text stringlengths 5.09k 178k | domain stringclasses 106
values |
|---|---|
#!/usr/bin/python
"""
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import gzip
import logging
import math
import os
import time
# try a fast json parser if it is installed
try:
import ujson as json
except:
import json
########################################################################################################################
# Trace processing
########################################################################################################################
class Trace():
def __init__(self):
self.thread_stack = {}
self.ignore_threads = {}
self.threads = {}
self.user_timing = []
self.event_names = {}
self.event_name_lookup = {}
self.scripts = None
self.timeline_events = []
self.trace_events = []
self.interactive = []
self.interactive_start = 0
self.interactive_end = None
self.start_time = None
self.end_time = None
self.cpu = {'main_thread': None}
self.feature_usage = None
self.feature_usage_start_time = None
self.netlog = {'bytes_in': 0, 'bytes_out': 0}
return
########################################################################################################################
# Output Logging
########################################################################################################################
def WriteJson(self, file, json_data):
try:
file_name, ext = os.path.splitext(file)
if ext.lower() == '.gz':
with gzip.open(file, 'wb') as f:
json.dump(json_data, f)
else:
with open(file, 'w') as f:
json.dump(json_data, f)
except:
logging.critical("Error writing to " + file)
def WriteUserTiming(self, file):
self.WriteJson(file, self.user_timing)
def WriteCPUSlices(self, file):
self.WriteJson(file, self.cpu)
def WriteScriptTimings(self, file):
if self.scripts is not None:
self.WriteJson(file, self.scripts)
def WriteFeatureUsage(self, file):
self.WriteJson(file, self.feature_usage)
def WriteInteractive(self, file):
self.WriteJson(file, self.interactive)
def WriteNetlog(self, file):
self.WriteJson(file, self.netlog)
########################################################################################################################
# Top-level processing
########################################################################################################################
def Process(self, trace):
f = None
line_mode = False
self.__init__()
try:
file_name, ext = os.path.splitext(trace)
if ext.lower() == '.gz':
f = gzip.open(trace, 'rb')
else:
f = open(trace, 'r')
for line in f:
try:
trace_event = json.loads(line.strip("\r\n\t ,"))
if not line_mode and 'traceEvents' in trace_event:
for sub_event in trace_event['traceEvents']:
self.FilterTraceEvent(sub_event)
else:
line_mode = True
self.FilterTraceEvent(trace_event)
except:
pass
except:
logging.critical("Error processing trace " + trace)
if f is not None:
f.close()
self.ProcessTraceEvents()
def ProcessTimeline(self, timeline):
self.__init__()
self.cpu['main_thread'] = '0'
self.threads['0'] = {}
events = None
f = None
try:
file_name, ext = os.path.splitext(timeline)
if ext.lower() == '.gz':
f = gzip.open(timeline, 'rb')
else:
f = open(timeline, 'r')
events = json.load(f)
if events:
# convert the old format timeline events into our internal representation
for event in events:
if 'method' in event and 'params' in event:
if self.start_time is None:
if event['method'] == 'Network.requestWillBeSent' and 'timestamp' in event['params']:
self.start_time = event['params']['timestamp'] * 1000000.0
self.end_time = event['params']['timestamp'] * 1000000.0
else:
if 'timestamp' in event['params']:
t = event['params']['timestamp'] * 1000000.0
if t > self.end_time:
self.end_time = t
if event['method'] == 'Timeline.eventRecorded' and 'record' in event['params']:
e = self.ProcessOldTimelineEvent(event['params']['record'], None)
if e is not None:
self.timeline_events.append(e)
self.ProcessTimelineEvents()
except:
logging.critical("Error processing timeline " + timeline)
if f is not None:
f.close()
def FilterTraceEvent(self, trace_event):
cat = trace_event['cat']
if cat == 'toplevel' or cat == 'ipc,toplevel':
return
if cat == 'devtools.timeline' or \
cat.find('devtools.timeline') >= 0 or \
cat.find('blink.feature_usage') >= 0 or \
cat.find('blink.user_timing') >= 0:
self.trace_events.append(trace_event)
def ProcessTraceEvents(self):
#sort the raw trace events by timestamp and then process them
if len(self.trace_events):
self.trace_events.sort(key=lambda trace_event: trace_event['ts'])
for trace_event in self.trace_events:
self.ProcessTraceEvent(trace_event)
self.trace_events = []
# Do the post-processing on timeline events
self.ProcessTimelineEvents()
def ProcessTraceEvent(self, trace_event):
cat = trace_event['cat']
if cat == 'devtools.timeline' or cat.find('devtools.timeline') >= 0:
self.ProcessTimelineTraceEvent(trace_event)
elif cat.find('blink.feature_usage') >= 0:
self.ProcessFeatureUsageEvent(trace_event)
elif cat.find('blink.user_timing') >= 0:
self.user_timing.append(trace_event)
#Netlog support is still in progress
#elif cat.find('netlog') >= 0:
# self.ProcessNetlogEvent(trace_event)
########################################################################################################################
# Timeline
########################################################################################################################
def ProcessTimelineTraceEvent(self, trace_event):
thread = '{0}:{1}'.format(trace_event['pid'], trace_event['tid'])
# Keep track of the main thread
if self.cpu['main_thread'] is None and trace_event['name'] == 'ResourceSendRequest' and 'args' in trace_event and \
'data' in trace_event['args'] and 'url' in trace_event['args']['data']:
if trace_event['args']['data']['url'][:21] == 'http://127.0.0.1:8888':
self.ignore_threads[thread] = True
else:
if thread not in self.threads:
self.threads[thread] = {}
if self.start_time is None or trace_event['ts'] < self.start_time:
self.start_time = trace_event['ts']
self.cpu['main_thread'] = thread
if 'dur' not in trace_event:
trace_event['dur'] = 1
# Make sure each thread has a numerical ID
if self.cpu['main_thread'] is not None and thread not in self.threads and thread not in self.ignore_threads and \
trace_event['name'] != 'Program':
self.threads[thread] = {}
# Build timeline events on a stack. 'B' begins an event, 'E' ends an event
if (thread in self.threads and ('dur' in trace_event or trace_event['ph'] == 'B' or trace_event['ph'] == 'E')):
trace_event['thread'] = self.threads[thread]
if thread not in self.thread_stack:
self.thread_stack[thread] = []
if trace_event['name'] not in self.event_names:
self.event_names[trace_event['name']] = len(self.event_names)
self.event_name_lookup[self.event_names[trace_event['name']]] = trace_event['name']
if trace_event['name'] not in self.threads[thread]:
self.threads[thread][trace_event['name']] = self.event_names[trace_event['name']]
e = None
if trace_event['ph'] == 'E':
if len(self.thread_stack[thread]) > 0:
e = self.thread_stack[thread].pop()
if e['n'] == self.event_names[trace_event['name']]:
e['e'] = trace_event['ts']
else:
e = {'t': thread, 'n': self.event_names[trace_event['name']], 's': trace_event['ts']}
if (trace_event['name'] == 'EvaluateScript' or trace_event['name'] == 'v8.compile' or trace_event['name'] == 'v8.parseOnBackground')\
and 'args' in trace_event and 'data' in trace_event['args'] and 'url' in trace_event['args']['data'] and\
trace_event['args']['data']['url'].startswith('http'):
e['js'] = trace_event['args']['data']['url']
if trace_event['name'] == 'FunctionCall' and 'args' in trace_event and 'data' in trace_event['args']:
if 'scriptName' in trace_event['args']['data'] and trace_event['args']['data']['scriptName'].startswith('http'):
e['js'] = trace_event['args']['data']['scriptName']
elif 'url' in trace_event['args']['data'] and trace_event['args']['data']['url'].startswith('http'):
e['js'] = trace_event['args']['data']['url']
if trace_event['ph'] == 'B':
self.thread_stack[thread].append(e)
e = None
elif 'dur' in trace_event:
e['e'] = e['s'] + trace_event['dur']
if e is not None and 'e' in e and e['s'] >= self.start_time and e['e'] >= e['s']:
if self.end_time is None or e['e'] > self.end_time:
self.end_time = e['e']
# attach it to a parent event if there is one
if len(self.thread_stack[thread]) > 0:
parent = self.thread_stack[thread].pop()
if 'c' not in parent:
parent['c'] = []
parent['c'].append(e)
self.thread_stack[thread].append(parent)
else:
self.timeline_events.append(e)
def ProcessOldTimelineEvent(self, event, type):
e = None
thread = '0'
if 'type' in event:
type = event['type']
if type not in self.event_names:
self.event_names[type] = len(self.event_names)
self.event_name_lookup[self.event_names[type]] = type
if type not in self.threads[thread]:
self.threads[thread][type] = self.event_names[type]
start = None
end = None
if 'startTime' in event and 'endTime' in event:
start = event['startTime'] * 1000000.0
end = event['endTime'] * 1000000.0
if 'callInfo' in event:
if 'startTime' in event['callInfo'] and 'endTime' in event['callInfo']:
start = event['callInfo']['startTime'] * 1000000.0
end = event['callInfo']['endTime'] * 1000000.0
if start is not None and end is not None and end >= start and type is not None:
if end > self.end_time:
self.end_time = end
e = {'t': thread, 'n': self.event_names[type], 's': start, 'e': end}
if 'callInfo' in event and 'url' in event and event['url'].startswith('http'):
e['js'] = event['url']
# Process profile child events
if 'data' in event and 'profile' in event['data'] and 'rootNodes' in event['data']['profile']:
for child in event['data']['profile']['rootNodes']:
c = self.ProcessOldTimelineEvent(child, type)
if c is not None:
if 'c' not in e:
e['c'] = []
e['c'].append(c)
# recursively process any child events
if 'children' in event:
for child in event['children']:
c = self.ProcessOldTimelineEvent(child, type)
if c is not None:
if 'c' not in e:
e['c'] = []
e['c'].append(c)
return e
def ProcessTimelineEvents(self):
if len(self.timeline_events) and self.end_time > self.start_time:
# Figure out how big each slice should be in usecs. Size it to a power of 10 where we have at least 2000 slices
exp = 0
last_exp = 0
slice_count = self.end_time - self.start_time
while slice_count > 2000:
last_exp = exp
exp += 1
slice_count = int(math.ceil(float(self.end_time - self.start_time) / float(pow(10, exp))))
self.cpu['total_usecs'] = self.end_time - self.start_time
self.cpu['slice_usecs'] = int(pow(10, last_exp))
slice_count = int(math.ceil(float(self.end_time - self.start_time) / float(self.cpu['slice_usecs'])))
# Create the empty time slices for all of the threads
self.cpu['slices'] = {}
for thread in self.threads.keys():
self.cpu['slices'][thread] = {'total': [0.0] * slice_count}
for name in self.threads[thread].keys():
self.cpu['slices'][thread][name] = [0.0] * slice_count
# Go through all of the timeline events recursively and account for the time they consumed
for timeline_event in self.timeline_events:
self.ProcessTimelineEvent(timeline_event, None)
if self.interactive_end is not None and self.interactive_end - self.interactive_start > 500000:
self.interactive.append([int(math.ceil(self.interactive_start / 1000.0)), int(math.floor(self.interactive_end / 1000.0))])
# Go through all of the fractional times and convert the float fractional times to integer usecs
for thread in self.cpu['slices'].keys():
del self.cpu['slices'][thread]['total']
for name in self.cpu['slices'][thread].keys():
for slice in range(len(self.cpu['slices'][thread][name])):
self.cpu['slices'][thread][name][slice] =\
int(self.cpu['slices'][thread][name][slice] * self.cpu['slice_usecs'])
def ProcessTimelineEvent(self, timeline_event, parent):
start = timeline_event['s'] - self.start_time
end = timeline_event['e'] - self.start_time
if end > start:
elapsed = end - start
thread = timeline_event['t']
name = self.event_name_lookup[timeline_event['n']]
# Keep track of periods on the main thread where at least 500ms are available with no tasks longer than 50ms
if 'main_thread' in self.cpu and thread == self.cpu['main_thread']:
if elapsed > 50000:
if start - self.interactive_start > 500000:
self.interactive.append([int(math.ceil(self.interactive_start / 1000.0)), int(math.floor(start / 1000.0))])
self.interactive_start = end
self.interactive_end = None
else:
self.interactive_end = end
if 'js' in timeline_event:
script = timeline_event['js']
s = start / 1000.0
e = end / 1000.0
if self.scripts is None:
self.scripts = {}
if 'main_thread' not in self.scripts and 'main_thread' in self.cpu:
self.scripts['main_thread'] = self.cpu['main_thread']
if thread not in self.scripts:
self.scripts[thread] = {}
if script not in self.scripts[thread]:
self.scripts[thread][script] = {}
if name not in self.scripts[thread][script]:
self.scripts[thread][script][name] = []
# make sure the script duration isn't already covered by a parent event
new_duration = True
if len(self.scripts[thread][script][name]):
for period in self.scripts[thread][script][name]:
if s >= period[0] and e <= period[1]:
new_duration = False
break
if new_duration:
self.scripts[thread][script][name].append([s, e])
slice_usecs = self.cpu['slice_usecs']
first_slice = int(float(start) / float(slice_usecs))
last_slice = int(float(end) / float(slice_usecs))
for slice_number in xrange(first_slice, last_slice + 1):
slice_start = slice_number * slice_usecs
slice_end = slice_start + slice_usecs
used_start = max(slice_start, start)
used_end = min(slice_end, end)
slice_elapsed = used_end - used_start
self.AdjustTimelineSlice(thread, slice_number, name, parent, slice_elapsed)
# Recursively process any child events
if 'c' in timeline_event:
for child in timeline_event['c']:
self.ProcessTimelineEvent(child, name)
# Add the time to the given slice and subtract the time from a parent event
def AdjustTimelineSlice(self, thread, slice_number, name, parent, elapsed):
try:
# Don't bother adjusting if both the current event and parent are the same category
# since they would just cancel each other out.
if name != parent:
fraction = min(1.0, float(elapsed) / float(self.cpu['slice_usecs']))
self.cpu['slices'][thread][name][slice_number] += fraction
self.cpu['slices'][thread]['total'][slice_number] += fraction
if parent is not None and self.cpu['slices'][thread][parent][slice_number] >= fraction:
self.cpu['slices'][thread][parent][slice_number] -= fraction
self.cpu['slices'][thread]['total'][slice_number] -= fraction
# Make sure we didn't exceed 100% in this slice
self.cpu['slices'][thread][name][slice_number] = min(1.0, self.cpu['slices'][thread][name][slice_number])
# make sure we don't exceed 100% for any slot
if self.cpu['slices'][thread]['total'][slice_number] > 1.0:
available = max(0.0, 1.0 - fraction)
for slice_name in self.cpu['slices'][thread].keys():
if slice_name != name:
self.cpu['slices'][thread][slice_name][slice_number] =\
min(self.cpu['slices'][thread][slice_name][slice_number], available)
available = max(0.0, available - self.cpu['slices'][thread][slice_name][slice_number])
self.cpu['slices'][thread]['total'][slice_number] = min(1.0, max(0.0, 1.0 - available))
except:
pass
########################################################################################################################
# Blink Features
########################################################################################################################
def ProcessFeatureUsageEvent(self, trace_event):
global BLINK_FEATURES
if 'name' in trace_event and\
'args' in trace_event and\
'feature' in trace_event['args'] and\
(trace_event['name'] == 'FeatureFirstUsed' or trace_event['name'] == 'CSSFirstUsed'):
if self.feature_usage is None:
self.feature_usage = {'Features': {}, 'CSSFeatures': {}}
if self.feature_usage_start_time is None:
if self.start_time is not None:
self.feature_usage_start_time = self.start_time
else:
self.feature_usage_start_time = trace_event['ts']
id = '{0:d}'.format(trace_event['args']['feature'])
timestamp = float('{0:0.3f}'.format((trace_event['ts'] - self.feature_usage_start_time) / 1000.0))
if trace_event['name'] == 'FeatureFirstUsed':
if id in BLINK_FEATURES:
name = BLINK_FEATURES[id]
else:
name = 'Feature_{0}'.format(id)
if name not in self.feature_usage['Features']:
self.feature_usage['Features'][name] = timestamp
elif trace_event['name'] == 'CSSFirstUsed':
if id in CSS_FEATURES:
name = CSS_FEATURES[id]
else:
name = 'CSSFeature_{0}'.format(id)
if name not in self.feature_usage['CSSFeatures']:
self.feature_usage['CSSFeatures'][name] = timestamp
########################################################################################################################
# Netlog
########################################################################################################################
def ProcessNetlogEvent(self, trace_event):
if 'args' in trace_event and 'id' in trace_event and 'name' in trace_event and 'source_type' in trace_event['args']:
# Convert the source event id to hex if one exists
if 'params' in trace_event['args'] and 'source_dependency' in trace_event['args']['params'] and 'id' in trace_event['args']['params']['source_dependency']:
dependency_id = int(trace_event['args']['params']['source_dependency']['id'])
trace_event['args']['params']['source_dependency']['id'] = 'x%X' % dependency_id
if trace_event['args']['source_type'] == 'SOCKET':
self.ProcessNetlogSocketEvent(trace_event)
if trace_event['args']['source_type'] == 'HTTP2_SESSION':
self.ProcessNetlogHTTP2SessionEvent(trace_event)
def ProcessNetlogSocketEvent(self, s):
if 'sockets' not in self.netlog:
self.netlog['sockets'] = {}
if s['id'] not in self.netlog['sockets']:
self.netlog['sockets'][s['id']] = {'bytes_in': 0, 'bytes_out': 0}
if s['name'] == 'SOCKET_BYTES_RECEIVED' and 'params' in s['args'] and 'byte_count' in s['args']['params']:
self.netlog['sockets'][s['id']]['bytes_in'] += s['args']['params']['byte_count']
self.netlog['bytes_in'] += s['args']['params']['byte_count']
if s['name'] == 'SOCKET_BYTES_SENT' and 'params' in s['args'] and 'byte_count' in s['args']['params']:
self.netlog['sockets'][s['id']]['bytes_out'] += s['args']['params']['byte_count']
self.netlog['bytes_out'] += s['args']['params']['byte_count']
def ProcessNetlogHTTP2SessionEvent(self, s):
if 'params' in s['args'] and 'stream_id' in s['args']['params']:
if 'http2' not in self.netlog:
self.netlog['http2'] = {'bytes_in': 0, 'bytes_out': 0}
if s['id'] not in self.netlog['http2']:
self.netlog['http2'][s['id']] = {'bytes_in': 0, 'bytes_out': 0, 'streams':{}}
stream = '{0:d}'.format(s['args']['params']['stream_id'])
if stream not in self.netlog['http2'][s['id']]['streams']:
self.netlog['http2'][s['id']]['streams'][stream] = {'start': s['tts'], 'end': s['tts'], 'bytes_in': 0, 'bytes_out': 0}
if s['tts'] > self.netlog['http2'][s['id']]['streams'][stream]['end']:
self.netlog['http2'][s['id']]['streams'][stream]['end'] = s['tts']
if s['name'] == 'HTTP2_SESSION_SEND_HEADERS' and 'params' in s['args']:
if 'request' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['request'] = {}
if 'headers' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['headers'] = s['args']['params']['headers']
if 'parent_stream_id' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['parent_stream_id'] = s['args']['params']['parent_stream_id']
if 'exclusive' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['exclusive'] = s['args']['params']['exclusive']
if 'priority' in s['args']['params']:
self.netlog['http2'][s['id']]['streams'][stream]['request']['priority'] = s['args']['params']['priority']
if s['name'] == 'HTTP2_SESSION_RECV_HEADERS' and 'params' in s['args']:
if 'first_byte' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['first_byte'] = s['tts']
if 'response' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['response'] = {}
if 'headers' in s['args']['params']:
self.netlog['http2'][s['id']]['response']['streams'][stream]['headers'] = s['args']['params']['headers']
if s['name'] == 'HTTP2_SESSION_RECV_DATA' and 'params' in s['args'] and 'size' in s['args']['params']:
if 'first_byte' not in self.netlog['http2'][s['id']]['streams'][stream]:
self.netlog['http2'][s['id']]['streams'][stream]['first_byte'] = s['tts']
self.netlog['http2'][s['id']]['streams'][stream]['bytes_in'] += s['args']['params']['size']
self.netlog['http2'][s['id']]['bytes_in'] += s['args']['params']['size']
########################################################################################################################
# Main Entry Point
########################################################################################################################
def main():
import argparse
parser = argparse.ArgumentParser(description='Chrome trace parser.',
prog='trace-parser')
parser.add_argument('-v', '--verbose', action='count',
help="Increase verbosity (specify multiple times for more). -vvvv for full debug output.")
parser.add_argument('-t', '--trace', help="Input trace file.")
parser.add_argument('-l', '--timeline', help="Input timeline file (iOS or really old Chrome).")
parser.add_argument('-c', '--cpu', help="Output CPU time slices file.")
parser.add_argument('-j', '--js', help="Output Javascript per-script parse/evaluate/execute timings.")
parser.add_argument('-u', '--user', help="Output user timing file.")
parser.add_argument('-f', '--features', help="Output blink feature usage file.")
parser.add_argument('-i', '--interactive', help="Output list of interactive times.")
parser.add_argument('-n', '--netlog', help="Output netlog details file.")
options, unknown = parser.parse_known_args()
# Set up logging
log_level = logging.CRITICAL
if options.verbose == 1:
log_level = logging.ERROR
elif options.verbose == 2:
log_level = logging.WARNING
elif options.verbose == 3:
log_level = logging.INFO
elif options.verbose >= 4:
log_level = logging.DEBUG
logging.basicConfig(level=log_level, format="%(asctime)s.%(msecs)03d - %(message)s", datefmt="%H:%M:%S")
if not options.trace and not options.timeline:
parser.error("Input trace or timeline file is not specified.")
start = time.time()
trace = Trace()
if options.trace:
trace.Process(options.trace)
elif options.timeline:
trace.ProcessTimeline(options.timeline)
if options.user:
trace.WriteUserTiming(options.user)
if options.cpu:
trace.WriteCPUSlices(options.cpu)
if options.js:
trace.WriteScriptTimings(options.js)
if options.features:
trace.WriteFeatureUsage(options.features)
if options.interactive:
trace.WriteInteractive(options.interactive)
if options.netlog:
trace.WriteNetlog(options.netlog)
end = time.time()
elapsed = end - start
logging.debug("Elapsed Time: {0:0.4f}".format(elapsed))
########################################################################################################################
# Blink feature names from https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/frame/UseCounter.h
########################################################################################################################
BLINK_FEATURES = {
"0": "PageDestruction",
"3": "PrefixedIndexedDB",
"4": "WorkerStart",
"5": "SharedWorkerStart",
"9": "UnprefixedIndexedDB",
"10": "OpenWebDatabase",
"13": "UnprefixedRequestAnimationFrame",
"14": "PrefixedRequestAnimationFrame",
"15": "ContentSecurityPolicy",
"16": "ContentSecurityPolicyReportOnly",
"18": "PrefixedTransitionEndEvent",
"19": "UnprefixedTransitionEndEvent",
"20": "PrefixedAndUnprefixedTransitionEndEvent",
"21": "AutoFocusAttribute",
"23": "DataListElement",
"24": "FormAttribute",
"25": "IncrementalAttribute",
"26": "InputTypeColor",
"27": "InputTypeDate",
"29": "InputTypeDateTimeFallback",
"30": "InputTypeDateTimeLocal",
"31": "InputTypeEmail",
"32": "InputTypeMonth",
"33": "InputTypeNumber",
"34": "InputTypeRange",
"35": "InputTypeSearch",
"36": "InputTypeTel",
"37": "InputTypeTime",
"38": "InputTypeURL",
"39": "InputTypeWeek",
"40": "InputTypeWeekFallback",
"41": "ListAttribute",
"42": "MaxAttribute",
"43": "MinAttribute",
"44": "PatternAttribute",
"45": "PlaceholderAttribute",
"47": "PrefixedDirectoryAttribute",
"49": "RequiredAttribute",
"51": "StepAttribute",
"52": "PageVisits",
"53": "HTMLMarqueeElement",
"55": "Reflection",
"57": "PrefixedStorageInfo",
"58": "XFrameOptions",
"59": "XFrameOptionsSameOrigin",
"60": "XFrameOptionsSameOriginWithBadAncestorChain",
"61": "DeprecatedFlexboxWebContent",
"62": "DeprecatedFlexboxChrome",
"63": "DeprecatedFlexboxChromeExtension",
"65": "UnprefixedPerformanceTimeline",
"67": "UnprefixedUserTiming",
"69": "WindowEvent",
"70": "ContentSecurityPolicyWithBaseElement",
"74": "DocumentClear",
"77": "XMLDocument",
"78": "XSLProcessingInstruction",
"79": "XSLTProcessor",
"80": "SVGSwitchElement",
"83": "DocumentAll",
"84": "FormElement",
"85": "DemotedFormElement",
"90": "SVGAnimationElement",
"96": "LineClamp",
"97": "SubFrameBeforeUnloadRegistered",
"98": "SubFrameBeforeUnloadFired",
"102": "ConsoleMarkTimeline",
"111": "DocumentCreateAttribute",
"112": "DocumentCreateAttributeNS",
"113": "DocumentCreateCDATASection",
"115": "DocumentXMLEncoding",
"116": "DocumentXMLStandalone",
"117": "DocumentXMLVersion",
"123": "NavigatorProductSub",
"124": "NavigatorVendor",
"125": "NavigatorVendorSub",
"128": "PrefixedAnimationEndEvent",
"129": "UnprefixedAnimationEndEvent",
"130": "PrefixedAndUnprefixedAnimationEndEvent",
"131": "PrefixedAnimationStartEvent",
"132": "UnprefixedAnimationStartEvent",
"133": "PrefixedAndUnprefixedAnimationStartEvent",
"134": "PrefixedAnimationIterationEvent",
"135": "UnprefixedAnimationIterationEvent",
"136": "PrefixedAndUnprefixedAnimationIterationEvent",
"137": "EventReturnValue",
"138": "SVGSVGElement",
"143": "DOMSubtreeModifiedEvent",
"144": "DOMNodeInsertedEvent",
"145": "DOMNodeRemovedEvent",
"146": "DOMNodeRemovedFromDocumentEvent",
"147": "DOMNodeInsertedIntoDocumentEvent",
"148": "DOMCharacterDataModifiedEvent",
"150": "DocumentAllLegacyCall",
"152": "HTMLEmbedElementLegacyCall",
"153": "HTMLObjectElementLegacyCall",
"155": "GetMatchedCSSRules",
"160": "AttributeOwnerElement",
"162": "AttributeSpecified",
"164": "PrefixedAudioDecodedByteCount",
"165": "PrefixedVideoDecodedByteCount",
"166": "PrefixedVideoSupportsFullscreen",
"167": "PrefixedVideoDisplayingFullscreen",
"168": "PrefixedVideoEnterFullscreen",
"169": "PrefixedVideoExitFullscreen",
"170": "PrefixedVideoEnterFullScreen",
"171": "PrefixedVideoExitFullScreen",
"172": "PrefixedVideoDecodedFrameCount",
"173": "PrefixedVideoDroppedFrameCount",
"176": "PrefixedElementRequestFullscreen",
"177": "PrefixedElementRequestFullScreen",
"178": "BarPropLocationbar",
"179": "BarPropMenubar",
"180": "BarPropPersonalbar",
"181": "BarPropScrollbars",
"182": "BarPropStatusbar",
"183": "BarPropToolbar",
"184": "InputTypeEmailMultiple",
"185": "InputTypeEmailMaxLength",
"186": "InputTypeEmailMultipleMaxLength",
"190": "InputTypeText",
"191": "InputTypeTextMaxLength",
"192": "InputTypePassword",
"193": "InputTypePasswordMaxLength",
"196": "PrefixedPageVisibility",
"198": "CSSStyleSheetInsertRuleOptionalArg",
"200": "DocumentBeforeUnloadRegistered",
"201": "DocumentBeforeUnloadFired",
"202": "DocumentUnloadRegistered",
"203": "DocumentUnloadFired",
"204": "SVGLocatableNearestViewportElement",
"205": "SVGLocatableFarthestViewportElement",
"209": "SVGPointMatrixTransform",
"211": "DOMFocusInOutEvent",
"212": "FileGetLastModifiedDate",
"213": "HTMLElementInnerText",
"214": "HTMLElementOuterText",
"215": "ReplaceDocumentViaJavaScriptURL",
"217": "ElementPrefixedMatchesSelector",
"219": "CSSStyleSheetRules",
"220": "CSSStyleSheetAddRule",
"221": "CSSStyleSheetRemoveRule",
"222": "InitMessageEvent",
"233": "PrefixedDevicePixelRatioMediaFeature",
"234": "PrefixedMaxDevicePixelRatioMediaFeature",
"235": "PrefixedMinDevicePixelRatioMediaFeature",
"237": "PrefixedTransform3dMediaFeature",
"240": "PrefixedStorageQuota",
"243": "ResetReferrerPolicy",
"244": "CaseInsensitiveAttrSelectorMatch",
"246": "FormNameAccessForImageElement",
"247": "FormNameAccessForPastNamesMap",
"248": "FormAssociationByParser",
"250": "SVGSVGElementInDocument",
"251": "SVGDocumentRootElement",
"257": "WorkerSubjectToCSP",
"258": "WorkerAllowedByChildBlockedByScript",
"260": "DeprecatedWebKitGradient",
"261": "DeprecatedWebKitLinearGradient",
"262": "DeprecatedWebKitRepeatingLinearGradient",
"263": "DeprecatedWebKitRadialGradient",
"264": "DeprecatedWebKitRepeatingRadialGradient",
"267": "PrefixedImageSmoothingEnabled",
"268": "UnprefixedImageSmoothingEnabled",
"274": "TextAutosizing",
"276": "HTMLAnchorElementPingAttribute",
"279": "SVGClassName",
"281": "HTMLMediaElementSeekToFragmentStart",
"282": "HTMLMediaElementPauseAtFragmentEnd",
"283": "PrefixedWindowURL",
"285": "WindowOrientation",
"286": "DOMStringListContains",
"287": "DocumentCaptureEvents",
"288": "DocumentReleaseEvents",
"289": "WindowCaptureEvents",
"290": "WindowReleaseEvents",
"295": "DocumentXPathCreateExpression",
"296": "DocumentXPathCreateNSResolver",
"297": "DocumentXPathEvaluate",
"298": "AttrGetValue",
"299": "AttrSetValue",
"300": "AnimationConstructorKeyframeListEffectObjectTiming",
"302": "AnimationConstructorKeyframeListEffectNoTiming",
"303": "AttrSetValueWithElement",
"304": "PrefixedCancelAnimationFrame",
"305": "PrefixedCancelRequestAnimationFrame",
"306": "NamedNodeMapGetNamedItem",
"307": "NamedNodeMapSetNamedItem",
"308": "NamedNodeMapRemoveNamedItem",
"309": "NamedNodeMapItem",
"310": "NamedNodeMapGetNamedItemNS",
"311": "NamedNodeMapSetNamedItemNS",
"312": "NamedNodeMapRemoveNamedItemNS",
"318": "PrefixedDocumentIsFullscreen",
"320": "PrefixedDocumentCurrentFullScreenElement",
"321": "PrefixedDocumentCancelFullScreen",
"322": "PrefixedDocumentFullscreenEnabled",
"323": "PrefixedDocumentFullscreenElement",
"324": "PrefixedDocumentExitFullscreen",
"325": "SVGForeignObjectElement",
"327": "SelectionSetPosition",
"328": "AnimationFinishEvent",
"329": "SVGSVGElementInXMLDocument",
"341": "PrefixedPerformanceClearResourceTimings",
"342": "PrefixedPerformanceSetResourceTimingBufferSize",
"343": "EventSrcElement",
"344": "EventCancelBubble",
"345": "EventPath",
"347": "NodeIteratorDetach",
"348": "AttrNodeValue",
"349": "AttrTextContent",
"350": "EventGetReturnValueTrue",
"351": "EventGetReturnValueFalse",
"352": "EventSetReturnValueTrue",
"353": "EventSetReturnValueFalse",
"356": "WindowOffscreenBuffering",
"357": "WindowDefaultStatus",
"358": "WindowDefaultstatus",
"361": "PrefixedTransitionEventConstructor",
"362": "PrefixedMutationObserverConstructor",
"363": "PrefixedIDBCursorConstructor",
"364": "PrefixedIDBDatabaseConstructor",
"365": "PrefixedIDBFactoryConstructor",
"366": "PrefixedIDBIndexConstructor",
"367": "PrefixedIDBKeyRangeConstructor",
"368": "PrefixedIDBObjectStoreConstructor",
"369": "PrefixedIDBRequestConstructor",
"370": "PrefixedIDBTransactionConstructor",
"371": "NotificationPermission",
"372": "RangeDetach",
"386": "PrefixedFileRelativePath",
"387": "DocumentCaretRangeFromPoint",
"389": "ElementScrollIntoViewIfNeeded",
"393": "RangeExpand",
"396": "HTMLImageElementX",
"397": "HTMLImageElementY",
"400": "SelectionBaseNode",
"401": "SelectionBaseOffset",
"402": "SelectionExtentNode",
"403": "SelectionExtentOffset",
"404": "SelectionType",
"405": "SelectionModify",
"406": "SelectionSetBaseAndExtent",
"407": "SelectionEmpty",
"409": "VTTCue",
"410": "VTTCueRender",
"411": "VTTCueRenderVertical",
"412": "VTTCueRenderSnapToLinesFalse",
"413": "VTTCueRenderLineNotAuto",
"414": "VTTCueRenderPositionNot50",
"415": "VTTCueRenderSizeNot100",
"416": "VTTCueRenderAlignNotMiddle",
"417": "ElementRequestPointerLock",
"418": "VTTCueRenderRtl",
"419": "PostMessageFromSecureToInsecure",
"420": "PostMessageFromInsecureToSecure",
"421": "DocumentExitPointerLock",
"422": "DocumentPointerLockElement",
"424": "PrefixedCursorZoomIn",
"425": "PrefixedCursorZoomOut",
"429": "TextEncoderConstructor",
"430": "TextEncoderEncode",
"431": "TextDecoderConstructor",
"432": "TextDecoderDecode",
"433": "FocusInOutEvent",
"434": "MouseEventMovementX",
"435": "MouseEventMovementY",
"440": "DocumentFonts",
"441": "MixedContentFormsSubmitted",
"442": "FormsSubmitted",
"443": "TextInputEventOnInput",
"444": "TextInputEventOnTextArea",
"445": "TextInputEventOnContentEditable",
"446": "TextInputEventOnNotNode",
"447": "WebkitBeforeTextInsertedOnInput",
"448": "WebkitBeforeTextInsertedOnTextArea",
"449": "WebkitBeforeTextInsertedOnContentEditable",
"450": "WebkitBeforeTextInsertedOnNotNode",
"451": "WebkitEditableContentChangedOnInput",
"452": "WebkitEditableContentChangedOnTextArea",
"453": "WebkitEditableContentChangedOnContentEditable",
"454": "WebkitEditableContentChangedOnNotNode",
"455": "HTMLImports",
"456": "ElementCreateShadowRoot",
"457": "DocumentRegisterElement",
"458": "EditingAppleInterchangeNewline",
"459": "EditingAppleConvertedSpace",
"460": "EditingApplePasteAsQuotation",
"461": "EditingAppleStyleSpanClass",
"462": "EditingAppleTabSpanClass",
"463": "HTMLImportsAsyncAttribute",
"465": "XMLHttpRequestSynchronous",
"466": "CSSSelectorPseudoUnresolved",
"467": "CSSSelectorPseudoShadow",
"468": "CSSSelectorPseudoContent",
"469": "CSSSelectorPseudoHost",
"470": "CSSSelectorPseudoHostContext",
"471": "CSSDeepCombinator",
"473": "UseAsm",
"475": "DOMWindowOpen",
"476": "DOMWindowOpenFeatures",
"478": "MediaStreamTrackGetSources",
"479": "AspectRatioFlexItem",
"480": "DetailsElement",
"481": "DialogElement",
"482": "MapElement",
"483": "MeterElement",
"484": "ProgressElement",
"490": "PrefixedHTMLElementDropzone",
"491": "WheelEventWheelDeltaX",
"492": "WheelEventWheelDeltaY",
"493": "WheelEventWheelDelta",
"494": "SendBeacon",
"495": "SendBeaconQuotaExceeded",
"501": "SVGSMILElementInDocument",
"502": "MouseEventOffsetX",
"503": "MouseEventOffsetY",
"504": "MouseEventX",
"505": "MouseEventY",
"506": "MouseEventFromElement",
"507": "MouseEventToElement",
"508": "RequestFileSystem",
"509": "RequestFileSystemWorker",
"510": "RequestFileSystemSyncWorker",
"519": "SVGStyleElementTitle",
"520": "PictureSourceSrc",
"521": "Picture",
"522": "Sizes",
"523": "SrcsetXDescriptor",
"524": "SrcsetWDescriptor",
"525": "SelectionContainsNode",
"529": "XMLExternalResourceLoad",
"530": "MixedContentPrivateHostnameInPublicHostname",
"531": "LegacyProtocolEmbeddedAsSubresource",
"532": "RequestedSubresourceWithEmbeddedCredentials",
"533": "NotificationCreated",
"534": "NotificationClosed",
"535": "NotificationPermissionRequested",
"538": "ConsoleTimeline",
"539": "ConsoleTimelineEnd",
"540": "SRIElementWithMatchingIntegrityAttribute",
"541": "SRIElementWithNonMatchingIntegrityAttribute",
"542": "SRIElementWithUnparsableIntegrityAttribute",
"545": "V8Animation_StartTime_AttributeGetter",
"546": "V8Animation_StartTime_AttributeSetter",
"547": "V8Animation_CurrentTime_AttributeGetter",
"548": "V8Animation_CurrentTime_AttributeSetter",
"549": "V8Animation_PlaybackRate_AttributeGetter",
"550": "V8Animation_PlaybackRate_AttributeSetter",
"551": "V8Animation_PlayState_AttributeGetter",
"552": "V8Animation_Finish_Method",
"553": "V8Animation_Play_Method",
"554": "V8Animation_Pause_Method",
"555": "V8Animation_Reverse_Method",
"556": "BreakIterator",
"557": "ScreenOrientationAngle",
"558": "ScreenOrientationType",
"559": "ScreenOrientationLock",
"560": "ScreenOrientationUnlock",
"561": "GeolocationSecureOrigin",
"562": "GeolocationInsecureOrigin",
"563": "NotificationSecureOrigin",
"564": "NotificationInsecureOrigin",
"565": "NotificationShowEvent",
"569": "SVGTransformListConsolidate",
"570": "SVGAnimatedTransformListBaseVal",
"571": "QuotedAnimationName",
"572": "QuotedKeyframesRule",
"573": "SrcsetDroppedCandidate",
"574": "WindowPostMessage",
"575": "WindowPostMessageWithLegacyTargetOriginArgument",
"576": "RenderRuby",
"578": "ScriptElementWithInvalidTypeHasSrc",
"581": "XMLHttpRequestSynchronousInNonWorkerOutsideBeforeUnload",
"582": "CSSSelectorPseudoScrollbar",
"583": "CSSSelectorPseudoScrollbarButton",
"584": "CSSSelectorPseudoScrollbarThumb",
"585": "CSSSelectorPseudoScrollbarTrack",
"586": "CSSSelectorPseudoScrollbarTrackPiece",
"587": "LangAttribute",
"588": "LangAttributeOnHTML",
"589": "LangAttributeOnBody",
"590": "LangAttributeDoesNotMatchToUILocale",
"591": "InputTypeSubmit",
"592": "InputTypeSubmitWithValue",
"593": "SetReferrerPolicy",
"595": "MouseEventWhich",
"598": "UIEventWhich",
"599": "TextWholeText",
"603": "NotificationCloseEvent",
"606": "StyleMedia",
"607": "StyleMediaType",
"608": "StyleMediaMatchMedium",
"609": "MixedContentPresent",
"610": "MixedContentBlockable",
"611": "MixedContentAudio",
"612": "MixedContentDownload",
"613": "MixedContentFavicon",
"614": "MixedContentImage",
"615": "MixedContentInternal",
"616": "MixedContentPlugin",
"617": "MixedContentPrefetch",
"618": "MixedContentVideo",
"620": "AudioListenerDopplerFactor",
"621": "AudioListenerSpeedOfSound",
"622": "AudioListenerSetVelocity",
"628": "CSSSelectorPseudoFullScreenAncestor",
"629": "CSSSelectorPseudoFullScreen",
"630": "WebKitCSSMatrix",
"631": "AudioContextCreateAnalyser",
"632": "AudioContextCreateBiquadFilter",
"633": "AudioContextCreateBufferSource",
"634": "AudioContextCreateChannelMerger",
"635": "AudioContextCreateChannelSplitter",
"636": "AudioContextCreateConvolver",
"637": "AudioContextCreateDelay",
"638": "AudioContextCreateDynamicsCompressor",
"639": "AudioContextCreateGain",
"640": "AudioContextCreateMediaElementSource",
"641": "AudioContextCreateMediaStreamDestination",
"642": "AudioContextCreateMediaStreamSource",
"643": "AudioContextCreateOscillator",
"645": "AudioContextCreatePeriodicWave",
"646": "AudioContextCreateScriptProcessor",
"647": "AudioContextCreateStereoPanner",
"648": "AudioContextCreateWaveShaper",
"649": "AudioContextDecodeAudioData",
"650": "AudioContextResume",
"651": "AudioContextSuspend",
"652": "AudioContext",
"653": "OfflineAudioContext",
"654": "PrefixedAudioContext",
"655": "PrefixedOfflineAudioContext",
"661": "MixedContentInNonHTTPSFrameThatRestrictsMixedContent",
"662": "MixedContentInSecureFrameThatDoesNotRestrictMixedContent",
"663": "MixedContentWebSocket",
"664": "SyntheticKeyframesInCompositedCSSAnimation",
"665": "MixedContentFormPresent",
"666": "GetUserMediaInsecureOrigin",
"667": "GetUserMediaSecureOrigin",
"668": "DeviceMotionInsecureOrigin",
"669": "DeviceMotionSecureOrigin",
"670": "DeviceOrientationInsecureOrigin",
"671": "DeviceOrientationSecureOrigin",
"672": "SandboxViaIFrame",
"673": "SandboxViaCSP",
"674": "BlockedSniffingImageToScript",
"675": "Fetch",
"676": "FetchBodyStream",
"677": "XMLHttpRequestAsynchronous",
"679": "WhiteSpacePreFromXMLSpace",
"680": "WhiteSpaceNowrapFromXMLSpace",
"685": "SVGSVGElementForceRedraw",
"686": "SVGSVGElementSuspendRedraw",
"687": "SVGSVGElementUnsuspendRedraw",
"688": "SVGSVGElementUnsuspendRedrawAll",
"689": "AudioContextClose",
"691": "CSSZoomNotEqualToOne",
"694": "ClientRectListItem",
"695": "WindowClientInformation",
"696": "WindowFind",
"697": "WindowScreenLeft",
"698": "WindowScreenTop",
"699": "V8Animation_Cancel_Method",
"700": "V8Animation_Onfinish_AttributeGetter",
"701": "V8Animation_Onfinish_AttributeSetter",
"707": "V8Window_WebKitAnimationEvent_ConstructorGetter",
"710": "CryptoGetRandomValues",
"711": "SubtleCryptoEncrypt",
"712": "SubtleCryptoDecrypt",
"713": "SubtleCryptoSign",
"714": "SubtleCryptoVerify",
"715": "SubtleCryptoDigest",
"716": "SubtleCryptoGenerateKey",
"717": "SubtleCryptoImportKey",
"718": "SubtleCryptoExportKey",
"719": "SubtleCryptoDeriveBits",
"720": "SubtleCryptoDeriveKey",
"721": "SubtleCryptoWrapKey",
"722": "SubtleCryptoUnwrapKey",
"723": "CryptoAlgorithmAesCbc",
"724": "CryptoAlgorithmHmac",
"725": "CryptoAlgorithmRsaSsaPkcs1v1_5",
"726": "CryptoAlgorithmSha1",
"727": "CryptoAlgorithmSha256",
"728": "CryptoAlgorithmSha384",
"729": "CryptoAlgorithmSha512",
"730": "CryptoAlgorithmAesGcm",
"731": "CryptoAlgorithmRsaOaep",
"732": "CryptoAlgorithmAesCtr",
"733": "CryptoAlgorithmAesKw",
"734": "CryptoAlgorithmRsaPss",
"735": "CryptoAlgorithmEcdsa",
"736": "CryptoAlgorithmEcdh",
"737": "CryptoAlgorithmHkdf",
"738": "CryptoAlgorithmPbkdf2",
"739": "DocumentSetDomain",
"740": "UpgradeInsecureRequestsEnabled",
"741": "UpgradeInsecureRequestsUpgradedRequest",
"742": "DocumentDesignMode",
"743": "GlobalCacheStorage",
"744": "NetInfo",
"745": "BackgroundSync",
"748": "LegacyConst",
"750": "V8Permissions_Query_Method",
"754": "V8HTMLInputElement_Autocapitalize_AttributeGetter",
"755": "V8HTMLInputElement_Autocapitalize_AttributeSetter",
"756": "V8HTMLTextAreaElement_Autocapitalize_AttributeGetter",
"757": "V8HTMLTextAreaElement_Autocapitalize_AttributeSetter",
"758": "SVGHrefBaseVal",
"759": "SVGHrefAnimVal",
"760": "V8CSSRuleList_Item_Method",
"761": "V8MediaList_Item_Method",
"762": "V8StyleSheetList_Item_Method",
"763": "StyleSheetListAnonymousNamedGetter",
"764": "AutocapitalizeAttribute",
"765": "FullscreenSecureOrigin",
"766": "FullscreenInsecureOrigin",
"767": "DialogInSandboxedContext",
"768": "SVGSMILAnimationInImageRegardlessOfCache",
"770": "EncryptedMediaSecureOrigin",
"771": "EncryptedMediaInsecureOrigin",
"772": "PerformanceFrameTiming",
"773": "V8Element_Animate_Method",
"778": "V8SVGSVGElement_GetElementById_Method",
"779": "ElementCreateShadowRootMultiple",
"780": "V8MessageChannel_Constructor",
"781": "V8MessagePort_PostMessage_Method",
"782": "V8MessagePort_Start_Method",
"783": "V8MessagePort_Close_Method",
"784": "MessagePortsTransferred",
"785": "CSSKeyframesRuleAnonymousIndexedGetter",
"786": "V8Screen_AvailLeft_AttributeGetter",
"787": "V8Screen_AvailTop_AttributeGetter",
"791": "V8SVGFEConvolveMatrixElement_PreserveAlpha_AttributeGetter",
"798": "V8SVGStyleElement_Disabled_AttributeGetter",
"799": "V8SVGStyleElement_Disabled_AttributeSetter",
"801": "InputTypeFileSecureOrigin",
"802": "InputTypeFileInsecureOrigin",
"804": "ElementAttachShadow",
"806": "V8SecurityPolicyViolationEvent_DocumentURI_AttributeGetter",
"807": "V8SecurityPolicyViolationEvent_BlockedURI_AttributeGetter",
"808": "V8SecurityPolicyViolationEvent_StatusCode_AttributeGetter",
"809": "HTMLLinkElementDisabled",
"810": "V8HTMLLinkElement_Disabled_AttributeGetter",
"811": "V8HTMLLinkElement_Disabled_AttributeSetter",
"812": "V8HTMLStyleElement_Disabled_AttributeGetter",
"813": "V8HTMLStyleElement_Disabled_AttributeSetter",
"816": "V8DOMError_Constructor",
"817": "V8DOMError_Name_AttributeGetter",
"818": "V8DOMError_Message_AttributeGetter",
"823": "V8Location_AncestorOrigins_AttributeGetter",
"824": "V8IDBDatabase_ObjectStoreNames_AttributeGetter",
"825": "V8IDBObjectStore_IndexNames_AttributeGetter",
"826": "V8IDBTransaction_ObjectStoreNames_AttributeGetter",
"830": "TextInputFired",
"831": "V8TextEvent_Data_AttributeGetter",
"832": "V8TextEvent_InitTextEvent_Method",
"833": "V8SVGSVGElement_UseCurrentView_AttributeGetter",
"834": "V8SVGSVGElement_CurrentView_AttributeGetter",
"835": "ClientHintsDPR",
"836": "ClientHintsResourceWidth",
"837": "ClientHintsViewportWidth",
"838": "SRIElementIntegrityAttributeButIneligible",
"839": "FormDataAppendFile",
"840": "FormDataAppendFileWithFilename",
"841": "FormDataAppendBlob",
"842": "FormDataAppendBlobWithFilename",
"843": "FormDataAppendNull",
"844": "HTMLDocumentCreateAttributeNameNotLowercase",
"845": "NonHTMLElementSetAttributeNodeFromHTMLDocumentNameNotLowercase",
"846": "DOMStringList_Item_AttributeGetter_IndexedDB",
"847": "DOMStringList_Item_AttributeGetter_Location",
"848": "DOMStringList_Contains_Method_IndexedDB",
"849": "DOMStringList_Contains_Method_Location",
"850": "NavigatorVibrate",
"851": "NavigatorVibrateSubFrame",
"853": "V8XPathEvaluator_Constructor",
"854": "V8XPathEvaluator_CreateExpression_Method",
"855": "V8XPathEvaluator_CreateNSResolver_Method",
"856": "V8XPathEvaluator_Evaluate_Method",
"857": "RequestMIDIAccess",
"858": "V8MouseEvent_LayerX_AttributeGetter",
"859": "V8MouseEvent_LayerY_AttributeGetter",
"860": "InnerTextWithShadowTree",
"861": "SelectionToStringWithShadowTree",
"862": "WindowFindWithShadowTree",
"863": "V8CompositionEvent_InitCompositionEvent_Method",
"864": "V8CustomEvent_InitCustomEvent_Method",
"865": "V8DeviceMotionEvent_InitDeviceMotionEvent_Method",
"866": "V8DeviceOrientationEvent_InitDeviceOrientationEvent_Method",
"867": "V8Event_InitEvent_Method",
"868": "V8KeyboardEvent_InitKeyboardEvent_Method",
"869": "V8MouseEvent_InitMouseEvent_Method",
"870": "V8MutationEvent_InitMutationEvent_Method",
"871": "V8StorageEvent_InitStorageEvent_Method",
"872": "V8TouchEvent_InitTouchEvent_Method",
"873": "V8UIEvent_InitUIEvent_Method",
"874": "V8Document_CreateTouch_Method",
"876": "RequestFileSystemNonWebbyOrigin",
"879": "V8MemoryInfo_TotalJSHeapSize_AttributeGetter",
"880": "V8MemoryInfo_UsedJSHeapSize_AttributeGetter",
"881": "V8MemoryInfo_JSHeapSizeLimit_AttributeGetter",
"882": "V8Performance_Timing_AttributeGetter",
"883": "V8Performance_Navigation_AttributeGetter",
"884": "V8Performance_Memory_AttributeGetter",
"885": "V8SharedWorker_WorkerStart_AttributeGetter",
"886": "HTMLKeygenElement",
"892": "HTMLMediaElementPreloadNone",
"893": "HTMLMediaElementPreloadMetadata",
"894": "HTMLMediaElementPreloadAuto",
"895": "HTMLMediaElementPreloadDefault",
"896": "MixedContentBlockableAllowed",
"897": "PseudoBeforeAfterForInputElement",
"898": "V8Permissions_Revoke_Method",
"899": "LinkRelDnsPrefetch",
"900": "LinkRelPreconnect",
"901": "LinkRelPreload",
"902": "LinkHeaderDnsPrefetch",
"903": "LinkHeaderPreconnect",
"904": "ClientHintsMetaAcceptCH",
"905": "HTMLElementDeprecatedWidth",
"906": "ClientHintsContentDPR",
"907": "ElementAttachShadowOpen",
"908": "ElementAttachShadowClosed",
"909": "AudioParamSetValueAtTime",
"910": "AudioParamLinearRampToValueAtTime",
"911": "AudioParamExponentialRampToValueAtTime",
"912": "AudioParamSetTargetAtTime",
"913": "AudioParamSetValueCurveAtTime",
"914": "AudioParamCancelScheduledValues",
"915": "V8Permissions_Request_Method",
"917": "LinkRelPrefetch",
"918": "LinkRelPrerender",
"919": "LinkRelNext",
"920": "PrefixedPerformanceResourceTimingBufferFull",
"921": "CSSValuePrefixedMinContent",
"922": "CSSValuePrefixedMaxContent",
"923": "CSSValuePrefixedFitContent",
"924": "CSSValuePrefixedFillAvailable",
"926": "PresentationDefaultRequest",
"927": "PresentationAvailabilityChangeEventListener",
"928": "PresentationRequestConstructor",
"929": "PresentationRequestStart",
"930": "PresentationRequestReconnect",
"931": "PresentationRequestGetAvailability",
"932": "PresentationRequestConnectionAvailableEventListener",
"933": "PresentationConnectionTerminate",
"934": "PresentationConnectionSend",
"936": "PresentationConnectionMessageEventListener",
"937": "CSSAnimationsStackedNeutralKeyframe",
"938": "ReadingCheckedInClickHandler",
"939": "FlexboxIntrinsicSizeAlgorithmIsDifferent",
"940": "HTMLImportsHasStyleSheets",
"944": "ClipPathOfPositionedElement",
"945": "ClipCssOfPositionedElement",
"946": "NetInfoType",
"947": "NetInfoDownlinkMax",
"948": "NetInfoOnChange",
"949": "NetInfoOnTypeChange",
"950": "V8Window_Alert_Method",
"951": "V8Window_Confirm_Method",
"952": "V8Window_Prompt_Method",
"953": "V8Window_Print_Method",
"954": "V8Window_RequestIdleCallback_Method",
"955": "FlexboxPercentagePaddingVertical",
"956": "FlexboxPercentageMarginVertical",
"957": "BackspaceNavigatedBack",
"958": "BackspaceNavigatedBackAfterFormInteraction",
"959": "CSPSourceWildcardWouldMatchExactHost",
"960": "CredentialManagerGet",
"961": "CredentialManagerGetWithUI",
"962": "CredentialManagerGetWithoutUI",
"963": "CredentialManagerStore",
"964": "CredentialManagerRequireUserMediation",
"966": "BlockableMixedContentInSubframeBlocked",
"967": "AddEventListenerThirdArgumentIsObject",
"968": "RemoveEventListenerThirdArgumentIsObject",
"969": "CSSAtRuleCharset",
"970": "CSSAtRuleFontFace",
"971": "CSSAtRuleImport",
"972": "CSSAtRuleKeyframes",
"973": "CSSAtRuleMedia",
"974": "CSSAtRuleNamespace",
"975": "CSSAtRulePage",
"976": "CSSAtRuleSupports",
"977": "CSSAtRuleViewport",
"978": "CSSAtRuleWebkitKeyframes",
"979": "V8HTMLFieldSetElement_Elements_AttributeGetter",
"980": "HTMLMediaElementPreloadForcedNone",
"981": "ExternalAddSearchProvider",
"982": "ExternalIsSearchProviderInstalled",
"983": "V8Permissions_RequestAll_Method",
"987": "DeviceOrientationAbsoluteInsecureOrigin",
"988": "DeviceOrientationAbsoluteSecureOrigin",
"989": "FontFaceConstructor",
"990": "ServiceWorkerControlledPage",
"993": "MeterElementWithMeterAppearance",
"994": "MeterElementWithNoneAppearance",
"997": "SelectionAnchorNode",
"998": "SelectionAnchorOffset",
"999": "SelectionFocusNode",
"1000": "SelectionFocusOffset",
"1001": "SelectionIsCollapsed",
"1002": "SelectionRangeCount",
"1003": "SelectionGetRangeAt",
"1004": "SelectionAddRange",
"1005": "SelectionRemoveAllRanges",
"1006": "SelectionCollapse",
"1007": "SelectionCollapseToStart",
"1008": "SelectionCollapseToEnd",
"1009": "SelectionExtend",
"1010": "SelectionSelectAllChildren",
"1011": "SelectionDeleteDromDocument",
"1012": "SelectionDOMString",
"1013": "InputTypeRangeVerticalAppearance",
"1014": "CSSFilterReference",
"1015": "CSSFilterGrayscale",
"1016": "CSSFilterSepia",
"1017": "CSSFilterSaturate",
"1018": "CSSFilterHueRotate",
"1019": "CSSFilterInvert",
"1020": "CSSFilterOpacity",
"1021": "CSSFilterBrightness",
"1022": "CSSFilterContrast",
"1023": "CSSFilterBlur",
"1024": "CSSFilterDropShadow",
"1025": "BackgroundSyncRegister",
"1027": "ExecCommandOnInputOrTextarea",
"1028": "V8History_ScrollRestoration_AttributeGetter",
"1029": "V8History_ScrollRestoration_AttributeSetter",
"1030": "SVG1DOMFilter",
"1031": "OfflineAudioContextStartRendering",
"1032": "OfflineAudioContextSuspend",
"1033": "OfflineAudioContextResume",
"1034": "AttrCloneNode",
"1035": "SVG1DOMPaintServer",
"1036": "SVGSVGElementFragmentSVGView",
"1037": "SVGSVGElementFragmentSVGViewElement",
"1038": "PresentationConnectionClose",
"1039": "SVG1DOMShape",
"1040": "SVG1DOMText",
"1041": "RTCPeerConnectionConstructorConstraints",
"1042": "RTCPeerConnectionConstructorCompliant",
"1044": "RTCPeerConnectionCreateOfferLegacyFailureCallback",
"1045": "RTCPeerConnectionCreateOfferLegacyConstraints",
"1046": "RTCPeerConnectionCreateOfferLegacyOfferOptions",
"1047": "RTCPeerConnectionCreateOfferLegacyCompliant",
"1049": "RTCPeerConnectionCreateAnswerLegacyFailureCallback",
"1050": "RTCPeerConnectionCreateAnswerLegacyConstraints",
"1051": "RTCPeerConnectionCreateAnswerLegacyCompliant",
"1052": "RTCPeerConnectionSetLocalDescriptionLegacyNoSuccessCallback",
"1053": "RTCPeerConnectionSetLocalDescriptionLegacyNoFailureCallback",
"1054": "RTCPeerConnectionSetLocalDescriptionLegacyCompliant",
"1055": "RTCPeerConnectionSetRemoteDescriptionLegacyNoSuccessCallback",
"1056": "RTCPeerConnectionSetRemoteDescriptionLegacyNoFailureCallback",
"1057": "RTCPeerConnectionSetRemoteDescriptionLegacyCompliant",
"1058": "RTCPeerConnectionGetStatsLegacyNonCompliant",
"1059": "NodeFilterIsFunction",
"1060": "NodeFilterIsObject",
"1062": "CSSSelectorInternalPseudoListBox",
"1063": "CSSSelectorInternalMediaControlsCastButton",
"1064": "CSSSelectorInternalMediaControlsOverlayCastButton",
"1065": "CSSSelectorInternalPseudoSpatialNavigationFocus",
"1066": "SameOriginTextScript",
"1067": "SameOriginApplicationScript",
"1068": "SameOriginOtherScript",
"1069": "CrossOriginTextScript",
"1070": "CrossOriginApplicationScript",
"1071": "CrossOriginOtherScript",
"1072": "SVG1DOMSVGTests",
"1073": "V8SVGViewElement_ViewTarget_AttributeGetter",
"1074": "DisableRemotePlaybackAttribute",
"1075": "V8SloppyMode",
"1076": "V8StrictMode",
"1077": "V8StrongMode",
"1078": "AudioNodeConnectToAudioNode",
"1079": "AudioNodeConnectToAudioParam",
"1080": "AudioNodeDisconnectFromAudioNode",
"1081": "AudioNodeDisconnectFromAudioParam",
"1082": "V8CSSFontFaceRule_Style_AttributeGetter",
"1083": "SelectionCollapseNull",
"1084": "SelectionSetBaseAndExtentNull",
"1085": "V8SVGSVGElement_CreateSVGNumber_Method",
"1086": "V8SVGSVGElement_CreateSVGLength_Method",
"1087": "V8SVGSVGElement_CreateSVGAngle_Method",
"1088": "V8SVGSVGElement_CreateSVGPoint_Method",
"1089": "V8SVGSVGElement_CreateSVGMatrix_Method",
"1090": "V8SVGSVGElement_CreateSVGRect_Method",
"1091": "V8SVGSVGElement_CreateSVGTransform_Method",
"1092": "V8SVGSVGElement_CreateSVGTransformFromMatrix_Method",
"1093": "FormNameAccessForNonDescendantImageElement",
"1095": "V8SVGSVGElement_Viewport_AttributeGetter",
"1096": "V8RegExpPrototypeStickyGetter",
"1097": "V8RegExpPrototypeToString",
"1098": "V8InputDeviceCapabilities_FiresTouchEvents_AttributeGetter",
"1099": "DataElement",
"1100": "TimeElement",
"1101": "SVG1DOMUriReference",
"1102": "SVG1DOMZoomAndPan",
"1103": "V8SVGGraphicsElement_Transform_AttributeGetter",
"1104": "MenuItemElement",
"1105": "MenuItemCloseTag",
"1106": "SVG1DOMMarkerElement",
"1107": "SVG1DOMUseElement",
"1108": "SVG1DOMMaskElement",
"1109": "V8SVGAElement_Target_AttributeGetter",
"1110": "V8SVGClipPathElement_ClipPathUnits_AttributeGetter",
"1111": "SVG1DOMFitToViewBox",
"1112": "SVG1DOMCursorElement",
"1113": "V8SVGPathElement_PathLength_AttributeGetter",
"1114": "SVG1DOMSVGElement",
"1115": "SVG1DOMImageElement",
"1116": "SVG1DOMForeignObjectElement",
"1117": "AudioContextCreateIIRFilter",
"1118": "CSSSelectorPseudoSlotted",
"1119": "MediaDevicesEnumerateDevices",
"1120": "NonSecureSharedWorkerAccessedFromSecureContext",
"1121": "SecureSharedWorkerAccessedFromNonSecureContext",
"1123": "EventComposedPath",
"1124": "LinkHeaderPreload",
"1125": "MouseWheelEvent",
"1126": "WheelEvent",
"1127": "MouseWheelAndWheelEvent",
"1128": "BodyScrollsInAdditionToViewport",
"1129": "DocumentDesignModeEnabeld",
"1130": "ContentEditableTrue",
"1131": "ContentEditableTrueOnHTML",
"1132": "ContentEditablePlainTextOnly",
"1133": "V8RegExpPrototypeUnicodeGetter",
"1134": "V8IntlV8Parse",
"1135": "V8IntlPattern",
"1136": "V8IntlResolved",
"1137": "V8PromiseChain",
"1138": "V8PromiseAccept",
"1139": "V8PromiseDefer",
"1140": "EventComposed",
"1141": "GeolocationInsecureOriginIframe",
"1142": "GeolocationSecureOriginIframe",
"1143": "RequestMIDIAccessIframe",
"1144": "GetUserMediaInsecureOriginIframe",
"1145": "GetUserMediaSecureOriginIframe",
"1146": "ElementRequestPointerLockIframe",
"1147": "NotificationAPIInsecureOriginIframe",
"1148": "NotificationAPISecureOriginIframe",
"1149": "WebSocket",
"1150": "MediaStreamConstraintsNameValue",
"1151": "MediaStreamConstraintsFromDictionary",
"1152": "MediaStreamConstraintsConformant",
"1153": "CSSSelectorIndirectAdjacent",
"1156": "CreateImageBitmap",
"1157": "PresentationConnectionConnectEventListener",
"1158": "PresentationConnectionCloseEventListener",
"1159": "PresentationConnectionTerminateEventListener",
"1160": "DocumentCreateEventFontFaceSetLoadEvent",
"1161": "DocumentCreateEventMediaQueryListEvent",
"1162": "DocumentCreateEventAnimationEvent",
"1164": "DocumentCreateEventApplicationCacheErrorEvent",
"1166": "DocumentCreateEventBeforeUnloadEvent",
"1167": "DocumentCreateEventClipboardEvent",
"1168": "DocumentCreateEventCompositionEvent",
"1169": "DocumentCreateEventDragEvent",
"1170": "DocumentCreateEventErrorEvent",
"1171": "DocumentCreateEventFocusEvent",
"1172": "DocumentCreateEventHashChangeEvent",
"1173": "DocumentCreateEventMutationEvent",
"1174": "DocumentCreateEventPageTransitionEvent",
"1176": "DocumentCreateEventPopStateEvent",
"1177": "DocumentCreateEventProgressEvent",
"1178": "DocumentCreateEventPromiseRejectionEvent",
"1180": "DocumentCreateEventResourceProgressEvent",
"1181": "DocumentCreateEventSecurityPolicyViolationEvent",
"1182": "DocumentCreateEventTextEvent",
"1183": "DocumentCreateEventTransitionEvent",
"1184": "DocumentCreateEventWheelEvent",
"1186": "DocumentCreateEventTrackEvent",
"1187": "DocumentCreateEventWebKitAnimationEvent",
"1188": "DocumentCreateEventMutationEvents",
"1189": "DocumentCreateEventOrientationEvent",
"1190": "DocumentCreateEventSVGEvents",
"1191": "DocumentCreateEventWebKitTransitionEvent",
"1192": "DocumentCreateEventBeforeInstallPromptEvent",
"1193": "DocumentCreateEventSyncEvent",
"1195": "DocumentCreateEventDeviceMotionEvent",
"1196": "DocumentCreateEventDeviceOrientationEvent",
"1197": "DocumentCreateEventMediaEncryptedEvent",
"1198": "DocumentCreateEventMediaKeyMessageEvent",
"1199": "DocumentCreateEventGamepadEvent",
"1201": "DocumentCreateEventIDBVersionChangeEvent",
"1202": "DocumentCreateEventBlobEvent",
"1203": "DocumentCreateEventMediaStreamEvent",
"1204": "DocumentCreateEventMediaStreamTrackEvent",
"1205": "DocumentCreateEventRTCDTMFToneChangeEvent",
"1206": "DocumentCreateEventRTCDataChannelEvent",
"1207": "DocumentCreateEventRTCIceCandidateEvent",
"1209": "DocumentCreateEventNotificationEvent",
"1210": "DocumentCreateEventPresentationConnectionAvailableEvent",
"1211": "DocumentCreateEventPresentationConnectionCloseEvent",
"1212": "DocumentCreateEventPushEvent",
"1213": "DocumentCreateEventExtendableEvent",
"1214": "DocumentCreateEventExtendableMessageEvent",
"1215": "DocumentCreateEventFetchEvent",
"1217": "DocumentCreateEventServiceWorkerMessageEvent",
"1218": "DocumentCreateEventSpeechRecognitionError",
"1219": "DocumentCreateEventSpeechRecognitionEvent",
"1220": "DocumentCreateEventSpeechSynthesisEvent",
"1221": "DocumentCreateEventStorageEvent",
"1222": "DocumentCreateEventAudioProcessingEvent",
"1223": "DocumentCreateEventOfflineAudioCompletionEvent",
"1224": "DocumentCreateEventWebGLContextEvent",
"1225": "DocumentCreateEventMIDIConnectionEvent",
"1226": "DocumentCreateEventMIDIMessageEvent",
"1227": "DocumentCreateEventCloseEvent",
"1228": "DocumentCreateEventKeyboardEvents",
"1229": "HTMLMediaElement",
"1230": "HTMLMediaElementInDocument",
"1231": "HTMLMediaElementControlsAttribute",
"1233": "V8Animation_Oncancel_AttributeGetter",
"1234": "V8Animation_Oncancel_AttributeSetter",
"1235": "V8HTMLCommentInExternalScript",
"1236": "V8HTMLComment",
"1237": "V8SloppyModeBlockScopedFunctionRedefinition",
"1238": "V8ForInInitializer",
"1239": "V8Animation_Id_AttributeGetter",
"1240": "V8Animation_Id_AttributeSetter",
"1243": "WebAnimationHyphenatedProperty",
"1244": "FormControlsCollectionReturnsRadioNodeListForFieldSet",
"1245": "ApplicationCacheManifestSelectInsecureOrigin",
"1246": "ApplicationCacheManifestSelectSecureOrigin",
"1247": "ApplicationCacheAPIInsecureOrigin",
"1248": "ApplicationCacheAPISecureOrigin",
"1249": "CSSAtRuleApply",
"1250": "CSSSelectorPseudoAny",
"1251": "PannerNodeSetVelocity",
"1252": "DocumentAllItemNoArguments",
"1253": "DocumentAllItemNamed",
"1254": "DocumentAllItemIndexed",
"1255": "DocumentAllItemIndexedWithNonNumber",
"1256": "DocumentAllLegacyCallNoArguments",
"1257": "DocumentAllLegacyCallNamed",
"1258": "DocumentAllLegacyCallIndexed",
"1259": "DocumentAllLegacyCallIndexedWithNonNumber",
"1260": "DocumentAllLegacyCallTwoArguments",
"1263": "HTMLLabelElementControlForNonFormAssociatedElement",
"1265": "HTMLMediaElementLoadNetworkEmptyNotPaused",
"1267": "V8Window_WebkitSpeechGrammar_ConstructorGetter",
"1268": "V8Window_WebkitSpeechGrammarList_ConstructorGetter",
"1269": "V8Window_WebkitSpeechRecognition_ConstructorGetter",
"1270": "V8Window_WebkitSpeechRecognitionError_ConstructorGetter",
"1271": "V8Window_WebkitSpeechRecognitionEvent_ConstructorGetter",
"1272": "V8Window_SpeechSynthesis_AttributeGetter",
"1273": "V8IDBFactory_WebkitGetDatabaseNames_Method",
"1274": "ImageDocument",
"1275": "ScriptPassesCSPDynamic",
"1277": "CSPWithStrictDynamic",
"1278": "ScrollAnchored",
"1279": "AddEventListenerFourArguments",
"1280": "RemoveEventListenerFourArguments",
"1281": "InvalidReportUriDirectiveInMetaCSP",
"1282": "InvalidSandboxDirectiveInMetaCSP",
"1283": "InvalidFrameAncestorsDirectiveInMetaCSP",
"1287": "SVGCalcModeDiscrete",
"1288": "SVGCalcModeLinear",
"1289": "SVGCalcModePaced",
"1290": "SVGCalcModeSpline",
"1291": "FormSubmissionStarted",
"1292": "FormValidationStarted",
"1293": "FormValidationAbortedSubmission",
"1294": "FormValidationShowedMessage",
"1295": "WebAnimationsEasingAsFunctionLinear",
"1296": "WebAnimationsEasingAsFunctionOther",
"1297": "V8Document_Images_AttributeGetter",
"1298": "V8Document_Embeds_AttributeGetter",
"1299": "V8Document_Plugins_AttributeGetter",
"1300": "V8Document_Links_AttributeGetter",
"1301": "V8Document_Forms_AttributeGetter",
"1302": "V8Document_Scripts_AttributeGetter",
"1303": "V8Document_Anchors_AttributeGetter",
"1304": "V8Document_Applets_AttributeGetter",
"1305": "XMLHttpRequestCrossOriginWithCredentials",
"1306": "MediaStreamTrackRemote",
"1307": "V8Node_IsConnected_AttributeGetter",
"1308": "ShadowRootDelegatesFocus",
"1309": "MixedShadowRootV0AndV1",
"1310": "ImageDocumentInFrame",
"1311": "MediaDocument",
"1312": "MediaDocumentInFrame",
"1313": "PluginDocument",
"1314": "PluginDocumentInFrame",
"1315": "SinkDocument",
"1316": "SinkDocumentInFrame",
"1317": "TextDocument",
"1318": "TextDocumentInFrame",
"1319": "ViewSourceDocument",
"1320": "FileAPINativeLineEndings",
"1321": "PointerEventAttributeCount",
"1322": "CompositedReplication",
"1323": "EncryptedMediaAllSelectedContentTypesHaveCodecs",
"1324": "EncryptedMediaAllSelectedContentTypesMissingCodecs",
"1325": "V8DataTransferItem_WebkitGetAsEntry_Method",
"1326": "V8HTMLInputElement_WebkitEntries_AttributeGetter",
"1327": "Entry_Filesystem_AttributeGetter_IsolatedFileSystem",
"1328": "Entry_GetMetadata_Method_IsolatedFileSystem",
"1329": "Entry_MoveTo_Method_IsolatedFileSystem",
"1330": "Entry_CopyTo_Method_IsolatedFileSystem",
"1331": "Entry_Remove_Method_IsolatedFileSystem",
"1332": "Entry_GetParent_Method_IsolatedFileSystem",
"1333": "Entry_ToURL_Method_IsolatedFileSystem",
"1334": "During_Microtask_Alert",
"1335": "During_Microtask_Confirm",
"1336": "During_Microtask_Print",
"1337": "During_Microtask_Prompt",
"1338": "During_Microtask_SyncXHR",
"1342": "CredentialManagerGetReturnedCredential",
"1343": "GeolocationInsecureOriginDeprecatedNotRemoved",
"1344": "GeolocationInsecureOriginIframeDeprecatedNotRemoved",
"1345": "ProgressElementWithNoneAppearance",
"1346": "ProgressElementWithProgressBarAppearance",
"1347": "PointerEventAddListenerCount",
"1348": "EventCancelBubbleAffected",
"1349": "EventCancelBubbleWasChangedToTrue",
"1350": "EventCancelBubbleWasChangedToFalse",
"1351": "CSSValueAppearanceNone",
"1352": "CSSValueAppearanceNotNone",
"1353": "CSSValueAppearanceOthers",
"1354": "CSSValueAppearanceButton",
"1355": "CSSValueAppearanceCaret",
"1356": "CSSValueAppearanceCheckbox",
"1357": "CSSValueAppearanceMenulist",
"1358": "CSSValueAppearanceMenulistButton",
"1359": "CSSValueAppearanceListbox",
"1360": "CSSValueAppearanceRadio",
"1361": "CSSValueAppearanceSearchField",
"1362": "CSSValueAppearanceTextField",
"1363": "AudioContextCreatePannerAutomated",
"1364": "PannerNodeSetPosition",
"1365": "PannerNodeSetOrientation",
"1366": "AudioListenerSetPosition",
"1367": "AudioListenerSetOrientation",
"1368": "IntersectionObserver_Constructor",
"1369": "DurableStoragePersist",
"1370": "DurableStoragePersisted",
"1371": "DurableStorageEstimate",
"1372": "UntrustedEventDefaultHandled",
"1375": "CSSDeepCombinatorAndShadow",
"1376": "OpacityWithPreserve3DQuirk",
"1377": "CSSSelectorPseudoReadOnly",
"1378": "CSSSelectorPseudoReadWrite",
"1379": "UnloadHandler_Navigation",
"1380": "TouchStartUserGestureUtilized",
"1381": "TouchMoveUserGestureUtilized",
"1382": "TouchEndDuringScrollUserGestureUtilized",
"1383": "CSSSelectorPseudoDefined",
"1384": "RTCPeerConnectionAddIceCandidatePromise",
"1385": "RTCPeerConnectionAddIceCandidateLegacy",
"1386": "RTCIceCandidateDefaultSdpMLineIndex",
"1389": "MediaStreamConstraintsOldAndNew",
"1390": "V8ArrayProtectorDirtied",
"1391": "V8ArraySpeciesModified",
"1392": "V8ArrayPrototypeConstructorModified",
"1393": "V8ArrayInstanceProtoModified",
"1394": "V8ArrayInstanceConstructorModified",
"1395": "V8LegacyFunctionDeclaration",
"1396": "V8RegExpPrototypeSourceGetter",
"1397": "V8RegExpPrototypeOldFlagGetter",
"1398": "V8DecimalWithLeadingZeroInStrictMode",
"1399": "FormSubmissionNotInDocumentTree",
"1400": "GetUserMediaPrefixed",
"1401": "GetUserMediaLegacy",
"1402": "GetUserMediaPromise",
"1403": "CSSFilterFunctionNoArguments",
"1404": "V8LegacyDateParser",
"1405": "OpenSearchInsecureOriginInsecureTarget",
"1406": "OpenSearchInsecureOriginSecureTarget",
"1407": "OpenSearchSecureOriginInsecureTarget",
"1408": "OpenSearchSecureOriginSecureTarget",
"1409": "RegisterProtocolHandlerSecureOrigin",
"1410": "RegisterProtocolHandlerInsecureOrigin",
"1411": "CrossOriginWindowAlert",
"1412": "CrossOriginWindowConfirm",
"1413": "CrossOriginWindowPrompt",
"1414": "CrossOriginWindowPrint",
"1415": "MediaStreamOnActive",
"1416": "MediaStreamOnInactive",
"1417": "AddEventListenerPassiveTrue",
"1418": "AddEventListenerPassiveFalse",
"1419": "CSPReferrerDirective",
"1420": "DocumentOpen",
"1421": "ElementRequestPointerLockInShadow",
"1422": "ShadowRootPointerLockElement",
"1423": "DocumentPointerLockElementInV0Shadow",
"1424": "TextAreaMaxLength",
"1425": "TextAreaMinLength",
"1426": "TopNavigationFromSubFrame",
"1427": "PrefixedElementRequestFullscreenInShadow",
"1428": "MediaSourceAbortRemove",
"1429": "MediaSourceDurationTruncatingBuffered",
"1430": "AudioContextCrossOriginIframe",
"1431": "PointerEventSetCapture",
"1432": "PointerEventDispatch",
"1433": "MIDIMessageEventReceivedTime",
"1434": "SummaryElementWithDisplayBlockAuthorRule",
"1435": "V8MediaStream_Active_AttributeGetter",
"1436": "BeforeInstallPromptEvent",
"1437": "BeforeInstallPromptEventUserChoice",
"1438": "BeforeInstallPromptEventPreventDefault",
"1439": "BeforeInstallPromptEventPrompt",
"1440": "ExecCommandAltersHTMLStructure",
"1441": "SecureContextCheckPassed",
"1442": "SecureContextCheckFailed",
"1443": "SecureContextCheckForSandboxedOriginPassed",
"1444": "SecureContextCheckForSandboxedOriginFailed",
"1445": "V8DefineGetterOrSetterWouldThrow",
"1446": "V8FunctionConstructorReturnedUndefined",
"1447": "V8BroadcastChannel_Constructor",
"1448": "V8BroadcastChannel_PostMessage_Method",
"1449": "V8BroadcastChannel_Close_Method",
"1450": "TouchStartFired",
"1451": "MouseDownFired",
"1452": "PointerDownFired",
"1453": "PointerDownFiredForTouch",
"1454": "PointerEventDispatchPointerDown",
"1455": "SVGSMILBeginOrEndEventValue",
"1456": "SVGSMILBeginOrEndSyncbaseValue",
"1457": "SVGSMILElementInsertedAfterLoad",
"1458": "V8VisualViewport_ScrollLeft_AttributeGetter",
"1459": "V8VisualViewport_ScrollTop_AttributeGetter",
"1460": "V8VisualViewport_PageX_AttributeGetter",
"1461": "V8VisualViewport_PageY_AttributeGetter",
"1462": "V8VisualViewport_ClientWidth_AttributeGetter",
"1463": "V8VisualViewport_ClientHeight_AttributeGetter",
"1464": "V8VisualViewport_Scale_AttributeGetter",
"1465": "VisualViewportScrollFired",
"1466": "VisualViewportResizeFired",
"1467": "NodeGetRootNode",
"1468": "SlotChangeEventAddListener",
"1469": "CSSValueAppearanceButtonRendered",
"1470": "CSSValueAppearanceButtonForAnchor",
"1471": "CSSValueAppearanceButtonForButton",
"1472": "CSSValueAppearanceButtonForOtherButtons",
"1473": "CSSValueAppearanceTextFieldRendered",
"1474": "CSSValueAppearanceTextFieldForSearch",
"1475": "CSSValueAppearanceTextFieldForTextField",
"1476": "RTCPeerConnectionGetStats",
"1477": "SVGSMILAnimationAppliedEffect",
"1478": "PerformanceResourceTimingSizes",
"1479": "EventSourceDocument",
"1480": "EventSourceWorker",
"1481": "SingleOriginInTimingAllowOrigin",
"1482": "MultipleOriginsInTimingAllowOrigin",
"1483": "StarInTimingAllowOrigin",
"1484": "SVGSMILAdditiveAnimation",
"1485": "SendBeaconWithNonSimpleContentType",
"1486": "ChromeLoadTimesRequestTime",
"1487": "ChromeLoadTimesStartLoadTime",
"1488": "ChromeLoadTimesCommitLoadTime",
"1489": "ChromeLoadTimesFinishDocumentLoadTime",
"1490": "ChromeLoadTimesFinishLoadTime",
"1491": "ChromeLoadTimesFirstPaintTime",
"1492": "ChromeLoadTimesFirstPaintAfterLoadTime",
"1493": "ChromeLoadTimesNavigationType",
"1494": "ChromeLoadTimesWasFetchedViaSpdy",
"1495": "ChromeLoadTimesWasNpnNegotiated",
"1496": "ChromeLoadTimesNpnNegotiatedProtocol",
"1497": "ChromeLoadTimesWasAlternateProtocolAvailable",
"1498": "ChromeLoadTimesConnectionInfo",
"1499": "ChromeLoadTimesUnknown",
"1500": "SVGViewElement",
"1501": "WebShareShare",
"1502": "AuxclickAddListenerCount",
"1503": "HTMLCanvasElement",
"1504": "SVGSMILAnimationElementTiming",
"1505": "SVGSMILBeginEndAnimationElement",
"1506": "SVGSMILPausing",
"1507": "SVGSMILCurrentTime",
"1508": "HTMLBodyElementOnSelectionChangeAttribute",
"1509": "ForeignFetchInterception",
"1510": "MapNameMatchingStrict",
"1511": "MapNameMatchingASCIICaseless",
"1512": "MapNameMatchingUnicodeLower",
"1513": "RadioNameMatchingStrict",
"1514": "RadioNameMatchingASCIICaseless",
"1515": "RadioNameMatchingCaseFolding",
"1517": "InputSelectionGettersThrow",
"1519": "UsbGetDevices",
"1520": "UsbRequestDevice",
"1521": "UsbDeviceOpen",
"1522": "UsbDeviceClose",
"1523": "UsbDeviceSelectConfiguration",
"1524": "UsbDeviceClaimInterface",
"1525": "UsbDeviceReleaseInterface",
"1526": "UsbDeviceSelectAlternateInterface",
"1527": "UsbDeviceControlTransferIn",
"1528": "UsbDeviceControlTransferOut",
"1529": "UsbDeviceClearHalt",
"1530": "UsbDeviceTransferIn",
"1531": "UsbDeviceTransferOut",
"1532": "UsbDeviceIsochronousTransferIn",
"1533": "UsbDeviceIsochronousTransferOut",
"1534": "UsbDeviceReset",
"1535": "PointerEnterLeaveFired",
"1536": "PointerOverOutFired",
"1539": "DraggableAttribute",
"1540": "CleanScriptElementWithNonce",
"1541": "PotentiallyInjectedScriptElementWithNonce",
"1542": "PendingStylesheetAddedAfterBodyStarted",
"1543": "UntrustedMouseDownEventDispatchedToSelect",
"1544": "BlockedSniffingAudioToScript",
"1545": "BlockedSniffingVideoToScript",
"1546": "BlockedSniffingCSVToScript",
"1547": "MetaSetCookie",
"1548": "MetaRefresh",
"1549": "MetaSetCookieWhenCSPBlocksInlineScript",
"1550": "MetaRefreshWhenCSPBlocksInlineScript",
"1551": "MiddleClickAutoscrollStart",
"1552": "ClipCssOfFixedPositionElement",
"1553": "RTCPeerConnectionCreateOfferOptionsOfferToReceive",
"1554": "DragAndDropScrollStart",
"1555": "PresentationConnectionListConnectionAvailableEventListener",
"1556": "WebAudioAutoplayCrossOriginIframe",
"1557": "ScriptInvalidTypeOrLanguage",
"1558": "VRGetDisplays",
"1559": "VRPresent",
"1560": "VRDeprecatedGetPose",
"1561": "WebAudioAnalyserNode",
"1562": "WebAudioAudioBuffer",
"1563": "WebAudioAudioBufferSourceNode",
"1564": "WebAudioBiquadFilterNode",
"1565": "WebAudioChannelMergerNode",
"1566": "WebAudioChannelSplitterNode",
"1567": "WebAudioConvolverNode",
"1568": "WebAudioDelayNode",
"1569": "WebAudioDynamicsCompressorNode",
"1570": "WebAudioGainNode",
"1571": "WebAudioIIRFilterNode",
"1572": "WebAudioMediaElementAudioSourceNode",
"1573": "WebAudioOscillatorNode",
"1574": "WebAudioPannerNode",
"1575": "WebAudioPeriodicWave",
"1576": "WebAudioStereoPannerNode",
"1577": "WebAudioWaveShaperNode",
"1578": "CSSZoomReset",
"1579": "CSSZoomDocument",
"1580": "PaymentAddressCareOf",
"1581": "XSSAuditorBlockedScript",
"1582": "XSSAuditorBlockedEntirePage",
"1583": "XSSAuditorDisabled",
"1584": "XSSAuditorEnabledFilter",
"1585": "XSSAuditorEnabledBlock",
"1586": "XSSAuditorInvalid",
"1587": "SVGCursorElement",
"1588": "SVGCursorElementHasClient",
"1589": "TextInputEventOnInput",
"1590": "TextInputEventOnTextArea",
"1591": "TextInputEventOnContentEditable",
"1592": "TextInputEventOnNotNode",
"1593": "WebkitBeforeTextInsertedOnInput",
"1594": "WebkitBeforeTextInsertedOnTextArea",
"1595": "WebkitBeforeTextInsertedOnContentEditable",
"1596": "WebkitBeforeTextInsertedOnNotNode",
"1597": "WebkitEditableContentChangedOnInput",
"1598": "WebkitEditableContentChangedOnTextArea",
"1599": "WebkitEditableContentChangedOnContentEditable",
"1600": "WebkitEditableContentChangedOnNotNode",
"1601": "V8NavigatorUserMediaError_ConstraintName_AttributeGetter",
"1602": "V8HTMLMediaElement_SrcObject_AttributeGetter",
"1603": "V8HTMLMediaElement_SrcObject_AttributeSetter",
"1604": "CreateObjectURLBlob",
"1605": "CreateObjectURLMediaSource",
"1606": "CreateObjectURLMediaStream",
"1607": "DocumentCreateTouchWindowNull",
"1608": "DocumentCreateTouchWindowWrongType",
"1609": "DocumentCreateTouchTargetNull",
"1610": "DocumentCreateTouchTargetWrongType",
"1611": "DocumentCreateTouchLessThanSevenArguments",
"1612": "DocumentCreateTouchMoreThanSevenArguments",
"1613": "EncryptedMediaCapabilityProvided",
"1614": "EncryptedMediaCapabilityNotProvided",
"1615": "LongTaskObserver",
"1616": "CSSMotionInEffect",
"1617": "CSSOffsetInEffect",
"1618": "VRGetDisplaysInsecureOrigin",
"1619": "VRRequestPresent",
"1620": "VRRequestPresentInsecureOrigin",
"1621": "VRDeprecatedFieldOfView",
"1622": "VideoInCanvas",
"1623": "HiddenAutoplayedVideoInCanvas",
"1624": "OffscreenCanvas",
"1625": "GamepadPose",
"1626": "GamepadHand",
"1627": "GamepadDisplayId",
"1628": "GamepadButtonTouched",
"1629": "GamepadPoseHasOrientation",
"1630": "GamepadPoseHasPosition",
"1631": "GamepadPosePosition",
"1632": "GamepadPoseLinearVelocity",
"1633": "GamepadPoseLinearAcceleration",
"1634": "GamepadPoseOrientation",
"1635": "GamepadPoseAngularVelocity",
"1636": "GamepadPoseAngularAcceleration",
"1638": "V8RTCDataChannel_MaxRetransmitTime_AttributeGetter",
"1639": "V8RTCDataChannel_MaxRetransmits_AttributeGetter",
"1640": "V8RTCDataChannel_Reliable_AttributeGetter",
"1641": "V8RTCPeerConnection_AddStream_Method",
"1642": "V8RTCPeerConnection_CreateDTMFSender_Method",
"1643": "V8RTCPeerConnection_GetLocalStreams_Method",
"1644": "V8RTCPeerConnection_GetRemoteStreams_Method",
"1645": "V8RTCPeerConnection_GetStreamById_Method",
"1646": "V8RTCPeerConnection_RemoveStream_Method",
"1647": "V8RTCPeerConnection_UpdateIce_Method",
"1648": "RTCPeerConnectionCreateDataChannelMaxRetransmitTime",
"1649": "RTCPeerConnectionCreateDataChannelMaxRetransmits",
"1650": "AudioContextCreateConstantSource",
"1651": "WebAudioConstantSourceNode",
"1652": "LoopbackEmbeddedInSecureContext",
"1653": "LoopbackEmbeddedInNonSecureContext",
"1654": "BlinkMacSystemFont",
"1655": "RTCConfigurationIceTransportsNone",
"1656": "RTCIceServerURL",
"1657": "RTCIceServerURLs",
"1658": "OffscreenCanvasTransferToImageBitmap2D",
"1659": "OffscreenCanvasTransferToImageBitmapWebGL",
"1660": "OffscreenCanvasCommit2D",
"1661": "OffscreenCanvasCommitWebGL",
"1662": "RTCConfigurationIceTransportPolicy",
"1663": "RTCConfigurationIceTransportPolicyNone",
"1664": "RTCConfigurationIceTransports",
"1665": "DocumentFullscreenElementInV0Shadow",
"1666": "ScriptWithCSPBypassingSchemeParserInserted",
"1667": "ScriptWithCSPBypassingSchemeNotParserInserted",
"1668": "DocumentCreateElement2ndArgStringHandling",
"1669": "V8MediaRecorder_Start_Method",
"1670": "WebBluetoothRequestDevice",
"1671": "UnitlessPerspectiveInPerspectiveProperty",
"1672": "UnitlessPerspectiveInTransformProperty",
"1673": "V8RTCSessionDescription_Type_AttributeGetter",
"1674": "V8RTCSessionDescription_Type_AttributeSetter",
"1675": "V8RTCSessionDescription_Sdp_AttributeGetter",
"1676": "V8RTCSessionDescription_Sdp_AttributeSetter",
"1677": "RTCSessionDescriptionInitNoType",
"1678": "RTCSessionDescriptionInitNoSdp",
"1679": "HTMLMediaElementPreloadForcedMetadata",
"1680": "GenericSensorStart",
"1681": "GenericSensorStop",
"1682": "TouchEventPreventedNoTouchAction",
"1683": "TouchEventPreventedForcedDocumentPassiveNoTouchAction",
"1684": "V8Event_StopPropagation_Method",
"1685": "V8Event_StopImmediatePropagation_Method",
"1686": "ImageCaptureConstructor",
"1687": "V8Document_RootScroller_AttributeGetter",
"1688": "V8Document_RootScroller_AttributeSetter",
"1689": "CustomElementRegistryDefine",
"1690": "LinkHeaderServiceWorker",
"1691": "CSSShadowPiercingDescendantCombinator",
"1692": "CSSFlexibleBox",
"1693": "CSSGridLayout",
"1694": "V8BarcodeDetector_Detect_Method",
"1695": "V8FaceDetector_Detect_Method"
}
########################################################################################################################
# CSS feature names from https://cs.chromium.org/chromium/src/third_party/WebKit/Source/core/frame/UseCounter.cpp
########################################################################################################################
CSS_FEATURES = {
"2": "CSSPropertyColor",
"3": "CSSPropertyDirection",
"4": "CSSPropertyDisplay",
"5": "CSSPropertyFont",
"6": "CSSPropertyFontFamily",
"7": "CSSPropertyFontSize",
"8": "CSSPropertyFontStyle",
"9": "CSSPropertyFontVariant",
"10": "CSSPropertyFontWeight",
"11": "CSSPropertyTextRendering",
"12": "CSSPropertyAliasWebkitFontFeatureSettings",
"13": "CSSPropertyFontKerning",
"14": "CSSPropertyWebkitFontSmoothing",
"15": "CSSPropertyFontVariantLigatures",
"16": "CSSPropertyWebkitLocale",
"17": "CSSPropertyWebkitTextOrientation",
"18": "CSSPropertyWebkitWritingMode",
"19": "CSSPropertyZoom",
"20": "CSSPropertyLineHeight",
"21": "CSSPropertyBackground",
"22": "CSSPropertyBackgroundAttachment",
"23": "CSSPropertyBackgroundClip",
"24": "CSSPropertyBackgroundColor",
"25": "CSSPropertyBackgroundImage",
"26": "CSSPropertyBackgroundOrigin",
"27": "CSSPropertyBackgroundPosition",
"28": "CSSPropertyBackgroundPositionX",
"29": "CSSPropertyBackgroundPositionY",
"30": "CSSPropertyBackgroundRepeat",
"31": "CSSPropertyBackgroundRepeatX",
"32": "CSSPropertyBackgroundRepeatY",
"33": "CSSPropertyBackgroundSize",
"34": "CSSPropertyBorder",
"35": "CSSPropertyBorderBottom",
"36": "CSSPropertyBorderBottomColor",
"37": "CSSPropertyBorderBottomLeftRadius",
"38": "CSSPropertyBorderBottomRightRadius",
"39": "CSSPropertyBorderBottomStyle",
"40": "CSSPropertyBorderBottomWidth",
"41": "CSSPropertyBorderCollapse",
"42": "CSSPropertyBorderColor",
"43": "CSSPropertyBorderImage",
"44": "CSSPropertyBorderImageOutset",
"45": "CSSPropertyBorderImageRepeat",
"46": "CSSPropertyBorderImageSlice",
"47": "CSSPropertyBorderImageSource",
"48": "CSSPropertyBorderImageWidth",
"49": "CSSPropertyBorderLeft",
"50": "CSSPropertyBorderLeftColor",
"51": "CSSPropertyBorderLeftStyle",
"52": "CSSPropertyBorderLeftWidth",
"53": "CSSPropertyBorderRadius",
"54": "CSSPropertyBorderRight",
"55": "CSSPropertyBorderRightColor",
"56": "CSSPropertyBorderRightStyle",
"57": "CSSPropertyBorderRightWidth",
"58": "CSSPropertyBorderSpacing",
"59": "CSSPropertyBorderStyle",
"60": "CSSPropertyBorderTop",
"61": "CSSPropertyBorderTopColor",
"62": "CSSPropertyBorderTopLeftRadius",
"63": "CSSPropertyBorderTopRightRadius",
"64": "CSSPropertyBorderTopStyle",
"65": "CSSPropertyBorderTopWidth",
"66": "CSSPropertyBorderWidth",
"67": "CSSPropertyBottom",
"68": "CSSPropertyBoxShadow",
"69": "CSSPropertyBoxSizing",
"70": "CSSPropertyCaptionSide",
"71": "CSSPropertyClear",
"72": "CSSPropertyClip",
"73": "CSSPropertyAliasWebkitClipPath",
"74": "CSSPropertyContent",
"75": "CSSPropertyCounterIncrement",
"76": "CSSPropertyCounterReset",
"77": "CSSPropertyCursor",
"78": "CSSPropertyEmptyCells",
"79": "CSSPropertyFloat",
"80": "CSSPropertyFontStretch",
"81": "CSSPropertyHeight",
"82": "CSSPropertyImageRendering",
"83": "CSSPropertyLeft",
"84": "CSSPropertyLetterSpacing",
"85": "CSSPropertyListStyle",
"86": "CSSPropertyListStyleImage",
"87": "CSSPropertyListStylePosition",
"88": "CSSPropertyListStyleType",
"89": "CSSPropertyMargin",
"90": "CSSPropertyMarginBottom",
"91": "CSSPropertyMarginLeft",
"92": "CSSPropertyMarginRight",
"93": "CSSPropertyMarginTop",
"94": "CSSPropertyMaxHeight",
"95": "CSSPropertyMaxWidth",
"96": "CSSPropertyMinHeight",
"97": "CSSPropertyMinWidth",
"98": "CSSPropertyOpacity",
"99": "CSSPropertyOrphans",
"100": "CSSPropertyOutline",
"101": "CSSPropertyOutlineColor",
"102": "CSSPropertyOutlineOffset",
"103": "CSSPropertyOutlineStyle",
"104": "CSSPropertyOutlineWidth",
"105": "CSSPropertyOverflow",
"106": "CSSPropertyOverflowWrap",
"107": "CSSPropertyOverflowX",
"108": "CSSPropertyOverflowY",
"109": "CSSPropertyPadding",
"110": "CSSPropertyPaddingBottom",
"111": "CSSPropertyPaddingLeft",
"112": "CSSPropertyPaddingRight",
"113": "CSSPropertyPaddingTop",
"114": "CSSPropertyPage",
"115": "CSSPropertyPageBreakAfter",
"116": "CSSPropertyPageBreakBefore",
"117": "CSSPropertyPageBreakInside",
"118": "CSSPropertyPointerEvents",
"119": "CSSPropertyPosition",
"120": "CSSPropertyQuotes",
"121": "CSSPropertyResize",
"122": "CSSPropertyRight",
"123": "CSSPropertySize",
"124": "CSSPropertySrc",
"125": "CSSPropertySpeak",
"126": "CSSPropertyTableLayout",
"127": "CSSPropertyTabSize",
"128": "CSSPropertyTextAlign",
"129": "CSSPropertyTextDecoration",
"130": "CSSPropertyTextIndent",
"136": "CSSPropertyTextOverflow",
"142": "CSSPropertyTextShadow",
"143": "CSSPropertyTextTransform",
"149": "CSSPropertyTop",
"150": "CSSPropertyTransition",
"151": "CSSPropertyTransitionDelay",
"152": "CSSPropertyTransitionDuration",
"153": "CSSPropertyTransitionProperty",
"154": "CSSPropertyTransitionTimingFunction",
"155": "CSSPropertyUnicodeBidi",
"156": "CSSPropertyUnicodeRange",
"157": "CSSPropertyVerticalAlign",
"158": "CSSPropertyVisibility",
"159": "CSSPropertyWhiteSpace",
"160": "CSSPropertyWidows",
"161": "CSSPropertyWidth",
"162": "CSSPropertyWordBreak",
"163": "CSSPropertyWordSpacing",
"164": "CSSPropertyWordWrap",
"165": "CSSPropertyZIndex",
"166": "CSSPropertyAliasWebkitAnimation",
"167": "CSSPropertyAliasWebkitAnimationDelay",
"168": "CSSPropertyAliasWebkitAnimationDirection",
"169": "CSSPropertyAliasWebkitAnimationDuration",
"170": "CSSPropertyAliasWebkitAnimationFillMode",
"171": "CSSPropertyAliasWebkitAnimationIterationCount",
"172": "CSSPropertyAliasWebkitAnimationName",
"173": "CSSPropertyAliasWebkitAnimationPlayState",
"174": "CSSPropertyAliasWebkitAnimationTimingFunction",
"175": "CSSPropertyWebkitAppearance",
"176": "CSSPropertyWebkitAspectRatio",
"177": "CSSPropertyAliasWebkitBackfaceVisibility",
"178": "CSSPropertyWebkitBackgroundClip",
"179": "CSSPropertyWebkitBackgroundComposite",
"180": "CSSPropertyWebkitBackgroundOrigin",
"181": "CSSPropertyAliasWebkitBackgroundSize",
"182": "CSSPropertyWebkitBorderAfter",
"183": "CSSPropertyWebkitBorderAfterColor",
"184": "CSSPropertyWebkitBorderAfterStyle",
"185": "CSSPropertyWebkitBorderAfterWidth",
"186": "CSSPropertyWebkitBorderBefore",
"187": "CSSPropertyWebkitBorderBeforeColor",
"188": "CSSPropertyWebkitBorderBeforeStyle",
"189": "CSSPropertyWebkitBorderBeforeWidth",
"190": "CSSPropertyWebkitBorderEnd",
"191": "CSSPropertyWebkitBorderEndColor",
"192": "CSSPropertyWebkitBorderEndStyle",
"193": "CSSPropertyWebkitBorderEndWidth",
"194": "CSSPropertyWebkitBorderFit",
"195": "CSSPropertyWebkitBorderHorizontalSpacing",
"196": "CSSPropertyWebkitBorderImage",
"197": "CSSPropertyAliasWebkitBorderRadius",
"198": "CSSPropertyWebkitBorderStart",
"199": "CSSPropertyWebkitBorderStartColor",
"200": "CSSPropertyWebkitBorderStartStyle",
"201": "CSSPropertyWebkitBorderStartWidth",
"202": "CSSPropertyWebkitBorderVerticalSpacing",
"203": "CSSPropertyWebkitBoxAlign",
"204": "CSSPropertyWebkitBoxDirection",
"205": "CSSPropertyWebkitBoxFlex",
"206": "CSSPropertyWebkitBoxFlexGroup",
"207": "CSSPropertyWebkitBoxLines",
"208": "CSSPropertyWebkitBoxOrdinalGroup",
"209": "CSSPropertyWebkitBoxOrient",
"210": "CSSPropertyWebkitBoxPack",
"211": "CSSPropertyWebkitBoxReflect",
"212": "CSSPropertyAliasWebkitBoxShadow",
"215": "CSSPropertyWebkitColumnBreakAfter",
"216": "CSSPropertyWebkitColumnBreakBefore",
"217": "CSSPropertyWebkitColumnBreakInside",
"218": "CSSPropertyAliasWebkitColumnCount",
"219": "CSSPropertyAliasWebkitColumnGap",
"220": "CSSPropertyWebkitColumnProgression",
"221": "CSSPropertyAliasWebkitColumnRule",
"222": "CSSPropertyAliasWebkitColumnRuleColor",
"223": "CSSPropertyAliasWebkitColumnRuleStyle",
"224": "CSSPropertyAliasWebkitColumnRuleWidth",
"225": "CSSPropertyAliasWebkitColumnSpan",
"226": "CSSPropertyAliasWebkitColumnWidth",
"227": "CSSPropertyAliasWebkitColumns",
"228": "CSSPropertyWebkitBoxDecorationBreak",
"229": "CSSPropertyWebkitFilter",
"230": "CSSPropertyAlignContent",
"231": "CSSPropertyAlignItems",
"232": "CSSPropertyAlignSelf",
"233": "CSSPropertyFlex",
"234": "CSSPropertyFlexBasis",
"235": "CSSPropertyFlexDirection",
"236": "CSSPropertyFlexFlow",
"237": "CSSPropertyFlexGrow",
"238": "CSSPropertyFlexShrink",
"239": "CSSPropertyFlexWrap",
"240": "CSSPropertyJustifyContent",
"241": "CSSPropertyWebkitFontSizeDelta",
"242": "CSSPropertyGridTemplateColumns",
"243": "CSSPropertyGridTemplateRows",
"244": "CSSPropertyGridColumnStart",
"245": "CSSPropertyGridColumnEnd",
"246": "CSSPropertyGridRowStart",
"247": "CSSPropertyGridRowEnd",
"248": "CSSPropertyGridColumn",
"249": "CSSPropertyGridRow",
"250": "CSSPropertyGridAutoFlow",
"251": "CSSPropertyWebkitHighlight",
"252": "CSSPropertyWebkitHyphenateCharacter",
"257": "CSSPropertyWebkitLineBoxContain",
"258": "CSSPropertyWebkitLineAlign",
"259": "CSSPropertyWebkitLineBreak",
"260": "CSSPropertyWebkitLineClamp",
"261": "CSSPropertyWebkitLineGrid",
"262": "CSSPropertyWebkitLineSnap",
"263": "CSSPropertyWebkitLogicalWidth",
"264": "CSSPropertyWebkitLogicalHeight",
"265": "CSSPropertyWebkitMarginAfterCollapse",
"266": "CSSPropertyWebkitMarginBeforeCollapse",
"267": "CSSPropertyWebkitMarginBottomCollapse",
"268": "CSSPropertyWebkitMarginTopCollapse",
"269": "CSSPropertyWebkitMarginCollapse",
"270": "CSSPropertyWebkitMarginAfter",
"271": "CSSPropertyWebkitMarginBefore",
"272": "CSSPropertyWebkitMarginEnd",
"273": "CSSPropertyWebkitMarginStart",
"280": "CSSPropertyWebkitMask",
"281": "CSSPropertyWebkitMaskBoxImage",
"282": "CSSPropertyWebkitMaskBoxImageOutset",
"283": "CSSPropertyWebkitMaskBoxImageRepeat",
"284": "CSSPropertyWebkitMaskBoxImageSlice",
"285": "CSSPropertyWebkitMaskBoxImageSource",
"286": "CSSPropertyWebkitMaskBoxImageWidth",
"287": "CSSPropertyWebkitMaskClip",
"288": "CSSPropertyWebkitMaskComposite",
"289": "CSSPropertyWebkitMaskImage",
"290": "CSSPropertyWebkitMaskOrigin",
"291": "CSSPropertyWebkitMaskPosition",
"292": "CSSPropertyWebkitMaskPositionX",
"293": "CSSPropertyWebkitMaskPositionY",
"294": "CSSPropertyWebkitMaskRepeat",
"295": "CSSPropertyWebkitMaskRepeatX",
"296": "CSSPropertyWebkitMaskRepeatY",
"297": "CSSPropertyWebkitMaskSize",
"298": "CSSPropertyWebkitMaxLogicalWidth",
"299": "CSSPropertyWebkitMaxLogicalHeight",
"300": "CSSPropertyWebkitMinLogicalWidth",
"301": "CSSPropertyWebkitMinLogicalHeight",
"303": "CSSPropertyOrder",
"304": "CSSPropertyWebkitPaddingAfter",
"305": "CSSPropertyWebkitPaddingBefore",
"306": "CSSPropertyWebkitPaddingEnd",
"307": "CSSPropertyWebkitPaddingStart",
"308": "CSSPropertyAliasWebkitPerspective",
"309": "CSSPropertyAliasWebkitPerspectiveOrigin",
"310": "CSSPropertyWebkitPerspectiveOriginX",
"311": "CSSPropertyWebkitPerspectiveOriginY",
"312": "CSSPropertyWebkitPrintColorAdjust",
"313": "CSSPropertyWebkitRtlOrdering",
"314": "CSSPropertyWebkitRubyPosition",
"315": "CSSPropertyWebkitTextCombine",
"316": "CSSPropertyWebkitTextDecorationsInEffect",
"317": "CSSPropertyWebkitTextEmphasis",
"318": "CSSPropertyWebkitTextEmphasisColor",
"319": "CSSPropertyWebkitTextEmphasisPosition",
"320": "CSSPropertyWebkitTextEmphasisStyle",
"321": "CSSPropertyWebkitTextFillColor",
"322": "CSSPropertyWebkitTextSecurity",
"323": "CSSPropertyWebkitTextStroke",
"324": "CSSPropertyWebkitTextStrokeColor",
"325": "CSSPropertyWebkitTextStrokeWidth",
"326": "CSSPropertyAliasWebkitTransform",
"327": "CSSPropertyAliasWebkitTransformOrigin",
"328": "CSSPropertyWebkitTransformOriginX",
"329": "CSSPropertyWebkitTransformOriginY",
"330": "CSSPropertyWebkitTransformOriginZ",
"331": "CSSPropertyAliasWebkitTransformStyle",
"332": "CSSPropertyAliasWebkitTransition",
"333": "CSSPropertyAliasWebkitTransitionDelay",
"334": "CSSPropertyAliasWebkitTransitionDuration",
"335": "CSSPropertyAliasWebkitTransitionProperty",
"336": "CSSPropertyAliasWebkitTransitionTimingFunction",
"337": "CSSPropertyWebkitUserDrag",
"338": "CSSPropertyWebkitUserModify",
"339": "CSSPropertyAliasWebkitUserSelect",
"340": "CSSPropertyWebkitFlowInto",
"341": "CSSPropertyWebkitFlowFrom",
"342": "CSSPropertyWebkitRegionFragment",
"343": "CSSPropertyWebkitRegionBreakAfter",
"344": "CSSPropertyWebkitRegionBreakBefore",
"345": "CSSPropertyWebkitRegionBreakInside",
"346": "CSSPropertyShapeInside",
"347": "CSSPropertyShapeOutside",
"348": "CSSPropertyShapeMargin",
"349": "CSSPropertyShapePadding",
"350": "CSSPropertyWebkitWrapFlow",
"351": "CSSPropertyWebkitWrapThrough",
"355": "CSSPropertyClipPath",
"356": "CSSPropertyClipRule",
"357": "CSSPropertyMask",
"359": "CSSPropertyFilter",
"360": "CSSPropertyFloodColor",
"361": "CSSPropertyFloodOpacity",
"362": "CSSPropertyLightingColor",
"363": "CSSPropertyStopColor",
"364": "CSSPropertyStopOpacity",
"365": "CSSPropertyColorInterpolation",
"366": "CSSPropertyColorInterpolationFilters",
"367": "CSSPropertyColorProfile",
"368": "CSSPropertyColorRendering",
"369": "CSSPropertyFill",
"370": "CSSPropertyFillOpacity",
"371": "CSSPropertyFillRule",
"372": "CSSPropertyMarker",
"373": "CSSPropertyMarkerEnd",
"374": "CSSPropertyMarkerMid",
"375": "CSSPropertyMarkerStart",
"376": "CSSPropertyMaskType",
"377": "CSSPropertyShapeRendering",
"378": "CSSPropertyStroke",
"379": "CSSPropertyStrokeDasharray",
"380": "CSSPropertyStrokeDashoffset",
"381": "CSSPropertyStrokeLinecap",
"382": "CSSPropertyStrokeLinejoin",
"383": "CSSPropertyStrokeMiterlimit",
"384": "CSSPropertyStrokeOpacity",
"385": "CSSPropertyStrokeWidth",
"386": "CSSPropertyAlignmentBaseline",
"387": "CSSPropertyBaselineShift",
"388": "CSSPropertyDominantBaseline",
"392": "CSSPropertyTextAnchor",
"393": "CSSPropertyVectorEffect",
"394": "CSSPropertyWritingMode",
"399": "CSSPropertyWebkitBlendMode",
"400": "CSSPropertyWebkitBackgroundBlendMode",
"401": "CSSPropertyTextDecorationLine",
"402": "CSSPropertyTextDecorationStyle",
"403": "CSSPropertyTextDecorationColor",
"404": "CSSPropertyTextAlignLast",
"405": "CSSPropertyTextUnderlinePosition",
"406": "CSSPropertyMaxZoom",
"407": "CSSPropertyMinZoom",
"408": "CSSPropertyOrientation",
"409": "CSSPropertyUserZoom",
"412": "CSSPropertyWebkitAppRegion",
"413": "CSSPropertyAliasWebkitFilter",
"414": "CSSPropertyWebkitBoxDecorationBreak",
"415": "CSSPropertyWebkitTapHighlightColor",
"416": "CSSPropertyBufferedRendering",
"417": "CSSPropertyGridAutoRows",
"418": "CSSPropertyGridAutoColumns",
"419": "CSSPropertyBackgroundBlendMode",
"420": "CSSPropertyMixBlendMode",
"421": "CSSPropertyTouchAction",
"422": "CSSPropertyGridArea",
"423": "CSSPropertyGridTemplateAreas",
"424": "CSSPropertyAnimation",
"425": "CSSPropertyAnimationDelay",
"426": "CSSPropertyAnimationDirection",
"427": "CSSPropertyAnimationDuration",
"428": "CSSPropertyAnimationFillMode",
"429": "CSSPropertyAnimationIterationCount",
"430": "CSSPropertyAnimationName",
"431": "CSSPropertyAnimationPlayState",
"432": "CSSPropertyAnimationTimingFunction",
"433": "CSSPropertyObjectFit",
"434": "CSSPropertyPaintOrder",
"435": "CSSPropertyMaskSourceType",
"436": "CSSPropertyIsolation",
"437": "CSSPropertyObjectPosition",
"438": "CSSPropertyInternalCallback",
"439": "CSSPropertyShapeImageThreshold",
"440": "CSSPropertyColumnFill",
"441": "CSSPropertyTextJustify",
"443": "CSSPropertyJustifySelf",
"444": "CSSPropertyScrollBehavior",
"445": "CSSPropertyWillChange",
"446": "CSSPropertyTransform",
"447": "CSSPropertyTransformOrigin",
"448": "CSSPropertyTransformStyle",
"449": "CSSPropertyPerspective",
"450": "CSSPropertyPerspectiveOrigin",
"451": "CSSPropertyBackfaceVisibility",
"452": "CSSPropertyGridTemplate",
"453": "CSSPropertyGrid",
"454": "CSSPropertyAll",
"455": "CSSPropertyJustifyItems",
"457": "CSSPropertyAliasMotionPath",
"458": "CSSPropertyAliasMotionOffset",
"459": "CSSPropertyAliasMotionRotation",
"460": "CSSPropertyMotion",
"461": "CSSPropertyX",
"462": "CSSPropertyY",
"463": "CSSPropertyRx",
"464": "CSSPropertyRy",
"465": "CSSPropertyFontSizeAdjust",
"466": "CSSPropertyCx",
"467": "CSSPropertyCy",
"468": "CSSPropertyR",
"469": "CSSPropertyAliasEpubCaptionSide",
"470": "CSSPropertyAliasEpubTextCombine",
"471": "CSSPropertyAliasEpubTextEmphasis",
"472": "CSSPropertyAliasEpubTextEmphasisColor",
"473": "CSSPropertyAliasEpubTextEmphasisStyle",
"474": "CSSPropertyAliasEpubTextOrientation",
"475": "CSSPropertyAliasEpubTextTransform",
"476": "CSSPropertyAliasEpubWordBreak",
"477": "CSSPropertyAliasEpubWritingMode",
"478": "CSSPropertyAliasWebkitAlignContent",
"479": "CSSPropertyAliasWebkitAlignItems",
"480": "CSSPropertyAliasWebkitAlignSelf",
"481": "CSSPropertyAliasWebkitBorderBottomLeftRadius",
"482": "CSSPropertyAliasWebkitBorderBottomRightRadius",
"483": "CSSPropertyAliasWebkitBorderTopLeftRadius",
"484": "CSSPropertyAliasWebkitBorderTopRightRadius",
"485": "CSSPropertyAliasWebkitBoxSizing",
"486": "CSSPropertyAliasWebkitFlex",
"487": "CSSPropertyAliasWebkitFlexBasis",
"488": "CSSPropertyAliasWebkitFlexDirection",
"489": "CSSPropertyAliasWebkitFlexFlow",
"490": "CSSPropertyAliasWebkitFlexGrow",
"491": "CSSPropertyAliasWebkitFlexShrink",
"492": "CSSPropertyAliasWebkitFlexWrap",
"493": "CSSPropertyAliasWebkitJustifyContent",
"494": "CSSPropertyAliasWebkitOpacity",
"495": "CSSPropertyAliasWebkitOrder",
"496": "CSSPropertyAliasWebkitShapeImageThreshold",
"497": "CSSPropertyAliasWebkitShapeMargin",
"498": "CSSPropertyAliasWebkitShapeOutside",
"499": "CSSPropertyScrollSnapType",
"500": "CSSPropertyScrollSnapPointsX",
"501": "CSSPropertyScrollSnapPointsY",
"502": "CSSPropertyScrollSnapCoordinate",
"503": "CSSPropertyScrollSnapDestination",
"504": "CSSPropertyTranslate",
"505": "CSSPropertyRotate",
"506": "CSSPropertyScale",
"507": "CSSPropertyImageOrientation",
"508": "CSSPropertyBackdropFilter",
"509": "CSSPropertyTextCombineUpright",
"510": "CSSPropertyTextOrientation",
"511": "CSSPropertyGridColumnGap",
"512": "CSSPropertyGridRowGap",
"513": "CSSPropertyGridGap",
"514": "CSSPropertyFontFeatureSettings",
"515": "CSSPropertyVariable",
"516": "CSSPropertyFontDisplay",
"517": "CSSPropertyContain",
"518": "CSSPropertyD",
"519": "CSSPropertySnapHeight",
"520": "CSSPropertyBreakAfter",
"521": "CSSPropertyBreakBefore",
"522": "CSSPropertyBreakInside",
"523": "CSSPropertyColumnCount",
"524": "CSSPropertyColumnGap",
"525": "CSSPropertyColumnRule",
"526": "CSSPropertyColumnRuleColor",
"527": "CSSPropertyColumnRuleStyle",
"528": "CSSPropertyColumnRuleWidth",
"529": "CSSPropertyColumnSpan",
"530": "CSSPropertyColumnWidth",
"531": "CSSPropertyColumns",
"532": "CSSPropertyApplyAtRule",
"533": "CSSPropertyFontVariantCaps",
"534": "CSSPropertyHyphens",
"535": "CSSPropertyFontVariantNumeric",
"536": "CSSPropertyTextSizeAdjust",
"537": "CSSPropertyAliasWebkitTextSizeAdjust",
"538": "CSSPropertyOverflowAnchor",
"539": "CSSPropertyUserSelect",
"540": "CSSPropertyOffsetDistance",
"541": "CSSPropertyOffsetPath",
"542": "CSSPropertyOffsetRotation",
"543": "CSSPropertyOffset",
"544": "CSSPropertyOffsetAnchor",
"545": "CSSPropertyOffsetPosition",
"546": "CSSPropertyTextDecorationSkip",
"547": "CSSPropertyCaretColor",
"548": "CSSPropertyOffsetRotate"
}
if '__main__' == __name__:
# import cProfile
# cProfile.run('main()', None, 2)
main()
| codeparrot/github-code-clean |
#!/usr/bin/env python
# @@xxx_skip_license@@
# -*- coding: utf-8 -*-
# @PydevCodeAnalysisIgnore
#
# Generated Tue Nov 6 11:26:05 2018 by generateDS.py version 2.29.24.
# Python 3.7.0 (default, Jul 23 2018, 20:22:55) [Clang 9.1.0 (clang-902.0.39.2)]
#
# Command line options:
# ('-f', '')
# ('--no-questions', '')
# ('--external-encoding', 'utf-8')
# ('-o', 'gen.py')
# ('-s', 'sub.py')
#
# Command line arguments:
# ../../../service/api/MessageFlow.1.xsd
#
# Command line:
# /usr/local/bin/generateDS -f --no-questions --external-encoding="utf-8" -o "gen.py" -s "sub.py" ../../../service/api/MessageFlow.1.xsd
#
# Current working directory (os.getcwd()):
# mfd
#
import base64
import datetime as datetime_
import re as re_
import sys
import warnings as warnings_
try:
from lxml import etree as etree_
except ImportError:
from xml.etree import ElementTree as etree_
Validate_simpletypes_ = True
if sys.version_info.major == 2:
BaseStrType_ = basestring
else:
BaseStrType_ = str
def parsexml_(infile, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
doc = etree_.parse(infile, parser=parser, **kwargs)
return doc
def parsexmlstring_(instring, parser=None, **kwargs):
if parser is None:
# Use the lxml ElementTree compatible parser so that, e.g.,
# we ignore comments.
try:
parser = etree_.ETCompatXMLParser()
except AttributeError:
# fallback to xml.etree
parser = etree_.XMLParser()
element = etree_.fromstring(instring, parser=parser, **kwargs)
return element
#
# Namespace prefix definition table (and other attributes, too)
#
# The module generatedsnamespaces, if it is importable, must contain
# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
# should map element type names (strings) to XML schema namespace prefix
# definitions. The export method for any class for which there is
# a namespace prefix definition, will export that definition in the
# XML representation of that element. See the export method of
# any generated element type class for a example of the use of this
# table.
# A sample table is:
#
# # File: generatedsnamespaces.py
#
# GenerateDSNamespaceDefs = {
# "ElementtypeA": "http://www.xxx.com/namespaceA",
# "ElementtypeB": "http://www.xxx.com/namespaceB",
# }
#
try:
from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
except ImportError:
GenerateDSNamespaceDefs_ = {}
#
# The root super-class for element type classes
#
# Calls to the methods in these classes are generated by generateDS.py.
# You can replace these methods by re-implementing the following class
# in a module named generatedssuper.py.
try:
from generatedssuper import GeneratedsSuper
except ImportError as exp:
class GeneratedsSuper(object):
tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
class _FixedOffsetTZ(datetime_.tzinfo):
def __init__(self, offset, name):
self.__offset = datetime_.timedelta(minutes=offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return None
def gds_format_string(self, input_data, input_name=''):
return input_data
def gds_validate_string(self, input_data, node=None, input_name=''):
if not input_data:
return ''
else:
return input_data
def gds_format_base64(self, input_data, input_name=''):
return base64.b64encode(input_data)
def gds_validate_base64(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer(self, input_data, input_name=''):
return '%d' % input_data
def gds_validate_integer(self, input_data, node=None, input_name=''):
return input_data
def gds_format_integer_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_integer_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
int(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of integers')
return values
def gds_format_float(self, input_data, input_name=''):
return ('%.15f' % input_data).rstrip('0')
def gds_validate_float(self, input_data, node=None, input_name=''):
return input_data
def gds_format_float_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_float_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of floats')
return values
def gds_format_double(self, input_data, input_name=''):
return '%e' % input_data
def gds_validate_double(self, input_data, node=None, input_name=''):
return input_data
def gds_format_double_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_double_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
try:
float(value)
except (TypeError, ValueError):
raise_parse_error(node, 'Requires sequence of doubles')
return values
def gds_format_boolean(self, input_data, input_name=''):
return ('%s' % input_data).lower()
def gds_validate_boolean(self, input_data, node=None, input_name=''):
return input_data
def gds_format_boolean_list(self, input_data, input_name=''):
return '%s' % ' '.join(input_data)
def gds_validate_boolean_list(
self, input_data, node=None, input_name=''):
values = input_data.split()
for value in values:
if value not in ('true', '1', 'false', '0', ):
raise_parse_error(
node,
'Requires sequence of booleans '
'("true", "1", "false", "0")')
return values
def gds_validate_datetime(self, input_data, node=None, input_name=''):
return input_data
def gds_format_datetime(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
input_data.year,
input_data.month,
input_data.day,
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
@classmethod
def gds_parse_datetime(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
time_parts = input_data.split('.')
if len(time_parts) > 1:
micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
input_data = '%s.%s' % (
time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), )
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(
input_data, '%Y-%m-%dT%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt
def gds_validate_date(self, input_data, node=None, input_name=''):
return input_data
def gds_format_date(self, input_data, input_name=''):
_svalue = '%04d-%02d-%02d' % (
input_data.year,
input_data.month,
input_data.day,
)
try:
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(
hours, minutes)
except AttributeError:
pass
return _svalue
@classmethod
def gds_parse_date(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
dt = dt.replace(tzinfo=tz)
return dt.date()
def gds_validate_time(self, input_data, node=None, input_name=''):
return input_data
def gds_format_time(self, input_data, input_name=''):
if input_data.microsecond == 0:
_svalue = '%02d:%02d:%02d' % (
input_data.hour,
input_data.minute,
input_data.second,
)
else:
_svalue = '%02d:%02d:%02d.%s' % (
input_data.hour,
input_data.minute,
input_data.second,
('%f' % (float(input_data.microsecond) / 1000000))[2:],
)
if input_data.tzinfo is not None:
tzoff = input_data.tzinfo.utcoffset(input_data)
if tzoff is not None:
total_seconds = tzoff.seconds + (86400 * tzoff.days)
if total_seconds == 0:
_svalue += 'Z'
else:
if total_seconds < 0:
_svalue += '-'
total_seconds *= -1
else:
_svalue += '+'
hours = total_seconds // 3600
minutes = (total_seconds - (hours * 3600)) // 60
_svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
return _svalue
def gds_validate_simple_patterns(self, patterns, target):
# pat is a list of lists of strings/patterns.
# The target value must match at least one of the patterns
# in order for the test to succeed.
found1 = True
for patterns1 in patterns:
found2 = False
for patterns2 in patterns1:
mo = re_.search(patterns2, target)
if mo is not None and len(mo.group(0)) == len(target):
found2 = True
break
if not found2:
found1 = False
break
return found1
@classmethod
def gds_parse_time(cls, input_data):
tz = None
if input_data[-1] == 'Z':
tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
input_data = input_data[:-1]
else:
results = GeneratedsSuper.tzoff_pattern.search(input_data)
if results is not None:
tzoff_parts = results.group(2).split(':')
tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
if results.group(1) == '-':
tzoff *= -1
tz = GeneratedsSuper._FixedOffsetTZ(
tzoff, results.group(0))
input_data = input_data[:-6]
if len(input_data.split('.')) > 1:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
else:
dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
dt = dt.replace(tzinfo=tz)
return dt.time()
def gds_str_lower(self, instring):
return instring.lower()
def get_path_(self, node):
path_list = []
self.get_path_list_(node, path_list)
path_list.reverse()
path = '/'.join(path_list)
return path
Tag_strip_pattern_ = re_.compile(r'\{.*\}')
def get_path_list_(self, node, path_list):
if node is None:
return
tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
if tag:
path_list.append(tag)
self.get_path_list_(node.getparent(), path_list)
def get_class_obj_(self, node, default_class=None):
class_obj1 = default_class
if 'xsi' in node.nsmap:
classname = node.get('{%s}type' % node.nsmap['xsi'])
if classname is not None:
names = classname.split(':')
if len(names) == 2:
classname = names[1]
class_obj2 = globals().get(classname)
if class_obj2 is not None:
class_obj1 = class_obj2
return class_obj1
def gds_build_any(self, node, type_name=None):
return None
@classmethod
def gds_reverse_node_mapping(cls, mapping):
return dict(((v, k) for k, v in mapping.iteritems()))
@staticmethod
def gds_encode(instring):
if sys.version_info.major == 2:
if ExternalEncoding:
encoding = ExternalEncoding
else:
encoding = 'utf-8'
return instring.encode(encoding)
else:
return instring
@staticmethod
def convert_unicode(instring):
if isinstance(instring, str):
result = quote_xml(instring)
elif sys.version_info.major == 2 and isinstance(instring, unicode):
result = quote_xml(instring).encode('utf8')
else:
result = GeneratedsSuper.gds_encode(str(instring))
return result
def __eq__(self, other):
if type(self) != type(other):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def getSubclassFromModule_(module, class_):
'''Get the subclass of a class from a specific module.'''
name = class_.__name__ + 'Sub'
if hasattr(module, name):
return getattr(module, name)
else:
return None
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Globals
#
ExternalEncoding = 'utf-8'
Tag_pattern_ = re_.compile(r'({.*})?(.*)')
String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
# Change this to redirect the generated superclass module to use a
# specific subclass module.
CurrentSubclassModule_ = None
#
# Support/utility functions.
#
def showIndent(outfile, level, pretty_print=True):
if pretty_print:
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
"Escape markup chars, but do not modify CDATA sections."
if not inStr:
return ''
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s2 = ''
pos = 0
matchobjects = CDATA_pattern_.finditer(s1)
for mo in matchobjects:
s3 = s1[pos:mo.start()]
s2 += quote_xml_aux(s3)
s2 += s1[mo.start():mo.end()]
pos = mo.end()
s3 = s1[pos:]
s2 += quote_xml_aux(s3)
return s2
def quote_xml_aux(inStr):
s1 = inStr.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
return s1
def quote_attrib(inStr):
s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('>', '>')
if '"' in s1:
if "'" in s1:
s1 = '"%s"' % s1.replace('"', """)
else:
s1 = "'%s'" % s1
else:
s1 = '"%s"' % s1
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
def get_all_text_(node):
if node.text is not None:
text = node.text
else:
text = ''
for child in node:
if child.tail is not None:
text += child.tail
return text
def find_attr_value_(attr_name, node):
attrs = node.attrib
attr_parts = attr_name.split(':')
value = None
if len(attr_parts) == 1:
value = attrs.get(attr_name)
elif len(attr_parts) == 2:
prefix, name = attr_parts
namespace = node.nsmap.get(prefix)
if namespace is not None:
value = attrs.get('{%s}%s' % (namespace, name, ))
return value
class GDSParseError(Exception):
pass
def raise_parse_error(node, msg):
msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
raise GDSParseError(msg)
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
TypeBase64 = 8
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name, namespace,
pretty_print=True):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(
outfile, level, namespace, name,
pretty_print=pretty_print)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (
self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeBase64:
outfile.write('<%s>%s</%s>' % (
self.name,
base64.b64encode(self.value),
self.name))
def to_etree(self, element):
if self.category == MixedContainer.CategoryText:
# Prevent exporting empty content as empty lines.
if self.value.strip():
if len(element) > 0:
if element[-1].tail is None:
element[-1].tail = self.value
else:
element[-1].tail += self.value
else:
if element.text is None:
element.text = self.value
else:
element.text += self.value
elif self.category == MixedContainer.CategorySimple:
subelement = etree_.SubElement(
element, '%s' % self.name)
subelement.text = self.to_etree_simple()
else: # category == MixedContainer.CategoryComplex
self.value.to_etree(element)
def to_etree_simple(self):
if self.content_type == MixedContainer.TypeString:
text = self.value
elif (self.content_type == MixedContainer.TypeInteger or
self.content_type == MixedContainer.TypeBoolean):
text = '%d' % self.value
elif (self.content_type == MixedContainer.TypeFloat or
self.content_type == MixedContainer.TypeDecimal):
text = '%f' % self.value
elif self.content_type == MixedContainer.TypeDouble:
text = '%g' % self.value
elif self.content_type == MixedContainer.TypeBase64:
text = '%s' % base64.b64encode(self.value)
return text
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
self.category, self.content_type,
self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write(
'model_.MixedContainer(%d, %d, "%s",\n' % (
self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
class MemberSpec_(object):
def __init__(self, name='', data_type='', container=0,
optional=0, child_attrs=None, choice=None):
self.name = name
self.data_type = data_type
self.container = container
self.child_attrs = child_attrs
self.choice = choice
self.optional = optional
def set_name(self, name): self.name = name
def get_name(self): return self.name
def set_data_type(self, data_type): self.data_type = data_type
def get_data_type_chain(self): return self.data_type
def get_data_type(self):
if isinstance(self.data_type, list):
if len(self.data_type) > 0:
return self.data_type[-1]
else:
return 'xs:string'
else:
return self.data_type
def set_container(self, container): self.container = container
def get_container(self): return self.container
def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
def get_child_attrs(self): return self.child_attrs
def set_choice(self, choice): self.choice = choice
def get_choice(self): return self.choice
def set_optional(self, optional): self.optional = optional
def get_optional(self): return self.optional
def _cast(typ, value):
if typ is None or value is None:
return value
return typ(value)
#
# Data representation classes.
#
class AlertIntervalType(object):
NONE='NONE'
INTERVAL__5='INTERVAL_5'
INTERVAL__15='INTERVAL_15'
INTERVAL__30='INTERVAL_30'
INTERVAL__60='INTERVAL_60'
INTERVAL__300='INTERVAL_300'
INTERVAL__900='INTERVAL_900'
INTERVAL__3600='INTERVAL_3600'
class AlertType(object):
BEEP='BEEP'
SILENT='SILENT'
RING__5='RING_5'
RING__15='RING_15'
RING__30='RING_30'
RING__60='RING_60'
class FormButton(object):
POSITIVE='positive'
NEGATIVE='negative'
class KeyboardType(object):
DEFAULT='DEFAULT'
AUTO_CAPITALIZED='AUTO_CAPITALIZED'
EMAIL='EMAIL'
URL='URL'
PHONE='PHONE'
NUMBER='NUMBER'
DECIMAL='DECIMAL'
PASSWORD='PASSWORD'
NUMBER_PASSWORD='NUMBER_PASSWORD'
class MemberStatus(object):
SUBMITTED='SUBMITTED'
INITIATED='INITIATED'
RUNNING='RUNNING'
FINISHED='FINISHED'
class ProgrammingLanguage(object):
JYTHON='JYTHON'
JRUBY='JRUBY'
class Attachment(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, name=None, url=None, contentType=None, size=None):
self.original_tagname_ = None
self.name = _cast(None, name)
self.url = _cast(None, url)
self.contentType = _cast(None, contentType)
self.size = _cast(int, size)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Attachment)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Attachment.subclass:
return Attachment.subclass(*args_, **kwargs_)
else:
return Attachment(*args_, **kwargs_)
factory = staticmethod(factory)
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_url(self): return self.url
def set_url(self, url): self.url = url
def get_contentType(self): return self.contentType
def set_contentType(self, contentType): self.contentType = contentType
def get_size(self): return self.size
def set_size(self, size): self.size = size
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='Attachment', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Attachment')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Attachment')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='Attachment', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Attachment'):
if self.name is not None and 'name' not in already_processed:
already_processed.add('name')
outfile.write(' name=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.name), input_name='name')), ))
if self.url is not None and 'url' not in already_processed:
already_processed.add('url')
outfile.write(' url=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.url), input_name='url')), ))
if self.contentType is not None and 'contentType' not in already_processed:
already_processed.add('contentType')
outfile.write(' contentType=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.contentType), input_name='contentType')), ))
if self.size is not None and 'size' not in already_processed:
already_processed.add('size')
outfile.write(' size="%s"' % self.gds_format_integer(self.size, input_name='size'))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='Attachment', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('name', node)
if value is not None and 'name' not in already_processed:
already_processed.add('name')
self.name = value
value = find_attr_value_('url', node)
if value is not None and 'url' not in already_processed:
already_processed.add('url')
self.url = value
value = find_attr_value_('contentType', node)
if value is not None and 'contentType' not in already_processed:
already_processed.add('contentType')
self.contentType = value
value = find_attr_value_('size', node)
if value is not None and 'size' not in already_processed:
already_processed.add('size')
try:
self.size = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Attachment
class FlowElement(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, id=None, extensiontype_=None):
self.original_tagname_ = None
self.id = _cast(None, id)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, FlowElement)
if subclass is not None:
return subclass(*args_, **kwargs_)
if FlowElement.subclass:
return FlowElement.subclass(*args_, **kwargs_)
else:
return FlowElement(*args_, **kwargs_)
factory = staticmethod(factory)
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='FlowElement', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('FlowElement')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FlowElement')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='FlowElement', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FlowElement'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', name_='FlowElement', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class FlowElement
class Answer(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, caption=None, action=None, id=None, reference=None, color=None):
self.original_tagname_ = None
self.caption = _cast(None, caption)
self.action = _cast(None, action)
self.id = _cast(None, id)
self.reference = _cast(None, reference)
self.color = _cast(None, color)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Answer)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Answer.subclass:
return Answer.subclass(*args_, **kwargs_)
else:
return Answer(*args_, **kwargs_)
factory = staticmethod(factory)
def get_caption(self): return self.caption
def set_caption(self, caption): self.caption = caption
def get_action(self): return self.action
def set_action(self, action): self.action = action
def get_id(self): return self.id
def set_id(self, id): self.id = id
def get_reference(self): return self.reference
def set_reference(self, reference): self.reference = reference
def get_color(self): return self.color
def set_color(self, color): self.color = color
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='Answer', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Answer')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Answer')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='Answer', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Answer'):
if self.caption is not None and 'caption' not in already_processed:
already_processed.add('caption')
outfile.write(' caption=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.caption), input_name='caption')), ))
if self.action is not None and 'action' not in already_processed:
already_processed.add('action')
outfile.write(' action=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.action), input_name='action')), ))
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
if self.reference is not None and 'reference' not in already_processed:
already_processed.add('reference')
outfile.write(' reference=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.reference), input_name='reference')), ))
if self.color is not None and 'color' not in already_processed:
already_processed.add('color')
outfile.write(' color=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.color), input_name='color')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='Answer', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('caption', node)
if value is not None and 'caption' not in already_processed:
already_processed.add('caption')
self.caption = value
value = find_attr_value_('action', node)
if value is not None and 'action' not in already_processed:
already_processed.add('action')
self.action = value
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
value = find_attr_value_('reference', node)
if value is not None and 'reference' not in already_processed:
already_processed.add('reference')
self.reference = value
value = find_attr_value_('color', node)
if value is not None and 'color' not in already_processed:
already_processed.add('color')
self.color = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Answer
class Message(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, allowDismiss=None, dismissReference=None, brandingKey=None, autoLock=None, vibrate=None, alertType=None, alertIntervalType=None, content=None, answer=None, attachment=None):
self.original_tagname_ = None
super(Message, self).__init__(id, )
self.allowDismiss = _cast(bool, allowDismiss)
self.dismissReference = _cast(None, dismissReference)
self.brandingKey = _cast(None, brandingKey)
self.autoLock = _cast(bool, autoLock)
self.vibrate = _cast(bool, vibrate)
self.alertType = _cast(None, alertType)
self.alertIntervalType = _cast(None, alertIntervalType)
self.content = content
if answer is None:
self.answer = []
else:
self.answer = answer
if attachment is None:
self.attachment = []
else:
self.attachment = attachment
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Message)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Message.subclass:
return Message.subclass(*args_, **kwargs_)
else:
return Message(*args_, **kwargs_)
factory = staticmethod(factory)
def get_content(self): return self.content
def set_content(self, content): self.content = content
def get_answer(self): return self.answer
def set_answer(self, answer): self.answer = answer
def add_answer(self, value): self.answer.append(value)
def insert_answer_at(self, index, value): self.answer.insert(index, value)
def replace_answer_at(self, index, value): self.answer[index] = value
def get_attachment(self): return self.attachment
def set_attachment(self, attachment): self.attachment = attachment
def add_attachment(self, value): self.attachment.append(value)
def insert_attachment_at(self, index, value): self.attachment.insert(index, value)
def replace_attachment_at(self, index, value): self.attachment[index] = value
def get_allowDismiss(self): return self.allowDismiss
def set_allowDismiss(self, allowDismiss): self.allowDismiss = allowDismiss
def get_dismissReference(self): return self.dismissReference
def set_dismissReference(self, dismissReference): self.dismissReference = dismissReference
def get_brandingKey(self): return self.brandingKey
def set_brandingKey(self, brandingKey): self.brandingKey = brandingKey
def get_autoLock(self): return self.autoLock
def set_autoLock(self, autoLock): self.autoLock = autoLock
def get_vibrate(self): return self.vibrate
def set_vibrate(self, vibrate): self.vibrate = vibrate
def get_alertType(self): return self.alertType
def set_alertType(self, alertType): self.alertType = alertType
def get_alertIntervalType(self): return self.alertIntervalType
def set_alertIntervalType(self, alertIntervalType): self.alertIntervalType = alertIntervalType
def validate_AlertType(self, value):
# Validate type AlertType, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['BEEP', 'SILENT', 'RING_5', 'RING_15', 'RING_30', 'RING_60']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on AlertType' % {"value" : value.encode("utf-8")} )
def validate_AlertIntervalType(self, value):
# Validate type AlertIntervalType, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['NONE', 'INTERVAL_5', 'INTERVAL_15', 'INTERVAL_30', 'INTERVAL_60', 'INTERVAL_300', 'INTERVAL_900', 'INTERVAL_3600']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on AlertIntervalType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
self.content is not None or
self.answer or
self.attachment or
super(Message, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='Message', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Message')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Message')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='Message', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Message'):
super(Message, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Message')
if self.allowDismiss is not None and 'allowDismiss' not in already_processed:
already_processed.add('allowDismiss')
outfile.write(' allowDismiss="%s"' % self.gds_format_boolean(self.allowDismiss, input_name='allowDismiss'))
if self.dismissReference is not None and 'dismissReference' not in already_processed:
already_processed.add('dismissReference')
outfile.write(' dismissReference=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.dismissReference), input_name='dismissReference')), ))
if self.brandingKey is not None and 'brandingKey' not in already_processed:
already_processed.add('brandingKey')
outfile.write(' brandingKey=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.brandingKey), input_name='brandingKey')), ))
if self.autoLock is not None and 'autoLock' not in already_processed:
already_processed.add('autoLock')
outfile.write(' autoLock="%s"' % self.gds_format_boolean(self.autoLock, input_name='autoLock'))
if self.vibrate is not None and 'vibrate' not in already_processed:
already_processed.add('vibrate')
outfile.write(' vibrate="%s"' % self.gds_format_boolean(self.vibrate, input_name='vibrate'))
if self.alertType is not None and 'alertType' not in already_processed:
already_processed.add('alertType')
outfile.write(' alertType=%s' % (quote_attrib(self.alertType), ))
if self.alertIntervalType is not None and 'alertIntervalType' not in already_processed:
already_processed.add('alertIntervalType')
outfile.write(' alertIntervalType=%s' % (quote_attrib(self.alertIntervalType), ))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='Message', fromsubclass_=False, pretty_print=True):
super(Message, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.content is not None:
self.content.export(outfile, level, namespaceprefix_, name_='content', pretty_print=pretty_print)
for answer_ in self.answer:
answer_.export(outfile, level, namespaceprefix_, name_='answer', pretty_print=pretty_print)
for attachment_ in self.attachment:
attachment_.export(outfile, level, namespaceprefix_, name_='attachment', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('allowDismiss', node)
if value is not None and 'allowDismiss' not in already_processed:
already_processed.add('allowDismiss')
if value in ('true', '1'):
self.allowDismiss = True
elif value in ('false', '0'):
self.allowDismiss = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('dismissReference', node)
if value is not None and 'dismissReference' not in already_processed:
already_processed.add('dismissReference')
self.dismissReference = value
value = find_attr_value_('brandingKey', node)
if value is not None and 'brandingKey' not in already_processed:
already_processed.add('brandingKey')
self.brandingKey = value
value = find_attr_value_('autoLock', node)
if value is not None and 'autoLock' not in already_processed:
already_processed.add('autoLock')
if value in ('true', '1'):
self.autoLock = True
elif value in ('false', '0'):
self.autoLock = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('vibrate', node)
if value is not None and 'vibrate' not in already_processed:
already_processed.add('vibrate')
if value in ('true', '1'):
self.vibrate = True
elif value in ('false', '0'):
self.vibrate = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('alertType', node)
if value is not None and 'alertType' not in already_processed:
already_processed.add('alertType')
self.alertType = value
self.validate_AlertType(self.alertType) # validate type AlertType
value = find_attr_value_('alertIntervalType', node)
if value is not None and 'alertIntervalType' not in already_processed:
already_processed.add('alertIntervalType')
self.alertIntervalType = value
self.validate_AlertIntervalType(self.alertIntervalType) # validate type AlertIntervalType
super(Message, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'content':
obj_ = contentType.factory()
obj_.build(child_)
self.content = obj_
obj_.original_tagname_ = 'content'
elif nodeName_ == 'answer':
obj_ = Answer.factory()
obj_.build(child_)
self.answer.append(obj_)
obj_.original_tagname_ = 'answer'
elif nodeName_ == 'attachment':
obj_ = Attachment.factory()
obj_.build(child_)
self.attachment.append(obj_)
obj_.original_tagname_ = 'attachment'
super(Message, self).buildChildren(child_, node, nodeName_, True)
# end class Message
class ResultsFlush(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, reference=None):
self.original_tagname_ = None
super(ResultsFlush, self).__init__(id, )
self.reference = _cast(None, reference)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ResultsFlush)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ResultsFlush.subclass:
return ResultsFlush.subclass(*args_, **kwargs_)
else:
return ResultsFlush(*args_, **kwargs_)
factory = staticmethod(factory)
def get_reference(self): return self.reference
def set_reference(self, reference): self.reference = reference
def hasContent_(self):
if (
super(ResultsFlush, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='ResultsFlush', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ResultsFlush')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ResultsFlush')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='ResultsFlush', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ResultsFlush'):
super(ResultsFlush, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ResultsFlush')
if self.reference is not None and 'reference' not in already_processed:
already_processed.add('reference')
outfile.write(' reference=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.reference), input_name='reference')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='ResultsFlush', fromsubclass_=False, pretty_print=True):
super(ResultsFlush, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('reference', node)
if value is not None and 'reference' not in already_processed:
already_processed.add('reference')
self.reference = value
super(ResultsFlush, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(ResultsFlush, self).buildChildren(child_, node, nodeName_, True)
pass
# end class ResultsFlush
class ResultsEmail(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, reference=None, emailAdmins=None, email=None):
self.original_tagname_ = None
super(ResultsEmail, self).__init__(id, )
self.reference = _cast(None, reference)
self.emailAdmins = _cast(bool, emailAdmins)
if email is None:
self.email = []
else:
self.email = email
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, ResultsEmail)
if subclass is not None:
return subclass(*args_, **kwargs_)
if ResultsEmail.subclass:
return ResultsEmail.subclass(*args_, **kwargs_)
else:
return ResultsEmail(*args_, **kwargs_)
factory = staticmethod(factory)
def get_email(self): return self.email
def set_email(self, email): self.email = email
def add_email(self, value): self.email.append(value)
def insert_email_at(self, index, value): self.email.insert(index, value)
def replace_email_at(self, index, value): self.email[index] = value
def get_reference(self): return self.reference
def set_reference(self, reference): self.reference = reference
def get_emailAdmins(self): return self.emailAdmins
def set_emailAdmins(self, emailAdmins): self.emailAdmins = emailAdmins
def hasContent_(self):
if (
self.email or
super(ResultsEmail, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='ResultsEmail', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('ResultsEmail')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ResultsEmail')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='ResultsEmail', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='ResultsEmail'):
super(ResultsEmail, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='ResultsEmail')
if self.reference is not None and 'reference' not in already_processed:
already_processed.add('reference')
outfile.write(' reference=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.reference), input_name='reference')), ))
if self.emailAdmins is not None and 'emailAdmins' not in already_processed:
already_processed.add('emailAdmins')
outfile.write(' emailAdmins="%s"' % self.gds_format_boolean(self.emailAdmins, input_name='emailAdmins'))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='ResultsEmail', fromsubclass_=False, pretty_print=True):
super(ResultsEmail, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for email_ in self.email:
email_.export(outfile, level, namespaceprefix_, name_='email', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('reference', node)
if value is not None and 'reference' not in already_processed:
already_processed.add('reference')
self.reference = value
value = find_attr_value_('emailAdmins', node)
if value is not None and 'emailAdmins' not in already_processed:
already_processed.add('emailAdmins')
if value in ('true', '1'):
self.emailAdmins = True
elif value in ('false', '0'):
self.emailAdmins = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(ResultsEmail, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'email':
class_obj_ = self.get_class_obj_(child_, Value)
obj_ = class_obj_.factory()
obj_.build(child_)
self.email.append(obj_)
obj_.original_tagname_ = 'email'
super(ResultsEmail, self).buildChildren(child_, node, nodeName_, True)
# end class ResultsEmail
class FlowCode(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, exceptionReference=None, outlet=None, javascriptCode=None):
self.original_tagname_ = None
super(FlowCode, self).__init__(id, )
self.exceptionReference = _cast(None, exceptionReference)
if outlet is None:
self.outlet = []
else:
self.outlet = outlet
self.javascriptCode = javascriptCode
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, FlowCode)
if subclass is not None:
return subclass(*args_, **kwargs_)
if FlowCode.subclass:
return FlowCode.subclass(*args_, **kwargs_)
else:
return FlowCode(*args_, **kwargs_)
factory = staticmethod(factory)
def get_outlet(self): return self.outlet
def set_outlet(self, outlet): self.outlet = outlet
def add_outlet(self, value): self.outlet.append(value)
def insert_outlet_at(self, index, value): self.outlet.insert(index, value)
def replace_outlet_at(self, index, value): self.outlet[index] = value
def get_javascriptCode(self): return self.javascriptCode
def set_javascriptCode(self, javascriptCode): self.javascriptCode = javascriptCode
def get_exceptionReference(self): return self.exceptionReference
def set_exceptionReference(self, exceptionReference): self.exceptionReference = exceptionReference
def hasContent_(self):
if (
self.outlet or
self.javascriptCode is not None or
super(FlowCode, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='FlowCode', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('FlowCode')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FlowCode')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='FlowCode', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FlowCode'):
super(FlowCode, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FlowCode')
if self.exceptionReference is not None and 'exceptionReference' not in already_processed:
already_processed.add('exceptionReference')
outfile.write(' exceptionReference=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.exceptionReference), input_name='exceptionReference')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='FlowCode', fromsubclass_=False, pretty_print=True):
super(FlowCode, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for outlet_ in self.outlet:
outlet_.export(outfile, level, namespaceprefix_, name_='outlet', pretty_print=pretty_print)
if self.javascriptCode is not None:
self.javascriptCode.export(outfile, level, namespaceprefix_, name_='javascriptCode', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('exceptionReference', node)
if value is not None and 'exceptionReference' not in already_processed:
already_processed.add('exceptionReference')
self.exceptionReference = value
super(FlowCode, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'outlet':
obj_ = Outlet.factory()
obj_.build(child_)
self.outlet.append(obj_)
obj_.original_tagname_ = 'outlet'
elif nodeName_ == 'javascriptCode':
obj_ = javascriptCodeType.factory()
obj_.build(child_)
self.javascriptCode = obj_
obj_.original_tagname_ = 'javascriptCode'
super(FlowCode, self).buildChildren(child_, node, nodeName_, True)
# end class FlowCode
class Widget(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, extensiontype_=None):
self.original_tagname_ = None
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Widget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Widget.subclass:
return Widget.subclass(*args_, **kwargs_)
else:
return Widget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='Widget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Widget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Widget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='Widget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Widget'):
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
pass
def exportChildren(self, outfile, level, namespaceprefix_='', name_='Widget', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Widget
class BaseSliderWidget(Widget):
subclass = None
superclass = Widget
def __init__(self, min=None, max=None, step=None, precision=None, unit=None, extensiontype_=None):
self.original_tagname_ = None
super(BaseSliderWidget, self).__init__(extensiontype_, )
self.min = _cast(float, min)
self.max = _cast(float, max)
self.step = _cast(float, step)
self.precision = _cast(int, precision)
self.unit = _cast(None, unit)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, BaseSliderWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if BaseSliderWidget.subclass:
return BaseSliderWidget.subclass(*args_, **kwargs_)
else:
return BaseSliderWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_min(self): return self.min
def set_min(self, min): self.min = min
def get_max(self): return self.max
def set_max(self, max): self.max = max
def get_step(self): return self.step
def set_step(self, step): self.step = step
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_unit(self): return self.unit
def set_unit(self, unit): self.unit = unit
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
super(BaseSliderWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='BaseSliderWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('BaseSliderWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSliderWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='BaseSliderWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BaseSliderWidget'):
super(BaseSliderWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BaseSliderWidget')
if self.min is not None and 'min' not in already_processed:
already_processed.add('min')
outfile.write(' min="%s"' % self.gds_format_float(self.min, input_name='min'))
if self.max is not None and 'max' not in already_processed:
already_processed.add('max')
outfile.write(' max="%s"' % self.gds_format_float(self.max, input_name='max'))
if self.step is not None and 'step' not in already_processed:
already_processed.add('step')
outfile.write(' step="%s"' % self.gds_format_float(self.step, input_name='step'))
if self.precision is not None and 'precision' not in already_processed:
already_processed.add('precision')
outfile.write(' precision="%s"' % self.gds_format_integer(self.precision, input_name='precision'))
if self.unit is not None and 'unit' not in already_processed:
already_processed.add('unit')
outfile.write(' unit=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.unit), input_name='unit')), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', name_='BaseSliderWidget', fromsubclass_=False, pretty_print=True):
super(BaseSliderWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('min', node)
if value is not None and 'min' not in already_processed:
already_processed.add('min')
try:
self.min = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (min): %s' % exp)
value = find_attr_value_('max', node)
if value is not None and 'max' not in already_processed:
already_processed.add('max')
try:
self.max = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (max): %s' % exp)
value = find_attr_value_('step', node)
if value is not None and 'step' not in already_processed:
already_processed.add('step')
try:
self.step = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (step): %s' % exp)
value = find_attr_value_('precision', node)
if value is not None and 'precision' not in already_processed:
already_processed.add('precision')
try:
self.precision = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('unit', node)
if value is not None and 'unit' not in already_processed:
already_processed.add('unit')
self.unit = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
super(BaseSliderWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(BaseSliderWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class BaseSliderWidget
class SliderWidget(BaseSliderWidget):
subclass = None
superclass = BaseSliderWidget
def __init__(self, min=None, max=None, step=None, precision=None, unit=None, value=None):
self.original_tagname_ = None
super(SliderWidget, self).__init__(min, max, step, precision, unit, )
self.value = _cast(float, value)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, SliderWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if SliderWidget.subclass:
return SliderWidget.subclass(*args_, **kwargs_)
else:
return SliderWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_value(self): return self.value
def set_value(self, value): self.value = value
def hasContent_(self):
if (
super(SliderWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='SliderWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('SliderWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SliderWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='SliderWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='SliderWidget'):
super(SliderWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='SliderWidget')
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value="%s"' % self.gds_format_float(self.value, input_name='value'))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='SliderWidget', fromsubclass_=False, pretty_print=True):
super(SliderWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
try:
self.value = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (value): %s' % exp)
super(SliderWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(SliderWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class SliderWidget
class RangeSliderWidget(BaseSliderWidget):
subclass = None
superclass = BaseSliderWidget
def __init__(self, min=None, max=None, step=None, precision=None, unit=None, lowValue=None, highValue=None):
self.original_tagname_ = None
super(RangeSliderWidget, self).__init__(min, max, step, precision, unit, )
self.lowValue = _cast(float, lowValue)
self.highValue = _cast(float, highValue)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, RangeSliderWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if RangeSliderWidget.subclass:
return RangeSliderWidget.subclass(*args_, **kwargs_)
else:
return RangeSliderWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_lowValue(self): return self.lowValue
def set_lowValue(self, lowValue): self.lowValue = lowValue
def get_highValue(self): return self.highValue
def set_highValue(self, highValue): self.highValue = highValue
def hasContent_(self):
if (
super(RangeSliderWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='RangeSliderWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('RangeSliderWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RangeSliderWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='RangeSliderWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='RangeSliderWidget'):
super(RangeSliderWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='RangeSliderWidget')
if self.lowValue is not None and 'lowValue' not in already_processed:
already_processed.add('lowValue')
outfile.write(' lowValue="%s"' % self.gds_format_float(self.lowValue, input_name='lowValue'))
if self.highValue is not None and 'highValue' not in already_processed:
already_processed.add('highValue')
outfile.write(' highValue="%s"' % self.gds_format_float(self.highValue, input_name='highValue'))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='RangeSliderWidget', fromsubclass_=False, pretty_print=True):
super(RangeSliderWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('lowValue', node)
if value is not None and 'lowValue' not in already_processed:
already_processed.add('lowValue')
try:
self.lowValue = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (lowValue): %s' % exp)
value = find_attr_value_('highValue', node)
if value is not None and 'highValue' not in already_processed:
already_processed.add('highValue')
try:
self.highValue = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (highValue): %s' % exp)
super(RangeSliderWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(RangeSliderWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class RangeSliderWidget
class PhotoUploadWidget(Widget):
subclass = None
superclass = Widget
def __init__(self, quality=None, gallery=None, camera=None, ratio=None):
self.original_tagname_ = None
super(PhotoUploadWidget, self).__init__()
self.quality = _cast(None, quality)
self.gallery = _cast(bool, gallery)
self.camera = _cast(bool, camera)
self.ratio = _cast(None, ratio)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PhotoUploadWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PhotoUploadWidget.subclass:
return PhotoUploadWidget.subclass(*args_, **kwargs_)
else:
return PhotoUploadWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_quality(self): return self.quality
def set_quality(self, quality): self.quality = quality
def get_gallery(self): return self.gallery
def set_gallery(self, gallery): self.gallery = gallery
def get_camera(self): return self.camera
def set_camera(self, camera): self.camera = camera
def get_ratio(self): return self.ratio
def set_ratio(self, ratio): self.ratio = ratio
def hasContent_(self):
if (
super(PhotoUploadWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='PhotoUploadWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PhotoUploadWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PhotoUploadWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='PhotoUploadWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PhotoUploadWidget'):
super(PhotoUploadWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PhotoUploadWidget')
if self.quality is not None and 'quality' not in already_processed:
already_processed.add('quality')
outfile.write(' quality=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.quality), input_name='quality')), ))
if self.gallery is not None and 'gallery' not in already_processed:
already_processed.add('gallery')
outfile.write(' gallery="%s"' % self.gds_format_boolean(self.gallery, input_name='gallery'))
if self.camera is not None and 'camera' not in already_processed:
already_processed.add('camera')
outfile.write(' camera="%s"' % self.gds_format_boolean(self.camera, input_name='camera'))
if self.ratio is not None and 'ratio' not in already_processed:
already_processed.add('ratio')
outfile.write(' ratio=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.ratio), input_name='ratio')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='PhotoUploadWidget', fromsubclass_=False, pretty_print=True):
super(PhotoUploadWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('quality', node)
if value is not None and 'quality' not in already_processed:
already_processed.add('quality')
self.quality = value
value = find_attr_value_('gallery', node)
if value is not None and 'gallery' not in already_processed:
already_processed.add('gallery')
if value in ('true', '1'):
self.gallery = True
elif value in ('false', '0'):
self.gallery = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('camera', node)
if value is not None and 'camera' not in already_processed:
already_processed.add('camera')
if value in ('true', '1'):
self.camera = True
elif value in ('false', '0'):
self.camera = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('ratio', node)
if value is not None and 'ratio' not in already_processed:
already_processed.add('ratio')
self.ratio = value
super(PhotoUploadWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(PhotoUploadWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class PhotoUploadWidget
class GPSLocationWidget(Widget):
subclass = None
superclass = Widget
def __init__(self, gps=None):
self.original_tagname_ = None
super(GPSLocationWidget, self).__init__()
self.gps = _cast(bool, gps)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, GPSLocationWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if GPSLocationWidget.subclass:
return GPSLocationWidget.subclass(*args_, **kwargs_)
else:
return GPSLocationWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_gps(self): return self.gps
def set_gps(self, gps): self.gps = gps
def hasContent_(self):
if (
super(GPSLocationWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='GPSLocationWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('GPSLocationWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GPSLocationWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='GPSLocationWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='GPSLocationWidget'):
super(GPSLocationWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='GPSLocationWidget')
if self.gps is not None and 'gps' not in already_processed:
already_processed.add('gps')
outfile.write(' gps="%s"' % self.gds_format_boolean(self.gps, input_name='gps'))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='GPSLocationWidget', fromsubclass_=False, pretty_print=True):
super(GPSLocationWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('gps', node)
if value is not None and 'gps' not in already_processed:
already_processed.add('gps')
if value in ('true', '1'):
self.gps = True
elif value in ('false', '0'):
self.gps = False
else:
raise_parse_error(node, 'Bad boolean attribute')
super(GPSLocationWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(GPSLocationWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class GPSLocationWidget
class TextWidget(Widget):
subclass = None
superclass = Widget
def __init__(self, maxChars=None, placeholder=None, value=None, keyboardType=None, extensiontype_=None):
self.original_tagname_ = None
super(TextWidget, self).__init__(extensiontype_, )
self.maxChars = _cast(int, maxChars)
self.placeholder = _cast(None, placeholder)
self.value = _cast(None, value)
self.keyboardType = _cast(None, keyboardType)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextWidget.subclass:
return TextWidget.subclass(*args_, **kwargs_)
else:
return TextWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def get_maxChars(self): return self.maxChars
def set_maxChars(self, maxChars): self.maxChars = maxChars
def get_placeholder(self): return self.placeholder
def set_placeholder(self, placeholder): self.placeholder = placeholder
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_keyboardType(self): return self.keyboardType
def set_keyboardType(self, keyboardType): self.keyboardType = keyboardType
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def validate_KeyboardType(self, value):
# Validate type KeyboardType, a restriction on xs:string.
if value is not None and Validate_simpletypes_:
value = str(value)
enumerations = ['DEFAULT', 'AUTO_CAPITALIZED', 'EMAIL', 'URL', 'PHONE', 'NUMBER', 'DECIMAL', 'PASSWORD', 'NUMBER_PASSWORD']
enumeration_respectee = False
for enum in enumerations:
if value == enum:
enumeration_respectee = True
break
if not enumeration_respectee:
warnings_.warn('Value "%(value)s" does not match xsd enumeration restriction on KeyboardType' % {"value" : value.encode("utf-8")} )
def hasContent_(self):
if (
super(TextWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='TextWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='TextWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextWidget'):
super(TextWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextWidget')
if self.maxChars is not None and 'maxChars' not in already_processed:
already_processed.add('maxChars')
outfile.write(' maxChars="%s"' % self.gds_format_integer(self.maxChars, input_name='maxChars'))
if self.placeholder is not None and 'placeholder' not in already_processed:
already_processed.add('placeholder')
outfile.write(' placeholder=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.placeholder), input_name='placeholder')), ))
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), ))
if self.keyboardType is not None and 'keyboardType' not in already_processed:
already_processed.add('keyboardType')
outfile.write(' keyboardType=%s' % (quote_attrib(self.keyboardType), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', name_='TextWidget', fromsubclass_=False, pretty_print=True):
super(TextWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('maxChars', node)
if value is not None and 'maxChars' not in already_processed:
already_processed.add('maxChars')
try:
self.maxChars = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('placeholder', node)
if value is not None and 'placeholder' not in already_processed:
already_processed.add('placeholder')
self.placeholder = value
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
self.value = value
value = find_attr_value_('keyboardType', node)
if value is not None and 'keyboardType' not in already_processed:
already_processed.add('keyboardType')
self.keyboardType = value
self.validate_KeyboardType(self.keyboardType) # validate type KeyboardType
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
super(TextWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(TextWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class TextWidget
class TextLineWidget(TextWidget):
subclass = None
superclass = TextWidget
def __init__(self, maxChars=None, placeholder=None, value=None, keyboardType=None):
self.original_tagname_ = None
super(TextLineWidget, self).__init__(maxChars, placeholder, value, keyboardType, )
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextLineWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextLineWidget.subclass:
return TextLineWidget.subclass(*args_, **kwargs_)
else:
return TextLineWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
super(TextLineWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='TextLineWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextLineWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextLineWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='TextLineWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextLineWidget'):
super(TextLineWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextLineWidget')
def exportChildren(self, outfile, level, namespaceprefix_='', name_='TextLineWidget', fromsubclass_=False, pretty_print=True):
super(TextLineWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(TextLineWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(TextLineWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class TextLineWidget
class TextBlockWidget(TextWidget):
subclass = None
superclass = TextWidget
def __init__(self, maxChars=None, placeholder=None, value=None, keyboardType=None):
self.original_tagname_ = None
super(TextBlockWidget, self).__init__(maxChars, placeholder, value, keyboardType, )
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, TextBlockWidget)
if subclass is not None:
return subclass(*args_, **kwargs_)
if TextBlockWidget.subclass:
return TextBlockWidget.subclass(*args_, **kwargs_)
else:
return TextBlockWidget(*args_, **kwargs_)
factory = staticmethod(factory)
def hasContent_(self):
if (
super(TextBlockWidget, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='TextBlockWidget', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('TextBlockWidget')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextBlockWidget')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='TextBlockWidget', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='TextBlockWidget'):
super(TextBlockWidget, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='TextBlockWidget')
def exportChildren(self, outfile, level, namespaceprefix_='', name_='TextBlockWidget', fromsubclass_=False, pretty_print=True):
super(TextBlockWidget, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(TextBlockWidget, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
super(TextBlockWidget, self).buildChildren(child_, node, nodeName_, True)
pass
# end class TextBlockWidget
class Value(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, value=None, extensiontype_=None):
self.original_tagname_ = None
self.value = _cast(None, value)
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, Value)
if subclass is not None:
return subclass(*args_, **kwargs_)
if Value.subclass:
return Value.subclass(*args_, **kwargs_)
else:
return Value(*args_, **kwargs_)
factory = staticmethod(factory)
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='Value', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('Value')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='Value')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='Value', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='Value'):
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.value), input_name='value')), ))
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', name_='Value', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
self.value = value
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class Value
class FloatValue(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, value=None):
self.original_tagname_ = None
self.value = _cast(float, value)
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, FloatValue)
if subclass is not None:
return subclass(*args_, **kwargs_)
if FloatValue.subclass:
return FloatValue.subclass(*args_, **kwargs_)
else:
return FloatValue(*args_, **kwargs_)
factory = staticmethod(factory)
def get_value(self): return self.value
def set_value(self, value): self.value = value
def hasContent_(self):
if (
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='FloatValue', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('FloatValue')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FloatValue')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='FloatValue', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FloatValue'):
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value="%s"' % self.gds_format_float(self.value, input_name='value'))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='FloatValue', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
try:
self.value = float(value)
except ValueError as exp:
raise ValueError('Bad float/double attribute (value): %s' % exp)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class FloatValue
class AdvancedOrderCategory(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, name=None, item=None):
self.original_tagname_ = None
super(AdvancedOrderCategory, self).__init__(id, )
self.name = name
if item is None:
self.item = []
else:
self.item = item
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AdvancedOrderCategory)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AdvancedOrderCategory.subclass:
return AdvancedOrderCategory.subclass(*args_, **kwargs_)
else:
return AdvancedOrderCategory(*args_, **kwargs_)
factory = staticmethod(factory)
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_item(self): return self.item
def set_item(self, item): self.item = item
def add_item(self, value): self.item.append(value)
def insert_item_at(self, index, value): self.item.insert(index, value)
def replace_item_at(self, index, value): self.item[index] = value
def hasContent_(self):
if (
self.name is not None or
self.item or
super(AdvancedOrderCategory, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='AdvancedOrderCategory', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AdvancedOrderCategory')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvancedOrderCategory')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='AdvancedOrderCategory', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdvancedOrderCategory'):
super(AdvancedOrderCategory, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvancedOrderCategory')
def exportChildren(self, outfile, level, namespaceprefix_='', name_='AdvancedOrderCategory', fromsubclass_=False, pretty_print=True):
super(AdvancedOrderCategory, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.name is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<name>%s</name>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.name), input_name='name')), eol_))
for item_ in self.item:
item_.export(outfile, level, namespaceprefix_, name_='item', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(AdvancedOrderCategory, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'name':
name_ = child_.text
name_ = self.gds_validate_string(name_, node, 'name')
self.name = name_
elif nodeName_ == 'item':
obj_ = AdvancedOrderItem.factory()
obj_.build(child_)
self.item.append(obj_)
obj_.original_tagname_ = 'item'
super(AdvancedOrderCategory, self).buildChildren(child_, node, nodeName_, True)
# end class AdvancedOrderCategory
class AdvancedOrderItem(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, value=None, unit=None, unitPrice=None, hasPrice=True, step=None, stepUnit=None, stepUnitConversion=None, imageUrl=None, name=None, description=None):
self.original_tagname_ = None
super(AdvancedOrderItem, self).__init__(id, )
self.value = _cast(int, value)
self.unit = _cast(None, unit)
self.unitPrice = _cast(int, unitPrice)
self.hasPrice = _cast(bool, hasPrice)
self.step = _cast(int, step)
self.stepUnit = _cast(None, stepUnit)
self.stepUnitConversion = _cast(int, stepUnitConversion)
self.imageUrl = _cast(None, imageUrl)
self.name = name
self.description = description
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, AdvancedOrderItem)
if subclass is not None:
return subclass(*args_, **kwargs_)
if AdvancedOrderItem.subclass:
return AdvancedOrderItem.subclass(*args_, **kwargs_)
else:
return AdvancedOrderItem(*args_, **kwargs_)
factory = staticmethod(factory)
def get_name(self): return self.name
def set_name(self, name): self.name = name
def get_description(self): return self.description
def set_description(self, description): self.description = description
def get_value(self): return self.value
def set_value(self, value): self.value = value
def get_unit(self): return self.unit
def set_unit(self, unit): self.unit = unit
def get_unitPrice(self): return self.unitPrice
def set_unitPrice(self, unitPrice): self.unitPrice = unitPrice
def get_hasPrice(self): return self.hasPrice
def set_hasPrice(self, hasPrice): self.hasPrice = hasPrice
def get_step(self): return self.step
def set_step(self, step): self.step = step
def get_stepUnit(self): return self.stepUnit
def set_stepUnit(self, stepUnit): self.stepUnit = stepUnit
def get_stepUnitConversion(self): return self.stepUnitConversion
def set_stepUnitConversion(self, stepUnitConversion): self.stepUnitConversion = stepUnitConversion
def get_imageUrl(self): return self.imageUrl
def set_imageUrl(self, imageUrl): self.imageUrl = imageUrl
def hasContent_(self):
if (
self.name is not None or
self.description is not None or
super(AdvancedOrderItem, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='AdvancedOrderItem', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('AdvancedOrderItem')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvancedOrderItem')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='AdvancedOrderItem', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='AdvancedOrderItem'):
super(AdvancedOrderItem, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='AdvancedOrderItem')
if self.value is not None and 'value' not in already_processed:
already_processed.add('value')
outfile.write(' value="%s"' % self.gds_format_integer(self.value, input_name='value'))
if self.unit is not None and 'unit' not in already_processed:
already_processed.add('unit')
outfile.write(' unit=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.unit), input_name='unit')), ))
if self.unitPrice is not None and 'unitPrice' not in already_processed:
already_processed.add('unitPrice')
outfile.write(' unitPrice="%s"' % self.gds_format_integer(self.unitPrice, input_name='unitPrice'))
if not self.hasPrice and 'hasPrice' not in already_processed:
already_processed.add('hasPrice')
outfile.write(' hasPrice="%s"' % self.gds_format_boolean(self.hasPrice, input_name='hasPrice'))
if self.step is not None and 'step' not in already_processed:
already_processed.add('step')
outfile.write(' step="%s"' % self.gds_format_integer(self.step, input_name='step'))
if self.stepUnit is not None and 'stepUnit' not in already_processed:
already_processed.add('stepUnit')
outfile.write(' stepUnit=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.stepUnit), input_name='stepUnit')), ))
if self.stepUnitConversion is not None and 'stepUnitConversion' not in already_processed:
already_processed.add('stepUnitConversion')
outfile.write(' stepUnitConversion="%s"' % self.gds_format_integer(self.stepUnitConversion, input_name='stepUnitConversion'))
if self.imageUrl is not None and 'imageUrl' not in already_processed:
already_processed.add('imageUrl')
outfile.write(' imageUrl=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.imageUrl), input_name='imageUrl')), ))
def exportChildren(self, outfile, level, namespaceprefix_='', name_='AdvancedOrderItem', fromsubclass_=False, pretty_print=True):
super(AdvancedOrderItem, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.name is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<name>%s</name>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.name), input_name='name')), eol_))
if self.description is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<description>%s</description>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.description), input_name='description')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('value', node)
if value is not None and 'value' not in already_processed:
already_processed.add('value')
try:
self.value = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('unit', node)
if value is not None and 'unit' not in already_processed:
already_processed.add('unit')
self.unit = value
value = find_attr_value_('unitPrice', node)
if value is not None and 'unitPrice' not in already_processed:
already_processed.add('unitPrice')
try:
self.unitPrice = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('hasPrice', node)
if value is not None and 'hasPrice' not in already_processed:
already_processed.add('hasPrice')
if value in ('true', '1'):
self.hasPrice = True
elif value in ('false', '0'):
self.hasPrice = False
else:
raise_parse_error(node, 'Bad boolean attribute')
value = find_attr_value_('step', node)
if value is not None and 'step' not in already_processed:
already_processed.add('step')
try:
self.step = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('stepUnit', node)
if value is not None and 'stepUnit' not in already_processed:
already_processed.add('stepUnit')
self.stepUnit = value
value = find_attr_value_('stepUnitConversion', node)
if value is not None and 'stepUnitConversion' not in already_processed:
already_processed.add('stepUnitConversion')
try:
self.stepUnitConversion = int(value)
except ValueError as exp:
raise_parse_error(node, 'Bad integer attribute: %s' % exp)
value = find_attr_value_('imageUrl', node)
if value is not None and 'imageUrl' not in already_processed:
already_processed.add('imageUrl')
self.imageUrl = value
super(AdvancedOrderItem, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'name':
name_ = child_.text
name_ = self.gds_validate_string(name_, node, 'name')
self.name = name_
elif nodeName_ == 'description':
description_ = child_.text
description_ = self.gds_validate_string(description_, node, 'description')
self.description = description_
super(AdvancedOrderItem, self).buildChildren(child_, node, nodeName_, True)
# end class AdvancedOrderItem
class BasePaymentMethod(FlowElement):
subclass = None
superclass = FlowElement
def __init__(self, id=None, currency=None, amount=None, precision=None, extensiontype_=None):
self.original_tagname_ = None
super(BasePaymentMethod, self).__init__(id, extensiontype_, )
self.currency = currency
self.amount = amount
self.precision = precision
self.extensiontype_ = extensiontype_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, BasePaymentMethod)
if subclass is not None:
return subclass(*args_, **kwargs_)
if BasePaymentMethod.subclass:
return BasePaymentMethod.subclass(*args_, **kwargs_)
else:
return BasePaymentMethod(*args_, **kwargs_)
factory = staticmethod(factory)
def get_currency(self): return self.currency
def set_currency(self, currency): self.currency = currency
def get_amount(self): return self.amount
def set_amount(self, amount): self.amount = amount
def get_precision(self): return self.precision
def set_precision(self, precision): self.precision = precision
def get_extensiontype_(self): return self.extensiontype_
def set_extensiontype_(self, extensiontype_): self.extensiontype_ = extensiontype_
def hasContent_(self):
if (
self.currency is not None or
self.amount is not None or
self.precision is not None or
super(BasePaymentMethod, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='BasePaymentMethod', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('BasePaymentMethod')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePaymentMethod')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='BasePaymentMethod', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BasePaymentMethod'):
super(BasePaymentMethod, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BasePaymentMethod')
if self.extensiontype_ is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
outfile.write(' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"')
outfile.write(' xsi:type="%s"' % self.extensiontype_)
def exportChildren(self, outfile, level, namespaceprefix_='', name_='BasePaymentMethod', fromsubclass_=False, pretty_print=True):
super(BasePaymentMethod, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.currency is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<currency>%s</currency>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.currency), input_name='currency')), eol_))
if self.amount is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<amount>%s</amount>%s' % (self.gds_format_integer(self.amount, input_name='amount'), eol_))
if self.precision is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<precision>%s</precision>%s' % (self.gds_format_integer(self.precision, input_name='precision'), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('xsi:type', node)
if value is not None and 'xsi:type' not in already_processed:
already_processed.add('xsi:type')
self.extensiontype_ = value
super(BasePaymentMethod, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'currency':
currency_ = child_.text
currency_ = self.gds_validate_string(currency_, node, 'currency')
self.currency = currency_
elif nodeName_ == 'amount' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'amount')
self.amount = ival_
elif nodeName_ == 'precision' and child_.text:
sval_ = child_.text
try:
ival_ = int(sval_)
except (TypeError, ValueError) as exp:
raise_parse_error(child_, 'requires integer: %s' % exp)
ival_ = self.gds_validate_integer(ival_, node, 'precision')
self.precision = ival_
super(BasePaymentMethod, self).buildChildren(child_, node, nodeName_, True)
# end class BasePaymentMethod
class PaymentMethod(BasePaymentMethod):
subclass = None
superclass = BasePaymentMethod
def __init__(self, id=None, currency=None, amount=None, precision=None, provider_id=None, calculateAmount=False, target=None):
self.original_tagname_ = None
super(PaymentMethod, self).__init__(id, currency, amount, precision, )
self.provider_id = provider_id
self.calculateAmount = calculateAmount
self.target = target
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, PaymentMethod)
if subclass is not None:
return subclass(*args_, **kwargs_)
if PaymentMethod.subclass:
return PaymentMethod.subclass(*args_, **kwargs_)
else:
return PaymentMethod(*args_, **kwargs_)
factory = staticmethod(factory)
def get_provider_id(self): return self.provider_id
def set_provider_id(self, provider_id): self.provider_id = provider_id
def get_calculateAmount(self): return self.calculateAmount
def set_calculateAmount(self, calculateAmount): self.calculateAmount = calculateAmount
def get_target(self): return self.target
def set_target(self, target): self.target = target
def hasContent_(self):
if (
self.provider_id is not None or
self.calculateAmount or
self.target is not None or
super(PaymentMethod, self).hasContent_()
):
return True
else:
return False
def export(self, outfile, level, namespaceprefix_='', name_='PaymentMethod', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('PaymentMethod')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PaymentMethod')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespaceprefix_='', name_='PaymentMethod', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PaymentMethod'):
super(PaymentMethod, self).exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PaymentMethod')
def exportChildren(self, outfile, level, namespaceprefix_='', name_='PaymentMethod', fromsubclass_=False, pretty_print=True):
super(PaymentMethod, self).exportChildren(outfile, level, namespaceprefix_, name_, True, pretty_print=pretty_print)
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.provider_id is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<provider_id>%s</provider_id>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.provider_id), input_name='provider_id')), eol_))
if self.calculateAmount is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<calculateAmount>%s</calculateAmount>%s' % (self.gds_format_boolean(self.calculateAmount, input_name='calculateAmount'), eol_))
if self.target is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<target>%s</target>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.target), input_name='target')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
super(PaymentMethod, self).buildAttributes(node, attrs, already_processed)
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'provider_id':
provider_id_ = child_.text
provider_id_ = self.gds_validate_string(provider_id_, node, 'provider_id')
self.provider_id = provider_id_
elif nodeName_ == 'calculateAmount':
sval_ = child_.text
if sval_ in ('true', '1'):
ival_ = True
elif sval_ in ('false', '0'):
ival_ = False
else:
raise_parse_error(child_, 'requires boolean')
ival_ = self.gds_validate_boolean(ival_, node, 'calculateAmount')
self.calculateAmount = ival_
elif nodeName_ == 'target':
target_ = child_.text
target_ = self.gds_validate_string(target_, node, 'target')
self.target = target_
super(PaymentMethod, self).buildChildren(child_, node, nodeName_, True)
# end class PaymentMethod
class TextAutocompleteWidget(TextWidget):
subclass = None
superclass = TextWidget
def __init__(self, maxChars=None, placeholder=None, value=None, keyboardType=None, suggestion=None):
self.original_tagname_ = None
super(TextAutocompleteWidget, self).__init__(maxChars, placeholder, value, keyboardType, )
if suggestion is None:
self.suggestion = []
else:
self.suggestion | codeparrot/github-code-clean |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras metrics functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import os
from absl.testing import parameterized
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import layers
from tensorflow.python.keras import metrics
from tensorflow.python.keras import testing_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training.checkpointable import util as checkpointable_utils
from tensorflow.python.training.rmsprop import RMSPropOptimizer
@test_util.run_all_in_graph_and_eager_modes
class KerasMeanTest(test.TestCase):
# TODO(b/120949004): Re-enable garbage collection check
# @test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def test_mean(self):
m = metrics.Mean(name='my_mean')
# check config
self.assertEqual(m.name, 'my_mean')
self.assertTrue(m.stateful)
self.assertEqual(m.dtype, dtypes.float32)
self.assertEqual(len(m.variables), 2)
self.evaluate(variables.variables_initializer(m.variables))
# check initial state
self.assertEqual(self.evaluate(m.total), 0)
self.assertEqual(self.evaluate(m.count), 0)
# check __call__()
self.assertEqual(self.evaluate(m(100)), 100)
self.assertEqual(self.evaluate(m.total), 100)
self.assertEqual(self.evaluate(m.count), 1)
# check update_state() and result() + state accumulation + tensor input
update_op = m.update_state(ops.convert_n_to_tensor([1, 5]))
self.evaluate(update_op)
self.assertAlmostEqual(self.evaluate(m.result()), 106 / 3, 2)
self.assertEqual(self.evaluate(m.total), 106) # 100 + 1 + 5
self.assertEqual(self.evaluate(m.count), 3)
# check reset_states()
m.reset_states()
self.assertEqual(self.evaluate(m.total), 0)
self.assertEqual(self.evaluate(m.count), 0)
# Check save and restore config
m2 = metrics.Mean.from_config(m.get_config())
self.assertEqual(m2.name, 'my_mean')
self.assertTrue(m2.stateful)
self.assertEqual(m2.dtype, dtypes.float32)
self.assertEqual(len(m2.variables), 2)
def test_mean_with_sample_weight(self):
m = metrics.Mean(dtype=dtypes.float64)
self.assertEqual(m.dtype, dtypes.float64)
self.evaluate(variables.variables_initializer(m.variables))
# check scalar weight
result_t = m(100, sample_weight=0.5)
self.assertEqual(self.evaluate(result_t), 50 / 0.5)
self.assertEqual(self.evaluate(m.total), 50)
self.assertEqual(self.evaluate(m.count), 0.5)
# check weights not scalar and weights rank matches values rank
result_t = m([1, 5], sample_weight=[1, 0.2])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 52 / 1.7, 2)
self.assertAlmostEqual(self.evaluate(m.total), 52, 2) # 50 + 1 + 5 * 0.2
self.assertAlmostEqual(self.evaluate(m.count), 1.7, 2) # 0.5 + 1.2
# check weights broadcast
result_t = m([1, 2], sample_weight=0.5)
self.assertAlmostEqual(self.evaluate(result_t), 53.5 / 2.7, 2)
self.assertAlmostEqual(self.evaluate(m.total), 53.5, 2) # 52 + 0.5 + 1
self.assertAlmostEqual(self.evaluate(m.count), 2.7, 2) # 1.7 + 0.5 + 0.5
# check weights squeeze
result_t = m([1, 5], sample_weight=[[1], [0.2]])
self.assertAlmostEqual(self.evaluate(result_t), 55.5 / 3.9, 2)
self.assertAlmostEqual(self.evaluate(m.total), 55.5, 2) # 53.5 + 1 + 1
self.assertAlmostEqual(self.evaluate(m.count), 3.9, 2) # 2.7 + 1.2
# check weights expand
result_t = m([[1], [5]], sample_weight=[1, 0.2])
self.assertAlmostEqual(self.evaluate(result_t), 57.5 / 5.1, 2)
self.assertAlmostEqual(self.evaluate(m.total), 57.5, 2) # 55.5 + 1 + 1
self.assertAlmostEqual(self.evaluate(m.count), 5.1, 2) # 3.9 + 1.2
# check values reduced to the dimensions of weight
result_t = m([[[1., 2.], [3., 2.], [0.5, 4.]]], sample_weight=[0.5])
result = np.round(self.evaluate(result_t), decimals=2) # 58.5 / 5.6
self.assertEqual(result, 10.45)
self.assertEqual(np.round(self.evaluate(m.total), decimals=2), 58.54)
self.assertEqual(np.round(self.evaluate(m.count), decimals=2), 5.6)
def test_mean_graph_with_placeholder(self):
with context.graph_mode(), self.cached_session() as sess:
m = metrics.Mean()
v = array_ops.placeholder(dtypes.float32)
w = array_ops.placeholder(dtypes.float32)
self.evaluate(variables.variables_initializer(m.variables))
# check __call__()
result_t = m(v, sample_weight=w)
result = sess.run(result_t, feed_dict=({v: 100, w: 0.5}))
self.assertEqual(self.evaluate(m.total), 50)
self.assertEqual(self.evaluate(m.count), 0.5)
self.assertEqual(result, 50 / 0.5)
# check update_state() and result()
result = sess.run(result_t, feed_dict=({v: [1, 5], w: [1, 0.2]}))
self.assertAlmostEqual(self.evaluate(m.total), 52, 2) # 50 + 1 + 5 * 0.2
self.assertAlmostEqual(self.evaluate(m.count), 1.7, 2) # 0.5 + 1.2
self.assertAlmostEqual(result, 52 / 1.7, 2)
def test_save_restore(self):
checkpoint_directory = self.get_temp_dir()
checkpoint_prefix = os.path.join(checkpoint_directory, 'ckpt')
m = metrics.Mean()
checkpoint = checkpointable_utils.Checkpoint(mean=m)
self.evaluate(variables.variables_initializer(m.variables))
# update state
self.evaluate(m(100.))
self.evaluate(m(200.))
# save checkpoint and then add an update
save_path = checkpoint.save(checkpoint_prefix)
self.evaluate(m(1000.))
# restore to the same checkpoint mean object
checkpoint.restore(save_path).assert_consumed().run_restore_ops()
self.evaluate(m(300.))
self.assertEqual(200., self.evaluate(m.result()))
# restore to a different checkpoint mean object
restore_mean = metrics.Mean()
restore_checkpoint = checkpointable_utils.Checkpoint(mean=restore_mean)
status = restore_checkpoint.restore(save_path)
restore_update = restore_mean(300.)
status.assert_consumed().run_restore_ops()
self.evaluate(restore_update)
self.assertEqual(200., self.evaluate(restore_mean.result()))
self.assertEqual(3, self.evaluate(restore_mean.count))
@test_util.run_all_in_graph_and_eager_modes
class KerasAccuracyTest(test.TestCase):
def test_accuracy(self):
acc_obj = metrics.Accuracy(name='my acc')
# check config
self.assertEqual(acc_obj.name, 'my acc')
self.assertTrue(acc_obj.stateful)
self.assertEqual(len(acc_obj.variables), 2)
self.assertEqual(acc_obj.dtype, dtypes.float32)
self.evaluate(variables.variables_initializer(acc_obj.variables))
# verify that correct value is returned
update_op = acc_obj.update_state([[1], [2], [3], [4]], [[1], [2], [3], [4]])
self.evaluate(update_op)
result = self.evaluate(acc_obj.result())
self.assertEqual(result, 1) # 2/2
# Check save and restore config
a2 = metrics.Accuracy.from_config(acc_obj.get_config())
self.assertEqual(a2.name, 'my acc')
self.assertTrue(a2.stateful)
self.assertEqual(len(a2.variables), 2)
self.assertEqual(a2.dtype, dtypes.float32)
# check with sample_weight
result_t = acc_obj([[2], [1]], [[2], [0]], sample_weight=[[0.5], [0.2]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.96, 2) # 4.5/4.7
def test_binary_accuracy(self):
acc_obj = metrics.BinaryAccuracy(name='my acc')
# check config
self.assertEqual(acc_obj.name, 'my acc')
self.assertTrue(acc_obj.stateful)
self.assertEqual(len(acc_obj.variables), 2)
self.assertEqual(acc_obj.dtype, dtypes.float32)
self.evaluate(variables.variables_initializer(acc_obj.variables))
# verify that correct value is returned
update_op = acc_obj.update_state([[1], [0]], [[1], [0]])
self.evaluate(update_op)
result = self.evaluate(acc_obj.result())
self.assertEqual(result, 1) # 2/2
# check y_pred squeeze
update_op = acc_obj.update_state([[1], [1]], [[[1]], [[0]]])
self.evaluate(update_op)
result = self.evaluate(acc_obj.result())
self.assertAlmostEqual(result, 0.75, 2) # 3/4
# check y_true squeeze
result_t = acc_obj([[[1]], [[1]]], [[1], [0]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.67, 2) # 4/6
# check with sample_weight
result_t = acc_obj([[1], [1]], [[1], [0]], [[0.5], [0.2]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.67, 2) # 4.5/6.7
def test_binary_accuracy_threshold(self):
acc_obj = metrics.BinaryAccuracy(threshold=0.7)
self.evaluate(variables.variables_initializer(acc_obj.variables))
result_t = acc_obj([[1], [1], [0], [0]], [[0.9], [0.6], [0.4], [0.8]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.5, 2)
def test_categorical_accuracy(self):
acc_obj = metrics.CategoricalAccuracy(name='my acc')
# check config
self.assertEqual(acc_obj.name, 'my acc')
self.assertTrue(acc_obj.stateful)
self.assertEqual(len(acc_obj.variables), 2)
self.assertEqual(acc_obj.dtype, dtypes.float32)
self.evaluate(variables.variables_initializer(acc_obj.variables))
# verify that correct value is returned
update_op = acc_obj.update_state([[0, 0, 1], [0, 1, 0]],
[[0.1, 0.1, 0.8], [0.05, 0.95, 0]])
self.evaluate(update_op)
result = self.evaluate(acc_obj.result())
self.assertEqual(result, 1) # 2/2
# check with sample_weight
result_t = acc_obj([[0, 0, 1], [0, 1, 0]],
[[0.1, 0.1, 0.8], [0.05, 0, 0.95]], [[0.5], [0.2]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.93, 2) # 2.5/2.7
def test_sparse_categorical_accuracy(self):
acc_obj = metrics.SparseCategoricalAccuracy(name='my acc')
# check config
self.assertEqual(acc_obj.name, 'my acc')
self.assertTrue(acc_obj.stateful)
self.assertEqual(len(acc_obj.variables), 2)
self.assertEqual(acc_obj.dtype, dtypes.float32)
self.evaluate(variables.variables_initializer(acc_obj.variables))
# verify that correct value is returned
update_op = acc_obj.update_state([[2], [1]],
[[0.1, 0.1, 0.8], [0.05, 0.95, 0]])
self.evaluate(update_op)
result = self.evaluate(acc_obj.result())
self.assertEqual(result, 1) # 2/2
# check with sample_weight
result_t = acc_obj([[2], [1]], [[0.1, 0.1, 0.8], [0.05, 0, 0.95]],
[[0.5], [0.2]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.93, 2) # 2.5/2.7
def test_sparse_categorical_accuracy_mismatched_dims(self):
acc_obj = metrics.SparseCategoricalAccuracy(name='my acc')
# check config
self.assertEqual(acc_obj.name, 'my acc')
self.assertTrue(acc_obj.stateful)
self.assertEqual(len(acc_obj.variables), 2)
self.assertEqual(acc_obj.dtype, dtypes.float32)
self.evaluate(variables.variables_initializer(acc_obj.variables))
# verify that correct value is returned
update_op = acc_obj.update_state([2, 1], [[0.1, 0.1, 0.8], [0.05, 0.95, 0]])
self.evaluate(update_op)
result = self.evaluate(acc_obj.result())
self.assertEqual(result, 1) # 2/2
# check with sample_weight
result_t = acc_obj([2, 1], [[0.1, 0.1, 0.8], [0.05, 0, 0.95]],
[[0.5], [0.2]])
result = self.evaluate(result_t)
self.assertAlmostEqual(result, 0.93, 2) # 2.5/2.7
def test_sparse_categorical_accuracy_mismatched_dims_dynamic(self):
with context.graph_mode(), self.cached_session() as sess:
acc_obj = metrics.SparseCategoricalAccuracy(name='my acc')
self.evaluate(variables.variables_initializer(acc_obj.variables))
t = array_ops.placeholder(dtypes.float32)
p = array_ops.placeholder(dtypes.float32)
w = array_ops.placeholder(dtypes.float32)
result_t = acc_obj(t, p, w)
result = sess.run(
result_t,
feed_dict=({
t: [2, 1],
p: [[0.1, 0.1, 0.8], [0.05, 0, 0.95]],
w: [[0.5], [0.2]]
}))
self.assertAlmostEqual(result, 0.71, 2) # 2.5/2.7
@test_util.run_all_in_graph_and_eager_modes
class FalsePositivesTest(test.TestCase):
def test_config(self):
fp_obj = metrics.FalsePositives(name='my_fp', thresholds=[0.4, 0.9])
self.assertEqual(fp_obj.name, 'my_fp')
self.assertEqual(len(fp_obj.variables), 1)
self.assertEqual(fp_obj.thresholds, [0.4, 0.9])
# Check save and restore config
fp_obj2 = metrics.FalsePositives.from_config(fp_obj.get_config())
self.assertEqual(fp_obj2.name, 'my_fp')
self.assertEqual(len(fp_obj2.variables), 1)
self.assertEqual(fp_obj2.thresholds, [0.4, 0.9])
def test_unweighted(self):
fp_obj = metrics.FalsePositives()
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = fp_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fp_obj.result()
self.assertAllClose(7., result)
def test_weighted(self):
fp_obj = metrics.FalsePositives()
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = fp_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(14., self.evaluate(result))
def test_unweighted_with_thresholds(self):
fp_obj = metrics.FalsePositives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
update_op = fp_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fp_obj.result()
self.assertAllClose([7., 4., 2.], result)
def test_weighted_with_thresholds(self):
fp_obj = metrics.FalsePositives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fp_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
sample_weight = ((1.0, 2.0, 3.0, 5.0), (7.0, 11.0, 13.0, 17.0),
(19.0, 23.0, 29.0, 31.0), (5.0, 15.0, 10.0, 0))
result = fp_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose([125., 42., 12.], self.evaluate(result))
def test_threshold_limit(self):
with self.assertRaisesRegexp(
ValueError,
r'Threshold values must be in \[0, 1\]. Invalid values: \[-1, 2\]'):
metrics.FalsePositives(thresholds=[-1, 0.5, 2])
with self.assertRaisesRegexp(
ValueError,
r'Threshold values must be in \[0, 1\]. Invalid values: \[None\]'):
metrics.FalsePositives(thresholds=[None])
@test_util.run_all_in_graph_and_eager_modes
class FalseNegativesTest(test.TestCase):
def test_config(self):
fn_obj = metrics.FalseNegatives(name='my_fn', thresholds=[0.4, 0.9])
self.assertEqual(fn_obj.name, 'my_fn')
self.assertEqual(len(fn_obj.variables), 1)
self.assertEqual(fn_obj.thresholds, [0.4, 0.9])
# Check save and restore config
fn_obj2 = metrics.FalseNegatives.from_config(fn_obj.get_config())
self.assertEqual(fn_obj2.name, 'my_fn')
self.assertEqual(len(fn_obj2.variables), 1)
self.assertEqual(fn_obj2.thresholds, [0.4, 0.9])
def test_unweighted(self):
fn_obj = metrics.FalseNegatives()
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = fn_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fn_obj.result()
self.assertAllClose(3., result)
def test_weighted(self):
fn_obj = metrics.FalseNegatives()
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = fn_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(5., self.evaluate(result))
def test_unweighted_with_thresholds(self):
fn_obj = metrics.FalseNegatives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
update_op = fn_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = fn_obj.result()
self.assertAllClose([1., 4., 6.], result)
def test_weighted_with_thresholds(self):
fn_obj = metrics.FalseNegatives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(fn_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
sample_weight = ((3.0,), (5.0,), (7.0,), (4.0,))
result = fn_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose([4., 16., 23.], self.evaluate(result))
@test_util.run_all_in_graph_and_eager_modes
class TrueNegativesTest(test.TestCase):
def test_config(self):
tn_obj = metrics.TrueNegatives(name='my_tn', thresholds=[0.4, 0.9])
self.assertEqual(tn_obj.name, 'my_tn')
self.assertEqual(len(tn_obj.variables), 1)
self.assertEqual(tn_obj.thresholds, [0.4, 0.9])
# Check save and restore config
tn_obj2 = metrics.TrueNegatives.from_config(tn_obj.get_config())
self.assertEqual(tn_obj2.name, 'my_tn')
self.assertEqual(len(tn_obj2.variables), 1)
self.assertEqual(tn_obj2.thresholds, [0.4, 0.9])
def test_unweighted(self):
tn_obj = metrics.TrueNegatives()
self.evaluate(variables.variables_initializer(tn_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = tn_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = tn_obj.result()
self.assertAllClose(3., result)
def test_weighted(self):
tn_obj = metrics.TrueNegatives()
self.evaluate(variables.variables_initializer(tn_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = tn_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(4., self.evaluate(result))
def test_unweighted_with_thresholds(self):
tn_obj = metrics.TrueNegatives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(tn_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
update_op = tn_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = tn_obj.result()
self.assertAllClose([2., 5., 7.], result)
def test_weighted_with_thresholds(self):
tn_obj = metrics.TrueNegatives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(tn_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
sample_weight = ((0.0, 2.0, 3.0, 5.0),)
result = tn_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose([5., 15., 23.], self.evaluate(result))
@test_util.run_all_in_graph_and_eager_modes
class TruePositivesTest(test.TestCase):
def test_config(self):
tp_obj = metrics.TruePositives(name='my_tp', thresholds=[0.4, 0.9])
self.assertEqual(tp_obj.name, 'my_tp')
self.assertEqual(len(tp_obj.variables), 1)
self.assertEqual(tp_obj.thresholds, [0.4, 0.9])
# Check save and restore config
tp_obj2 = metrics.TruePositives.from_config(tp_obj.get_config())
self.assertEqual(tp_obj2.name, 'my_tp')
self.assertEqual(len(tp_obj2.variables), 1)
self.assertEqual(tp_obj2.thresholds, [0.4, 0.9])
def test_unweighted(self):
tp_obj = metrics.TruePositives()
self.evaluate(variables.variables_initializer(tp_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = tp_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = tp_obj.result()
self.assertAllClose(7., result)
def test_weighted(self):
tp_obj = metrics.TruePositives()
self.evaluate(variables.variables_initializer(tp_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = tp_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(12., self.evaluate(result))
def test_unweighted_with_thresholds(self):
tp_obj = metrics.TruePositives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(tp_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
update_op = tp_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = tp_obj.result()
self.assertAllClose([6., 3., 1.], result)
def test_weighted_with_thresholds(self):
tp_obj = metrics.TruePositives(thresholds=[0.15, 0.5, 0.85])
self.evaluate(variables.variables_initializer(tp_obj.variables))
y_pred = constant_op.constant(((0.9, 0.2, 0.8, 0.1), (0.2, 0.9, 0.7, 0.6),
(0.1, 0.2, 0.4, 0.3), (0, 1, 0.7, 0.3)))
y_true = constant_op.constant(((0, 1, 1, 0), (1, 0, 0, 0), (0, 0, 0, 0),
(1, 1, 1, 1)))
result = tp_obj(y_true, y_pred, sample_weight=37.)
self.assertAllClose([222., 111., 37.], self.evaluate(result))
@test_util.run_all_in_graph_and_eager_modes
class PrecisionTest(test.TestCase):
def test_config(self):
p_obj = metrics.Precision(name='my_precision', thresholds=[0.4, 0.9])
self.assertEqual(p_obj.name, 'my_precision')
self.assertEqual(len(p_obj.variables), 2)
self.assertEqual([v.name for v in p_obj.variables],
['true_positives:0', 'false_positives:0'])
self.assertEqual(p_obj.thresholds, [0.4, 0.9])
# Check save and restore config
p_obj2 = metrics.Precision.from_config(p_obj.get_config())
self.assertEqual(p_obj2.name, 'my_precision')
self.assertEqual(len(p_obj2.variables), 2)
self.assertEqual(p_obj2.thresholds, [0.4, 0.9])
def test_value_is_idempotent(self):
p_obj = metrics.Precision(thresholds=[0.3, 0.72])
y_pred = random_ops.random_uniform(shape=(10, 3))
y_true = random_ops.random_uniform(shape=(10, 3))
update_op = p_obj.update_state(y_true, y_pred)
self.evaluate(variables.variables_initializer(p_obj.variables))
# Run several updates.
for _ in range(10):
self.evaluate(update_op)
# Then verify idempotency.
initial_precision = self.evaluate(p_obj.result())
for _ in range(10):
self.assertArrayNear(initial_precision, self.evaluate(p_obj.result()),
1e-3)
def test_unweighted(self):
p_obj = metrics.Precision()
y_pred = constant_op.constant([1, 0, 1, 0], shape=(1, 4))
y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
self.evaluate(variables.variables_initializer(p_obj.variables))
result = p_obj(y_true, y_pred)
self.assertAlmostEqual(0.5, self.evaluate(result))
def test_unweighted_all_incorrect(self):
p_obj = metrics.Precision(thresholds=[0.5])
inputs = np.random.randint(0, 2, size=(100, 1))
y_pred = constant_op.constant(inputs)
y_true = constant_op.constant(1 - inputs)
self.evaluate(variables.variables_initializer(p_obj.variables))
result = p_obj(y_true, y_pred)
self.assertAlmostEqual(0, self.evaluate(result))
def test_weighted(self):
p_obj = metrics.Precision()
y_pred = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
y_true = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
self.evaluate(variables.variables_initializer(p_obj.variables))
result = p_obj(
y_true,
y_pred,
sample_weight=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, self.evaluate(result))
def test_div_by_zero(self):
p_obj = metrics.Precision()
y_pred = constant_op.constant([0, 0, 0, 0])
y_true = constant_op.constant([0, 0, 0, 0])
self.evaluate(variables.variables_initializer(p_obj.variables))
result = p_obj(y_true, y_pred)
self.assertEqual(0, self.evaluate(result))
def test_unweighted_with_threshold(self):
p_obj = metrics.Precision(thresholds=[0.5, 0.7])
y_pred = constant_op.constant([1, 0, 0.6, 0], shape=(1, 4))
y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
self.evaluate(variables.variables_initializer(p_obj.variables))
result = p_obj(y_true, y_pred)
self.assertArrayNear([0.5, 0.], self.evaluate(result), 0)
def test_weighted_with_threshold(self):
p_obj = metrics.Precision(thresholds=[0.5, 1.])
y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
y_pred = constant_op.constant([[1, 0], [0.6, 0]],
shape=(2, 2),
dtype=dtypes.float32)
weights = constant_op.constant([[4, 0], [3, 1]],
shape=(2, 2),
dtype=dtypes.float32)
self.evaluate(variables.variables_initializer(p_obj.variables))
result = p_obj(y_true, y_pred, sample_weight=weights)
weighted_tp = 0 + 3.
weighted_positives = (0 + 3.) + (4. + 0.)
expected_precision = weighted_tp / weighted_positives
self.assertArrayNear([expected_precision, 0], self.evaluate(result), 1e-3)
def test_multiple_updates(self):
p_obj = metrics.Precision(thresholds=[0.5, 1.])
y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
y_pred = constant_op.constant([[1, 0], [0.6, 0]],
shape=(2, 2),
dtype=dtypes.float32)
weights = constant_op.constant([[4, 0], [3, 1]],
shape=(2, 2),
dtype=dtypes.float32)
self.evaluate(variables.variables_initializer(p_obj.variables))
update_op = p_obj.update_state(y_true, y_pred, sample_weight=weights)
for _ in range(2):
self.evaluate(update_op)
weighted_tp = (0 + 3.) + (0 + 3.)
weighted_positives = ((0 + 3.) + (4. + 0.)) + ((0 + 3.) + (4. + 0.))
expected_precision = weighted_tp / weighted_positives
self.assertArrayNear([expected_precision, 0], self.evaluate(p_obj.result()),
1e-3)
@test_util.run_all_in_graph_and_eager_modes
class RecallTest(test.TestCase):
def test_config(self):
r_obj = metrics.Recall(name='my_recall', thresholds=[0.4, 0.9])
self.assertEqual(r_obj.name, 'my_recall')
self.assertEqual(len(r_obj.variables), 2)
self.assertEqual([v.name for v in r_obj.variables],
['true_positives:0', 'false_negatives:0'])
self.assertEqual(r_obj.thresholds, [0.4, 0.9])
# Check save and restore config
r_obj2 = metrics.Recall.from_config(r_obj.get_config())
self.assertEqual(r_obj2.name, 'my_recall')
self.assertEqual(len(r_obj2.variables), 2)
self.assertEqual(r_obj2.thresholds, [0.4, 0.9])
def test_value_is_idempotent(self):
r_obj = metrics.Recall(thresholds=[0.3, 0.72])
y_pred = random_ops.random_uniform(shape=(10, 3))
y_true = random_ops.random_uniform(shape=(10, 3))
update_op = r_obj.update_state(y_true, y_pred)
self.evaluate(variables.variables_initializer(r_obj.variables))
# Run several updates.
for _ in range(10):
self.evaluate(update_op)
# Then verify idempotency.
initial_recall = self.evaluate(r_obj.result())
for _ in range(10):
self.assertArrayNear(initial_recall, self.evaluate(r_obj.result()), 1e-3)
def test_unweighted(self):
r_obj = metrics.Recall()
y_pred = constant_op.constant([1, 0, 1, 0], shape=(1, 4))
y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
self.evaluate(variables.variables_initializer(r_obj.variables))
result = r_obj(y_true, y_pred)
self.assertAlmostEqual(0.5, self.evaluate(result))
def test_unweighted_all_incorrect(self):
r_obj = metrics.Recall(thresholds=[0.5])
inputs = np.random.randint(0, 2, size=(100, 1))
y_pred = constant_op.constant(inputs)
y_true = constant_op.constant(1 - inputs)
self.evaluate(variables.variables_initializer(r_obj.variables))
result = r_obj(y_true, y_pred)
self.assertAlmostEqual(0, self.evaluate(result))
def test_weighted(self):
r_obj = metrics.Recall()
y_pred = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
y_true = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
self.evaluate(variables.variables_initializer(r_obj.variables))
result = r_obj(
y_true,
y_pred,
sample_weight=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
weighted_tp = 3.0 + 1.0
weighted_t = (2.0 + 3.0) + (4.0 + 1.0)
expected_recall = weighted_tp / weighted_t
self.assertAlmostEqual(expected_recall, self.evaluate(result))
def test_div_by_zero(self):
r_obj = metrics.Recall()
y_pred = constant_op.constant([0, 0, 0, 0])
y_true = constant_op.constant([0, 0, 0, 0])
self.evaluate(variables.variables_initializer(r_obj.variables))
result = r_obj(y_true, y_pred)
self.assertEqual(0, self.evaluate(result))
def test_unweighted_with_threshold(self):
r_obj = metrics.Recall(thresholds=[0.5, 0.7])
y_pred = constant_op.constant([1, 0, 0.6, 0], shape=(1, 4))
y_true = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
self.evaluate(variables.variables_initializer(r_obj.variables))
result = r_obj(y_true, y_pred)
self.assertArrayNear([0.5, 0.], self.evaluate(result), 0)
def test_weighted_with_threshold(self):
r_obj = metrics.Recall(thresholds=[0.5, 1.])
y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
y_pred = constant_op.constant([[1, 0], [0.6, 0]],
shape=(2, 2),
dtype=dtypes.float32)
weights = constant_op.constant([[1, 4], [3, 2]],
shape=(2, 2),
dtype=dtypes.float32)
self.evaluate(variables.variables_initializer(r_obj.variables))
result = r_obj(y_true, y_pred, sample_weight=weights)
weighted_tp = 0 + 3.
weighted_positives = (0 + 3.) + (4. + 0.)
expected_recall = weighted_tp / weighted_positives
self.assertArrayNear([expected_recall, 0], self.evaluate(result), 1e-3)
def test_multiple_updates(self):
r_obj = metrics.Recall(thresholds=[0.5, 1.])
y_true = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
y_pred = constant_op.constant([[1, 0], [0.6, 0]],
shape=(2, 2),
dtype=dtypes.float32)
weights = constant_op.constant([[1, 4], [3, 2]],
shape=(2, 2),
dtype=dtypes.float32)
self.evaluate(variables.variables_initializer(r_obj.variables))
update_op = r_obj.update_state(y_true, y_pred, sample_weight=weights)
for _ in range(2):
self.evaluate(update_op)
weighted_tp = (0 + 3.) + (0 + 3.)
weighted_positives = ((0 + 3.) + (4. + 0.)) + ((0 + 3.) + (4. + 0.))
expected_recall = weighted_tp / weighted_positives
self.assertArrayNear([expected_recall, 0], self.evaluate(r_obj.result()),
1e-3)
@test_util.run_all_in_graph_and_eager_modes
class SensitivityAtSpecificityTest(test.TestCase, parameterized.TestCase):
def test_config(self):
s_obj = metrics.SensitivityAtSpecificity(
0.4, num_thresholds=100, name='sensitivity_at_specificity_1')
self.assertEqual(s_obj.name, 'sensitivity_at_specificity_1')
self.assertLen(s_obj.variables, 4)
self.assertEqual(s_obj.specificity, 0.4)
self.assertEqual(s_obj.num_thresholds, 100)
# Check save and restore config
s_obj2 = metrics.SensitivityAtSpecificity.from_config(s_obj.get_config())
self.assertEqual(s_obj2.name, 'sensitivity_at_specificity_1')
self.assertLen(s_obj2.variables, 4)
self.assertEqual(s_obj2.specificity, 0.4)
self.assertEqual(s_obj2.num_thresholds, 100)
def test_value_is_idempotent(self):
s_obj = metrics.SensitivityAtSpecificity(0.7)
y_pred = random_ops.random_uniform((10, 3),
maxval=1,
dtype=dtypes.float32,
seed=1)
y_true = random_ops.random_uniform((10, 3),
maxval=2,
dtype=dtypes.int64,
seed=1)
update_op = s_obj.update_state(y_true, y_pred)
self.evaluate(variables.variables_initializer(s_obj.variables))
# Run several updates.
for _ in range(10):
self.evaluate(update_op)
# Then verify idempotency.
initial_sensitivity = self.evaluate(s_obj.result())
for _ in range(10):
self.assertAlmostEqual(initial_sensitivity, self.evaluate(s_obj.result()),
1e-3)
def test_unweighted_all_correct(self):
s_obj = metrics.SensitivityAtSpecificity(0.7)
inputs = np.random.randint(0, 2, size=(100, 1))
y_pred = constant_op.constant(inputs, dtype=dtypes.float32)
y_true = constant_op.constant(inputs)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred)
self.assertAlmostEqual(1, self.evaluate(result))
def test_unweighted_high_specificity(self):
s_obj = metrics.SensitivityAtSpecificity(0.8)
pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9]
label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
y_pred = constant_op.constant(pred_values, dtype=dtypes.float32)
y_true = constant_op.constant(label_values)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred)
self.assertAlmostEqual(0.8, self.evaluate(result))
def test_unweighted_low_specificity(self):
s_obj = metrics.SensitivityAtSpecificity(0.4)
pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
y_pred = constant_op.constant(pred_values, dtype=dtypes.float32)
y_true = constant_op.constant(label_values)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred)
self.assertAlmostEqual(0.6, self.evaluate(result))
@parameterized.parameters([dtypes.bool, dtypes.int32, dtypes.float32])
def test_weighted(self, label_dtype):
s_obj = metrics.SensitivityAtSpecificity(0.4)
pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weight_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
y_pred = constant_op.constant(pred_values, dtype=dtypes.float32)
y_true = math_ops.cast(label_values, dtype=label_dtype)
weights = constant_op.constant(weight_values)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred, sample_weight=weights)
self.assertAlmostEqual(0.675, self.evaluate(result))
def test_invalid_specificity(self):
with self.assertRaisesRegexp(
ValueError, r'`specificity` must be in the range \[0, 1\].'):
metrics.SensitivityAtSpecificity(-1)
def test_invalid_num_thresholds(self):
with self.assertRaisesRegexp(ValueError, '`num_thresholds` must be > 0.'):
metrics.SensitivityAtSpecificity(0.4, num_thresholds=-1)
@test_util.run_all_in_graph_and_eager_modes
class SpecificityAtSensitivityTest(test.TestCase, parameterized.TestCase):
def test_config(self):
s_obj = metrics.SpecificityAtSensitivity(
0.4, num_thresholds=100, name='specificity_at_sensitivity_1')
self.assertEqual(s_obj.name, 'specificity_at_sensitivity_1')
self.assertLen(s_obj.variables, 4)
self.assertEqual(s_obj.sensitivity, 0.4)
self.assertEqual(s_obj.num_thresholds, 100)
# Check save and restore config
s_obj2 = metrics.SpecificityAtSensitivity.from_config(s_obj.get_config())
self.assertEqual(s_obj2.name, 'specificity_at_sensitivity_1')
self.assertLen(s_obj2.variables, 4)
self.assertEqual(s_obj2.sensitivity, 0.4)
self.assertEqual(s_obj2.num_thresholds, 100)
def test_value_is_idempotent(self):
s_obj = metrics.SpecificityAtSensitivity(0.7)
y_pred = random_ops.random_uniform((10, 3),
maxval=1,
dtype=dtypes.float32,
seed=1)
y_true = random_ops.random_uniform((10, 3),
maxval=2,
dtype=dtypes.int64,
seed=1)
update_op = s_obj.update_state(y_true, y_pred)
self.evaluate(variables.variables_initializer(s_obj.variables))
# Run several updates.
for _ in range(10):
self.evaluate(update_op)
# Then verify idempotency.
initial_specificity = self.evaluate(s_obj.result())
for _ in range(10):
self.assertAlmostEqual(initial_specificity, self.evaluate(s_obj.result()),
1e-3)
def test_unweighted_all_correct(self):
s_obj = metrics.SpecificityAtSensitivity(0.7)
inputs = np.random.randint(0, 2, size=(100, 1))
y_pred = constant_op.constant(inputs, dtype=dtypes.float32)
y_true = constant_op.constant(inputs)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred)
self.assertAlmostEqual(1, self.evaluate(result))
def test_unweighted_high_sensitivity(self):
s_obj = metrics.SpecificityAtSensitivity(0.8)
pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9]
label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
y_pred = constant_op.constant(pred_values, dtype=dtypes.float32)
y_true = constant_op.constant(label_values)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred)
self.assertAlmostEqual(0.4, self.evaluate(result))
def test_unweighted_low_sensitivity(self):
s_obj = metrics.SpecificityAtSensitivity(0.4)
pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
y_pred = constant_op.constant(pred_values, dtype=dtypes.float32)
y_true = constant_op.constant(label_values)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred)
self.assertAlmostEqual(0.6, self.evaluate(result))
@parameterized.parameters([dtypes.bool, dtypes.int32, dtypes.float32])
def test_weighted(self, label_dtype):
s_obj = metrics.SpecificityAtSensitivity(0.4)
pred_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
label_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weight_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
y_pred = constant_op.constant(pred_values, dtype=dtypes.float32)
y_true = math_ops.cast(label_values, dtype=label_dtype)
weights = constant_op.constant(weight_values)
self.evaluate(variables.variables_initializer(s_obj.variables))
result = s_obj(y_true, y_pred, sample_weight=weights)
self.assertAlmostEqual(0.4, self.evaluate(result))
def test_invalid_sensitivity(self):
with self.assertRaisesRegexp(
ValueError, r'`sensitivity` must be in the range \[0, 1\].'):
metrics.SpecificityAtSensitivity(-1)
def test_invalid_num_thresholds(self):
with self.assertRaisesRegexp(ValueError, '`num_thresholds` must be > 0.'):
metrics.SpecificityAtSensitivity(0.4, num_thresholds=-1)
@test_util.run_all_in_graph_and_eager_modes
class CosineProximityTest(test.TestCase):
def test_config(self):
cosine_obj = metrics.CosineProximity(name='my_cos', dtype=dtypes.int32)
self.assertEqual(cosine_obj.name, 'my_cos')
self.assertEqual(cosine_obj._dtype, dtypes.int32)
# Check save and restore config
cosine_obj2 = metrics.CosineProximity.from_config(cosine_obj.get_config())
self.assertEqual(cosine_obj2.name, 'my_cos')
self.assertEqual(cosine_obj2._dtype, dtypes.int32)
def test_unweighted(self):
cosine_obj = metrics.CosineProximity()
self.evaluate(variables.variables_initializer(cosine_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = cosine_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = cosine_obj.result()
self.assertAllClose(-0.60723, result, atol=1e-5)
def test_weighted(self):
cosine_obj = metrics.CosineProximity()
self.evaluate(variables.variables_initializer(cosine_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = cosine_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(-0.59916, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class MeanAbsoluteErrorTest(test.TestCase):
def test_config(self):
mae_obj = metrics.MeanAbsoluteError(name='my_mae', dtype=dtypes.int32)
self.assertEqual(mae_obj.name, 'my_mae')
self.assertEqual(mae_obj._dtype, dtypes.int32)
# Check save and restore config
mae_obj2 = metrics.MeanAbsoluteError.from_config(mae_obj.get_config())
self.assertEqual(mae_obj2.name, 'my_mae')
self.assertEqual(mae_obj2._dtype, dtypes.int32)
def test_unweighted(self):
mae_obj = metrics.MeanAbsoluteError()
self.evaluate(variables.variables_initializer(mae_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = mae_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = mae_obj.result()
self.assertAllClose(0.5, result, atol=1e-5)
def test_weighted(self):
mae_obj = metrics.MeanAbsoluteError()
self.evaluate(variables.variables_initializer(mae_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = mae_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(0.54285, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class MeanAbsolutePercentageErrorTest(test.TestCase):
def test_config(self):
mape_obj = metrics.MeanAbsolutePercentageError(
name='my_mape', dtype=dtypes.int32)
self.assertEqual(mape_obj.name, 'my_mape')
self.assertEqual(mape_obj._dtype, dtypes.int32)
# Check save and restore config
mape_obj2 = metrics.MeanAbsolutePercentageError.from_config(
mape_obj.get_config())
self.assertEqual(mape_obj2.name, 'my_mape')
self.assertEqual(mape_obj2._dtype, dtypes.int32)
def test_unweighted(self):
mape_obj = metrics.MeanAbsolutePercentageError()
self.evaluate(variables.variables_initializer(mape_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = mape_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = mape_obj.result()
self.assertAllClose(35e7, result, atol=1e-5)
def test_weighted(self):
mape_obj = metrics.MeanAbsolutePercentageError()
self.evaluate(variables.variables_initializer(mape_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = mape_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(40e7, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class MeanSquaredErrorTest(test.TestCase):
def test_config(self):
mse_obj = metrics.MeanSquaredError(name='my_mse', dtype=dtypes.int32)
self.assertEqual(mse_obj.name, 'my_mse')
self.assertEqual(mse_obj._dtype, dtypes.int32)
# Check save and restore config
mse_obj2 = metrics.MeanSquaredError.from_config(mse_obj.get_config())
self.assertEqual(mse_obj2.name, 'my_mse')
self.assertEqual(mse_obj2._dtype, dtypes.int32)
def test_unweighted(self):
mse_obj = metrics.MeanSquaredError()
self.evaluate(variables.variables_initializer(mse_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = mse_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = mse_obj.result()
self.assertAllClose(0.5, result, atol=1e-5)
def test_weighted(self):
mse_obj = metrics.MeanSquaredError()
self.evaluate(variables.variables_initializer(mse_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = mse_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(0.54285, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class MeanSquaredLogarithmicErrorTest(test.TestCase):
def test_config(self):
msle_obj = metrics.MeanSquaredLogarithmicError(
name='my_msle', dtype=dtypes.int32)
self.assertEqual(msle_obj.name, 'my_msle')
self.assertEqual(msle_obj._dtype, dtypes.int32)
# Check save and restore config
msle_obj2 = metrics.MeanSquaredLogarithmicError.from_config(
msle_obj.get_config())
self.assertEqual(msle_obj2.name, 'my_msle')
self.assertEqual(msle_obj2._dtype, dtypes.int32)
def test_unweighted(self):
msle_obj = metrics.MeanSquaredLogarithmicError()
self.evaluate(variables.variables_initializer(msle_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = msle_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = msle_obj.result()
self.assertAllClose(0.24022, result, atol=1e-5)
def test_weighted(self):
msle_obj = metrics.MeanSquaredLogarithmicError()
self.evaluate(variables.variables_initializer(msle_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = msle_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(0.26082, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class HingeTest(test.TestCase):
def test_config(self):
hinge_obj = metrics.Hinge(name='hinge', dtype=dtypes.int32)
self.assertEqual(hinge_obj.name, 'hinge')
self.assertEqual(hinge_obj._dtype, dtypes.int32)
# Check save and restore config
hinge_obj2 = metrics.Hinge.from_config(hinge_obj.get_config())
self.assertEqual(hinge_obj2.name, 'hinge')
self.assertEqual(hinge_obj2._dtype, dtypes.int32)
def test_unweighted(self):
hinge_obj = metrics.Hinge()
self.evaluate(variables.variables_initializer(hinge_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = hinge_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = hinge_obj.result()
self.assertAllClose(0.65, result, atol=1e-5)
def test_weighted(self):
hinge_obj = metrics.Hinge()
self.evaluate(variables.variables_initializer(hinge_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = hinge_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(0.65714, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class SquaredHingeTest(test.TestCase):
def test_config(self):
sq_hinge_obj = metrics.SquaredHinge(name='sq_hinge', dtype=dtypes.int32)
self.assertEqual(sq_hinge_obj.name, 'sq_hinge')
self.assertEqual(sq_hinge_obj._dtype, dtypes.int32)
# Check save and restore config
sq_hinge_obj2 = metrics.SquaredHinge.from_config(sq_hinge_obj.get_config())
self.assertEqual(sq_hinge_obj2.name, 'sq_hinge')
self.assertEqual(sq_hinge_obj2._dtype, dtypes.int32)
def test_unweighted(self):
sq_hinge_obj = metrics.SquaredHinge()
self.evaluate(variables.variables_initializer(sq_hinge_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = sq_hinge_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = sq_hinge_obj.result()
self.assertAllClose(0.65, result, atol=1e-5)
def test_weighted(self):
sq_hinge_obj = metrics.SquaredHinge()
self.evaluate(variables.variables_initializer(sq_hinge_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = sq_hinge_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(0.65714, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class CategoricalHingeTest(test.TestCase):
def test_config(self):
cat_hinge_obj = metrics.CategoricalHinge(
name='cat_hinge', dtype=dtypes.int32)
self.assertEqual(cat_hinge_obj.name, 'cat_hinge')
self.assertEqual(cat_hinge_obj._dtype, dtypes.int32)
# Check save and restore config
cat_hinge_obj2 = metrics.CategoricalHinge.from_config(
cat_hinge_obj.get_config())
self.assertEqual(cat_hinge_obj2.name, 'cat_hinge')
self.assertEqual(cat_hinge_obj2._dtype, dtypes.int32)
def test_unweighted(self):
cat_hinge_obj = metrics.CategoricalHinge()
self.evaluate(variables.variables_initializer(cat_hinge_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
update_op = cat_hinge_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = cat_hinge_obj.result()
self.assertAllClose(0.5, result, atol=1e-5)
def test_weighted(self):
cat_hinge_obj = metrics.CategoricalHinge()
self.evaluate(variables.variables_initializer(cat_hinge_obj.variables))
y_true = constant_op.constant(((0, 1, 0, 1, 0), (0, 0, 1, 1, 1),
(1, 1, 1, 1, 0), (0, 0, 0, 0, 1)))
y_pred = constant_op.constant(((0, 0, 1, 1, 0), (1, 1, 1, 1, 1),
(0, 1, 0, 1, 0), (1, 1, 1, 1, 1)))
sample_weight = constant_op.constant((1., 1.5, 2., 2.5))
result = cat_hinge_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(0.5, self.evaluate(result), atol=1e-5)
@test_util.run_all_in_graph_and_eager_modes
class RootMeanSquaredErrorTest(test.TestCase):
def test_config(self):
rmse_obj = metrics.RootMeanSquaredError(name='rmse', dtype=dtypes.int32)
self.assertEqual(rmse_obj.name, 'rmse')
self.assertEqual(rmse_obj._dtype, dtypes.int32)
rmse_obj2 = metrics.RootMeanSquaredError.from_config(rmse_obj.get_config())
self.assertEqual(rmse_obj2.name, 'rmse')
self.assertEqual(rmse_obj2._dtype, dtypes.int32)
def test_unweighted(self):
rmse_obj = metrics.RootMeanSquaredError()
self.evaluate(variables.variables_initializer(rmse_obj.variables))
y_true = constant_op.constant((2, 4, 6))
y_pred = constant_op.constant((1, 3, 2))
update_op = rmse_obj.update_state(y_true, y_pred)
self.evaluate(update_op)
result = rmse_obj.result()
# error = [-1, -1, -4], square(error) = [1, 1, 16], mean = 18/3 = 6
self.assertAllClose(math.sqrt(6), result, atol=1e-3)
def test_weighted(self):
rmse_obj = metrics.RootMeanSquaredError()
self.evaluate(variables.variables_initializer(rmse_obj.variables))
y_true = constant_op.constant((2, 4, 6, 8))
y_pred = constant_op.constant((1, 3, 2, 3))
sample_weight = constant_op.constant((0, 1, 0, 1))
result = rmse_obj(y_true, y_pred, sample_weight=sample_weight)
self.assertAllClose(math.sqrt(13), self.evaluate(result), atol=1e-3)
@test_util.run_all_in_graph_and_eager_modes
class TopKCategoricalAccuracyTest(test.TestCase):
def test_config(self):
a_obj = metrics.TopKCategoricalAccuracy(name='topkca', dtype=dtypes.int32)
self.assertEqual(a_obj.name, 'topkca')
self.assertEqual(a_obj._dtype, dtypes.int32)
a_obj2 = metrics.TopKCategoricalAccuracy.from_config(a_obj.get_config())
self.assertEqual(a_obj2.name, 'topkca')
self.assertEqual(a_obj2._dtype, dtypes.int32)
def test_correctness(self):
a_obj = metrics.TopKCategoricalAccuracy()
self.evaluate(variables.variables_initializer(a_obj.variables))
y_true = constant_op.constant([[0, 0, 1], [0, 1, 0]])
y_pred = constant_op.constant([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
result = a_obj(y_true, y_pred)
self.assertEqual(1, self.evaluate(result)) # both the samples match
# With `k` < 5.
a_obj = metrics.TopKCategoricalAccuracy(k=1)
self.evaluate(variables.variables_initializer(a_obj.variables))
result = a_obj(y_true, y_pred)
self.assertEqual(0.5, self.evaluate(result)) # only sample #2 matches
# With `k` > 5.
y_true = constant_op.constant([[0, 0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0]])
y_pred = constant_op.constant([[0.5, 0.9, 0.1, 0.7, 0.6, 0.5, 0.4],
[0.05, 0.95, 0, 0, 0, 0, 0]])
a_obj = metrics.TopKCategoricalAccuracy(k=6)
self.evaluate(variables.variables_initializer(a_obj.variables))
result = a_obj(y_true, y_pred)
self.assertEqual(0.5, self.evaluate(result)) # only 1 sample matches.
@test_util.run_all_in_graph_and_eager_modes
class SparseTopKCategoricalAccuracyTest(test.TestCase):
def test_config(self):
a_obj = metrics.SparseTopKCategoricalAccuracy(
name='stopkca', dtype=dtypes.int32)
self.assertEqual(a_obj.name, 'stopkca')
self.assertEqual(a_obj._dtype, dtypes.int32)
a_obj2 = metrics.SparseTopKCategoricalAccuracy.from_config(
a_obj.get_config())
self.assertEqual(a_obj2.name, 'stopkca')
self.assertEqual(a_obj2._dtype, dtypes.int32)
def test_correctness(self):
a_obj = metrics.SparseTopKCategoricalAccuracy()
self.evaluate(variables.variables_initializer(a_obj.variables))
y_true = constant_op.constant([2, 1])
y_pred = constant_op.constant([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
result = a_obj(y_true, y_pred)
self.assertEqual(1, self.evaluate(result)) # both the samples match
# With `k` < 5.
a_obj = metrics.SparseTopKCategoricalAccuracy(k=1)
self.evaluate(variables.variables_initializer(a_obj.variables))
result = a_obj(y_true, y_pred)
self.assertEqual(0.5, self.evaluate(result)) # only sample #2 matches
# With `k` > 5.
y_pred = constant_op.constant([[0.5, 0.9, 0.1, 0.7, 0.6, 0.5, 0.4],
[0.05, 0.95, 0, 0, 0, 0, 0]])
a_obj = metrics.SparseTopKCategoricalAccuracy(k=6)
self.evaluate(variables.variables_initializer(a_obj.variables))
result = a_obj(y_true, y_pred)
self.assertEqual(0.5, self.evaluate(result)) # only 1 sample matches.
def _get_model(compile_metrics):
model_layers = [
layers.Dense(3, activation='relu', kernel_initializer='ones'),
layers.Dense(1, activation='sigmoid', kernel_initializer='ones')]
model = testing_utils.get_model_from_layers(model_layers, input_shape=(4,))
model.compile(
loss='mae',
metrics=compile_metrics,
optimizer=RMSPropOptimizer(learning_rate=0.001),
run_eagerly=testing_utils.should_run_eagerly())
return model
@keras_parameterized.run_with_all_model_types
@keras_parameterized.run_all_keras_modes
class ResetStatesTest(keras_parameterized.TestCase):
def test_reset_states_false_positives(self):
fp_obj = metrics.FalsePositives()
model = _get_model([fp_obj])
x = np.ones((100, 4))
y = np.zeros((100, 1))
model.evaluate(x, y)
self.assertEqual(self.evaluate(fp_obj.accumulator), 100.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(fp_obj.accumulator), 100.)
def test_reset_states_false_negatives(self):
fn_obj = metrics.FalseNegatives()
model = _get_model([fn_obj])
x = np.zeros((100, 4))
y = np.ones((100, 1))
model.evaluate(x, y)
self.assertEqual(self.evaluate(fn_obj.accumulator), 100.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(fn_obj.accumulator), 100.)
def test_reset_states_true_negatives(self):
tn_obj = metrics.TrueNegatives()
model = _get_model([tn_obj])
x = np.zeros((100, 4))
y = np.zeros((100, 1))
model.evaluate(x, y)
self.assertEqual(self.evaluate(tn_obj.accumulator), 100.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(tn_obj.accumulator), 100.)
def test_reset_states_true_positives(self):
tp_obj = metrics.TruePositives()
model = _get_model([tp_obj])
x = np.ones((100, 4))
y = np.ones((100, 1))
model.evaluate(x, y)
self.assertEqual(self.evaluate(tp_obj.accumulator), 100.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(tp_obj.accumulator), 100.)
def test_reset_states_precision(self):
p_obj = metrics.Precision()
model = _get_model([p_obj])
x = np.concatenate((np.ones((50, 4)), np.ones((50, 4))))
y = np.concatenate((np.ones((50, 1)), np.zeros((50, 1))))
model.evaluate(x, y)
self.assertEqual(self.evaluate(p_obj.tp), 50.)
self.assertEqual(self.evaluate(p_obj.fp), 50.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(p_obj.tp), 50.)
self.assertEqual(self.evaluate(p_obj.fp), 50.)
def test_reset_states_recall(self):
r_obj = metrics.Recall()
model = _get_model([r_obj])
x = np.concatenate((np.ones((50, 4)), np.zeros((50, 4))))
y = np.concatenate((np.ones((50, 1)), np.ones((50, 1))))
model.evaluate(x, y)
self.assertEqual(self.evaluate(r_obj.tp), 50.)
self.assertEqual(self.evaluate(r_obj.fn), 50.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(r_obj.tp), 50.)
self.assertEqual(self.evaluate(r_obj.fn), 50.)
def test_reset_states_sensitivity_at_specificity(self):
s_obj = metrics.SensitivityAtSpecificity(0.5, num_thresholds=1)
model = _get_model([s_obj])
x = np.concatenate((np.ones((25, 4)), np.zeros((25, 4)), np.zeros((25, 4)),
np.ones((25, 4))))
y = np.concatenate((np.ones((25, 1)), np.zeros((25, 1)), np.ones((25, 1)),
np.zeros((25, 1))))
model.evaluate(x, y)
self.assertEqual(self.evaluate(s_obj.tp), 25.)
self.assertEqual(self.evaluate(s_obj.fp), 25.)
self.assertEqual(self.evaluate(s_obj.fn), 25.)
self.assertEqual(self.evaluate(s_obj.tn), 25.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(s_obj.tp), 25.)
self.assertEqual(self.evaluate(s_obj.fp), 25.)
self.assertEqual(self.evaluate(s_obj.fn), 25.)
self.assertEqual(self.evaluate(s_obj.tn), 25.)
def test_reset_states_specificity_at_sensitivity(self):
s_obj = metrics.SpecificityAtSensitivity(0.5, num_thresholds=1)
model = _get_model([s_obj])
x = np.concatenate((np.ones((25, 4)), np.zeros((25, 4)), np.zeros((25, 4)),
np.ones((25, 4))))
y = np.concatenate((np.ones((25, 1)), np.zeros((25, 1)), np.ones((25, 1)),
np.zeros((25, 1))))
model.evaluate(x, y)
self.assertEqual(self.evaluate(s_obj.tp), 25.)
self.assertEqual(self.evaluate(s_obj.fp), 25.)
self.assertEqual(self.evaluate(s_obj.fn), 25.)
self.assertEqual(self.evaluate(s_obj.tn), 25.)
model.evaluate(x, y)
self.assertEqual(self.evaluate(s_obj.tp), 25.)
self.assertEqual(self.evaluate(s_obj.fp), 25.)
self.assertEqual(self.evaluate(s_obj.fn), 25.)
self.assertEqual(self.evaluate(s_obj.tn), 25.)
if __name__ == '__main__':
test.main()
| codeparrot/github-code-clean |
# coding: utf-8
"""test_isort.py.
Tests all major functionality of the isort library
Should be ran using py.test by simply running py.test in the isort project directory
Copyright (C) 2013 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import codecs
import os
import shutil
import tempfile
from isort.isort import SortImports
from isort.pie_slice import *
from isort.settings import WrapModes
SHORT_IMPORT = "from third_party import lib1, lib2, lib3, lib4"
REALLY_LONG_IMPORT = ("from third_party import lib1, lib2, lib3, lib4, lib5, lib6, lib7, lib8, lib9, lib10, lib11,"
"lib12, lib13, lib14, lib15, lib16, lib17, lib18, lib20, lib21, lib22")
REALLY_LONG_IMPORT_WITH_COMMENT = ("from third_party import lib1, lib2, lib3, lib4, lib5, lib6, lib7, lib8, lib9, "
"lib10, lib11, lib12, lib13, lib14, lib15, lib16, lib17, lib18, lib20, lib21, lib22"
" # comment")
def test_happy_path():
"""Test the most basic use case, straight imports no code, simply not organized by category."""
test_input = ("import sys\n"
"import os\n"
"import myproject.test\n"
"import django.settings")
test_output = SortImports(file_contents=test_input, known_third_party=['django']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import django.settings\n"
"\n"
"import myproject.test\n")
def test_code_intermixed():
"""Defines what should happen when isort encounters imports intermixed with
code.
(it should pull them all to the top)
"""
test_input = ("import sys\n"
"print('yo')\n"
"print('I like to put code between imports cause I want stuff to break')\n"
"import myproject.test\n")
test_output = SortImports(file_contents=test_input).output
assert test_output == ("import sys\n"
"\n"
"import myproject.test\n"
"\n"
"print('yo')\n"
"print('I like to put code between imports cause I want stuff to break')\n")
def test_correct_space_between_imports():
"""Ensure after imports a correct amount of space (in newlines) is
enforced.
(2 for method, class, or decorator definitions 1 for anything else)
"""
test_input_method = ("import sys\n"
"def my_method():\n"
" print('hello world')\n")
test_output_method = SortImports(file_contents=test_input_method).output
assert test_output_method == ("import sys\n"
"\n"
"\n"
"def my_method():\n"
" print('hello world')\n")
test_input_decorator = ("import sys\n"
"@my_decorator\n"
"def my_method():\n"
" print('hello world')\n")
test_output_decorator = SortImports(file_contents=test_input_decorator).output
assert test_output_decorator == ("import sys\n"
"\n"
"\n"
"@my_decorator\n"
"def my_method():\n"
" print('hello world')\n")
test_input_class = ("import sys\n"
"class MyClass(object):\n"
" pass\n")
test_output_class = SortImports(file_contents=test_input_class).output
assert test_output_class == ("import sys\n"
"\n"
"\n"
"class MyClass(object):\n"
" pass\n")
test_input_other = ("import sys\n"
"print('yo')\n")
test_output_other = SortImports(file_contents=test_input_other).output
assert test_output_other == ("import sys\n"
"\n"
"print('yo')\n")
def test_sort_on_number():
"""Ensure numbers get sorted logically (10 > 9 not the other way around)"""
test_input = ("import lib10\n"
"import lib9\n")
test_output = SortImports(file_contents=test_input).output
assert test_output == ("import lib9\n"
"import lib10\n")
def test_line_length():
"""Ensure isort enforces the set line_length."""
assert len(SortImports(file_contents=REALLY_LONG_IMPORT, line_length=80).output.split("\n")[0]) <= 80
assert len(SortImports(file_contents=REALLY_LONG_IMPORT, line_length=120).output.split("\n")[0]) <= 120
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, line_length=42).output
assert test_output == ("from third_party import (lib1, lib2, lib3,\n"
" lib4, lib5, lib6,\n"
" lib7, lib8, lib9,\n"
" lib10, lib11,\n"
" lib12, lib13,\n"
" lib14, lib15,\n"
" lib16, lib17,\n"
" lib18, lib20,\n"
" lib21, lib22)\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, line_length=42, wrap_length=32).output
assert test_output == ("from third_party import (lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22)\n")
def test_output_modes():
"""Test setting isort to use various output modes works as expected"""
test_output_grid = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.GRID, line_length=40).output
assert test_output_grid == ("from third_party import (lib1, lib2,\n"
" lib3, lib4,\n"
" lib5, lib6,\n"
" lib7, lib8,\n"
" lib9, lib10,\n"
" lib11, lib12,\n"
" lib13, lib14,\n"
" lib15, lib16,\n"
" lib17, lib18,\n"
" lib20, lib21,\n"
" lib22)\n")
test_output_vertical = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL, line_length=40).output
assert test_output_vertical == ("from third_party import (lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22)\n")
comment_output_vertical = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL, line_length=40).output
assert comment_output_vertical == ("from third_party import (lib1, # comment\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22)\n")
test_output_hanging_indent = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=" ").output
assert test_output_hanging_indent == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, \\\n"
" lib8, lib9, lib10, lib11, lib12, \\\n"
" lib13, lib14, lib15, lib16, lib17, \\\n"
" lib18, lib20, lib21, lib22\n")
comment_output_hanging_indent = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=" ").output
assert comment_output_hanging_indent == ("from third_party import lib1, \\ # comment\n"
" lib2, lib3, lib4, lib5, lib6, \\\n"
" lib7, lib8, lib9, lib10, lib11, \\\n"
" lib12, lib13, lib14, lib15, lib16, \\\n"
" lib17, lib18, lib20, lib21, lib22\n")
test_output_vertical_indent = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=40, indent=" ").output
assert test_output_vertical_indent == ("from third_party import (\n"
" lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22\n"
")\n")
comment_output_vertical_indent = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=40, indent=" ").output
assert comment_output_vertical_indent == ("from third_party import ( # comment\n"
" lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
" lib5,\n"
" lib6,\n"
" lib7,\n"
" lib8,\n"
" lib9,\n"
" lib10,\n"
" lib11,\n"
" lib12,\n"
" lib13,\n"
" lib14,\n"
" lib15,\n"
" lib16,\n"
" lib17,\n"
" lib18,\n"
" lib20,\n"
" lib21,\n"
" lib22\n"
")\n")
test_output_vertical_grid = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID,
line_length=40, indent=" ").output
assert test_output_vertical_grid == ("from third_party import (\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22)\n")
comment_output_vertical_grid = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL_GRID,
line_length=40, indent=" ").output
assert comment_output_vertical_grid == ("from third_party import ( # comment\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22)\n")
test_output_vertical_grid_grouped = SortImports(file_contents=REALLY_LONG_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID_GROUPED,
line_length=40, indent=" ").output
assert test_output_vertical_grid_grouped == ("from third_party import (\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22\n"
")\n")
comment_output_vertical_grid_grouped = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.VERTICAL_GRID_GROUPED,
line_length=40, indent=" ").output
assert comment_output_vertical_grid_grouped == ("from third_party import ( # comment\n"
" lib1, lib2, lib3, lib4, lib5, lib6,\n"
" lib7, lib8, lib9, lib10, lib11,\n"
" lib12, lib13, lib14, lib15, lib16,\n"
" lib17, lib18, lib20, lib21, lib22\n"
")\n")
output_noqa = SortImports(file_contents=REALLY_LONG_IMPORT_WITH_COMMENT,
multi_line_output=WrapModes.NOQA).output
assert output_noqa == "from third_party import lib1, lib2, lib3, lib4, lib5, lib6, lib7, lib8, lib9, lib10, lib11, lib12, lib13, lib14, lib15, lib16, lib17, lib18, lib20, lib21, lib22 # NOQA comment\n" # NOQA
def test_qa_comment_case():
test_input = "from veryveryveryveryveryveryveryveryveryveryvery import X # NOQA"
test_output = SortImports(file_contents=test_input, line_length=40, multi_line_output=WrapModes.NOQA).output
assert test_output == "from veryveryveryveryveryveryveryveryveryveryvery import X # NOQA\n"
test_input = "import veryveryveryveryveryveryveryveryveryveryvery # NOQA"
test_output = SortImports(file_contents=test_input, line_length=40, multi_line_output=WrapModes.NOQA).output
assert test_output == "import veryveryveryveryveryveryveryveryveryveryvery # NOQA\n"
def test_length_sort():
"""Test setting isort to sort on length instead of alphabetically."""
test_input = ("import medium_sizeeeeeeeeeeeeee\n"
"import shortie\n"
"import looooooooooooooooooooooooooooooooooooooong\n"
"import medium_sizeeeeeeeeeeeeea\n")
test_output = SortImports(file_contents=test_input, length_sort=True).output
assert test_output == ("import shortie\n"
"import medium_sizeeeeeeeeeeeeea\n"
"import medium_sizeeeeeeeeeeeeee\n"
"import looooooooooooooooooooooooooooooooooooooong\n")
def test_convert_hanging():
"""Ensure that isort will convert hanging indents to correct indent
method."""
test_input = ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, \\\n"
" lib8, lib9, lib10, lib11, lib12, \\\n"
" lib13, lib14, lib15, lib16, lib17, \\\n"
" lib18, lib20, lib21, lib22\n")
test_output = SortImports(file_contents=test_input, multi_line_output=WrapModes.GRID,
line_length=40).output
assert test_output == ("from third_party import (lib1, lib2,\n"
" lib3, lib4,\n"
" lib5, lib6,\n"
" lib7, lib8,\n"
" lib9, lib10,\n"
" lib11, lib12,\n"
" lib13, lib14,\n"
" lib15, lib16,\n"
" lib17, lib18,\n"
" lib20, lib21,\n"
" lib22)\n")
def test_custom_indent():
"""Ensure setting a custom indent will work as expected."""
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=" ", balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, lib8, \\\n"
" lib9, lib10, lib11, lib12, lib13, \\\n"
" lib14, lib15, lib16, lib17, lib18, \\\n"
" lib20, lib21, lib22\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent="' '", balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, lib8, \\\n"
" lib9, lib10, lib11, lib12, lib13, \\\n"
" lib14, lib15, lib16, lib17, lib18, \\\n"
" lib20, lib21, lib22\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent="tab", balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
"\tlib3, lib4, lib5, lib6, lib7, lib8, \\\n"
"\tlib9, lib10, lib11, lib12, lib13, \\\n"
"\tlib14, lib15, lib16, lib17, lib18, \\\n"
"\tlib20, lib21, lib22\n")
test_output = SortImports(file_contents=REALLY_LONG_IMPORT, multi_line_output=WrapModes.HANGING_INDENT,
line_length=40, indent=2, balanced_wrapping=False).output
assert test_output == ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, lib8, \\\n"
" lib9, lib10, lib11, lib12, lib13, \\\n"
" lib14, lib15, lib16, lib17, lib18, \\\n"
" lib20, lib21, lib22\n")
def test_use_parentheses():
test_input = (
"from fooooooooooooooooooooooooo.baaaaaaaaaaaaaaaaaaarrrrrrr import \\"
" my_custom_function as my_special_function"
)
test_output = SortImports(
file_contents=test_input, known_third_party=['django'],
line_length=79, use_parentheses=True,
).output
assert '(' in test_output
def test_skip():
"""Ensure skipping a single import will work as expected."""
test_input = ("import myproject\n"
"import django\n"
"print('hey')\n"
"import sys # isort:skip this import needs to be placed here\n\n\n\n\n\n\n")
test_output = SortImports(file_contents=test_input, known_third_party=['django']).output
assert test_output == ("import django\n"
"\n"
"import myproject\n"
"\n"
"print('hey')\n"
"import sys # isort:skip this import needs to be placed here\n")
def test_skip_with_file_name():
"""Ensure skipping a file works even when file_contents is provided."""
test_input = ("import django\n"
"import myproject\n")
skipped = SortImports(file_path='/baz.py', file_contents=test_input, known_third_party=['django'],
skip=['baz.py']).skipped
assert skipped
def test_force_to_top():
"""Ensure forcing a single import to the top of its category works as expected."""
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n")
test_output = SortImports(file_contents=test_input, force_to_top=['lib5']).output
assert test_output == ("import lib5\n"
"import lib1\n"
"import lib2\n"
"import lib6\n")
def test_add_imports():
"""Ensures adding imports works as expected."""
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n\n")
test_output = SortImports(file_contents=test_input, add_imports=['import lib4', 'import lib7']).output
assert test_output == ("import lib1\n"
"import lib2\n"
"import lib4\n"
"import lib5\n"
"import lib6\n"
"import lib7\n")
# Using simplified syntax
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n\n")
test_output = SortImports(file_contents=test_input, add_imports=['lib4', 'lib7', 'lib8.a']).output
assert test_output == ("import lib1\n"
"import lib2\n"
"import lib4\n"
"import lib5\n"
"import lib6\n"
"import lib7\n"
"from lib8 import a\n")
# On a file that has no pre-existing imports
test_input = ('"""Module docstring"""\n'
'\n'
'class MyClass(object):\n'
' pass\n')
test_output = SortImports(file_contents=test_input, add_imports=['from __future__ import print_function']).output
assert test_output == ('"""Module docstring"""\n'
'from __future__ import print_function\n'
'\n'
'\n'
'class MyClass(object):\n'
' pass\n')
# On a file that has no pre-existing imports, and no doc-string
test_input = ('class MyClass(object):\n'
' pass\n')
test_output = SortImports(file_contents=test_input, add_imports=['from __future__ import print_function']).output
assert test_output == ('from __future__ import print_function\n'
'\n'
'\n'
'class MyClass(object):\n'
' pass\n')
# On a file with no content what so ever
test_input = ("")
test_output = SortImports(file_contents=test_input, add_imports=['lib4']).output
assert test_output == ("")
# On a file with no content what so ever, after force_adds is set to True
test_input = ("")
test_output = SortImports(file_contents=test_input, add_imports=['lib4'], force_adds=True).output
assert test_output == ("import lib4\n")
def test_remove_imports():
"""Ensures removing imports works as expected."""
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1")
test_output = SortImports(file_contents=test_input, remove_imports=['lib2', 'lib6']).output
assert test_output == ("import lib1\n"
"import lib5\n")
# Using natural syntax
test_input = ("import lib6\n"
"import lib2\n"
"import lib5\n"
"import lib1\n"
"from lib8 import a")
test_output = SortImports(file_contents=test_input, remove_imports=['import lib2', 'import lib6',
'from lib8 import a']).output
assert test_output == ("import lib1\n"
"import lib5\n")
def test_explicitly_local_import():
"""Ensure that explicitly local imports are separated."""
test_input = ("import lib1\n"
"import lib2\n"
"import .lib6\n"
"from . import lib7")
assert SortImports(file_contents=test_input).output == ("import lib1\n"
"import lib2\n"
"\n"
"import .lib6\n"
"from . import lib7\n")
def test_quotes_in_file():
"""Ensure imports within triple quotes don't get imported."""
test_input = ('import os\n'
'\n'
'"""\n'
'Let us\n'
'import foo\n'
'okay?\n'
'"""\n')
assert SortImports(file_contents=test_input).output == test_input
test_input = ('import os\n'
'\n'
"'\"\"\"'\n"
'import foo\n')
assert SortImports(file_contents=test_input).output == ('import os\n'
'\n'
'import foo\n'
'\n'
"'\"\"\"'\n")
test_input = ('import os\n'
'\n'
'"""Let us"""\n'
'import foo\n'
'"""okay?"""\n')
assert SortImports(file_contents=test_input).output == ('import os\n'
'\n'
'import foo\n'
'\n'
'"""Let us"""\n'
'"""okay?"""\n')
test_input = ('import os\n'
'\n'
'#"""\n'
'import foo\n'
'#"""')
assert SortImports(file_contents=test_input).output == ('import os\n'
'\n'
'import foo\n'
'\n'
'#"""\n'
'#"""\n')
test_input = ('import os\n'
'\n'
"'\\\n"
"import foo'\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ('import os\n'
'\n'
"'''\n"
"\\'''\n"
'import junk\n'
"'''\n")
assert SortImports(file_contents=test_input).output == test_input
def test_check_newline_in_imports(capsys):
"""Ensure tests works correctly when new lines are in imports."""
test_input = ('from lib1 import (\n'
' sub1,\n'
' sub2,\n'
' sub3\n)\n')
SortImports(file_contents=test_input, multi_line_output=WrapModes.VERTICAL_HANGING_INDENT, line_length=20,
check=True, verbose=True)
out, err = capsys.readouterr()
assert 'SUCCESS' in out
def test_forced_separate():
"""Ensure that forcing certain sub modules to show separately works as expected."""
test_input = ('import sys\n'
'import warnings\n'
'from collections import OrderedDict\n'
'\n'
'from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation\n'
'from django.core.paginator import InvalidPage\n'
'from django.core.urlresolvers import reverse\n'
'from django.db import models\n'
'from django.db.models.fields import FieldDoesNotExist\n'
'from django.utils import six\n'
'from django.utils.deprecation import RenameMethodsBase\n'
'from django.utils.encoding import force_str, force_text\n'
'from django.utils.http import urlencode\n'
'from django.utils.translation import ugettext, ugettext_lazy\n'
'\n'
'from django.contrib.admin import FieldListFilter\n'
'from django.contrib.admin.exceptions import DisallowedModelAdminLookup\n'
'from django.contrib.admin.options import IncorrectLookupParameters, IS_POPUP_VAR, TO_FIELD_VAR\n')
assert SortImports(file_contents=test_input, forced_separate=['django.contrib'],
known_third_party=['django'], line_length=120, order_by_type=False).output == test_input
test_input = ('from .foo import bar\n'
'\n'
'from .y import ca\n')
assert SortImports(file_contents=test_input, forced_separate=['.y'],
line_length=120, order_by_type=False).output == test_input
def test_default_section():
"""Test to ensure changing the default section works as expected."""
test_input = ("import sys\n"
"import os\n"
"import myproject.test\n"
"import django.settings")
test_output = SortImports(file_contents=test_input, known_third_party=['django'],
default_section="FIRSTPARTY").output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import django.settings\n"
"\n"
"import myproject.test\n")
test_output_custom = SortImports(file_contents=test_input, known_third_party=['django'],
default_section="STDLIB").output
assert test_output_custom == ("import myproject.test\n"
"import os\n"
"import sys\n"
"\n"
"import django.settings\n")
def test_first_party_overrides_standard_section():
"""Test to ensure changing the default section works as expected."""
test_input = ("import sys\n"
"import os\n"
"import profile.test\n")
test_output = SortImports(file_contents=test_input, known_first_party=['profile']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import profile.test\n")
def test_thirdy_party_overrides_standard_section():
"""Test to ensure changing the default section works as expected."""
test_input = ("import sys\n"
"import os\n"
"import profile.test\n")
test_output = SortImports(file_contents=test_input, known_third_party=['profile']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import profile.test\n")
def test_force_single_line_imports():
"""Test to ensure forcing imports to each have their own line works as expected."""
test_input = ("from third_party import lib1, lib2, \\\n"
" lib3, lib4, lib5, lib6, lib7, \\\n"
" lib8, lib9, lib10, lib11, lib12, \\\n"
" lib13, lib14, lib15, lib16, lib17, \\\n"
" lib18, lib20, lib21, lib22\n")
test_output = SortImports(file_contents=test_input, multi_line_output=WrapModes.GRID,
line_length=40, force_single_line=True).output
assert test_output == ("from third_party import lib1\n"
"from third_party import lib2\n"
"from third_party import lib3\n"
"from third_party import lib4\n"
"from third_party import lib5\n"
"from third_party import lib6\n"
"from third_party import lib7\n"
"from third_party import lib8\n"
"from third_party import lib9\n"
"from third_party import lib10\n"
"from third_party import lib11\n"
"from third_party import lib12\n"
"from third_party import lib13\n"
"from third_party import lib14\n"
"from third_party import lib15\n"
"from third_party import lib16\n"
"from third_party import lib17\n"
"from third_party import lib18\n"
"from third_party import lib20\n"
"from third_party import lib21\n"
"from third_party import lib22\n")
def test_force_single_line_long_imports():
test_input = ("from veryveryveryveryveryvery import small, big\n")
test_output = SortImports(file_contents=test_input, multi_line_output=WrapModes.NOQA,
line_length=40, force_single_line=True).output
assert test_output == ("from veryveryveryveryveryvery import big\n"
"from veryveryveryveryveryvery import small # NOQA\n")
def test_titled_imports():
"""Tests setting custom titled/commented import sections."""
test_input = ("import sys\n"
"import unicodedata\n"
"import statistics\n"
"import os\n"
"import myproject.test\n"
"import django.settings")
test_output = SortImports(file_contents=test_input, known_third_party=['django'],
import_heading_stdlib="Standard Library", import_heading_firstparty="My Stuff").output
assert test_output == ("# Standard Library\n"
"import os\n"
"import statistics\n"
"import sys\n"
"import unicodedata\n"
"\n"
"import django.settings\n"
"\n"
"# My Stuff\n"
"import myproject.test\n")
test_second_run = SortImports(file_contents=test_output, known_third_party=['django'],
import_heading_stdlib="Standard Library", import_heading_firstparty="My Stuff").output
assert test_second_run == test_output
def test_balanced_wrapping():
"""Tests balanced wrapping mode, where the length of individual lines maintain width."""
test_input = ("from __future__ import (absolute_import, division, print_function,\n"
" unicode_literals)")
test_output = SortImports(file_contents=test_input, line_length=70, balanced_wrapping=True).output
assert test_output == ("from __future__ import (absolute_import, division,\n"
" print_function, unicode_literals)\n")
def test_relative_import_with_space():
"""Tests the case where the relation and the module that is being imported from is separated with a space."""
test_input = ("from ... fields.sproqet import SproqetCollection")
assert SortImports(file_contents=test_input).output == ("from ...fields.sproqet import SproqetCollection\n")
def test_multiline_import():
"""Test the case where import spawns multiple lines with inconsistent indentation."""
test_input = ("from pkg \\\n"
" import stuff, other_suff \\\n"
" more_stuff")
assert SortImports(file_contents=test_input).output == ("from pkg import more_stuff, other_suff, stuff\n")
# test again with a custom configuration
custom_configuration = {'force_single_line': True,
'line_length': 120,
'known_first_party': ['asdf', 'qwer'],
'default_section': 'THIRDPARTY',
'forced_separate': 'asdf'}
expected_output = ("from pkg import more_stuff\n"
"from pkg import other_suff\n"
"from pkg import stuff\n")
assert SortImports(file_contents=test_input, **custom_configuration).output == expected_output
def test_single_multiline():
"""Test the case where a single import spawns multiple lines."""
test_input = ("from os import\\\n"
" getuid\n"
"\n"
"print getuid()\n")
output = SortImports(file_contents=test_input).output
assert output == (
"from os import getuid\n"
"\n"
"print getuid()\n"
)
def test_atomic_mode():
# without syntax error, everything works OK
test_input = ("from b import d, c\n"
"from a import f, e\n")
assert SortImports(file_contents=test_input, atomic=True).output == ("from a import e, f\n"
"from b import c, d\n")
# with syntax error content is not changed
test_input += "while True print 'Hello world'" # blatant syntax error
assert SortImports(file_contents=test_input, atomic=True).output == test_input
def test_order_by_type():
test_input = "from module import Class, CONSTANT, function"
assert SortImports(file_contents=test_input,
order_by_type=True).output == ("from module import CONSTANT, Class, function\n")
# More complex sample data
test_input = "from module import Class, CONSTANT, function, BASIC, Apple"
assert SortImports(file_contents=test_input,
order_by_type=True).output == ("from module import BASIC, CONSTANT, Apple, Class, function\n")
# Really complex sample data, to verify we don't mess with top level imports, only nested ones
test_input = ("import StringIO\n"
"import glob\n"
"import os\n"
"import shutil\n"
"import tempfile\n"
"import time\n"
"from subprocess import PIPE, Popen, STDOUT\n")
assert SortImports(file_contents=test_input, order_by_type=True).output == \
("import glob\n"
"import os\n"
"import shutil\n"
"import StringIO\n"
"import tempfile\n"
"import time\n"
"from subprocess import PIPE, STDOUT, Popen\n")
def test_custom_lines_after_import_section():
"""Test the case where the number of lines to output after imports has been explicitly set."""
test_input = ("from a import b\n"
"foo = 'bar'\n")
# default case is one space if not method or class after imports
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"foo = 'bar'\n")
# test again with a custom number of lines after the import section
assert SortImports(file_contents=test_input, lines_after_imports=2).output == ("from a import b\n"
"\n"
"\n"
"foo = 'bar'\n")
def test_smart_lines_after_import_section():
"""Tests the default 'smart' behavior for dealing with lines after the import section"""
# one space if not method or class after imports
test_input = ("from a import b\n"
"foo = 'bar'\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"foo = 'bar'\n")
# two spaces if a method or class after imports
test_input = ("from a import b\n"
"def my_function():\n"
" pass\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"\n"
"def my_function():\n"
" pass\n")
# two spaces if a method or class after imports - even if comment before function
test_input = ("from a import b\n"
"# comment should be ignored\n"
"def my_function():\n"
" pass\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"\n"
"# comment should be ignored\n"
"def my_function():\n"
" pass\n")
# ensure logic works with both style comments
test_input = ("from a import b\n"
'"""\n'
" comment should be ignored\n"
'"""\n'
"def my_function():\n"
" pass\n")
assert SortImports(file_contents=test_input).output == ("from a import b\n"
"\n"
"\n"
'"""\n'
" comment should be ignored\n"
'"""\n'
"def my_function():\n"
" pass\n")
def test_settings_combine_instead_of_overwrite():
"""Test to ensure settings combine logically, instead of fully overwriting."""
assert set(SortImports(known_standard_library=['not_std_library']).config['known_standard_library']) == \
set(SortImports().config['known_standard_library'] + ['not_std_library'])
assert set(SortImports(not_known_standard_library=['thread']).config['known_standard_library']) == \
set(item for item in SortImports().config['known_standard_library'] if item != 'thread')
def test_combined_from_and_as_imports():
"""Test to ensure it's possible to combine from and as imports."""
test_input = ("from translate.misc.multistring import multistring\n"
"from translate.storage import base, factory\n"
"from translate.storage.placeables import general, parse as rich_parse\n")
assert SortImports(file_contents=test_input, combine_as_imports=True).output == test_input
def test_as_imports_with_line_length():
"""Test to ensure it's possible to combine from and as imports."""
test_input = ("from translate.storage import base as storage_base\n"
"from translate.storage.placeables import general, parse as rich_parse\n")
assert SortImports(file_contents=test_input, combine_as_imports=False, line_length=40).output == \
("from translate.storage import \\\n base as storage_base\n"
"from translate.storage.placeables import \\\n parse as rich_parse\n"
"from translate.storage.placeables import \\\n general\n")
def test_keep_comments():
"""Test to ensure isort properly keeps comments in tact after sorting."""
# Straight Import
test_input = ("import foo # bar\n")
assert SortImports(file_contents=test_input).output == test_input
# Star import
test_input_star = ("from foo import * # bar\n")
assert SortImports(file_contents=test_input_star).output == test_input_star
# Force Single Line From Import
test_input = ("from foo import bar # comment\n")
assert SortImports(file_contents=test_input, force_single_line=True).output == test_input
# From import
test_input = ("from foo import bar # My Comment\n")
assert SortImports(file_contents=test_input).output == test_input
# More complicated case
test_input = ("from a import b # My Comment1\n"
"from a import c # My Comment2\n")
assert SortImports(file_contents=test_input).output == \
("from a import b # My Comment1\n"
"from a import c # My Comment2\n")
# Test case where imports comments make imports extend pass the line length
test_input = ("from a import b # My Comment1\n"
"from a import c # My Comment2\n"
"from a import d\n")
assert SortImports(file_contents=test_input, line_length=45).output == \
("from a import b # My Comment1\n"
"from a import c # My Comment2\n"
"from a import d\n")
# Test case where imports with comments will be beyond line length limit
test_input = ("from a import b, c # My Comment1\n"
"from a import c, d # My Comment2 is really really really really long\n")
assert SortImports(file_contents=test_input, line_length=45).output == \
("from a import (b, # My Comment1; My Comment2 is really really really really long\n"
" c, d)\n")
# Test that comments are not stripped from 'import ... as ...' by default
test_input = ("from a import b as bb # b comment\n"
"from a import c as cc # c comment\n")
assert SortImports(file_contents=test_input).output == test_input
# Test that 'import ... as ...' comments are not collected inappropriately
test_input = ("from a import b as bb # b comment\n"
"from a import c as cc # c comment\n"
"from a import d\n")
assert SortImports(file_contents=test_input).output == test_input
assert SortImports(file_contents=test_input, combine_as_imports=True).output == (
"from a import b as bb, c as cc, d # b comment; c comment\n"
)
def test_multiline_split_on_dot():
"""Test to ensure isort correctly handles multiline imports, even when split right after a '.'"""
test_input = ("from my_lib.my_package.test.level_1.level_2.level_3.level_4.level_5.\\\n"
" my_module import my_function")
assert SortImports(file_contents=test_input, line_length=70).output == \
("from my_lib.my_package.test.level_1.level_2.level_3.level_4.level_5.my_module import \\\n"
" my_function\n")
def test_import_star():
"""Test to ensure isort handles star imports correctly"""
test_input = ("from blah import *\n"
"from blah import _potato\n")
assert SortImports(file_contents=test_input).output == ("from blah import *\n"
"from blah import _potato\n")
assert SortImports(file_contents=test_input, combine_star=True).output == ("from blah import *\n")
def test_include_trailing_comma():
"""Test for the include_trailing_comma option"""
test_output_grid = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.GRID,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_grid == (
"from third_party import (lib1, lib2,\n"
" lib3, lib4,)\n"
)
test_output_vertical = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical == (
"from third_party import (lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,)\n"
)
test_output_vertical_indent = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical_indent == (
"from third_party import (\n"
" lib1,\n"
" lib2,\n"
" lib3,\n"
" lib4,\n"
")\n"
)
test_output_vertical_grid = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical_grid == (
"from third_party import (\n"
" lib1, lib2, lib3, lib4,)\n"
)
test_output_vertical_grid_grouped = SortImports(
file_contents=SHORT_IMPORT,
multi_line_output=WrapModes.VERTICAL_GRID_GROUPED,
line_length=40,
include_trailing_comma=True,
).output
assert test_output_vertical_grid_grouped == (
"from third_party import (\n"
" lib1, lib2, lib3, lib4,\n"
")\n"
)
def test_similar_to_std_library():
"""Test to ensure modules that are named similarly to a standard library import don't end up clobbered"""
test_input = ("import datetime\n"
"\n"
"import requests\n"
"import times\n")
assert SortImports(file_contents=test_input, known_third_party=["requests", "times"]).output == test_input
def test_correctly_placed_imports():
"""Test to ensure comments stay on correct placement after being sorted"""
test_input = ("from a import b # comment for b\n"
"from a import c # comment for c\n")
assert SortImports(file_contents=test_input, force_single_line=True).output == \
("from a import b # comment for b\n"
"from a import c # comment for c\n")
assert SortImports(file_contents=test_input).output == ("from a import b # comment for b\n"
"from a import c # comment for c\n")
# Full example test from issue #143
test_input = ("from itertools import chain\n"
"\n"
"from django.test import TestCase\n"
"from model_mommy import mommy\n"
"\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product_d"
"efinition\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product_d"
"efinition_platform\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_item_product_p"
"latform\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_territory_reta"
"il_model\n"
"from apps.clientman.commands.download_usage_rights import associate_right_for_territory_reta"
"il_model_definition_platform_provider # noqa\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product_defini"
"tion\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product_defini"
"tion_platform\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_item_product_platfo"
"rm\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_territory_retail_mo"
"del\n"
"from apps.clientman.commands.download_usage_rights import clear_right_for_territory_retail_mo"
"del_definition_platform_provider # noqa\n"
"from apps.clientman.commands.download_usage_rights import create_download_usage_right\n"
"from apps.clientman.commands.download_usage_rights import delete_download_usage_right\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product_d"
"efinition\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product_d"
"efinition_platform\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_item_product_p"
"latform\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_territory_reta"
"il_model\n"
"from apps.clientman.commands.download_usage_rights import disable_download_for_territory_reta"
"il_model_definition_platform_provider # noqa\n"
"from apps.clientman.commands.download_usage_rights import get_download_rights_for_item\n"
"from apps.clientman.commands.download_usage_rights import get_right\n")
assert SortImports(file_contents=test_input, force_single_line=True, line_length=140,
known_third_party=["django", "model_mommy"]).output == test_input
def test_auto_detection():
"""Initial test to ensure isort auto-detection works correctly - will grow over time as new issues are raised."""
# Issue 157
test_input = ("import binascii\n"
"import os\n"
"\n"
"import cv2\n"
"import requests\n")
assert SortImports(file_contents=test_input, known_third_party=["cv2", "requests"]).output == test_input
# alternative solution
assert SortImports(file_contents=test_input, default_section="THIRDPARTY").output == test_input
def test_same_line_statements():
"""Ensure isort correctly handles the case where a single line contains multiple statements including an import"""
test_input = ("import pdb; import nose\n")
assert SortImports(file_contents=test_input).output == ("import pdb\n"
"\n"
"import nose\n")
test_input = ("import pdb; pdb.set_trace()\n"
"import nose; nose.run()\n")
assert SortImports(file_contents=test_input).output == test_input
def test_long_line_comments():
"""Ensure isort correctly handles comments at the end of extremely long lines"""
test_input = ("from foo.utils.fabric_stuff.live import check_clean_live, deploy_live, sync_live_envdir, "
"update_live_app, update_live_cron # noqa\n"
"from foo.utils.fabric_stuff.stage import check_clean_stage, deploy_stage, sync_stage_envdir, "
"update_stage_app, update_stage_cron # noqa\n")
assert SortImports(file_contents=test_input).output == \
("from foo.utils.fabric_stuff.live import (check_clean_live, deploy_live, # noqa\n"
" sync_live_envdir, update_live_app, update_live_cron)\n"
"from foo.utils.fabric_stuff.stage import (check_clean_stage, deploy_stage, # noqa\n"
" sync_stage_envdir, update_stage_app, update_stage_cron)\n")
def test_tab_character_in_import():
"""Ensure isort correctly handles import statements that contain a tab character"""
test_input = ("from __future__ import print_function\n"
"from __future__ import\tprint_function\n")
assert SortImports(file_contents=test_input).output == "from __future__ import print_function\n"
def test_split_position():
"""Ensure isort splits on import instead of . when possible"""
test_input = ("from p24.shared.exceptions.master.host_state_flag_unchanged import HostStateUnchangedException\n")
assert SortImports(file_contents=test_input, line_length=80).output == \
("from p24.shared.exceptions.master.host_state_flag_unchanged import \\\n"
" HostStateUnchangedException\n")
def test_place_comments():
"""Ensure manually placing imports works as expected"""
test_input = ("import sys\n"
"import os\n"
"import myproject.test\n"
"import django.settings\n"
"\n"
"# isort:imports-thirdparty\n"
"# isort:imports-firstparty\n"
"print('code')\n"
"\n"
"# isort:imports-stdlib\n")
expected_output = ("\n# isort:imports-thirdparty\n"
"import django.settings\n"
"\n"
"# isort:imports-firstparty\n"
"import myproject.test\n"
"\n"
"print('code')\n"
"\n"
"# isort:imports-stdlib\n"
"import os\n"
"import sys\n")
test_output = SortImports(file_contents=test_input, known_third_party=['django']).output
assert test_output == expected_output
test_output = SortImports(file_contents=test_output, known_third_party=['django']).output
assert test_output == expected_output
def test_placement_control():
"""Ensure that most specific placement control match wins"""
test_input = ("import os\n"
"import sys\n"
"from bottle import Bottle, redirect, response, run\n"
"import p24.imports._argparse as argparse\n"
"import p24.imports._subprocess as subprocess\n"
"import p24.imports._VERSION as VERSION\n"
"import p24.shared.media_wiki_syntax as syntax\n")
test_output = SortImports(file_contents=test_input,
known_first_party=['p24', 'p24.imports._VERSION'],
known_standard_library=['p24.imports'],
known_third_party=['bottle'],
default_section="THIRDPARTY").output
assert test_output == ("import os\n"
"import p24.imports._argparse as argparse\n"
"import p24.imports._subprocess as subprocess\n"
"import sys\n"
"\n"
"from bottle import Bottle, redirect, response, run\n"
"\n"
"import p24.imports._VERSION as VERSION\n"
"import p24.shared.media_wiki_syntax as syntax\n")
def test_custom_sections():
"""Ensure that most specific placement control match wins"""
test_input = ("import os\n"
"import sys\n"
"from django.conf import settings\n"
"from bottle import Bottle, redirect, response, run\n"
"import p24.imports._argparse as argparse\n"
"from django.db import models\n"
"import p24.imports._subprocess as subprocess\n"
"import pandas as pd\n"
"import p24.imports._VERSION as VERSION\n"
"import numpy as np\n"
"import p24.shared.media_wiki_syntax as syntax\n")
test_output = SortImports(file_contents=test_input,
known_first_party=['p24', 'p24.imports._VERSION'],
import_heading_stdlib='Standard Library',
import_heading_thirdparty='Third Party',
import_heading_firstparty='First Party',
import_heading_django='Django',
import_heading_pandas='Pandas',
known_standard_library=['p24.imports'],
known_third_party=['bottle'],
known_django=['django'],
known_pandas=['pandas', 'numpy'],
default_section="THIRDPARTY",
sections=["FUTURE", "STDLIB", "DJANGO", "THIRDPARTY", "PANDAS", "FIRSTPARTY", "LOCALFOLDER"]).output
assert test_output == ("# Standard Library\n"
"import os\n"
"import p24.imports._argparse as argparse\n"
"import p24.imports._subprocess as subprocess\n"
"import sys\n"
"\n"
"# Django\n"
"from django.conf import settings\n"
"from django.db import models\n"
"\n"
"# Third Party\n"
"from bottle import Bottle, redirect, response, run\n"
"\n"
"# Pandas\n"
"import numpy as np\n"
"import pandas as pd\n"
"\n"
"# First Party\n"
"import p24.imports._VERSION as VERSION\n"
"import p24.shared.media_wiki_syntax as syntax\n")
def test_sticky_comments():
"""Test to ensure it is possible to make comments 'stick' above imports"""
test_input = ("import os\n"
"\n"
"# Used for type-hinting (ref: https://github.com/davidhalter/jedi/issues/414).\n"
"from selenium.webdriver.remote.webdriver import WebDriver # noqa\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("from django import forms\n"
"# While this couples the geographic forms to the GEOS library,\n"
"# it decouples from database (by not importing SpatialBackend).\n"
"from django.contrib.gis.geos import GEOSException, GEOSGeometry\n"
"from django.utils.translation import ugettext_lazy as _\n")
assert SortImports(file_contents=test_input).output == test_input
def test_zipimport():
"""Imports ending in "import" shouldn't be clobbered"""
test_input = "from zipimport import zipimport\n"
assert SortImports(file_contents=test_input).output == test_input
def test_from_ending():
"""Imports ending in "from" shouldn't be clobbered."""
test_input = "from foo import get_foo_from, get_foo\n"
expected_output = "from foo import get_foo, get_foo_from\n"
assert SortImports(file_contents=test_input).output == expected_output
def test_from_first():
"""Tests the setting from_first works correctly"""
test_input = "from os import path\nimport os\n"
assert SortImports(file_contents=test_input, from_first=True).output == test_input
def test_top_comments():
"""Ensure correct behavior with top comments"""
test_input = ("# -*- encoding: utf-8 -*-\n"
"# Test comment\n"
"#\n"
"from __future__ import unicode_literals\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# -*- coding: utf-8 -*-\n"
"from django.db import models\n"
"from django.utils.encoding import python_2_unicode_compatible\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# Comment\n"
"import sys\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# -*- coding\n"
"import sys\n")
assert SortImports(file_contents=test_input).output == test_input
def test_consistency():
"""Ensures consistency of handling even when dealing with non ordered-by-type imports"""
test_input = "from sqlalchemy.dialects.postgresql import ARRAY, array\n"
assert SortImports(file_contents=test_input, order_by_type=True).output == test_input
def test_force_grid_wrap():
"""Ensures removing imports works as expected."""
test_input = (
"from foo import lib6, lib7\n"
"from bar import lib2\n"
)
test_output = SortImports(
file_contents=test_input,
force_grid_wrap=True,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT
).output
assert test_output == """from bar import lib2
from foo import (
lib6,
lib7
)
"""
def test_force_grid_wrap_long():
"""Ensure that force grid wrap still happens with long line length"""
test_input = (
"from foo import lib6, lib7\n"
"from bar import lib2\n"
"from babar import something_that_is_kind_of_long"
)
test_output = SortImports(
file_contents=test_input,
force_grid_wrap=True,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=9999,
).output
assert test_output == """from babar import something_that_is_kind_of_long
from bar import lib2
from foo import (
lib6,
lib7
)
"""
def test_uses_jinja_variables():
"""Test a basic set of imports that use jinja variables"""
test_input = ("import sys\n"
"import os\n"
"import myproject.{ test }\n"
"import django.{ settings }")
test_output = SortImports(file_contents=test_input, known_third_party=['django'],
known_first_party=['myproject']).output
assert test_output == ("import os\n"
"import sys\n"
"\n"
"import django.{ settings }\n"
"\n"
"import myproject.{ test }\n")
test_input = ("import {{ cookiecutter.repo_name }}\n"
"from foo import {{ cookiecutter.bar }}\n")
assert SortImports(file_contents=test_input).output == test_input
def test_fcntl():
"""Test to ensure fcntl gets correctly recognized as stdlib import"""
test_input = ("import fcntl\n"
"import os\n"
"import sys\n")
assert SortImports(file_contents=test_input).output == test_input
def test_import_split_is_word_boundary_aware():
"""Test to ensure that isort splits words in a boundry aware mannor"""
test_input = ("from mycompany.model.size_value_array_import_func import \\\n"
" get_size_value_array_import_func_jobs")
test_output = SortImports(file_contents=test_input,
multi_line_output=WrapModes.VERTICAL_HANGING_INDENT,
line_length=79).output
assert test_output == ("from mycompany.model.size_value_array_import_func import (\n"
" get_size_value_array_import_func_jobs\n"
")\n")
def test_other_file_encodings():
"""Test to ensure file encoding is respected"""
try:
tmp_dir = tempfile.mkdtemp()
for encoding in ('latin1', 'utf8'):
tmp_fname = os.path.join(tmp_dir, 'test_{0}.py'.format(encoding))
with codecs.open(tmp_fname, mode='w', encoding=encoding) as f:
file_contents = "# coding: {0}\n\ns = u'ã'\n".format(encoding)
f.write(file_contents)
assert SortImports(file_path=tmp_fname).output == file_contents
finally:
shutil.rmtree(tmp_dir, ignore_errors=True)
def test_comment_at_top_of_file():
"""Test to ensure isort correctly handles top of file comments"""
test_input = ("# Comment one\n"
"from django import forms\n"
"# Comment two\n"
"from django.contrib.gis.geos import GEOSException\n")
assert SortImports(file_contents=test_input).output == test_input
test_input = ("# -*- coding: utf-8 -*-\n"
"from django.db import models\n")
assert SortImports(file_contents=test_input).output == test_input
def test_alphabetic_sorting():
"""Test to ensure isort correctly handles top of file comments"""
test_input = ("import unittest\n"
"\n"
"import ABC\n"
"import Zope\n"
"from django.contrib.gis.geos import GEOSException\n"
"from plone.app.testing import getRoles\n"
"from plone.app.testing import ManageRoles\n"
"from plone.app.testing import setRoles\n"
"from Products.CMFPlone import utils\n"
)
options = {'force_single_line': True,
'force_alphabetical_sort_within_sections': True, }
output = SortImports(file_contents=test_input, **options).output
assert output == test_input
test_input = ("# -*- coding: utf-8 -*-\n"
"from django.db import models\n")
assert SortImports(file_contents=test_input).output == test_input
def test_alphabetic_sorting_multi_line():
"""Test to ensure isort correctly handles multiline import see: issue 364"""
test_input = ("from a import (CONSTANT_A, cONSTANT_B, CONSTANT_C, CONSTANT_D, CONSTANT_E,\n"
" CONSTANT_F, CONSTANT_G, CONSTANT_H, CONSTANT_I, CONSTANT_J)\n")
options = {'force_alphabetical_sort_within_sections': True, }
assert SortImports(file_contents=test_input, **options).output == test_input
def test_comments_not_duplicated():
"""Test to ensure comments aren't duplicated: issue 303"""
test_input = ('from flask import url_for\n'
"# Whole line comment\n"
'from service import demo # inline comment\n'
'from service import settings\n')
output = SortImports(file_contents=test_input).output
assert output.count("# Whole line comment\n") == 1
assert output.count("# inline comment\n") == 1
def test_top_of_line_comments():
"""Test to ensure top of line comments stay where they should: issue 260"""
test_input = ('# -*- coding: utf-8 -*-\n'
'from django.db import models\n'
'#import json as simplejson\n'
'from myproject.models import Servidor\n'
'\n'
'import reversion\n'
'\n'
'import logging\n')
output = SortImports(file_contents=test_input).output
assert output.startswith('# -*- coding: utf-8 -*-\n')
def test_basic_comment():
"""Test to ensure a basic comment wont crash isort"""
test_input = ('import logging\n'
'# Foo\n'
'import os\n')
assert SortImports(file_contents=test_input).output == test_input
def test_shouldnt_add_lines():
"""Ensure that isort doesn't add a blank line when a top of import comment is present, issue #316"""
test_input = ('"""Text"""\n'
'# This is a comment\n'
'import pkg_resources\n')
assert SortImports(file_contents=test_input).output == test_input
def test_sections_parsed_correct():
"""Ensure that modules for custom sections parsed as list from config file and isort result is correct"""
tmp_conf_dir = None
conf_file_data = (
'[settings]\n'
'sections=FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER,COMMON\n'
'known_common=nose\n'
'import_heading_common=Common Library\n'
'import_heading_stdlib=Standard Library\n'
)
test_input = (
'import os\n'
'from nose import *\n'
'import nose\n'
'from os import path'
)
correct_output = (
'# Standard Library\n'
'import os\n'
'from os import path\n'
'\n'
'# Common Library\n'
'import nose\n'
'from nose import *\n'
)
try:
tmp_conf_dir = tempfile.mkdtemp()
tmp_conf_name = os.path.join(tmp_conf_dir, '.isort.cfg')
with codecs.open(tmp_conf_name, 'w') as test_config:
test_config.writelines(conf_file_data)
assert SortImports(file_contents=test_input, settings_path=tmp_conf_dir).output == correct_output
finally:
shutil.rmtree(tmp_conf_dir, ignore_errors=True)
def test_alphabetic_sorting_no_newlines():
'''Test to ensure that alphabetical sort does not erroneously introduce new lines (issue #328)'''
test_input = "import os\n"
test_output = SortImports(file_contents=test_input,force_alphabetical_sort_within_sections=True).output
assert test_input == test_output
test_input = ('import os\n'
'import unittest\n'
'\n'
'from a import b\n'
'\n'
'\n'
'print(1)\n')
test_output = SortImports(file_contents=test_input,force_alphabetical_sort_within_sections=True, lines_after_imports=2).output
assert test_input == test_output
def test_sort_within_section():
'''Test to ensure its possible to force isort to sort within sections'''
test_input = ('from Foob import ar\n'
'import foo\n'
'from foo import bar\n'
'from foo.bar import Quux, baz\n')
test_output = SortImports(file_contents=test_input,force_sort_within_sections=True).output
assert test_output == test_input
test_input = ('import foo\n'
'from foo import bar\n'
'from foo.bar import baz\n'
'from foo.bar import Quux\n'
'from Foob import ar\n')
test_output = SortImports(file_contents=test_input,force_sort_within_sections=True, order_by_type=False,
force_single_line=True).output
assert test_output == test_input
def test_sorting_with_two_top_comments():
'''Test to ensure isort will sort files that contain 2 top comments'''
test_input = ('#! comment1\n'
"''' comment2\n"
"'''\n"
'import b\n'
'import a\n')
assert SortImports(file_contents=test_input).output == ('#! comment1\n'
"''' comment2\n"
"'''\n"
'import a\n'
'import b\n')
def test_lines_between_sections():
"""Test to ensure lines_between_sections works"""
test_input = ('from bar import baz\n'
'import os\n')
assert SortImports(file_contents=test_input, lines_between_sections=0).output == ('import os\n'
'from bar import baz\n')
assert SortImports(file_contents=test_input, lines_between_sections=2).output == ('import os\n\n\n'
'from bar import baz\n')
def test_forced_sepatate_globs():
"""Test to ensure that forced_separate glob matches lines"""
test_input = ('import os\n'
'\n'
'from myproject.foo.models import Foo\n'
'\n'
'from myproject.utils import util_method\n'
'\n'
'from myproject.bar.models import Bar\n'
'\n'
'import sys\n')
test_output = SortImports(file_contents=test_input, forced_separate=['*.models'],
line_length=120).output
assert test_output == ('import os\n'
'import sys\n'
'\n'
'from myproject.utils import util_method\n'
'\n'
'from myproject.bar.models import Bar\n'
'from myproject.foo.models import Foo\n')
def test_no_additional_lines_issue_358():
"""Test to ensure issue 358 is resovled and running isort multiple times does not add extra newlines"""
test_input = ('"""This is a docstring"""\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
expected_output = ('"""This is a docstring"""\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
test_output = SortImports(file_contents=test_input, multi_line_output=3, line_length=20).output
assert test_output == expected_output
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
for attempt in range(5):
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
test_input = ('"""This is a docstring"""\n'
'\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
expected_output = ('"""This is a docstring"""\n'
'\n'
'# This is a comment\n'
'from __future__ import (\n'
' absolute_import,\n'
' division,\n'
' print_function,\n'
' unicode_literals\n'
')\n')
test_output = SortImports(file_contents=test_input, multi_line_output=3, line_length=20).output
assert test_output == expected_output
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
for attempt in range(5):
test_output = SortImports(file_contents=test_output, multi_line_output=3, line_length=20).output
assert test_output == expected_output
def test_import_by_paren_issue_375():
"""Test to ensure isort can correctly handle sorting imports where the paren is directly by the import body"""
test_input = ('from .models import(\n'
' Foo,\n'
' Bar,\n'
')\n')
assert SortImports(file_contents=test_input).output == 'from .models import Bar, Foo\n'
def test_function_with_docstring():
"""Test to ensure isort can correctly sort imports when the first found content is a function with a docstring"""
add_imports = ['from __future__ import unicode_literals']
test_input = ('def foo():\n'
' """ Single line triple quoted doctring """\n'
' pass\n')
expected_output = ('from __future__ import unicode_literals\n'
'\n'
'\n'
'def foo():\n'
' """ Single line triple quoted doctring """\n'
' pass\n')
assert SortImports(file_contents=test_input, add_imports=add_imports).output == expected_output
def test_plone_style():
"""Test to ensure isort correctly plone style imports"""
test_input = ("from django.contrib.gis.geos import GEOSException\n"
"from plone.app.testing import getRoles\n"
"from plone.app.testing import ManageRoles\n"
"from plone.app.testing import setRoles\n"
"from Products.CMFPlone import utils\n"
"\n"
"import ABC\n"
"import unittest\n"
"import Zope\n")
options = {'force_single_line': True,
'force_alphabetical_sort': True}
assert SortImports(file_contents=test_input, **options).output == test_input
| codeparrot/github-code-clean |
from Regions import *
from Classes import button
import Settings
GlobalCustomButtonThreshold = Settings.GlobalCustomButtonThreshold
v_home = button()
v_home.Image("v_home.png")
v_home.Region(VIEWER_REGION)
source1 = button()
source1.Image("source1.png")
source1.Region(CONFIGURATOR_REGION)
showunsupporteddevices = button()
showunsupporteddevices.Image("showunsupporteddevices.png")
showunsupporteddevices.Region(CONFIGURATOR_REGION)
elangm88ethernet = button()
elangm88ethernet.Image("elangm88ethernet.png")
elangm88ethernet.Region(CONFIGURATOR_REGION)
source5 = button()
source5.Image("source5.png")
source5.Region(CONFIGURATOR_REGION)
source6 = button()
source6.Image("source6.png")
source6.Region(CONFIGURATOR_REGION)
source7 = button()
source7.Image("source7.png")
source7.Region(CONFIGURATOR_REGION)
source8 = button()
source8.Image("source8.png")
source8.Region(CONFIGURATOR_REGION)
source9 = button()
source9.Image("source9.png")
source9.Region(CONFIGURATOR_REGION)
senseinput = button()
senseinput.Image('senseinput.png')
senseinput.Region(CONFIGURATOR_REGION)
factoryresettheultramatrix = button()
factoryresettheultramatrix.Image('factoryresettheultramatrix.png')
factoryresettheultramatrix.Region(CONFIGURATOR_REGION)
paging = button()
paging.Image('paging.png')
paging.Region(CONFIGURATOR_REGION)
device = button()
device.Image("device.png")
device.Region(CONFIGURATOR_REGION)
ok2 = button()
ok2.Image("ok2.png")
ok2.Region(CONFIGURATOR_REGION)
addnewirlibrarysource = button()
addnewirlibrarysource.Image("addnewirlibrarysource.png")
addnewirlibrarysource.Region(CONFIGURATOR_REGION)
comport = button()
comport.Image("comport.png")
comport.Region(CONFIGURATOR_REGION)
comport.similar(int = .29)
copylayoutfromtemplate = button()
copylayoutfromtemplate.Image("copylayoutfromtemplate.png")
copylayoutfromtemplate.Region(CONFIGURATOR_REGION)
hdmiinput = button()
hdmiinput.Image("hdmiinput.png")
hdmiinput.Region(CONFIGURATOR_REGION)
constant = button()
constant.Image("constant.png")
constant.Region(CONFIGURATOR_REGION)
audioinput = button()
audioinput.Image("audioinput.png")
audioinput.Region(CONFIGURATOR_REGION)
energyefficiencymode = button()
energyefficiencymode.Image("energyefficiencymode.png")
energyefficiencymode.Region(CONFIGURATOR_REGION)
videooutput = button()
videooutput.Image("videooutput.png")
videooutput.Region(CONFIGURATOR_REGION)
audiooutput = button()
audiooutput.Image("audiooutput.png")
audiooutput.Region(CONFIGURATOR_REGION)
analogtodigital = button()
analogtodigital.Image("analogtodigital.png")
analogtodigital.Region(CONFIGURATOR_REGION)
audiooutputtype = button()
audiooutputtype.Image("audiooutputtype.png")
audiooutputtype.Region(CONFIGURATOR_REGION)
zones = button()
zones.Image("zones.png")
zones.Region(CONFIGURATOR_REGION)
zone1 = button()
zone1.Image("zone1.png")
zone1.Region(CONFIGURATOR_REGION)
audioinputtype = button()
audioinputtype.Image("audioinputtype.png")
audioinputtype.Region(CONFIGURATOR_REGION)
automationcomp = button()
automationcomp.Image("automationcomp.png")
automationcomp.Region(CONFIGURATOR_REGION)
apple = button()
apple.Image("apple.png")
apple.Region(CONFIGURATOR_REGION)
lgbluray = button()
lgbluray.Image("lgbluray.png")
lgbluray.Region(CONFIGURATOR_REGION)
av1 = button()
av1.Image("av1.png")
av1.Region(CONFIGURATOR_REGION)
addnewlibrarysource = button()
addnewlibrarysource.Image("addnewlibrarysource.png")
addnewlibrarysource.Region(CONFIGURATOR_REGION)
appletv = button()
appletv.Image("appletv.png")
appletv.Region(CONFIGURATOR_REGION)
v_appletvdevice = button()
v_appletvdevice.Image("v_appletvdevice.png")
v_appletvdevice.Region(VIEWER_REGION)
v_sonysinglebluraydevice = button()
v_sonysinglebluraydevice.Image("v_sonysinglebluraydevice.png")
v_sonysinglebluraydevice.Region(VIEWER_REGION)
v_sonysinglebluraydevice.similar(int = 50)
v_farright = button()
v_farright.Image("v_farright.png")
v_farright.Region(VIEWER_REGION)
ultra6x4192168842 = button()
ultra6x4192168842.Image("ultra6x4192168842.png")
ultra6x4192168842.Region(CONFIGURATOR_REGION)
ultra6x4192168842.similar(int = .29)
ultra6x41921681026 = button()
ultra6x41921681026.Image("ultra6x41921681026.png")
ultra6x41921681026.Region(CONFIGURATOR_REGION)
ultra6x41921681026.similar(int = .95)
page = button()
page.Image("page.png")
page.Region(CONFIGURATOR_REGION)
cmax_edit = button()
cmax_edit.Image("cmax_edit.png")
cmax_edit.Region(CONFIGURATOR_REGION_MAX)
addeditzone_page = button()
addeditzone_page.Image("addeditzone_page.png")
addeditzone_page.Region(ADDEDITZONE)
pagelevel = button()
pagelevel.Image("pagelevel.png")
pagelevel.Region(CONFIGURATOR_REGION)
ultra6x419216810263 = button()
ultra6x419216810263.Image("ultra6x419216810263.png")
ultra6x419216810263.Region(CONFIGURATOR_REGION)
ultra6x419216810263.similar(int = .95)
ultra6x419216810262 = button()
ultra6x419216810262.Image("ultra6x419216810262.png")
ultra6x419216810262.Region(CONFIGURATOR_REGION)
ultra6x419216810262.similar(int = .95)
bah = button()
bah.Image('bah.png')
bah.Region(VIEWER_REGION)
bah.similar(int = .40)
v_sonysinglebluray = button()
v_sonysinglebluray.Image("v_sonysinglebluray.png")
v_sonysinglebluray.Region(VIEWER_REGION)
v_sonymultibluraydevice = button()
v_sonymultibluraydevice.Image("v_sonymultibluraydevice.png")
v_sonymultibluraydevice.Region(VIEWER_REGION)
v_sonymultibluraydevice.similar(int = GlobalCustomButtonThreshold)
v_appletv = button()
v_appletv.Image("v_appletv.png")
v_appletv.Region(VIEWER_REGION)
interfacelgbluray = button()
interfacelgbluray.Image("interfacelgbluray.png")
interfacelgbluray.Region(CONFIGURATOR_REGION)
interfaceappletv = button()
interfaceappletv.Image("interfaceappletv.png")
interfaceappletv.Region(CONFIGURATOR_REGION)
interfacesonysinglebluray = button()
interfacesonysinglebluray.Image("interfacesonysinglebluray.png")
interfacesonysinglebluray.Region(CONFIGURATOR_REGION)
interfacesonysinglebluray.similar(int = 90)
analogblueray = button()
analogblueray.Image("analogblueray.png")
analogblueray.Region(CONFIGURATOR_REGION)
coaxappletv = button()
coaxappletv.Image("coaxappletv.png")
coaxappletv.Region(CONFIGURATOR_REGION)
interfacesonymultibluray = button()
interfacesonymultibluray.Image("interfacesonymultibluray.png")
interfacesonymultibluray.Region(CONFIGURATOR_REGION)
interfacesonymultibluray.similar(int = 90)
v_play = button()
v_play.Image("v_play.png")
v_play.Region(VIEWER_REGION)
v_lgblueraydevice = button()
v_lgblueraydevice.Image("v_lgblueraydevice.png")
v_lgblueraydevice.Region(VIEWER_REGION)
sonymultibluray = button()
sonymultibluray.Image("sonymultibluray.png")
sonymultibluray.Region(CONFIGURATOR_REGION)
sonysinglebluray = button()
sonysinglebluray.Image("sonysinglebluray.png")
sonysinglebluray.Region(CONFIGURATOR_REGION)
irsender = button()
irsender.Image("irsender.png")
irsender.Region(CONFIGURATOR_REGION)
audio1 = button()
audio1.Image("audio1.png")
audio1.Region(CONFIGURATOR_REGION)
sourcedevice = button()
sourcedevice.Image("sourcedevice.png")
sourcedevice.Region(CONFIGURATOR_REGION)
sources2 = button()
sources2.Image("sources2.png")
sources2.Region(CONFIGURATOR_REGION)
v_disc = button()
v_disc.Image("v_disc.png")
v_disc.Region(VIEWER_REGION)
v_disc.similar(int = .30)
v_musicone = button()
v_musicone.Image("v_musicone.png")
v_musicone.Region(VIEWER_REGION)
v_off = button()
v_off.Image("v_off.png")
v_off.Region(VIEWER_REGION)
v_musictwo = button()
v_musictwo.Image("v_musictwo.png")
v_musictwo.Region(VIEWER_REGION)
v_musicthree = button()
v_musicthree.Image("v_musicthree.png")
v_musicthree.Region(VIEWER_REGION)
v_allzonesoff = button()
v_allzonesoff.Image("v_allzonesoff.png")
v_allzonesoff.Region(VIEWER_REGION)
v_musicfour = button()
v_musicfour.Image("v_musicfour.png")
v_musicfour.Region(VIEWER_REGION)
v_zone4off = button()
v_zone4off.Image("v_zone4off.png")
v_zone4off.Region(VIEWER_REGION)
v_zone3off = button()
v_zone3off.Image("v_zone3off.png")
v_zone3off.Region(VIEWER_REGION)
v_zone2off = button()
v_zone2off.Image("v_zone2off.png")
v_zone2off.Region(VIEWER_REGION)
v_downsource = button()
v_downsource.Image("v_downsource.png")
v_downsource.Region(VIEWER_REGION)
v_downsource.similar(int = GlobalCustomButtonThreshold)
v_g = button()
v_g.Image("v_g.png")
v_g.Region(VIEWER_REGION)
v_zone1off = button()
v_zone1off.Image("v_zone1off.png")
v_zone1off.Region(VIEWER_REGION)
v_zone1 = button()
v_zone1.Image("v_zone1.png")
v_zone1.Region(VIEWER_REGION)
v_zone1.similar(int = GlobalCustomButtonThreshold)
v_back = button()
v_back.Image("v_back.png")
v_back.Region(VIEWER_REGION)
v_next = button()
v_next.Image("v_next.png")
v_next.Region(VIEWER_REGION)
v_zone2 = button()
v_zone2.Image("v_zone2.png")
v_zone2.Region(VIEWER_REGION)
v_zone3 = button()
v_zone3.Image("v_zone3.png")
v_zone3.Region(VIEWER_REGION)
v_zone4 = button()
v_zone4.Image("v_zone4.png")
v_zone4.Region(VIEWER_REGION)
v_shuffle = button()
v_shuffle.Image("v_shuffle.png")
v_shuffle.Region(VIEWER_REGION)
v_musicsymbol = button()
v_musicsymbol.Image("v_musicsymbol.png")
v_musicsymbol.Region(VIEWER_REGION)
v_musicsymbol.similar(int = .20)
v_mute = button()
v_mute.Image("v_mute.png")
v_mute.Region(VIEWER_REGION)
v_pandora = button()
v_pandora.Image("v_pandora.png")
v_pandora.Region(VIEWER_REGION)
discovergmvdevices = button()
discovergmvdevices.Image("discovergmvdevices.png")
discovergmvdevices.Region(CONFIGURATOR_REGION)
minusone = button()
minusone.Image("minusone.png")
minusone.Region(CONFIGURATOR_REGION)
v_mainzone = button()
v_mainzone.Image("v_mainzone.png")
v_mainzone.Region(VIEWER_REGION)
cancel = button()
cancel.Image("cancel.png")
cancel.Region(CONFIGURATOR_REGION)
cancel.similar(int = GlobalCustomButtonThreshold)
minustwo = button()
minustwo.Image("minustwo.png")
minustwo.Region(CONFIGURATOR_REGION)
minusthree = button()
minusthree.Image("minusthree.png")
minusthree.Region(CONFIGURATOR_REGION)
gmvdevice = button()
gmvdevice.Image("gmvdevice.png")
gmvdevice.Region(CONFIGURATOR_REGION)
ultra6x4 = button()
ultra6x4.Image("ultra6x4.png")
ultra6x4.Region(CONFIGURATOR_REGION)
n_scrollbar = button()
n_scrollbar.Image("n_scrollbar.png")
n_scrollbar.Region(NILES_REGION)
n_showfavorites = button()
n_showfavorites.Image("n_showfavorites.png")
n_showfavorites.Region(NILES_REGION)
g_configurator = button()
g_configurator.Image("g_configurator.png")
g_configurator.Region(GTOOLS_REGION)
g_configurator.similar(int = GlobalCustomButtonThreshold)
activeclientcon = button()
activeclientcon.Image("activeclientcon.png")
activeclientcon.Region(CONFIGURATOR_REGION)
minus = button()
minus.Image("minus.png")
minus.Region(CONFIGURATOR_REGION)
n_bluraydisc = button()
n_bluraydisc.Image("n_bluraydisc.png")
n_bluraydisc.Region(NILES_REGION)
n_bluraydisc.similar(int = GlobalCustomButtonThreshold)
n_configurerlesinterfacesutilisateur = button()
n_configurerlesinterfacesutilisateur.Image("n_configurerlesinterfacesutilisateur.png")
n_configurerlesinterfacesutilisateur.Region(NILES_REGION)
n_appliquer = button()
n_appliquer.Image("n_appliquer.png")
n_appliquer.Region(NILES_REGION)
n_sourceconfiguration = button()
n_sourceconfiguration.Image("n_sourceconfiguration.png")
n_sourceconfiguration.Region(NILES_REGION)
activeclientconnections = button()
activeclientconnections.Image("activeclientconnections.png")
activeclientconnections.Region(CONFIGURATOR_REGION)
connections = button()
connections.Image("connections.png")
connections.Region(CONFIGURATOR_REGION)
n_yes = button()
n_yes.Image("n_yes.png")
n_yes.Region(NILES_REGION)
n_allmedia = button()
n_allmedia.Image('n_allmedia.png')
n_allmedia.Region(NILES_REGION)
addnew = button()
addnew.Image("addnew.png")
addnew.Region(CONFIGURATOR_REGION)
blank = button()
blank.Image("blank.png")
blank.Region(CONFIGURATOR_REGION)
addnewcommunicationdevice = button()
addnewcommunicationdevice.Image("addnewcommunicationdevice.png")
addnewcommunicationdevice.Region(CONFIGURATOR_REGION)
addnewdevice = button()
addnewdevice.Image("addnewdevice.png")
addnewdevice.Region(CONFIGURATOR_REGION)
admin = button()
admin.Image("admin.png")
admin.Region(CONFIGURATOR_REGION)
apply = button()
apply.Image("apply.png")
apply.Region(CONFIGURATOR_REGION)
audiosharefolders = button()
audiosharefolders.Image("audiosharefolders.png")
audiosharefolders.Region(CONFIGURATOR_REGION)
audiolibraryscanoptions = button()
audiolibraryscanoptions.Image("audiolibraryscanoptions.png")
audiolibraryscanoptions.Region(CONFIGURATOR_REGION)
audiolibraryoptions = button()
audiolibraryoptions.Image("audiolibraryoptions.png")
audiolibraryoptions.Region(CONFIGURATOR_REGION)
backupconfigurationtofile = button()
backupconfigurationtofile.Image("backupconfigurationtofile.png")
backupconfigurationtofile.Region(CONFIGURATOR_REGION)
configureipinfo = button()
configureipinfo.Image("configureipinfo.png")
configureipinfo.Region(CONFIGURATOR_REGION)
closefive = button()
closefive.Image("closefive.png")
closefive.Region(CONFIGURATOR_REGION)
BackupCoreMo = button()
BackupCoreMo.Image("BackupCoreMo.png")
BackupCoreMo.Region(CONFIGURATOR_REGION)
backupcoremodueversionandconfigurationtofile = button()
backupcoremodueversionandconfigurationtofile.Image("backupcoremodueversionandconfigurationtofile.png")
backupcoremodueversionandconfigurationtofile.Region(CONFIGURATOR_REGION)
backupfile = button()
backupfile.Image("backupfile.png")
backupfile.Region(CONFIGURATOR_REGION)
restorebackupfile = button()
restorebackupfile.Image("restorebackupfile.png")
restorebackupfile.Region(CONFIGURATOR_REGION)
backupobjects = button()
backupobjects.Image("backupobjects.png")
backupobjects.Region(CONFIGURATOR_REGION)
booleans = button()
booleans.Image("booleans.png")
booleans.Region(CONFIGURATOR_REGION)
calendarsystemmodes = button()
calendarsystemmodes.Image("calendarsystemmodes.png")
calendarsystemmodes.Region(CONFIGURATOR_REGION)
channelgroups = button()
channelgroups.Image("channelgroups.png")
channelgroups.Region(CONFIGURATOR_REGION)
channels = button()
channels.Image("channels.png")
channels.Region(CONFIGURATOR_REGION)
clientconnections = button()
clientconnections.Image("clientconnections.png")
clientconnections.Region(CONFIGURATOR_REGION)
clienthomelogicsystem = button()
clienthomelogicsystem.Image("clienthomelogicsystem.png")
clienthomelogicsystem.Region(CONFIGURATOR_REGION)
climate = button()
climate.Image("climate.png")
climate.Region(CONFIGURATOR_REGION)
communciationdevices = button()
communciationdevices.Image("communciationdevices.png")
communciationdevices.Region(CONFIGURATOR_REGION)
Communicatio = button()
Communicatio.Image("Communicatio.png")
Communicatio.Region(CONFIGURATOR_REGION)
communicationdevices = button()
communicationdevices.Image("communicationdevices.png")
communicationdevices.Region(CONFIGURATOR_REGION)
configuration = button()
configuration.Image("configuration.png")
configuration.Region(CONFIGURATOR_REGION)
configurator = button()
configurator.Image("configurator.png")
configurator.Region(CONFIGURATOR_REGION)
configuripinfo = button()
configuripinfo.Image("configuripinfo.png")
configuripinfo.Region(CONFIGURATOR_REGION)
content = button()
content.Image("content.png")
content.Region(CONFIGURATOR_REGION)
controller = button()
controller.Image("controller.png")
controller.Region(CONFIGURATOR_REGION)
cpuusage = button()
cpuusage.Image("cpuusage.png")
cpuusage.Region(CONFIGURATOR_REGION)
customimages = button()
customimages.Image("customimages.png")
customimages.Region(CONFIGURATOR_REGION)
customizablescenes = button()
customizablescenes.Image("customizablescenes.png")
customizablescenes.Region(CONFIGURATOR_REGION)
custompages = button()
custompages.Image("custompages.png")
custompages.Region(CONFIGURATOR_REGION)
default = button()
default.Image("default.png")
default.Region(CONFIGURATOR_REGION)
DefaultRescJ = button()
DefaultRescJ.Image("DefaultRescJ.png")
DefaultRescJ.Region(CONFIGURATOR_REGION)
delete = button()
delete.Image("delete.png")
delete.Region(CONFIGURATOR_REGION)
devicename = button()
devicename.Image("devicename.png")
devicename.Region(CONFIGURATOR_REGION)
devices = button()
devices.Image("devices.png")
devices.Region(CONFIGURATOR_REGION)
discoverdevices = button()
discoverdevices.Image("discoverdevices.png")
discoverdevices.Region(CONFIGURATOR_REGION)
dispalysettings = button()
dispalysettings.Image("dispalysettings.png")
dispalysettings.Region(CONFIGURATOR_REGION)
displaysettings = button()
displaysettings.Image("displaysettings.png")
displaysettings.Region(CONFIGURATOR_REGION)
black = button()
black.Image("black.png")
black.Region(CONFIGURATOR_REGION)
blackhivis = button()
blackhivis.Image("blackhivis.png")
blackhivis.Region(CONFIGURATOR_REGION)
classic = button()
classic.Image("classic.png")
classic.Region(CONFIGURATOR_REGION)
documents = button()
documents.Image("documents.png")
documents.Region(CONFIGURATOR_REGION)
doorlocks = button()
doorlocks.Image("doorlocks.png")
doorlocks.Region(CONFIGURATOR_REGION)
emailaccounts = button()
emailaccounts.Image("emailaccounts.png")
emailaccounts.Region(CONFIGURATOR_REGION)
EmailMessage = button()
EmailMessage.Image("EmailMessage.png")
EmailMessage.Region(CONFIGURATOR_REGION)
emailmessagesoutbound = button()
emailmessagesoutbound.Image("emailmessagesoutbound.png")
emailmessagesoutbound.Region(CONFIGURATOR_REGION)
textspeechmessages = button()
textspeechmessages.Image("textspeechmessages.png")
textspeechmessages.Region(CONFIGURATOR_REGION)
ents = button()
ents.Image("ents.png")
ents.Region(CONFIGURATOR_REGION)
mainwindow = button()
mainwindow.Image("mainwindow.png")
mainwindow.Region(CONFIGURATOR_REGION)
two = button()
two.Image("two.png")
two.Region(CONFIGURATOR_REGION)
three = button()
three.Image("three.png")
three.Region(CONFIGURATOR_REGION)
mainwindow = button()
mainwindow.Image("mainwindow.png")
mainwindow.Region(CONFIGURATOR_REGION)
EventMapper = button()
EventMapper.Image("EventMapper.png")
EventMapper.Region(CONFIGURATOR_REGION)
eventmaps = button()
eventmaps.Image("eventmaps.png")
eventmaps.Region(CONFIGURATOR_REGION)
eventmapper = button()
eventmapper.Image("eventmapper.png")
eventmapper.Region(CONFIGURATOR_REGION)
exceptions = button()
exceptions.Image("exceptions.png")
exceptions.Region(CONFIGURATOR_REGION)
factoreconfiguration = button()
factoreconfiguration.Image("factoreconfiguration.png")
factoreconfiguration.Region(CONFIGURATOR_REGION)
family = button()
family.Image("family.png")
family.Region(CONFIGURATOR_REGION)
GenericSeria = button()
GenericSeria.Image("GenericSeria.png")
GenericSeria.Region(CONFIGURATOR_REGION)
genericserialdevices = button()
genericserialdevices.Image("genericserialdevices.png")
genericserialdevices.Region(CONFIGURATOR_REGION)
gernericserialdevices = button()
gernericserialdevices.Image("gernericserialdevices.png")
gernericserialdevices.Region(CONFIGURATOR_REGION)
globalirrigationperiods = button()
globalirrigationperiods.Image("globalirrigationperiods.png")
globalirrigationperiods.Region(CONFIGURATOR_REGION)
globaloptions = button()
globaloptions.Image("globaloptions.png")
globaloptions.Region(CONFIGURATOR_REGION)
graphobjects = button()
graphobjects.Image("graphobjects.png")
graphobjects.Region(CONFIGURATOR_REGION)
gTools = button()
gTools.Image("gTools.png")
gTools.Region(CONFIGURATOR_REGION)
heatingcoolingunits = button()
heatingcoolingunits.Image("heatingcoolingunits.png")
heatingcoolingunits.Region(CONFIGURATOR_REGION)
hhrzoneheaders = button()
hhrzoneheaders.Image("hhrzoneheaders.png")
hhrzoneheaders.Region(CONFIGURATOR_REGION)
homelogicsystems = button()
homelogicsystems.Image("homelogicsystems.png")
homelogicsystems.Region(CONFIGURATOR_REGION)
homepages = button()
homepages.Image("homepages.png")
homepages.Region(CONFIGURATOR_REGION)
hrrzoneheaders = button()
hrrzoneheaders.Image("hrrzoneheaders.png")
hrrzoneheaders.Region(CONFIGURATOR_REGION)
inputoutput = button()
inputoutput.Image("inputoutput.png")
inputoutput.Region(CONFIGURATOR_REGION)
interface = button()
interface.Image("interface.png")
interface.Region(CONFIGURATOR_REGION)
InterfaceDev = button()
InterfaceDev.Image("InterfaceDev.png")
InterfaceDev.Region(CONFIGURATOR_REGION)
interfacedevices = button()
interfacedevices.Image("interfacedevices.png")
interfacedevices.Region(CONFIGURATOR_REGION)
interfacedeviceshrr = button()
interfacedeviceshrr.Image("interfacedeviceshrr.png")
interfacedeviceshrr.Region(CONFIGURATOR_REGION)
hr2zoneheaders = button()
hr2zoneheaders.Image("hr2zoneheaders.png")
hr2zoneheaders.Region(CONFIGURATOR_REGION)
interfacedeviceskp7 = button()
interfacedeviceskp7.Image("interfacedeviceskp7.png")
interfacedeviceskp7.Region(CONFIGURATOR_REGION)
interfacedevicesosd = button()
interfacedevicesosd.Image("interfacedevicesosd.png")
interfacedevicesosd.Region(CONFIGURATOR_REGION)
interfacedevicestouchscreen = button()
interfacedevicestouchscreen.Image("interfacedevicestouchscreen.png")
interfacedevicestouchscreen.Region(CONFIGURATOR_REGION)
interfacegroups = button()
interfacegroups.Image("interfacegroups.png")
interfacegroups.Region(CONFIGURATOR_REGION)
zonecontrollers = button()
zonecontrollers.Image("zonecontrollers.png")
zonecontrollers.Region(CONFIGURATOR_REGION)
zonecontrollers.similar(int = GlobalCustomButtonThreshold)
tvchannelgroups = button()
tvchannelgroups.Image("tvchannelgroups.png")
tvchannelgroups.Region(CONFIGURATOR_REGION)
tvchannels = button()
tvchannels.Image("tvchannels.png")
tvchannels.Region(CONFIGURATOR_REGION)
interfacetemplates = button()
interfacetemplates.Image("interfacetemplates.png")
interfacetemplates.Region(CONFIGURATOR_REGION)
internetradiofavoritegenres = button()
internetradiofavoritegenres.Image("internetradiofavoritegenres.png")
internetradiofavoritegenres.Region(CONFIGURATOR_REGION)
ipaddress = button()
ipaddress.Image("ipaddress.png")
ipaddress.Region(CONFIGURATOR_REGION)
webpictures = button()
webpictures.Image("webpictures.png")
webpictures.Region(CONFIGURATOR_REGION)
local = button()
local.Image("local.png")
local.Region(CONFIGURATOR_REGION)
picturesharefolders = button()
picturesharefolders.Image("picturesharefolders.png")
picturesharefolders.Region(CONFIGURATOR_REGION)
picturelibraryscanoptions = button()
picturelibraryscanoptions.Image("picturelibraryscanoptions.png")
picturelibraryscanoptions.Region(CONFIGURATOR_REGION)
iroutputs = button()
iroutputs.Image("iroutputs.png")
iroutputs.Region(CONFIGURATOR_REGION)
irriagationcontrollers = button()
irriagationcontrollers.Image("irriagationcontrollers.png")
irriagationcontrollers.Region(CONFIGURATOR_REGION)
irreceivers = button()
irreceivers.Image("irreceivers.png")
irreceivers.Region(CONFIGURATOR_REGION)
irdevices = button()
irdevices.Image("irdevices.png")
irdevices.Region(CONFIGURATOR_REGION)
irrigation = button()
irrigation.Image("irrigation.png")
irrigation.Region(CONFIGURATOR_REGION)
irrigationgroups = button()
irrigationgroups.Image("irrigationgroups.png")
irrigationgroups.Region(CONFIGURATOR_REGION)
irrigationcontrollers = button()
irrigationcontrollers.Image("irrigationcontrollers.png")
irrigationcontrollers.Region(CONFIGURATOR_REGION)
irrigationzones = button()
irrigationzones.Image("irrigationzones.png")
irrigationzones.Region(CONFIGURATOR_REGION)
keypadtemplates = button()
keypadtemplates.Image("keypadtemplates.png")
keypadtemplates.Region(CONFIGURATOR_REGION)
kids = button()
kids.Image("kids.png")
kids.Region(CONFIGURATOR_REGION)
language = button()
language.Image("language.png")
language.Region(CONFIGURATOR_REGION)
libraryoptions = button()
libraryoptions.Image("libraryoptions.png")
libraryoptions.Region(CONFIGURATOR_REGION)
libraryscanoptions = button()
libraryscanoptions.Image("libraryscanoptions.png")
libraryscanoptions.Region(CONFIGURATOR_REGION)
lighting = button()
lighting.Image("lighting.png")
lighting.Region(CONFIGURATOR_REGION)
lightinginterfaces = button()
lightinginterfaces.Image("lightinginterfaces.png")
lightinginterfaces.Region(CONFIGURATOR_REGION)
lntemetRadio = button()
lntemetRadio.Image("lntemetRadio.png")
lntemetRadio.Region(CONFIGURATOR_REGION)
loadinterfaces = button()
loadinterfaces.Image("loadinterfaces.png")
loadinterfaces.Region(CONFIGURATOR_REGION)
locationandweather = button()
locationandweather.Image("locationandweather.png")
locationandweather.Region(CONFIGURATOR_REGION)
locationdevices = button()
locationdevices.Image("locationdevices.png")
locationdevices.Region(CONFIGURATOR_REGION)
lockgroups = button()
lockgroups.Image("lockgroups.png")
lockgroups.Region(CONFIGURATOR_REGION)
logitechmediaserver = button()
logitechmediaserver.Image("logitechmediaserver.png")
logitechmediaserver.Region(CONFIGURATOR_REGION)
lookuplatlonfromlocation = button()
lookuplatlonfromlocation.Image("lookuplatlonfromlocation.png")
lookuplatlonfromlocation.Region(CONFIGURATOR_REGION)
maintabs = button()
maintabs.Image("maintabs.png")
maintabs.Region(CONFIGURATOR_REGION)
media = button()
media.Image("media.png")
media.Region(CONFIGURATOR_REGION)
media.similar(GlobalCustomButtonThreshold)
g_cancel = button()
g_cancel.Image("g_cancel.png")
g_cancel.Region(GTOOLS_REGION)
g_viewer = button()
g_viewer.Image("g_viewer.png")
g_viewer.Region(GTOOLS_REGION)
g_viewer.similar(GlobalCustomButtonThreshold)
messaging = button()
messaging.Image("messaging.png")
messaging.Region(CONFIGURATOR_REGION)
min = button()
min.Image("min.png")
min.Region(CONFIGURATOR_REGION)
minus = button()
minus.Image("minus.png")
minus.Region(CONFIGURATOR_REGION)
minus.similar(int = .80)
moduleconfiguration = button()
moduleconfiguration.Image("moduleconfiguration.png")
moduleconfiguration.Region(CONFIGURATOR_REGION)
movies = button()
movies.Image("movies.png")
movies.Region(CONFIGURATOR_REGION)
music = button()
music.Image("music.png")
music.Region(CONFIGURATOR_REGION)
mysystems = button()
mysystems.Image("mysystems.png")
mysystems.Region(CONFIGURATOR_REGION)
name = button()
name.Image("name.png")
name.Region(CONFIGURATOR_REGION)
news = button()
news.Image("news.png")
news.Region(CONFIGURATOR_REGION)
numerictriggers = button()
numerictriggers.Image("numerictriggers.png")
numerictriggers.Region(CONFIGURATOR_REGION)
objects = button()
objects.Image("objects.png")
objects.Region(CONFIGURATOR_REGION)
ondevices = button()
ondevices.Image("ondevices.png")
ondevices.Region(CONFIGURATOR_REGION)
one = button()
one.Image("one.png")
one.Region(CONFIGURATOR_REGION)
n_sourcetwo = button()
n_sourcetwo.Image("n_sourcetwo.png")
n_sourcetwo.Region(NILES_REGION)
n_zone1 = button()
n_zone1.Image("n_zone1.png")
n_zone1.Region(NILES_REGION)
n_off = button()
n_off.Image("n_off.png")
n_off.Region(NILES_REGION)
n_mainzoneoff = button()
n_mainzoneoff.Image("n_mainzoneoff.png")
n_mainzoneoff.Region(NILES_REGION)
n_zone1off = button()
n_zone1off.Image("n_zone1off.png")
n_zone1off.Region(NILES_REGION)
n_notinstalled = button()
n_notinstalled.Image("n_notinstalled.png")
n_notinstalled.Region(NILES_REGION)
n_notinstalled.similar(GlobalCustomButtonThreshold)
n_none = button()
n_none.Image("n_none.png")
n_none.Region(NILES_REGION)
n_none.similar(GlobalCustomButtonThreshold)
n_configurekeypads = button()
n_configurekeypads.Image("n_configurekeypads.png")
n_configurekeypads.Region(NILES_REGION)
n_remove = button()
n_remove.Image("n_remove.png")
n_remove.Region(NILES_REGION)
n_remove.similar(GlobalCustomButtonThreshold)
n_poweron = button()
n_poweron.Image("n_poweron.png")
n_poweron.Region(NILES_REGION)
n_poweroff = button()
n_poweroff.Image("n_poweroff.png")
n_poweroff.Region(NILES_REGION)
n_home = button()
n_home.Image("n_home.png")
n_home.Region(NILES_REGION)
n_mainzone = button()
n_mainzone.Image("n_mainzone.png")
n_mainzone.Region(NILES_REGION)
n_mainzone.similar(int = GlobalCustomButtonThreshold)
n_scrollright = button()
n_scrollright.Image("n_scrollright.png")
n_scrollright.Region(NILES_REGION)
n_scrollright.similar(int = GlobalCustomButtonThreshold)
outcontroller = button()
outcontroller.Image("outcontroller.png")
outcontroller.Region(CONFIGURATOR_REGION)
elangmv64ethernet = button()
elangmv64ethernet.Image("elangmv64ethernet.png")
elangmv64ethernet.Region(CONFIGURATOR_REGION)
v_forward = button()
v_forward.Image("v_forward.png")
v_forward.Region(VIEWER_REGION)
outputs = button()
outputs.Image("outputs.png")
outputs.Region(CONFIGURATOR_REGION)
pictures = button()
pictures.Image("pictures.png")
pictures.Region(CONFIGURATOR_REGION)
picuturesharefolders = button()
picuturesharefolders.Image("picuturesharefolders.png")
picuturesharefolders.Region(CONFIGURATOR_REGION)
plus = button()
plus.Image("plus.png")
plus.Region(CONFIGURATOR_REGION)
poolcontrol = button()
poolcontrol.Image("poolcontrol.png")
poolcontrol.Region(CONFIGURATOR_REGION)
virtualsecuritycontroller = button()
virtualsecuritycontroller.Image("virtualsecuritycontroller.png")
virtualsecuritycontroller.Region(CONFIGURATOR_REGION)
virtualdoorlock = button()
virtualdoorlock.Image("virtualdoorlock.png")
virtualdoorlock.Region(CONFIGURATOR_REGION)
poolcontrollers = button()
poolcontrollers.Image("poolcontrollers.png")
poolcontrollers.Region(CONFIGURATOR_REGION)
yamahayncaethernet = button()
yamahayncaethernet.Image("yamahayncaethernet.png")
yamahayncaethernet.Region(CONFIGURATOR_REGION)
yamahayncaethernet.similar(int = GlobalCustomButtonThreshold)
ipzero = button()
ipzero.Image("ipzero.png")
ipzero.Region(CONFIGURATOR_REGION)
ipzero.similar(int = GlobalCustomButtonThreshold)
v_settings = button()
v_settings.Image("v_settings.png")
v_settings.Region(VIEWER_REGION)
v_three = button()
v_three.Image("v_three.png")
v_three.Region(VIEWER_REGION)
v_six = button()
v_six.Image("v_six.png")
v_six.Region(VIEWER_REGION)
v_six.similar(int = .50)
v_five = button()
v_five.Image("v_five.png")
v_five.Region(VIEWER_REGION)
v_one = button()
v_one.Image("v_one.png")
v_one.Region(VIEWER_REGION)
v_four = button()
v_four.Image("v_four.png")
v_four.Region(VIEWER_REGION)
v_two = button()
v_two.Image("v_two.png")
v_two.Region(VIEWER_REGION)
v_media = button()
v_media.Image("v_media.png")
v_media.Region(VIEWER_REGION)
morning = button()
morning.Image("morning.png")
morning.Region(CONFIGURATOR_REGION)
reboottargetsystemhardware = button()
reboottargetsystemhardware.Image("reboottargetsystemhardware.png")
reboottargetsystemhardware.Region(CONFIGURATOR_REGION)
hardwaretype = button()
hardwaretype.Image("hardwaretype.png")
hardwaretype.Region(CONFIGURATOR_REGION)
hardwaretype.similar(GlobalCustomButtonThreshold)
receivers = button()
receivers.Image("receivers.png")
receivers.Region(CONFIGURATOR_REGION)
RecordResolu = button()
RecordResolu.Image("RecordResolu.png")
RecordResolu.Region(CONFIGURATOR_REGION)
RegOne = button()
RegOne.Image("RegOne.png")
RegOne.Region(CONFIGURATOR_REGION)
RegThree = button()
RegThree.Image("RegThree.png")
RegThree.Region(CONFIGURATOR_REGION)
RegTwo = button()
RegTwo.Image("RegTwo.png")
RegTwo.Region(CONFIGURATOR_REGION)
relayoutputs = button()
relayoutputs.Image("relayoutputs.png")
relayoutputs.Region(CONFIGURATOR_REGION)
remoteusers = button()
remoteusers.Image("remoteusers.png")
remoteusers.Region(CONFIGURATOR_REGION)
repeatingsystemtimers = button()
repeatingsystemtimers.Image("repeatingsystemtimers.png")
repeatingsystemtimers.Region(CONFIGURATOR_REGION)
repeatingsystemtimes = button()
repeatingsystemtimes.Image("repeatingsystemtimes.png")
repeatingsystemtimes.Region(CONFIGURATOR_REGION)
resettofactoryconfiguration = button()
resettofactoryconfiguration.Image("resettofactoryconfiguration.png")
resettofactoryconfiguration.Region(CONFIGURATOR_REGION)
restarttargersystemsoftwareonly = button()
restarttargersystemsoftwareonly.Image("restarttargersystemsoftwareonly.png")
restarttargersystemsoftwareonly.Region(CONFIGURATOR_REGION)
RunOnceSyste = button()
RunOnceSyste.Image("RunOnceSyste.png")
RunOnceSyste.Region(CONFIGURATOR_REGION)
runoncesystemtimers = button()
runoncesystemtimers.Image("runoncesystemtimers.png")
runoncesystemtimers.Region(CONFIGURATOR_REGION)
reapeatingsystemtimers = button()
reapeatingsystemtimers.Image("reapeatingsystemtimers.png")
reapeatingsystemtimers.Region(CONFIGURATOR_REGION)
schedule = button()
schedule.Image("schedule.png")
schedule.Region(CONFIGURATOR_REGION)
security = button()
security.Image("security.png")
security.Region(CONFIGURATOR_REGION)
securitypanels = button()
securitypanels.Image("securitypanels.png")
securitypanels.Region(CONFIGURATOR_REGION)
select = button()
select.Image("select.png")
select.Region(CONFIGURATOR_REGION)
senders = button()
senders.Image("senders.png")
senders.Region(CONFIGURATOR_REGION)
senseinputs = button()
senseinputs.Image("senseinputs.png")
senseinputs.Region(CONFIGURATOR_REGION)
hc6controller = button()
hc6controller.Image("hc6controller.png")
hc6controller.Region(CONFIGURATOR_REGION)
serialdevices = button()
serialdevices.Image("serialdevices.png")
serialdevices.Region(CONFIGURATOR_REGION)
sharing = button()
sharing.Image("sharing.png")
sharing.Region(CONFIGURATOR_REGION)
sources = button()
sources.Image("sources.png")
sources.Region(CONFIGURATOR_REGION)
connecttoexistingdevice = button()
connecttoexistingdevice.Image("connecttoexistingdevice.png")
connecttoexistingdevice.Region(CONFIGURATOR_REGION)
connecttoexistingdevice.similar(int = .30)
none = button()
none.Image("none.png")
none.Region(CONFIGURATOR_REGION)
sources3 = button()
sources3.Image("sources3.png")
sources3.Region(CONFIGURATOR_REGION)
sports = button()
sports.Image("sports.png")
sports.Region(CONFIGURATOR_REGION)
g_ok = button()
g_ok.Image("g_ok.png")
g_ok.Region(GTOOLS_REGION)
system = button()
system.Image("system.png")
system.Region(CONFIGURATOR_REGION)
system.similar(int = GlobalCustomButtonThreshold)
systemimages = button()
systemimages.Image("systemimages.png")
systemimages.Region(CONFIGURATOR_REGION)
systemlogs = button()
systemlogs.Image("systemlogs.png")
systemlogs.Region(CONFIGURATOR_REGION)
systemmodes = button()
systemmodes.Image("systemmodes.png")
systemmodes.Region(CONFIGURATOR_REGION)
systemsounds = button()
systemsounds.Image("systemsounds.png")
systemsounds.Region(CONFIGURATOR_REGION)
onscreendisplay = button()
onscreendisplay.Image("onscreendisplay.png")
onscreendisplay.Region(CONFIGURATOR_REGION)
systemtimers = button()
systemtimers.Image("systemtimers.png")
systemtimers.Region(CONFIGURATOR_REGION)
systemtime = button()
systemtime.Image("systemtime.png")
systemtime.Region(CONFIGURATOR_REGION)
timedevents = button()
timedevents.Image("timedevents.png")
timedevents.Region(CONFIGURATOR_REGION)
ftpfoldertriggers = button()
ftpfoldertriggers.Image("ftpfoldertriggers.png")
ftpfoldertriggers.Region(CONFIGURATOR_REGION)
vianet = button()
vianet.Image("vianet.png")
vianet.Region(CONFIGURATOR_REGION)
tabs = button()
tabs.Image("tabs.png")
tabs.Region(CONFIGURATOR_REGION)
telephonesystems = button()
telephonesystems.Image("telephonesystems.png")
telephonesystems.Region(CONFIGURATOR_REGION)
thermostats = button()
thermostats.Image("thermostats.png")
thermostats.Region(CONFIGURATOR_REGION)
three = button()
three.Image("three.png")
three.Region(CONFIGURATOR_REGION)
timesevents = button()
timesevents.Image("timesevents.png")
timesevents.Region(CONFIGURATOR_REGION)
tpdevices = button()
tpdevices.Image("tpdevices.png")
tpdevices.Region(CONFIGURATOR_REGION)
triggers = button()
triggers.Image("triggers.png")
triggers.Region(CONFIGURATOR_REGION)
two = button()
two.Image("two.png")
two.Region(CONFIGURATOR_REGION)
ups = button()
ups.Image("ups.png")
ups.Region(CONFIGURATOR_REGION)
UPSPowerSupp = button()
UPSPowerSupp.Image("UPSPowerSupp.png")
UPSPowerSupp.Region(CONFIGURATOR_REGION)
upspowersupplies = button()
upspowersupplies.Image("upspowersupplies.png")
upspowersupplies.Region(CONFIGURATOR_REGION)
variables = button()
variables.Image("variables.png")
variables.Region(CONFIGURATOR_REGION)
version = button()
version.Image("version.png")
version.Region(CONFIGURATOR_REGION)
version.similar(GlobalCustomButtonThreshold)
video = button()
video.Image("video.png")
video.Region(CONFIGURATOR_REGION)
videocamerasources = button()
videocamerasources.Image("videocamerasources.png")
videocamerasources.Region(CONFIGURATOR_REGION)
audiobreakout = button()
audiobreakout.Image("audiobreakout.png")
audiobreakout.Region(CONFIGURATOR_REGION)
source2 = button()
source2.Image("source2.png")
source2.Region(CONFIGURATOR_REGION)
source3 = button()
source3.Image("source3.png")
source3.Region(CONFIGURATOR_REGION)
source4 = button()
source4.Image("source4.png")
source4.Region(CONFIGURATOR_REGION)
videodisplays = button()
videodisplays.Image("videodisplays.png")
videodisplays.Region(CONFIGURATOR_REGION)
displayname = button()
displayname.Image("displayname.png")
displayname.Region(CONFIGURATOR_REGION)
viewer = button()
viewer.Image("viewer.png")
viewer.Region(CONFIGURATOR_REGION)
voicemailboxes = button()
voicemailboxes.Image("voicemailboxes.png")
voicemailboxes.Region(CONFIGURATOR_REGION)
house = button()
house.Image("house.png")
house.Region(CONFIGURATOR_REGION)
WeatherCalen = button()
WeatherCalen.Image("WeatherCalen.png")
WeatherCalen.Region(CONFIGURATOR_REGION)
weathercalendar = button()
weathercalendar.Image("weathercalendar.png")
weathercalendar.Region(CONFIGURATOR_REGION)
weathercalendarsystemmo = button()
weathercalendarsystemmo.Image("weathercalendarsystemmo.png")
weathercalendarsystemmo.Region(CONFIGURATOR_REGION)
weathersystemmodes = button()
weathersystemmodes.Image("weathersystemmodes.png")
weathersystemmodes.Region(CONFIGURATOR_REGION)
windows = button()
windows.Image("windows.png")
windows.Region(CONFIGURATOR_REGION)
works = button()
works.Image("works.png")
works.Region(CONFIGURATOR_REGION)
x = button()
x.Image("x.png")
x.Region(CONFIGURATOR_REGION)
############################################################
addnewaudiosharefolder = button()
addnewaudiosharefolder.Image("addnewaudiosharefolder.png")
addnewaudiosharefolder.Region(CONFIGURATOR_REGION)
addnewbackupprocedure = button()
addnewbackupprocedure.Image("addnewbackupprocedure.png")
addnewbackupprocedure.Region(CONFIGURATOR_REGION)
addnewcommunicationdevice = button()
addnewcommunicationdevice.Image("addnewcommunicationdevice.png")
addnewcommunicationdevice.Region(CONFIGURATOR_REGION)
addnewcustompage = button()
addnewcustompage.Image("addnewcustompage.png")
addnewcustompage.Region(CONFIGURATOR_REGION)
addnewdevice = button()
addnewdevice.Image("addnewdevice.png")
addnewdevice.Region(CONFIGURATOR_REGION)
addnewdisplaysettings = button()
addnewdisplaysettings.Image("addnewdisplaysettings.png")
addnewdisplaysettings.Region(CONFIGURATOR_REGION)
addnewdvdplayer = button()
addnewdvdplayer.Image("addnewdvdplayer.png")
addnewdvdplayer.Region(CONFIGURATOR_REGION)
addnewemailaccount = button()
addnewemailaccount.Image("addnewemailaccount.png")
addnewemailaccount.Region(CONFIGURATOR_REGION)
addnewemailmessage = button()
addnewemailmessage.Image("addnewemailmessage.png")
addnewemailmessage.Region(CONFIGURATOR_REGION)
addneweventmap = button()
addneweventmap.Image("addneweventmap.png")
addneweventmap.Region(CONFIGURATOR_REGION)
addneweventmapboolean = button()
addneweventmapboolean.Image("addneweventmapboolean.png")
addneweventmapboolean.Region(CONFIGURATOR_REGION)
addneweventmapvariable = button()
addneweventmapvariable.Image("addneweventmapvariable.png")
addneweventmapvariable.Region(CONFIGURATOR_REGION)
addnewftpfoldertrigger = button()
addnewftpfoldertrigger.Image("addnewftpfoldertrigger.png")
addnewftpfoldertrigger.Region(CONFIGURATOR_REGION)
addnewgenericserialdevice = button()
addnewgenericserialdevice.Image("addnewgenericserialdevice.png")
addnewgenericserialdevice.Region(CONFIGURATOR_REGION)
addnewgraphobject = button()
addnewgraphobject.Image("addnewgraphobject.png")
addnewgraphobject.Region(CONFIGURATOR_REGION)
addnewgroup = button()
addnewgroup.Image("addnewgroup.png")
addnewgroup.Region(CONFIGURATOR_REGION)
addnewinputcontroller = button()
addnewinputcontroller.Image("addnewinputcontroller.png")
addnewinputcontroller.Region(CONFIGURATOR_REGION)
addnewinternetradiogenre = button()
addnewinternetradiogenre.Image("addnewinternetradiogenre.png")
addnewinternetradiogenre.Region(CONFIGURATOR_REGION)
addnewirdevice = button()
addnewirdevice.Image("addnewirdevice.png")
addnewirdevice.Region(CONFIGURATOR_REGION)
addnewirreceiver = button()
addnewirreceiver.Image("addnewirreceiver.png")
addnewirreceiver.Region(CONFIGURATOR_REGION)
addnewirrigationcontroller = button()
addnewirrigationcontroller.Image("addnewirrigationcontroller.png")
addnewirrigationcontroller.Region(CONFIGURATOR_REGION)
addnewirrigationgroup = button()
addnewirrigationgroup.Image("addnewirrigationgroup.png")
addnewirrigationgroup.Region(CONFIGURATOR_REGION)
addnewirrigationperiod = button()
addnewirrigationperiod.Image("addnewirrigationperiod.png")
addnewirrigationperiod.Region(CONFIGURATOR_REGION)
addnewirsender = button()
addnewirsender.Image("addnewirsender.png")
addnewirsender.Region(CONFIGURATOR_REGION)
addnewkeypad = button()
addnewkeypad.Image("addnewkeypad.png")
addnewkeypad.Region(CONFIGURATOR_REGION)
addnewkeypadcontroller = button()
addnewkeypadcontroller.Image("addnewkeypadcontroller.png")
addnewkeypadcontroller.Region(CONFIGURATOR_REGION)
addnewkeypadtemplate = button()
addnewkeypadtemplate.Image("addnewkeypadtemplate.png")
addnewkeypadtemplate.Region(CONFIGURATOR_REGION)
addnewlightinginterface = button()
addnewlightinginterface.Image("addnewlightinginterface.png")
addnewlightinginterface.Region(CONFIGURATOR_REGION)
addnewlockgroup = button()
addnewlockgroup.Image("addnewlockgroup.png")
addnewlockgroup.Region(CONFIGURATOR_REGION)
addnewmediaplayer = button()
addnewmediaplayer.Image("addnewmediaplayer.png")
addnewmediaplayer.Region(CONFIGURATOR_REGION)
addnewmp3player = button()
addnewmp3player.Image("addnewmp3player.png")
addnewmp3player.Region(CONFIGURATOR_REGION)
addnewnumerictrigger = button()
addnewnumerictrigger.Image("addnewnumerictrigger.png")
addnewnumerictrigger.Region(CONFIGURATOR_REGION)
addnewoutputcontroller = button()
addnewoutputcontroller.Image("addnewoutputcontroller.png")
addnewoutputcontroller.Region(CONFIGURATOR_REGION)
addnewpicuturesharefolder = button()
addnewpicuturesharefolder.Image("addnewpicuturesharefolder.png")
addnewpicuturesharefolder.Region(CONFIGURATOR_REGION)
addnewpoolcontroller = button()
addnewpoolcontroller.Image("addnewpoolcontroller.png")
addnewpoolcontroller.Region(CONFIGURATOR_REGION)
addnewpowersupply = button()
addnewpowersupply.Image("addnewpowersupply.png")
addnewpowersupply.Region(CONFIGURATOR_REGION)
addnewrepeatingsystemtimer = button()
addnewrepeatingsystemtimer.Image("addnewrepeatingsystemtimer.png")
addnewrepeatingsystemtimer.Region(CONFIGURATOR_REGION)
addnewrunoncesystemtimer = button()
addnewrunoncesystemtimer.Image("addnewrunoncesystemtimer.png")
addnewrunoncesystemtimer.Region(CONFIGURATOR_REGION)
addnewscene = button()
addnewscene.Image("addnewscene.png")
addnewscene.Region(CONFIGURATOR_REGION)
addnewsource = button()
addnewsource.Image("addnewsource.png")
addnewsource.Region(CONFIGURATOR_REGION)
addnewtextspeechmessage = button()
addnewtextspeechmessage.Image("addnewtextspeechmessage.png")
addnewtextspeechmessage.Region(CONFIGURATOR_REGION)
addnewthermostat = button()
addnewthermostat.Image("addnewthermostat.png")
addnewthermostat.Region(CONFIGURATOR_REGION)
addnewtimedevent = button()
addnewtimedevent.Image("addnewtimedevent.png")
addnewtimedevent.Region(CONFIGURATOR_REGION)
addnewtouchscreen = button()
addnewtouchscreen.Image("addnewtouchscreen.png")
addnewtouchscreen.Region(CONFIGURATOR_REGION)
addnewtouchscreenhomepage = button()
addnewtouchscreenhomepage.Image("addnewtouchscreenhomepage.png")
addnewtouchscreenhomepage.Region(CONFIGURATOR_REGION)
addnewtuner = button()
addnewtuner.Image("addnewtuner.png")
addnewtuner.Region(CONFIGURATOR_REGION)
addnewtvchannel = button()
addnewtvchannel.Image("addnewtvchannel.png")
addnewtvchannel.Region(CONFIGURATOR_REGION)
addnewtvchannelgroup = button()
addnewtvchannelgroup.Image("addnewtvchannelgroup.png")
addnewtvchannelgroup.Region(CONFIGURATOR_REGION)
addnewvideodisplay = button()
addnewvideodisplay.Image("addnewvideodisplay.png")
addnewvideodisplay.Region(CONFIGURATOR_REGION)
addnewvideosourcecamera = button()
addnewvideosourcecamera.Image("addnewvideosourcecamera.png")
addnewvideosourcecamera.Region(CONFIGURATOR_REGION)
addnewvoicemailbox = button()
addnewvoicemailbox.Image("addnewvoicemailbox.png")
addnewvoicemailbox.Region(CONFIGURATOR_REGION)
addnewwebpicture = button()
addnewwebpicture.Image("addnewwebpicture.png")
addnewwebpicture.Region(CONFIGURATOR_REGION)
n_tvpower = button()
n_tvpower.Image("n_tvpower.png")
n_tvpower.Region(NILES_REGION)
n_turnon = button()
n_turnon.Image("n_turnon.png")
n_turnon.Region(NILES_REGION)
n_turnon.similar(int = GlobalCustomButtonThreshold)
n_settopbox = button()
n_settopbox.Image("n_settopbox.png")
n_settopbox.Region(NILES_REGION)
n_xfinity = button()
n_xfinity.Image("n_xfinity.png")
n_xfinity.Region(NILES_REGION)
n_scrollbar2 = button()
n_scrollbar2.Image("n_scrollbar2.png")
n_scrollbar2.Region(NILES_REGION)
n_mx011anmcablebox = button()
n_mx011anmcablebox.Image("n_mx011anmcablebox.png")
n_mx011anmcablebox.Region(NILES_REGION)
n_tvsource = button()
n_tvsource.Image("n_tvsource.png")
n_tvsource.Region(NILES_REGION)
addnewzonecontroller = button()
addnewzonecontroller.Image("addnewzonecontroller.png")
addnewzonecontroller.Region(CONFIGURATOR_REGION)
addnewzoneheaderinterface = button()
addnewzoneheaderinterface.Image("addnewzoneheaderinterface.png")
addnewzoneheaderinterface.Region(CONFIGURATOR_REGION)
browseimagefolder = button()
browseimagefolder.Image("browseimagefolder.png")
browseimagefolder.Region(CONFIGURATOR_REGION)
browseshare = button()
browseshare.Image("browseshare.png")
browseshare.Region(CONFIGURATOR_REGION)
browsesoundsfolder = button()
browsesoundsfolder.Image("browsesoundsfolder.png")
browsesoundsfolder.Region(CONFIGURATOR_REGION)
configurecircuits = button()
configurecircuits.Image("configurecircuits.png")
configurecircuits.Region(CONFIGURATOR_REGION)
copycolors = button()
copycolors.Image("copycolors.png")
copycolors.Region(CONFIGURATOR_REGION)
copysettings = button()
copysettings.Image("copysettings.png")
copysettings.Region(CONFIGURATOR_REGION)
createeventmapfor = button()
createeventmapfor.Image("createeventmapfor.png")
createeventmapfor.Region(CONFIGURATOR_REGION)
deletehcseriesosd = button()
deletehcseriesosd.Image("deletehcseriesosd.png")
deletehcseriesosd.Region(CONFIGURATOR_REGION)
deletelocal = button()
deletelocal.Image("deletelocal.png")
deletelocal.Region(CONFIGURATOR_REGION)
deletemorning = button()
deletemorning.Image("deletemorning.png")
deletemorning.Region(CONFIGURATOR_REGION)
deletenewsettings = button()
deletenewsettings.Image("deletenewsettings.png")
deletenewsettings.Region(CONFIGURATOR_REGION)
exportchannelsandgroups = button()
exportchannelsandgroups.Image("exportchannelsandgroups.png")
exportchannelsandgroups.Region(CONFIGURATOR_REGION)
exporttofile = button()
exporttofile.Image("exporttofile.png")
exporttofile.Region(CONFIGURATOR_REGION)
importchannelsandgroups = button()
importchannelsandgroups.Image("importchannelsandgroups.png")
importchannelsandgroups.Region(CONFIGURATOR_REGION)
importnewirdevicefromfile = button()
importnewirdevicefromfile.Image("importnewirdevicefromfile.png")
importnewirdevicefromfile.Region(CONFIGURATOR_REGION)
importnewirinterfacefromfile = button()
importnewirinterfacefromfile.Image("importnewirinterfacefromfile.png")
importnewirinterfacefromfile.Region(CONFIGURATOR_REGION)
importtofile = button()
importtofile.Image("importtofile.png")
importtofile.Region(CONFIGURATOR_REGION)
moveup = button()
moveup.Image("moveup.png")
moveup.Region(CONFIGURATOR_REGION)
scansharenow = button()
scansharenow.Image("scansharenow.png")
scansharenow.Region(CONFIGURATOR_REGION)
showcommunicationstatus = button()
showcommunicationstatus.Image("showcommunicationstatus.png")
showcommunicationstatus.Region(CONFIGURATOR_REGION)
showeventmaptraceinfo = button()
showeventmaptraceinfo.Image("showeventmaptraceinfo.png")
showeventmaptraceinfo.Region(CONFIGURATOR_REGION)
#####################NILES##########################################
n_add = button()
n_add.Image("n_add.png")
n_add.Region(NILES_REGION)
n_allmodels = button()
n_allmodels.Image("n_allmodels.png")
n_allmodels.Region(NILES_REGION)
n_apple = button()
n_apple.Image("n_apple.png")
n_apple.Region(NILES_REGION)
n_appletv = button()
n_appletv.Image("n_appletv.png")
n_appletv.Region(NILES_REGION)
n_av1 = button()
n_av1.Image("n_av1.png")
n_av1.Region(NILES_REGION)
n_av2 = button()
n_av2.Image("n_av2.png")
n_av2.Region(NILES_REGION)
n_bluray = button()
n_bluray.Image("n_bluray.png")
n_bluray.Region(NILES_REGION)
n_bosa58e001 = button()
n_bosa58e001.Image("n_bosa58e001.png")
n_bosa58e001.Region(NILES_REGION)
n_configurechassis = button()
n_configurechassis.Image("n_configurechassis.png")
n_configurechassis.Region(NILES_REGION)
n_configurehometheatertv = button()
n_configurehometheatertv.Image("n_configurehometheatertv.png")
n_configurehometheatertv.Region(NILES_REGION)
n_configuresources = button()
n_configuresources.Image("n_configuresources.png")
n_configuresources.Region(NILES_REGION)
n_finish = button()
n_finish.Image("n_finish.png")
n_finish.Region(NILES_REGION)
n_five = button()
n_five.Image("n_five.png")
n_five.Region(NILES_REGION)
n_forward = button()
n_forward.Image("n_forward.png")
n_forward.Region(NILES_REGION)
n_forward.similar(GlobalCustomButtonThreshold)
n_gointo = button()
n_gointo.Image("n_gointo.png")
n_gointo.Region(NILES_REGION)
n_gointo.similar(int = .30)
n_ipcontrolledsource = button()
n_ipcontrolledsource.Image("n_ipcontrolledsource.png")
n_ipcontrolledsource.Region(NILES_REGION)
n_ircontrolledsource = button()
n_ircontrolledsource.Image("n_ircontrolledsource.png")
n_ircontrolledsource.Region(NILES_REGION)
n_iroutput01 = button()
n_iroutput01.Image("n_iroutput01.png")
n_iroutput01.Region(NILES_REGION)
n_iroutput02 = button()
n_iroutput02.Image("n_iroutput02.png")
n_iroutput02.Region(NILES_REGION)
n_iroutput03 = button()
n_iroutput03.Image("n_iroutput03.png")
n_iroutput03.Region(NILES_REGION)
n_logitechmediaserver = button()
n_logitechmediaserver.Image("n_logitechmediaserver.png")
n_logitechmediaserver.Region(NILES_REGION)
n_mediamanager = button()
n_mediamanager.Image("n_mediamanager.png")
n_mediamanager.Region(NILES_REGION)
n_mediaserver = button()
n_mediaserver.Image("n_mediaserver.png")
n_mediaserver.Region(NILES_REGION)
n_next = button()
n_next.Image("n_next.png")
n_next.Region(NILES_REGION)
n_next.similar(int = .50)
n_nileslogo = button()
n_nileslogo.Image("n_nileslogo.png")
n_nileslogo.Region(NILES_REGION)
n_ok = button()
n_ok.Image("n_ok.png")
n_ok.Region(NILES_REGION)
n_one = button()
n_one.Image("n_one.png")
n_one.Region(NILES_REGION)
ok = button()
ok.Image("ok.png")
ok.Region(CONFIGURATOR_REGION)
scrollwhitespace = button()
scrollwhitespace.Image("scrollwhitespace.png")
scrollwhitespace.Region(CONFIGURATOR_REGION)
n_removeall = button()
n_removeall.Image("n_removeall.png")
n_removeall.Region(NILES_REGION)
n_abc = button()
n_abc.Image("n_abc.png")
n_abc.Region(NILES_REGION)
n_abcfamily = button()
n_abcfamily.Image("n_abcfamily.png")
n_abcfamily.Region(NILES_REGION)
n_aetv = button()
n_aetv.Image("n_aetv.png")
n_aetv.Region(NILES_REGION)
n_cancel = button()
n_cancel.Image("n_cancel.png")
n_cancel.Region(NILES_REGION)
n_test = button()
n_test.Image("n_test.png")
n_test.Region(NILES_REGION)
n_cursordown = button()
n_cursordown.Image("n_cursordown.png")
n_cursordown.Region(NILES_REGION)
n_cursorenter = button()
n_cursorenter.Image("n_cursorenter.png")
n_cursorenter.Region(NILES_REGION)
gmvdevice
gmvdevice = button()
gmvdevice.Image("gmvdevice.png")
gmvdevice.Region(CONFIGURATOR_REGION)
n_cursorleft = button()
n_cursorleft.Image("n_cursorleft.png")
n_cursorleft.Region(NILES_REGION)
n_cursorright = button()
n_cursorright.Image("n_cursorright.png")
n_cursorright.Region(NILES_REGION)
n_cursorup = button()
n_cursorup.Image("n_cursorup.png")
n_cursorup.Region(NILES_REGION)
n_irrepeat = button()
n_irrepeat.Image("n_irrepeat.png")
n_irrepeat.Region(NILES_REGION)
n_iroutput04 = button()
n_iroutput04.Image("n_iroutput04.png")
n_iroutput04.Region(NILES_REGION)
n_sonyallmodels = button()
n_sonyallmodels.Image("n_sonyallmodels.png")
n_sonyallmodels.Region(NILES_REGION)
n_sonyallmodels.similar(int = .60)
n_sonyallmodels2 = button()
n_sonyallmodels2.Image("n_sonyallmodels2.png")
n_sonyallmodels2.Region(NILES_REGION)
n_sonyallmodels2.similar(int = .60)
n_back = button()
n_back.Image("n_back.png")
n_back.Region(NILES_REGION)
n_english = button()
n_english.Image("n_english.png")
n_english.Region(NILES_REGION)
n_vuegenerale = button()
n_vuegenerale.Image("n_vuegenerale.png")
n_vuegenerale.Region(NILES_REGION)
n_french = button()
n_french.Image("n_french.png")
n_french.Region(NILES_REGION)
n_apply = button()
n_apply.Image("n_apply.png")
n_apply.Region(NILES_REGION)
n_squeezeboxtouch = button()
n_squeezeboxtouch.Image("n_squeezeboxtouch.png")
n_squeezeboxtouch.Region(NILES_REGION)
n_configureuserinterfaces = button()
n_configureuserinterfaces.Image("n_configureuserinterfaces.png")
n_configureuserinterfaces.Region(NILES_REGION)
n_at = button()
n_at.Image("n_at.png")
n_at.Region(NILES_REGION)
n_at.similar(GlobalCustomButtonThreshold)
n_configuresources2 = button()
n_configuresources2.Image("n_configuresources2.png")
n_configuresources2.Region(NILES_REGION)
n_systemconfiguration = button()
n_systemconfiguration.Image("n_systemconfiguration.png")
n_systemconfiguration.Region(NILES_REGION)
n_amc = button()
n_amc.Image("n_amc.png")
n_amc.Region(NILES_REGION)
n_four = button()
n_four.Image("n_four.png")
n_four.Region(NILES_REGION)
n_loadunitedstates = button()
n_loadunitedstates.Image("n_loadunitedstates.png")
n_loadunitedstates.Region(NILES_REGION)
n_unitedstates = button()
n_unitedstates.Image("n_unitedstates.png")
n_unitedstates.Region(NILES_REGION)
n_panasonic = button()
n_panasonic.Image("n_panasonic.png")
n_panasonic.Region(NILES_REGION)
n_scrolldown = button()
n_scrolldown.Image("n_scrolldown.png")
n_scrolldown.Region(NILES_REGION)
n_six = button()
n_six.Image("n_six.png")
n_six.Region(NILES_REGION)
n_softsqueeze = button()
n_softsqueeze.Image("n_softsqueeze.png")
n_softsqueeze.Region(NILES_REGION)
n_sony = button()
n_sony.Image("n_sony.png")
n_sony.Region(NILES_REGION)
n_sourceone = button()
n_sourceone.Image("n_sourceone.png")
n_sourceone.Region(NILES_REGION)
n_three = button()
n_three.Image("n_three.png")
n_three.Region(NILES_REGION)
n_two = button()
n_two.Image("n_two.png")
n_two.Region(NILES_REGION)
n_yamahrxa2040ynca = button()
n_yamahrxa2040ynca.Image("n_yamahrxa2040ynca.png")
n_yamahrxa2040ynca.Region(NILES_REGION)
n_zeroip = button()
n_zeroip.Image("n_zeroip.png")
n_zeroip.Region(NILES_REGION)
adasuite1616videozones = button()
adasuite1616videozones.Image("adasuite1616videozones.png")
adasuite1616videozones.Region(CONFIGURATOR_REGION)
adasuite1616zone = button()
adasuite1616zone.Image("adasuite1616zone.png")
adasuite1616zone.Region(CONFIGURATOR_REGION)
adasuite1632videozones = button()
adasuite1632videozones.Image("adasuite1632videozones.png")
adasuite1632videozones.Region(CONFIGURATOR_REGION)
adasuite1632zone = button()
adasuite1632zone.Image("adasuite1632zone.png")
adasuite1632zone.Region(CONFIGURATOR_REGION)
adasuite1648zone = button()
adasuite1648zone.Image("adasuite1648zone.png")
adasuite1648zone.Region(CONFIGURATOR_REGION)
adasuite1664zone = button()
adasuite1664zone.Image("adasuite1664zone.png")
adasuite1664zone.Region(CONFIGURATOR_REGION)
adasuite1696zone = button()
adasuite1696zone.Image("adasuite1696zone.png")
adasuite1696zone.Region(CONFIGURATOR_REGION)
adasuite3232zone = button()
adasuite3232zone.Image("adasuite3232zone.png")
adasuite3232zone.Region(CONFIGURATOR_REGION)
ziporpostalcode = button()
ziporpostalcode.Image("ziporpostalcode.png")
ziporpostalcode.Region(CONFIGURATOR_REGION)
keypadinterfaces = button()
keypadinterfaces.Image("keypadinterfaces.png")
keypadinterfaces.Region(CONFIGURATOR_REGION)
locationtype = button()
locationtype.Image("locationtype.png")
locationtype.Region(CONFIGURATOR_REGION)
adasuite3264zone = button()
adasuite3264zone.Image("adasuite3264zone.png")
adasuite3264zone.Region(CONFIGURATOR_REGION)
adasuite3296zone = button()
adasuite3296zone.Image("adasuite3296zone.png")
adasuite3296zone.Region(CONFIGURATOR_REGION)
adasuite71 = button()
adasuite71.Image("adasuite71.png")
adasuite71.Region(CONFIGURATOR_REGION)
n_address = button()
n_address.Image("n_address.png")
n_address.Region(NILES_REGION)
adasuite8100 = button()
adasuite8100.Image("adasuite8100.png")
adasuite8100.Region(CONFIGURATOR_REGION)
adasuite8200 = button()
adasuite8200.Image("adasuite8200.png")
adasuite8200.Region(CONFIGURATOR_REGION)
ah66tsinglechassis = button()
ah66tsinglechassis.Image("ah66tsinglechassis.png")
ah66tsinglechassis.Region(CONFIGURATOR_REGION)
atlonaath2h44m4x4hdmi = button()
atlonaath2h44m4x4hdmi.Image("atlonaath2h44m4x4hdmi.png")
atlonaath2h44m4x4hdmi.Region(CONFIGURATOR_REGION)
atlonaath2h88m8x8hdmi = button()
atlonaath2h88m8x8hdmi.Image("atlonaath2h88m8x8hdmi.png")
atlonaath2h88m8x8hdmi.Region(CONFIGURATOR_REGION)
atlonaathdv1616m16x16hdmi = button()
atlonaathdv1616m16x16hdmi.Image("atlonaathdv1616m16x16hdmi.png")
atlonaathdv1616m16x16hdmi.Region(CONFIGURATOR_REGION)
atlonaathdv44m4x4hdmi = button()
atlonaathdv44m4x4hdmi.Image("atlonaathdv44m4x4hdmi.png")
atlonaathdv44m4x4hdmi.Region(CONFIGURATOR_REGION)
atlonaatpro2hd1616m16x16hdbaset = button()
atlonaatpro2hd1616m16x16hdbaset.Image("atlonaatpro2hd1616m16x16hdbaset.png")
atlonaatpro2hd1616m16x16hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro2hd44m4x4hdbaset = button()
atlonaatpro2hd44m4x4hdbaset.Image("atlonaatpro2hd44m4x4hdbaset.png")
atlonaatpro2hd44m4x4hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro2hd88m8x8hdbaset = button()
atlonaatpro2hd88m8x8hdbaset.Image("atlonaatpro2hd88m8x8hdbaset.png")
atlonaatpro2hd88m8x8hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro3hd44m4x4hdbaset = button()
atlonaatpro3hd44m4x4hdbaset.Image("atlonaatpro3hd44m4x4hdbaset.png")
atlonaatpro3hd44m4x4hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro3hd66m6x6hdbaset = button()
atlonaatpro3hd66m6x6hdbaset.Image("atlonaatpro3hd66m6x6hdbaset.png")
atlonaatpro3hd66m6x6hdbaset.Region(CONFIGURATOR_REGION)
denon28053805 = button()
denon28053805.Image("denon28053805.png")
denon28053805.Region(CONFIGURATOR_REGION)
denonavpa1hdci = button()
denonavpa1hdci.Image("denonavpa1hdci.png")
denonavpa1hdci.Region(CONFIGURATOR_REGION)
denonavr1613 = button()
denonavr1613.Image("denonavr1613.png")
denonavr1613.Region(CONFIGURATOR_REGION)
denonavr1713 = button()
denonavr1713.Image("denonavr1713.png")
denonavr1713.Region(CONFIGURATOR_REGION)
denonavr1912ci = button()
denonavr1912ci.Image("denonavr1912ci.png")
denonavr1912ci.Region(CONFIGURATOR_REGION)
denonavr1913 = button()
denonavr1913.Image("denonavr1913.png")
denonavr1913.Region(CONFIGURATOR_REGION)
denonavr2112ci = button()
denonavr2112ci.Image("denonavr2112ci.png")
denonavr2112ci.Region(CONFIGURATOR_REGION)
denonavr2113ci = button()
denonavr2113ci.Image("denonavr2113ci.png")
denonavr2113ci.Region(CONFIGURATOR_REGION)
denonavr2310ci = button()
denonavr2310ci.Image("denonavr2310ci.png")
denonavr2310ci.Region(CONFIGURATOR_REGION)
denonavr2311ci = button()
denonavr2311ci.Image("denonavr2311ci.png")
denonavr2311ci.Region(CONFIGURATOR_REGION)
denonavr2312ci = button()
denonavr2312ci.Image("denonavr2312ci.png")
denonavr2312ci.Region(CONFIGURATOR_REGION)
denonavr2313ci = button()
denonavr2313ci.Image("denonavr2313ci.png")
denonavr2313ci.Region(CONFIGURATOR_REGION)
denonavr2808ci = button()
denonavr2808ci.Image("denonavr2808ci.png")
denonavr2808ci.Region(CONFIGURATOR_REGION)
denonavr3310ci = button()
denonavr3310ci.Image("denonavr3310ci.png")
denonavr3310ci.Region(CONFIGURATOR_REGION)
denonavr3311ci = button()
denonavr3311ci.Image("denonavr3311ci.png")
denonavr3311ci.Region(CONFIGURATOR_REGION)
denonavr3312ci = button()
denonavr3312ci.Image("denonavr3312ci.png")
denonavr3312ci.Region(CONFIGURATOR_REGION)
denonavr3313ci = button()
denonavr3313ci.Image("denonavr3313ci.png")
denonavr3313ci.Region(CONFIGURATOR_REGION)
denonavr3806 = button()
denonavr3806.Image("denonavr3806.png")
denonavr3806.Region(CONFIGURATOR_REGION)
denonavr3808ci = button()
denonavr3808ci.Image("denonavr3808ci.png")
denonavr3808ci.Region(CONFIGURATOR_REGION)
denonavr4308ci = button()
denonavr4308ci.Image("denonavr4308ci.png")
denonavr4308ci.Region(CONFIGURATOR_REGION)
denonavr4310ci = button()
denonavr4310ci.Image("denonavr4310ci.png")
denonavr4310ci.Region(CONFIGURATOR_REGION)
denonavr4311ci = button()
denonavr4311ci.Image("denonavr4311ci.png")
denonavr4311ci.Region(CONFIGURATOR_REGION)
denonavr45204520ci = button()
denonavr45204520ci.Image("denonavr45204520ci.png")
denonavr45204520ci.Region(CONFIGURATOR_REGION)
denonavr4806 = button()
denonavr4806.Image("denonavr4806.png")
denonavr4806.Region(CONFIGURATOR_REGION)
denonavr4810ci = button()
denonavr4810ci.Image("denonavr4810ci.png")
denonavr4810ci.Region(CONFIGURATOR_REGION)
denonavr5308ci = button()
denonavr5308ci.Image("denonavr5308ci.png")
denonavr5308ci.Region(CONFIGURATOR_REGION)
denonavr5805 = button()
denonavr5805.Image("denonavr5805.png")
denonavr5805.Region(CONFIGURATOR_REGION)
denonavrx1000international = button()
denonavrx1000international.Image("denonavrx1000international.png")
denonavrx1000international.Region(CONFIGURATOR_REGION)
denonavrx1000us = button()
denonavrx1000us.Image("denonavrx1000us.png")
denonavrx1000us.Region(CONFIGURATOR_REGION)
denonavrx1100 = button()
denonavrx1100.Image("denonavrx1100.png")
denonavrx1100.Region(CONFIGURATOR_REGION)
denonavrx2000 = button()
denonavrx2000.Image("denonavrx2000.png")
denonavrx2000.Region(CONFIGURATOR_REGION)
denonavrx2100 = button()
denonavrx2100.Image("denonavrx2100.png")
denonavrx2100.Region(CONFIGURATOR_REGION)
denonavrx3000 = button()
denonavrx3000.Image("denonavrx3000.png")
denonavrx3000.Region(CONFIGURATOR_REGION)
denonavrx3100 = button()
denonavrx3100.Image("denonavrx3100.png")
denonavrx3100.Region(CONFIGURATOR_REGION)
denonavrx4000 = button()
denonavrx4000.Image("denonavrx4000.png")
denonavrx4000.Region(CONFIGURATOR_REGION)
denonavrx4100 = button()
denonavrx4100.Image("denonavrx4100.png")
denonavrx4100.Region(CONFIGURATOR_REGION)
denonavrx5200us = button()
denonavrx5200us.Image("denonavrx5200us.png")
denonavrx5200us.Region(CONFIGURATOR_REGION)
elangm64ethernet = button()
elangm64ethernet.Image("elangm64ethernet.png")
elangm64ethernet.Region(CONFIGURATOR_REGION)
elangm64ethernet.similar(int = .50)
addnewcustomsource = button()
addnewcustomsource.Image("addnewcustomsource.png")
addnewcustomsource.Region(CONFIGURATOR_REGION)
elangmv64rs232 = button()
elangmv64rs232.Image("elangmv64rs232.png")
elangmv64rs232.Region(CONFIGURATOR_REGION)
elanm86a12zones = button()
elanm86a12zones.Image("elanm86a12zones.png")
elanm86a12zones.Region(CONFIGURATOR_REGION)
elanm86a18zones = button()
elanm86a18zones.Image("elanm86a18zones.png")
elanm86a18zones.Region(CONFIGURATOR_REGION)
selectall = button()
selectall.Image("selectall.png")
selectall.Region(CONFIGURATOR_REGION)
selectall.similar(int = .18)
elanm86a24zones = button()
elanm86a24zones.Image("elanm86a24zones.png")
elanm86a24zones.Region(CONFIGURATOR_REGION)
elanm86a6zone = button()
elanm86a6zone.Image("elanm86a6zone.png")
elanm86a6zone.Region(CONFIGURATOR_REGION)
elans1616adualchassismode = button()
elans1616adualchassismode.Image("elans1616adualchassismode.png")
elans1616adualchassismode.Region(CONFIGURATOR_REGION)
elans1616asinglechassismode = button()
elans1616asinglechassismode.Image("elans1616asinglechassismode.png")
elans1616asinglechassismode.Region(CONFIGURATOR_REGION)
elans86ap = button()
elans86ap.Image("elans86ap.png")
elans86ap.Region(CONFIGURATOR_REGION)
elans86ap12zones = button()
elans86ap12zones.Image("elans86ap12zones.png")
elans86ap12zones.Region(CONFIGURATOR_REGION)
elans86ap18zones = button()
elans86ap18zones.Image("elans86ap18zones.png")
elans86ap18zones.Region(CONFIGURATOR_REGION)
elans86ap24zones = button()
elans86ap24zones.Image("elans86ap24zones.png")
elans86ap24zones.Region(CONFIGURATOR_REGION)
elansystem1208zones = button()
elansystem1208zones.Image("elansystem1208zones.png")
elansystem1208zones.Region(CONFIGURATOR_REGION)
elansystem1216zones = button()
elansystem1216zones.Image("elansystem1216zones.png")
elansystem1216zones.Region(CONFIGURATOR_REGION)
elansystem1224zones = button()
elansystem1224zones.Image("elansystem1224zones.png")
elansystem1224zones.Region(CONFIGURATOR_REGION)
elansystem1232zones = button()
elansystem1232zones.Image("elansystem1232zones.png")
elansystem1232zones.Region(CONFIGURATOR_REGION)
elanv8 = button()
elanv8.Image("elanv8.png")
elanv8.Region(CONFIGURATOR_REGION)
elanv85 = button()
elanv85.Image("elanv85.png")
elanv85.Region(CONFIGURATOR_REGION)
elanv883 = button()
elanv883.Image("elanv883.png")
elanv883.Region(CONFIGURATOR_REGION)
gefenhdfst4444elr = button()
gefenhdfst4444elr.Image("gefenhdfst4444elr.png")
gefenhdfst4444elr.Region(CONFIGURATOR_REGION)
gefenhdfst848 = button()
gefenhdfst848.Image("gefenhdfst848.png")
gefenhdfst848.Region(CONFIGURATOR_REGION)
genericsinglezonecontroller = button()
genericsinglezonecontroller.Image("genericsinglezonecontroller.png")
genericsinglezonecontroller.Region(CONFIGURATOR_REGION)
integradhc806 = button()
integradhc806.Image("integradhc806.png")
integradhc806.Region(CONFIGURATOR_REGION)
integradtr203 = button()
integradtr203.Image("integradtr203.png")
integradtr203.Region(CONFIGURATOR_REGION)
integradtr303 = button()
integradtr303.Image("integradtr303.png")
integradtr303.Region(CONFIGURATOR_REGION)
integradtr306 = button()
integradtr306.Image("integradtr306.png")
integradtr306.Region(CONFIGURATOR_REGION)
integradtr401onkyotxnr1007 = button()
integradtr401onkyotxnr1007.Image("integradtr401onkyotxnr1007.png")
integradtr401onkyotxnr1007.Region(CONFIGURATOR_REGION)
integradtr403 = button()
integradtr403.Image("integradtr403.png")
integradtr403.Region(CONFIGURATOR_REGION)
integradtr404 = button()
integradtr404.Image("integradtr404.png")
integradtr404.Region(CONFIGURATOR_REGION)
integradtr405 = button()
integradtr405.Image("integradtr405.png")
integradtr405.Region(CONFIGURATOR_REGION)
integradtr406 = button()
integradtr406.Image("integradtr406.png")
integradtr406.Region(CONFIGURATOR_REGION)
integradtr46dtr56dtr66dtr76 = button()
integradtr46dtr56dtr66dtr76.Image("integradtr46dtr56dtr66dtr76.png")
integradtr46dtr56dtr66dtr76.Region(CONFIGURATOR_REGION)
integradtr49 = button()
integradtr49.Image("integradtr49.png")
integradtr49.Region(CONFIGURATOR_REGION)
integradtr501 = button()
integradtr501.Image("integradtr501.png")
integradtr501.Region(CONFIGURATOR_REGION)
integradtr503 = button()
integradtr503.Image("integradtr503.png")
integradtr503.Region(CONFIGURATOR_REGION)
integradtr504 = button()
integradtr504.Image("integradtr504.png")
integradtr504.Region(CONFIGURATOR_REGION)
integradtr505 = button()
integradtr505.Image("integradtr505.png")
integradtr505.Region(CONFIGURATOR_REGION)
integradtr506 = button()
integradtr506.Image("integradtr506.png")
integradtr506.Region(CONFIGURATOR_REGION)
integradtr59 = button()
integradtr59.Image("integradtr59.png")
integradtr59.Region(CONFIGURATOR_REGION)
integradtr605 = button()
integradtr605.Image("integradtr605.png")
integradtr605.Region(CONFIGURATOR_REGION)
integradtr606 = button()
integradtr606.Image("integradtr606.png")
integradtr606.Region(CONFIGURATOR_REGION)
integradtr701onkyotxnr3007 = button()
integradtr701onkyotxnr3007.Image("integradtr701onkyotxnr3007.png")
integradtr701onkyotxnr3007.Region(CONFIGURATOR_REGION)
integradtr703dtr803 = button()
integradtr703dtr803.Image("integradtr703dtr803.png")
integradtr703dtr803.Region(CONFIGURATOR_REGION)
integradtr704 = button()
integradtr704.Image("integradtr704.png")
integradtr704.Region(CONFIGURATOR_REGION)
integradtr706 = button()
integradtr706.Image("integradtr706.png")
integradtr706.Region(CONFIGURATOR_REGION)
integradtr74dtr54 = button()
integradtr74dtr54.Image("integradtr74dtr54.png")
integradtr74dtr54.Region(CONFIGURATOR_REGION)
integradtr79dtr69onkyotxsr806txsr706 = button()
integradtr79dtr69onkyotxsr806txsr706.Image("integradtr79dtr69onkyotxsr806txsr706.png")
integradtr79dtr69onkyotxsr806txsr706.Region(CONFIGURATOR_REGION)
integradtr801onkyotxnr5007 = button()
integradtr801onkyotxnr5007.Image("integradtr801onkyotxnr5007.png")
integradtr801onkyotxnr5007.Region(CONFIGURATOR_REGION)
integradtr89onkyotxsr876prsc886 = button()
integradtr89onkyotxsr876prsc886.Image("integradtr89onkyotxsr876prsc886.png")
integradtr89onkyotxsr876prsc886.Region(CONFIGURATOR_REGION)
integradtr99onkyotxnr906 = button()
integradtr99onkyotxnr906.Image("integradtr99onkyotxnr906.png")
integradtr99onkyotxnr906.Region(CONFIGURATOR_REGION)
japhdoveripswitch = button()
japhdoveripswitch.Image("japhdoveripswitch.png")
japhdoveripswitch.Region(CONFIGURATOR_REGION)
marantzav7701dtype = button()
marantzav7701dtype.Image("marantzav7701dtype.png")
marantzav7701dtype.Region(CONFIGURATOR_REGION)
marantzav7702dtype = button()
marantzav7702dtype.Image("marantzav7702dtype.png")
marantzav7702dtype.Region(CONFIGURATOR_REGION)
marantzav8801dtype = button()
marantzav8801dtype.Image("marantzav8801dtype.png")
marantzav8801dtype.Region(CONFIGURATOR_REGION)
marantznr1504dtype = button()
marantznr1504dtype.Image("marantznr1504dtype.png")
marantznr1504dtype.Region(CONFIGURATOR_REGION)
marantznr1602dtype = button()
marantznr1602dtype.Image("marantznr1602dtype.png")
marantznr1602dtype.Region(CONFIGURATOR_REGION)
marantznr1603dtype = button()
marantznr1603dtype.Image("marantznr1603dtype.png")
marantznr1603dtype.Region(CONFIGURATOR_REGION)
marantznr1604dtype = button()
marantznr1604dtype.Image("marantznr1604dtype.png")
marantznr1604dtype.Region(CONFIGURATOR_REGION)
marantznr1605dtype = button()
marantznr1605dtype.Image("marantznr1605dtype.png")
marantznr1605dtype.Region(CONFIGURATOR_REGION)
marantzsr5004 = button()
marantzsr5004.Image("marantzsr5004.png")
marantzsr5004.Region(CONFIGURATOR_REGION)
marantzsr5005 = button()
marantzsr5005.Image("marantzsr5005.png")
marantzsr5005.Region(CONFIGURATOR_REGION)
marantzsr5006dtype = button()
marantzsr5006dtype.Image("marantzsr5006dtype.png")
marantzsr5006dtype.Region(CONFIGURATOR_REGION)
marantzsr5007dtype = button()
marantzsr5007dtype.Image("marantzsr5007dtype.png")
marantzsr5007dtype.Region(CONFIGURATOR_REGION)
marantzsr5008dtype = button()
marantzsr5008dtype.Image("marantzsr5008dtype.png")
marantzsr5008dtype.Region(CONFIGURATOR_REGION)
marantzsr5009dtype = button()
marantzsr5009dtype.Image("marantzsr5009dtype.png")
marantzsr5009dtype.Region(CONFIGURATOR_REGION)
marantzsr5500560075008500 = button()
marantzsr5500560075008500.Image("marantzsr5500560075008500.png")
marantzsr5500560075008500.Region(CONFIGURATOR_REGION)
marantzsr6004 = button()
marantzsr6004.Image("marantzsr6004.png")
marantzsr6004.Region(CONFIGURATOR_REGION)
marantzsr6005dtype = button()
marantzsr6005dtype.Image("marantzsr6005dtype.png")
marantzsr6005dtype.Region(CONFIGURATOR_REGION)
marantzsr6006dtype = button()
marantzsr6006dtype.Image("marantzsr6006dtype.png")
marantzsr6006dtype.Region(CONFIGURATOR_REGION)
marantzsr6007dtype = button()
marantzsr6007dtype.Image("marantzsr6007dtype.png")
marantzsr6007dtype.Region(CONFIGURATOR_REGION)
marantzsr6008dtype = button()
marantzsr6008dtype.Image("marantzsr6008dtype.png")
marantzsr6008dtype.Region(CONFIGURATOR_REGION)
marantzsr6009dtype = button()
marantzsr6009dtype.Image("marantzsr6009dtype.png")
marantzsr6009dtype.Region(CONFIGURATOR_REGION)
marantzsr7002 = button()
marantzsr7002.Image("marantzsr7002.png")
marantzsr7002.Region(CONFIGURATOR_REGION)
marantzsr7005av7005dtype = button()
marantzsr7005av7005dtype.Image("marantzsr7005av7005dtype.png")
marantzsr7005av7005dtype.Region(CONFIGURATOR_REGION)
marantzsr7007dtype = button()
marantzsr7007dtype.Image("marantzsr7007dtype.png")
marantzsr7007dtype.Region(CONFIGURATOR_REGION)
marantzsr7008dtype = button()
marantzsr7008dtype.Image("marantzsr7008dtype.png")
marantzsr7008dtype.Region(CONFIGURATOR_REGION)
marantzsr7009dtype = button()
marantzsr7009dtype.Image("marantzsr7009dtype.png")
marantzsr7009dtype.Region(CONFIGURATOR_REGION)
marantzsr8001 = button()
marantzsr8001.Image("marantzsr8001.png")
marantzsr8001.Region(CONFIGURATOR_REGION)
marantzsr8002 = button()
marantzsr8002.Image("marantzsr8002.png")
marantzsr8002.Region(CONFIGURATOR_REGION)
marantzsr9600 = button()
marantzsr9600.Image("marantzsr9600.png")
marantzsr9600.Region(CONFIGURATOR_REGION)
nilesgxr2ethernet = button()
nilesgxr2ethernet.Image("nilesgxr2ethernet.png")
nilesgxr2ethernet.Region(CONFIGURATOR_REGION)
nilesmrc6430 = button()
nilesmrc6430.Image("nilesmrc6430.png")
nilesmrc6430.Region(CONFIGURATOR_REGION)
nuvoconcerto = button()
nuvoconcerto.Image("nuvoconcerto.png")
nuvoconcerto.Region(CONFIGURATOR_REGION)
nuvoessentia = button()
nuvoessentia.Image("nuvoessentia.png")
nuvoessentia.Region(CONFIGURATOR_REGION)
nuvoessentianve6g12zone = button()
nuvoessentianve6g12zone.Image("nuvoessentianve6g12zone.png")
nuvoessentianve6g12zone.Region(CONFIGURATOR_REGION)
nuvoessentianve6g6zone = button()
nuvoessentianve6g6zone.Image("nuvoessentianve6g6zone.png")
nuvoessentianve6g6zone.Region(CONFIGURATOR_REGION)
nuvograndconcerto16zone = button()
nuvograndconcerto16zone.Image("nuvograndconcerto16zone.png")
nuvograndconcerto16zone.Region(CONFIGURATOR_REGION)
nuvograndconcerto8zone = button()
nuvograndconcerto8zone.Image("nuvograndconcerto8zone.png")
nuvograndconcerto8zone.Region(CONFIGURATOR_REGION)
onkyoprsc5530 = button()
onkyoprsc5530.Image("onkyoprsc5530.png")
onkyoprsc5530.Region(CONFIGURATOR_REGION)
onkyotxnr1009txnr3009txnr5009 = button()
onkyotxnr1009txnr3009txnr5009.Image("onkyotxnr1009txnr3009txnr5009.png")
onkyotxnr1009txnr3009txnr5009.Region(CONFIGURATOR_REGION)
onkyotxnr1010 = button()
onkyotxnr1010.Image("onkyotxnr1010.png")
onkyotxnr1010.Region(CONFIGURATOR_REGION)
onkyotxnr1030 = button()
onkyotxnr1030.Image("onkyotxnr1030.png")
onkyotxnr1030.Region(CONFIGURATOR_REGION)
onkyotxnr3010 = button()
onkyotxnr3010.Image("onkyotxnr3010.png")
onkyotxnr3010.Region(CONFIGURATOR_REGION)
onkyotxnr3030 = button()
onkyotxnr3030.Image("onkyotxnr3030.png")
onkyotxnr3030.Region(CONFIGURATOR_REGION)
onkyotxnr5010 = button()
onkyotxnr5010.Image("onkyotxnr5010.png")
onkyotxnr5010.Region(CONFIGURATOR_REGION)
onkyotxnr515 = button()
onkyotxnr515.Image("onkyotxnr515.png")
onkyotxnr515.Region(CONFIGURATOR_REGION)
onkyotxnr525 = button()
onkyotxnr525.Image("onkyotxnr525.png")
onkyotxnr525.Region(CONFIGURATOR_REGION)
onkyotxnr535 = button()
onkyotxnr535.Image("onkyotxnr535.png")
onkyotxnr535.Region(CONFIGURATOR_REGION)
onkyotxnr609 = button()
onkyotxnr609.Image("onkyotxnr609.png")
onkyotxnr609.Region(CONFIGURATOR_REGION)
onkyotxnr616 = button()
onkyotxnr616.Image("onkyotxnr616.png")
onkyotxnr616.Region(CONFIGURATOR_REGION)
onkyotxnr626 = button()
onkyotxnr626.Image("onkyotxnr626.png")
onkyotxnr626.Region(CONFIGURATOR_REGION)
onkyotxnr636htrc660 = button()
onkyotxnr636htrc660.Image("onkyotxnr636htrc660.png")
onkyotxnr636htrc660.Region(CONFIGURATOR_REGION)
onkyotxnr709 = button()
onkyotxnr709.Image("onkyotxnr709.png")
onkyotxnr709.Region(CONFIGURATOR_REGION)
onkyotxnr717 = button()
onkyotxnr717.Image("onkyotxnr717.png")
onkyotxnr717.Region(CONFIGURATOR_REGION)
onkyotxnr727 = button()
onkyotxnr727.Image("onkyotxnr727.png")
onkyotxnr727.Region(CONFIGURATOR_REGION)
onkyotxnr727.similar(.50)
onkyotxnr737 = button()
onkyotxnr737.Image("onkyotxnr737.png")
onkyotxnr737.Region(CONFIGURATOR_REGION)
onkyotxnr809 = button()
onkyotxnr809.Image("onkyotxnr809.png")
onkyotxnr809.Region(CONFIGURATOR_REGION)
onkyotxnr818 = button()
onkyotxnr818.Image("onkyotxnr818.png")
onkyotxnr818.Region(CONFIGURATOR_REGION)
onkyotxnr818.similar(.50)
onkyotxnr828 = button()
onkyotxnr828.Image("onkyotxnr828.png")
onkyotxnr828.Region(CONFIGURATOR_REGION)
onkyotxnr828.similar(.50)
onkyotxnr838 = button()
onkyotxnr838.Image("onkyotxnr838.png")
onkyotxnr838.Region(CONFIGURATOR_REGION)
onkyotxnr838.similar(.50)
onkyotxnr929 = button()
onkyotxnr929.Image("onkyotxnr929.png")
onkyotxnr929.Region(CONFIGURATOR_REGION)
onkyotxnr929.similar(.50)
pioneersc1223k = button()
pioneersc1223k.Image("pioneersc1223k.png")
pioneersc1223k.Region(CONFIGURATOR_REGION)
pioneersc1323k = button()
pioneersc1323k.Image("pioneersc1323k.png")
pioneersc1323k.Region(CONFIGURATOR_REGION)
pioneersc1523k = button()
pioneersc1523k.Image("pioneersc1523k.png")
pioneersc1523k.Region(CONFIGURATOR_REGION)
pioneersc2023k = button()
pioneersc2023k.Image("pioneersc2023k.png")
pioneersc2023k.Region(CONFIGURATOR_REGION)
pioneersc55 = button()
pioneersc55.Image("pioneersc55.png")
pioneersc55.Region(CONFIGURATOR_REGION)
pioneersc57 = button()
pioneersc57.Image("pioneersc57.png")
pioneersc57.Region(CONFIGURATOR_REGION)
pioneersc61 = button()
pioneersc61.Image("pioneersc61.png")
pioneersc61.Region(CONFIGURATOR_REGION)
pioneersc63 = button()
pioneersc63.Image("pioneersc63.png")
pioneersc63.Region(CONFIGURATOR_REGION)
pioneersc65 = button()
pioneersc65.Image("pioneersc65.png")
pioneersc65.Region(CONFIGURATOR_REGION)
pioneersc67 = button()
pioneersc67.Image("pioneersc67.png")
pioneersc67.Region(CONFIGURATOR_REGION)
pioneersc68 = button()
pioneersc68.Image("pioneersc68.png")
pioneersc68.Region(CONFIGURATOR_REGION)
pioneersc71 = button()
pioneersc71.Image("pioneersc71.png")
pioneersc71.Region(CONFIGURATOR_REGION)
pioneersc72 = button()
pioneersc72.Image("pioneersc72.png")
pioneersc72.Region(CONFIGURATOR_REGION)
pioneersc75 = button()
pioneersc75.Image("pioneersc75.png")
pioneersc75.Region(CONFIGURATOR_REGION)
pioneersc77 = button()
pioneersc77.Image("pioneersc77.png")
pioneersc77.Region(CONFIGURATOR_REGION)
pioneersc79 = button()
pioneersc79.Image("pioneersc79.png")
pioneersc79.Region(CONFIGURATOR_REGION)
pioneersclx57k = button()
pioneersclx57k.Image("pioneersclx57k.png")
pioneersclx57k.Region(CONFIGURATOR_REGION)
pioneersclx77k = button()
pioneersclx77k.Image("pioneersclx77k.png")
pioneersclx77k.Region(CONFIGURATOR_REGION)
pioneersclx87k = button()
pioneersclx87k.Image("pioneersclx87k.png")
pioneersclx87k.Region(CONFIGURATOR_REGION)
pioneervsx1123k = button()
pioneervsx1123k.Image("pioneervsx1123k.png")
pioneervsx1123k.Region(CONFIGURATOR_REGION)
pioneervsx50 = button()
pioneervsx50.Image("pioneervsx50.png")
pioneervsx50.Region(CONFIGURATOR_REGION)
pioneervsx51 = button()
pioneervsx51.Image("pioneervsx51.png")
pioneervsx51.Region(CONFIGURATOR_REGION)
pioneervsx52 = button()
pioneervsx52.Image("pioneervsx52.png")
pioneervsx52.Region(CONFIGURATOR_REGION)
pioneervsx53 = button()
pioneervsx53.Image("pioneervsx53.png")
pioneervsx53.Region(CONFIGURATOR_REGION)
pioneervsx60 = button()
pioneervsx60.Image("pioneervsx60.png")
pioneervsx60.Region(CONFIGURATOR_REGION)
pioneervsx70k = button()
pioneervsx70k.Image("pioneervsx70k.png")
pioneervsx70k.Region(CONFIGURATOR_REGION)
pioneervsx923k = button()
pioneervsx923k.Image("pioneervsx923k.png")
pioneervsx923k.Region(CONFIGURATOR_REGION)
snapavb100b3004x4or8x8 = button()
snapavb100b3004x4or8x8.Image("snapavb100b3004x4or8x8.png")
snapavb100b3004x4or8x8.Region(CONFIGURATOR_REGION)
speakercraftmra664 = button()
speakercraftmra664.Image("speakercraftmra664.png")
speakercraftmra664.Region(CONFIGURATOR_REGION)
speakercraftmzc64 = button()
speakercraftmzc64.Image("speakercraftmzc64.png")
speakercraftmzc64.Region(CONFIGURATOR_REGION)
speakercraftmzc648zone = button()
speakercraftmzc648zone.Image("speakercraftmzc648zone.png")
speakercraftmzc648zone.Region(CONFIGURATOR_REGION)
speakercraftmzc66 = button()
speakercraftmzc66.Image("speakercraftmzc66.png")
speakercraftmzc66.Region(CONFIGURATOR_REGION)
speakercraftmzc6612zone = button()
speakercraftmzc6612zone.Image("speakercraftmzc6612zone.png")
speakercraftmzc6612zone.Region(CONFIGURATOR_REGION)
speakercraftmzc6618zone = button()
speakercraftmzc6618zone.Image("speakercraftmzc6618zone.png")
speakercraftmzc6618zone.Region(CONFIGURATOR_REGION)
speakercraftmzc6624zone = button()
speakercraftmzc6624zone.Image("speakercraftmzc6624zone.png")
speakercraftmzc6624zone.Region(CONFIGURATOR_REGION)
speakercraftmzc88 = button()
speakercraftmzc88.Image("speakercraftmzc88.png")
speakercraftmzc88.Region(CONFIGURATOR_REGION)
speakercraftmzc8816zone = button()
speakercraftmzc8816zone.Image("speakercraftmzc8816zone.png")
speakercraftmzc8816zone.Region(CONFIGURATOR_REGION)
speakercraftmzc8824zone = button()
speakercraftmzc8824zone.Image("speakercraftmzc8824zone.png")
speakercraftmzc8824zone.Region(CONFIGURATOR_REGION)
speakercraftmzc8832zone = button()
speakercraftmzc8832zone.Image("speakercraftmzc8832zone.png")
speakercraftmzc8832zone.Region(CONFIGURATOR_REGION)
sunfiretgr3tgp5 = button()
sunfiretgr3tgp5.Image("sunfiretgr3tgp5.png")
sunfiretgr3tgp5.Region(CONFIGURATOR_REGION)
sunfiretgr401tgp401 = button()
sunfiretgr401tgp401.Image("sunfiretgr401tgp401.png")
sunfiretgr401tgp401.Region(CONFIGURATOR_REGION)
wyrestormmx0404 = button()
wyrestormmx0404.Image("wyrestormmx0404.png")
wyrestormmx0404.Region(CONFIGURATOR_REGION)
wyrestormmx0606 = button()
wyrestormmx0606.Image("wyrestormmx0606.png")
wyrestormmx0606.Region(CONFIGURATOR_REGION)
wyrestormmx0804 = button()
wyrestormmx0804.Image("wyrestormmx0804.png")
wyrestormmx0804.Region(CONFIGURATOR_REGION)
wyrestormmx0808 = button()
wyrestormmx0808.Image("wyrestormmx0808.png")
wyrestormmx0808.Region(CONFIGURATOR_REGION)
wyrestormmx0808310 = button()
wyrestormmx0808310.Image("wyrestormmx0808310.png")
wyrestormmx0808310.Region(CONFIGURATOR_REGION)
wyrestormmx0816310 = button()
wyrestormmx0816310.Image("wyrestormmx0816310.png")
wyrestormmx0816310.Region(CONFIGURATOR_REGION)
wyrestormmx1616310 = button()
wyrestormmx1616310.Image("wyrestormmx1616310.png")
wyrestormmx1616310.Region(CONFIGURATOR_REGION)
xantechhd44cc514units = button()
xantechhd44cc514units.Image("xantechhd44cc514units.png")
xantechhd44cc514units.Region(CONFIGURATOR_REGION)
xantechhd88cc514units = button()
xantechhd88cc514units.Image("xantechhd88cc514units.png")
xantechhd88cc514units.Region(CONFIGURATOR_REGION)
yamaharxa1000ynca = button()
yamaharxa1000ynca.Image("yamaharxa1000ynca.png")
yamaharxa1000ynca.Region(CONFIGURATOR_REGION)
yamaharxa1010ynca = button()
yamaharxa1010ynca.Image("yamaharxa1010ynca.png")
yamaharxa1010ynca.Region(CONFIGURATOR_REGION)
yamaharxa1020ynca = button()
yamaharxa1020ynca.Image("yamaharxa1020ynca.png")
yamaharxa1020ynca.Region(CONFIGURATOR_REGION)
yamaharxa1030ynca = button()
yamaharxa1030ynca.Image("yamaharxa1030ynca.png")
yamaharxa1030ynca.Region(CONFIGURATOR_REGION)
yamaharxa1040ynca = button()
yamaharxa1040ynca.Image("yamaharxa1040ynca.png")
yamaharxa1040ynca.Region(CONFIGURATOR_REGION)
yamaharxa2000ynca = button()
yamaharxa2000ynca.Image("yamaharxa2000ynca.png")
yamaharxa2000ynca.Region(CONFIGURATOR_REGION)
yamaharxa2010ynca = button()
yamaharxa2010ynca.Image("yamaharxa2010ynca.png")
yamaharxa2010ynca.Region(CONFIGURATOR_REGION)
yamaharxa2020ynca = button()
yamaharxa2020ynca.Image("yamaharxa2020ynca.png")
yamaharxa2020ynca.Region(CONFIGURATOR_REGION)
yamaharxa2030ynca = button()
yamaharxa2030ynca.Image("yamaharxa2030ynca.png")
yamaharxa2030ynca.Region(CONFIGURATOR_REGION)
yamaharxa2040ynca = button()
yamaharxa2040ynca.Image("yamaharxa2040ynca.png")
yamaharxa2040ynca.Region(CONFIGURATOR_REGION)
yamaharxa3000ynca = button()
yamaharxa3000ynca.Image("yamaharxa3000ynca.png")
yamaharxa3000ynca.Region(CONFIGURATOR_REGION)
yamaharxa3010ynca = button()
yamaharxa3010ynca.Image("yamaharxa3010ynca.png")
yamaharxa3010ynca.Region(CONFIGURATOR_REGION)
yamaharxa3020ynca = button()
yamaharxa3020ynca.Image("yamaharxa3020ynca.png")
yamaharxa3020ynca.Region(CONFIGURATOR_REGION)
yamaharxa3030ynca = button()
yamaharxa3030ynca.Image("yamaharxa3030ynca.png")
yamaharxa3030ynca.Region(CONFIGURATOR_REGION)
yamaharxa3040ynca = button()
yamaharxa3040ynca.Image("yamaharxa3040ynca.png")
yamaharxa3040ynca.Region(CONFIGURATOR_REGION)
yamaharxa710ynca = button()
yamaharxa710ynca.Image("yamaharxa710ynca.png")
yamaharxa710ynca.Region(CONFIGURATOR_REGION)
yamaharxa720ynca = button()
yamaharxa720ynca.Image("yamaharxa720ynca.png")
yamaharxa720ynca.Region(CONFIGURATOR_REGION)
yamaharxa730ynca = button()
yamaharxa730ynca.Image("yamaharxa730ynca.png")
yamaharxa730ynca.Region(CONFIGURATOR_REGION)
yamaharxa740ynca = button()
yamaharxa740ynca.Image("yamaharxa740ynca.png")
yamaharxa740ynca.Region(CONFIGURATOR_REGION)
yamaharxa800ynca = button()
yamaharxa800ynca.Image("yamaharxa800ynca.png")
yamaharxa800ynca.Region(CONFIGURATOR_REGION)
yamaharxa810ynca = button()
yamaharxa810ynca.Image("yamaharxa810ynca.png")
yamaharxa810ynca.Region(CONFIGURATOR_REGION)
yamaharxa820ynca = button()
yamaharxa820ynca.Image("yamaharxa820ynca.png")
yamaharxa820ynca.Region(CONFIGURATOR_REGION)
yamaharxa830ynca = button()
yamaharxa830ynca.Image("yamaharxa830ynca.png")
yamaharxa830ynca.Region(CONFIGURATOR_REGION)
yamaharxa840ynca = button()
yamaharxa840ynca.Image("yamaharxa840ynca.png")
yamaharxa840ynca.Region(CONFIGURATOR_REGION)
yamaharxv1600v2600 = button()
yamaharxv1600v2600.Image("yamaharxv1600v2600.png")
yamaharxv1600v2600.Region(CONFIGURATOR_REGION)
yamaharxv1700v2700 = button()
yamaharxv1700v2700.Image("yamaharxv1700v2700.png")
yamaharxv1700v2700.Region(CONFIGURATOR_REGION)
yamaharxv2065ethernet = button()
yamaharxv2065ethernet.Image("yamaharxv2065ethernet.png")
yamaharxv2065ethernet.Region(CONFIGURATOR_REGION)
yamaharxv2065rs232 = button()
yamaharxv2065rs232.Image("yamaharxv2065rs232.png")
yamaharxv2065rs232.Region(CONFIGURATOR_REGION)
yamaharxv3900ethernet = button()
yamaharxv3900ethernet.Image("yamaharxv3900ethernet.png")
yamaharxv3900ethernet.Region(CONFIGURATOR_REGION)
yamaharxz7ethernet = button()
yamaharxz7ethernet.Image("yamaharxz7ethernet.png")
yamaharxz7ethernet.Region(CONFIGURATOR_REGION)
yamaharxz9 = button()
yamaharxz9.Image("yamaharxz9.png")
yamaharxz9.Region(CONFIGURATOR_REGION)
| codeparrot/github-code-clean |
#
# yuminstall.py
#
# Copyright (C) 2005, 2006, 2007 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from flags import flags
from errors import *
import sys
import os
import os.path
import shutil
import time
import warnings
import types
import locale
import glob
import tempfile
import itertools
import re
import anaconda_log
import rpm
import rpmUtils
import urlgrabber.progress
import urlgrabber.grabber
from urlgrabber.grabber import URLGrabber, URLGrabError
import yum
import iniparse
from yum.constants import *
from yum.Errors import *
from yum.misc import to_unicode
from yum.yumRepo import YumRepository
from backend import AnacondaBackend
from product import *
from sortedtransaction import SplitMediaTransactionData
from constants import *
from image import *
from compssort import *
import packages
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
P_ = lambda x, y, z: gettext.ldngettext("anaconda", x, y, z)
import network
# specspo stuff
rpm.addMacro("_i18ndomains", "redhat-dist")
import logging
log = logging.getLogger("anaconda")
import urlparse
urlparse.uses_fragment.append('media')
urlgrabber.grabber.default_grabber.opts.user_agent = "%s (anaconda)/%s" %(productName, productVersion)
import iutil
import isys
def size_string (size):
def number_format(s):
return locale.format("%s", s, 1)
retval = None
if size > 1024 * 1024:
size = size / (1024*1024)
retval = _("%s MB") %(number_format(size),)
elif size > 1024:
size = size / 1024
retval = _("%s KB") %(number_format(size),)
else:
retval = P_("%s Byte", "%s Bytes", size) % (number_format(size),)
return to_unicode(retval)
class AnacondaCallback:
def __init__(self, ayum, anaconda, instLog, modeText):
self.repos = ayum.repos
self.ts = ayum.ts
self.ayum = ayum
self.messageWindow = anaconda.intf.messageWindow
self.pulseWindow = anaconda.intf.progressWindow
self.progress = anaconda.intf.instProgress
self.progressWindowClass = anaconda.intf.progressWindow
self.rootPath = anaconda.rootPath
self.initWindow = None
self.progressWindow = None
self.lastprogress = 0
self.incr = 20
self.instLog = instLog
self.modeText = modeText
self.openfile = None
self.inProgressPo = None
def setSizes(self, numpkgs, totalSize, totalFiles):
self.numpkgs = numpkgs
self.totalSize = totalSize
self.totalFiles = totalFiles
self.donepkgs = 0
self.doneSize = 0
self.doneFiles = 0
def callback(self, what, amount, total, h, user):
if what == rpm.RPMCALLBACK_TRANS_START:
# step 6 is the bulk of the ts processing time
if amount == 6:
self.progressWindow = \
self.progressWindowClass (_("Preparing to install"),
_("Preparing transaction from installation source"),
total)
self.incr = total / 10
if what == rpm.RPMCALLBACK_TRANS_PROGRESS:
if self.progressWindow and amount > self.lastprogress + self.incr:
self.progressWindow.set(amount)
self.lastprogress = amount
if what == rpm.RPMCALLBACK_TRANS_STOP and self.progressWindow:
self.progressWindow.pop()
if what == rpm.RPMCALLBACK_INST_OPEN_FILE:
(hdr, rpmloc) = h
# hate hate hate at epochs...
epoch = hdr['epoch']
if epoch is not None:
epoch = str(epoch)
txmbrs = self.ayum.tsInfo.matchNaevr(hdr['name'], hdr['arch'],
epoch, hdr['version'],
hdr['release'])
if len(txmbrs) == 0:
raise RuntimeError, "Unable to find package %s-%s-%s.%s" %(hdr['name'], hdr['version'], hdr['release'], hdr['arch'])
po = txmbrs[0].po
repo = self.repos.getRepo(po.repoid)
pkgStr = "%s-%s-%s.%s" % (po.name, po.version, po.release, po.arch)
s = to_unicode(_("<b>Installing %(pkgStr)s</b> (%(size)s)\n")) \
% {'pkgStr': pkgStr, 'size': size_string(hdr['size'])}
summary = to_unicode(gettext.ldgettext("redhat-dist", hdr['summary'] or ""))
s += summary.strip()
self.progress.set_label(s)
self.instLog.write(self.modeText % str(pkgStr))
self.instLog.flush()
self.openfile = None
while self.openfile is None:
try:
fn = repo.getPackage(po)
f = open(fn, 'r')
self.openfile = f
except yum.Errors.NoMoreMirrorsRepoError:
self.ayum._handleFailure(po)
except IOError:
self.ayum._handleFailure(po)
except yum.Errors.RepoError, e:
continue
self.inProgressPo = po
return self.openfile.fileno()
elif what == rpm.RPMCALLBACK_INST_CLOSE_FILE:
if self.initWindow:
self.initWindow.pop()
self.initWindow = None
(hdr, rpmloc) = h
fn = self.openfile.name
self.openfile.close()
self.openfile = None
if os.path.dirname(fn).startswith("%s/var/cache/yum/" % self.rootPath):
try:
os.unlink(fn)
except OSError as e:
log.debug("unable to remove file %s" %(e.strerror,))
self.donepkgs += 1
self.doneSize += self.inProgressPo.returnSimple("installedsize") / 1024.0
self.doneFiles += len(hdr[rpm.RPMTAG_BASENAMES])
if self.donepkgs <= self.numpkgs:
self.progress.set_text(P_("Packages completed: "
"%(donepkgs)d of %(numpkgs)d",
"Packages completed: "
"%(donepkgs)d of %(numpkgs)d",
self.numpkgs)
% {'donepkgs': self.donepkgs,
'numpkgs': self.numpkgs})
self.progress.set_fraction(float(self.doneSize / self.totalSize))
self.progress.processEvents()
self.inProgressPo = None
elif what in (rpm.RPMCALLBACK_UNINST_START,
rpm.RPMCALLBACK_UNINST_STOP):
if self.initWindow is None:
self.initWindow = self.pulseWindow(_("Finishing upgrade"),
_("Finishing upgrade process. This may take a little while."),
0, pulse=True)
else:
self.initWindow.pulse()
elif what in (rpm.RPMCALLBACK_CPIO_ERROR,
rpm.RPMCALLBACK_UNPACK_ERROR,
rpm.RPMCALLBACK_SCRIPT_ERROR):
if not isinstance(h, types.TupleType):
h = (h, None)
(hdr, rpmloc) = h
# If this is a cleanup/remove, then hdr is a string not a header.
if isinstance(hdr, rpm.hdr):
name = hdr['name']
else:
name = hdr
# Script errors store whether or not they're fatal in "total". So,
# we should only error out for fatal script errors or the cpio and
# unpack problems.
if what != rpm.RPMCALLBACK_SCRIPT_ERROR or total:
self.messageWindow(_("Error Installing Package"),
_("A fatal error occurred when installing the %s "
"package. This could indicate errors when reading "
"the installation media. Installation cannot "
"continue.") % name,
type="custom", custom_icon="error",
custom_buttons=[_("_Exit installer")])
sys.exit(1)
if self.initWindow is None:
self.progress.processEvents()
class AnacondaYumRepo(YumRepository):
def __init__(self, *args, **kwargs):
YumRepository.__init__(self, *args, **kwargs)
self.enablegroups = True
self._anacondaBaseURLs = []
def needsNetwork(self):
def _isURL(s):
return s.startswith("http") or s.startswith("ftp")
if len(self.baseurl) > 0:
return len(filter(lambda s: _isURL(s), self.baseurl)) > 0
elif self.mirrorlist:
return _isURL(self.mirrorlist)
else:
return False
def dirCleanup(self):
cachedir = self.getAttribute('cachedir')
if os.path.isdir(cachedir):
if not self.needsNetwork() or self.name == "Installation Repo":
shutil.rmtree(cachedir)
else:
if os.path.exists("%s/headers" % cachedir):
shutil.rmtree("%s/headers" % cachedir)
if os.path.exists("%s/packages" % cachedir):
shutil.rmtree("%s/packages" % cachedir)
# needed to store nfs: repo url that yum doesn't know
def _getAnacondaBaseURLs(self):
return self._anacondaBaseURLs or self.baseurl or [self.mirrorlist]
def _setAnacondaBaseURLs(self, value):
self._anacondaBaseURLs = value
anacondaBaseURLs = property(_getAnacondaBaseURLs, _setAnacondaBaseURLs,
doc="Extends AnacondaYum.baseurl to store non-yum urls:")
class YumSorter(yum.YumBase):
def _transactionDataFactory(self):
return SplitMediaTransactionData()
class AnacondaYum(YumSorter):
def __init__(self, anaconda):
YumSorter.__init__(self)
self.anaconda = anaconda
self._timestamp = None
self.repoIDcounter = itertools.count()
# Only needed for hard drive and nfsiso installs.
self._discImages = {}
self.isodir = None
# Only needed for media installs.
self.currentMedia = None
self.mediagrabber = None
# Where is the source media mounted? This is the directory
# where Packages/ is located.
self.tree = "/mnt/source"
self.macros = {}
if flags.selinux:
for directory in ("/tmp/updates",
"/etc/selinux/targeted/contexts/files",
"/etc/security/selinux/src/policy/file_contexts",
"/etc/security/selinux"):
fn = "%s/file_contexts" %(directory,)
if os.access(fn, os.R_OK):
break
self.macros["__file_context_path"] = fn
else:
self.macros["__file_context_path"] = "%{nil}"
self.updates = []
self.localPackages = []
def setup(self):
# yum doesn't understand all our method URLs, so use this for all
# except FTP and HTTP installs.
self._baseRepoURL = "file://%s" % self.tree
while True:
try:
self.configBaseURL()
break
except SystemError, e:
self.anaconda.intf.messageWindow(_("Error Setting Up Repository"),
_("The following error occurred while setting up the "
"installation repository:\n\n%(e)s\n\nPlease provide the "
"correct information for installing %(productName)s.")
% {'e': e, 'productName': productName})
self.anaconda.methodstr = self.anaconda.intf.methodstrRepoWindow(self.anaconda.methodstr or "cdrom:")
self.doConfigSetup(root=self.anaconda.rootPath)
self.conf.installonlypkgs = []
def _switchCD(self, discnum):
if os.access("%s/.discinfo" % self.tree, os.R_OK):
f = open("%s/.discinfo" % self.tree)
self._timestamp = f.readline().strip()
f.close()
dev = self.anaconda.storage.devicetree.getDeviceByName(self.anaconda.mediaDevice)
dev.format.mountpoint = self.tree
# If self.currentMedia is None, then there shouldn't be anything
# mounted. Before going further, see if the correct disc is already
# in the drive. This saves a useless eject and insert if the user
# has for some reason already put the disc in the drive.
if self.currentMedia is None:
try:
dev.format.mount()
if verifyMedia(self.tree, discnum, None):
self.currentMedia = discnum
return
dev.format.unmount()
except:
pass
else:
unmountCD(dev, self.anaconda.intf.messageWindow)
self.currentMedia = None
dev.eject()
while True:
if self.anaconda.intf:
self.anaconda.intf.beep()
self.anaconda.intf.messageWindow(_("Change Disc"),
_("Please insert %(productName)s disc %(discnum)d to continue.")
% {'productName': productName, 'discnum': discnum})
try:
dev.format.mount()
if verifyMedia(self.tree, discnum, self._timestamp):
self.currentMedia = discnum
break
self.anaconda.intf.messageWindow(_("Wrong Disc"),
_("That's not the correct %s disc.")
% (productName,))
dev.format.unmount()
dev.eject()
except:
self.anaconda.intf.messageWindow(_("Error"),
_("Unable to access the disc."))
def _switchImage(self, discnum):
umountImage(self.tree, self.currentMedia)
self.currentMedia = None
# mountDirectory checks before doing anything, so it's safe to
# call this repeatedly.
mountDirectory(self.anaconda.methodstr,
self.anaconda.intf.messageWindow)
self._discImages = mountImage(self.isodir, self.tree, discnum,
self.anaconda.intf.messageWindow,
discImages=self._discImages)
self.currentMedia = discnum
def configBaseURL(self):
# We only have a methodstr if method= or repo= was passed to
# anaconda. No source for this base repo (the CD media, NFS,
# whatever) is mounted yet since loader only mounts the source
# for the stage2 image. We need to set up the source mount
# now.
if flags.cmdline.has_key("preupgrade"):
path = "/var/cache/yum/preupgrade"
self.anaconda.methodstr = "hd::%s" % path
self._baseRepoURL = "file:///mnt/sysimage/%s" % path
elif self.anaconda.methodstr:
m = self.anaconda.methodstr
if m.startswith("hd:"):
if m.count(":") == 2:
(device, path) = m[3:].split(":")
else:
(device, fstype, path) = m[3:].split(":")
self.isodir = "/mnt/isodir/%s" % path
# This takes care of mounting /mnt/isodir first.
self._switchImage(1)
self.mediagrabber = self.mediaHandler
elif m.startswith("nfsiso:"):
self.isodir = "/mnt/isodir"
# Calling _switchImage takes care of mounting /mnt/isodir first.
if not network.hasActiveNetDev():
if not self.anaconda.intf.enableNetwork():
self._baseRepoURL = None
return
urlgrabber.grabber.reset_curl_obj()
self._switchImage(1)
self.mediagrabber = self.mediaHandler
elif m.startswith("http") or m.startswith("ftp:"):
self._baseRepoURL = m
elif m.startswith("nfs:"):
if not network.hasActiveNetDev():
if not self.anaconda.intf.enableNetwork():
self._baseRepoURL = None
urlgrabber.grabber.reset_curl_obj()
(opts, server, path) = iutil.parseNfsUrl(m)
isys.mount(server+":"+path, self.tree, "nfs", options=opts)
# This really should be fixed in loader instead but for now see
# if there's images and if so go with this being an NFSISO
# install instead.
images = findIsoImages(self.tree, self.anaconda.intf.messageWindow)
if images != {}:
isys.umount(self.tree, removeDir=False)
self.anaconda.methodstr = "nfsiso:%s" % m[4:]
self.configBaseURL()
return
elif m.startswith("cdrom:"):
self._switchCD(1)
self.mediagrabber = self.mediaHandler
self._baseRepoURL = "file://%s" % self.tree
else:
# No methodstr was given. In order to find an installation source,
# we should first check to see if there's a CD/DVD with packages
# on it, and then default to the mirrorlist URL. The user can
# always change the repo with the repo editor later.
cdr = scanForMedia(self.tree, self.anaconda.storage)
if cdr:
self.mediagrabber = self.mediaHandler
self.anaconda.mediaDevice = cdr
self.currentMedia = 1
log.info("found installation media on %s" % cdr)
else:
# No CD with media on it and no repo=/method= parameter, so
# default to using whatever's enabled in /etc/yum.repos.d/
self._baseRepoURL = None
def configBaseRepo(self, root='/'):
# Create the "base" repo object, assuming there is one. Otherwise we
# just skip all this and use the defaults from /etc/yum.repos.d.
if not self._baseRepoURL:
return
# add default repos
anacondabaseurl = (self.anaconda.methodstr or
"cdrom:%s" % (self.anaconda.mediaDevice))
anacondabasepaths = self.anaconda.instClass.getPackagePaths(anacondabaseurl)
for (name, uri) in self.anaconda.instClass.getPackagePaths(self._baseRepoURL).items():
rid = name.replace(" ", "")
repo = AnacondaYumRepo("anaconda-%s-%s" % (rid, productStamp))
repo.baseurl = uri
repo.anacondaBaseURLs = anacondabasepaths[name]
repo.name = name
repo.cost = 100
if self.anaconda.mediaDevice or self.isodir:
repo.mediaid = getMediaId(self.tree)
log.info("set mediaid of repo %s to: %s" % (rid, repo.mediaid))
repo.enable()
self.repos.add(repo)
def mediaHandler(self, *args, **kwargs):
mediaid = kwargs["mediaid"]
discnum = kwargs["discnum"]
relative = kwargs["relative"]
# The package exists on media other than what's mounted right now.
if discnum != self.currentMedia:
log.info("switching from media #%s to #%s for %s" %
(self.currentMedia, discnum, relative))
# Unmount any currently mounted ISO images and mount the one
# containing the requested packages.
if self.isodir:
self._switchImage(discnum)
else:
self._switchCD(discnum)
ug = URLGrabber(checkfunc=kwargs["checkfunc"])
ug.urlgrab("%s/%s" % (self.tree, kwargs["relative"]), kwargs["local"],
text=kwargs["text"], range=kwargs["range"], copy_local=1)
return kwargs["local"]
# XXX: This is straight out of yum, but we need to override it here in
# order to use our own repo class.
def readRepoConfig(self, parser, section):
'''Parse an INI file section for a repository.
@param parser: ConfParser or similar to read INI file values from.
@param section: INI file section to read.
@return: YumRepository instance.
'''
repo = AnacondaYumRepo(section)
repo.populate(parser, section, self.conf)
# Ensure that the repo name is set
if not repo.name:
repo.name = section
self.logger.error(_('Repository %r is missing name in configuration, '
'using id') % section)
# Set attributes not from the config file
repo.yumvar.update(self.conf.yumvar)
repo.cfg = parser
if "-source" in repo.id or "-debuginfo" in repo.id:
name = repo.name
del(repo)
raise RepoError, "Repo %s contains -source or -debuginfo, excluding" % name
# this is a little hard-coded, but it's effective
if not BETANAG and ("rawhide" in repo.id or "development" in repo.id):
name = repo.name
del(repo)
raise RepoError, "Excluding devel repo %s for non-devel anaconda" % name
if BETANAG and not repo.enabled:
name = repo.name
del(repo)
raise RepoError, "Excluding disabled repo %s for prerelease" % name
# If repo=/method= was passed in, we want to default these extra
# repos to off.
if self._baseRepoURL:
repo.enabled = False
return repo
# We need to make sure $releasever gets set up before .repo files are
# read. Since there's no redhat-release package in /mnt/sysimage (and
# won't be for quite a while), we need to do our own substutition.
def _getReleasever(self):
from ConfigParser import ConfigParser
c = ConfigParser()
try:
if os.access("%s/.treeinfo" % self.anaconda.methodstr, os.R_OK):
ConfigParser.read(c, "%s/.treeinfo" % self.anaconda.methodstr)
else:
ug = URLGrabber()
ug.urlgrab("%s/.treeinfo" % self.anaconda.methodstr,
"/tmp/.treeinfo", copy_local=1)
ConfigParser.read(c, "/tmp/.treeinfo")
return c.get("general", "version")
except:
return productVersion
# Override this method so yum doesn't nuke our existing logging config.
def doLoggingSetup(self, *args, **kwargs):
import yum.logginglevels
file_handler = logging.FileHandler("/tmp/yum.log")
file_formatter = logging.Formatter("[%(asctime)s] %(levelname)-8s: %(message)s")
file_handler.setFormatter(file_formatter)
tty3_handler = logging.FileHandler("/dev/tty3")
tty3_formatter = logging.Formatter(anaconda_log.TTY_FORMAT,
anaconda_log.DATE_FORMAT)
tty3_handler.setFormatter(tty3_formatter)
verbose = logging.getLogger("yum.verbose")
verbose.setLevel(logging.DEBUG)
verbose.propagate = False
verbose.addHandler(file_handler)
logger = logging.getLogger("yum")
logger.propagate = False
logger.setLevel(yum.logginglevels.INFO_2)
logger.addHandler(file_handler)
anaconda_log.autoSetLevel(tty3_handler, True)
tty3_handler.setLevel(anaconda_log.logger.tty_loglevel)
logger.addHandler(tty3_handler)
# XXX filelogger is set in setFileLog - do we or user want it?
filelogger = logging.getLogger("yum.filelogging")
filelogger.setLevel(logging.INFO)
filelogger.propagate = False
def doConfigSetup(self, fn='/tmp/anaconda-yum.conf', root='/'):
if hasattr(self, "preconf"):
self.preconf.fn = fn
self.preconf.root = root
self.preconf.releasever = self._getReleasever()
self.preconf.enabled_plugins = ["whiteout", "blacklist"]
YumSorter._getConfig(self)
else:
YumSorter._getConfig(self, fn=fn, root=root,
enabled_plugins=["whiteout", "blacklist"])
self.configBaseRepo(root=root)
extraRepos = []
ddArch = os.uname()[4]
#Add the Driver disc repos to Yum
for d in glob.glob(DD_RPMS):
dirname = os.path.basename(d)
rid = "anaconda-%s" % dirname
repo = AnacondaYumRepo(rid)
repo.baseurl = [ "file:///%s" % d ]
repo.name = "Driver Disk %s" % dirname.split("-")[1]
repo.enable()
extraRepos.append(repo)
if self.anaconda.ksdata:
# This is the same pattern as from loader/urls.c:splitProxyParam.
pattern = re.compile("([[:alpha:]]+://)?(([[:alnum:]]+)(:[^:@]+)?@)?([^:]+)(:[[:digit:]]+)?(/.*)?")
for ksrepo in self.anaconda.ksdata.repo.repoList:
anacondaBaseURLs = [ksrepo.baseurl]
# yum doesn't understand nfs:// and doesn't want to. We need
# to first do the mount, then translate it into a file:// that
# yum does understand.
# "nfs:" and "nfs://" prefixes are accepted in ks repo --baseurl
if ksrepo.baseurl and ksrepo.baseurl.startswith("nfs:"):
if not network.hasActiveNetDev() and not self.anaconda.intf.enableNetwork():
self.anaconda.intf.messageWindow(_("No Network Available"),
_("Some of your software repositories require "
"networking, but there was an error enabling the "
"network on your system."),
type="custom", custom_icon="error",
custom_buttons=[_("_Exit installer")])
sys.exit(1)
urlgrabber.grabber.reset_curl_obj()
dest = tempfile.mkdtemp("", ksrepo.name.replace(" ", ""), "/mnt")
# handle "nfs://" prefix
if ksrepo.baseurl[4:6] == '//':
ksrepo.baseurl = ksrepo.baseurl.replace('//', '', 1)
anacondaBaseURLs = [ksrepo.baseurl]
try:
isys.mount(ksrepo.baseurl[4:], dest, "nfs")
except Exception as e:
log.error("error mounting NFS repo: %s" % e)
ksrepo.baseurl = "file://%s" % dest
repo = AnacondaYumRepo(ksrepo.name)
repo.mirrorlist = ksrepo.mirrorlist
repo.name = ksrepo.name
if not ksrepo.baseurl:
repo.baseurl = []
else:
repo.baseurl = [ ksrepo.baseurl ]
repo.anacondaBaseURLs = anacondaBaseURLs
if ksrepo.cost:
repo.cost = ksrepo.cost
if ksrepo.excludepkgs:
repo.exclude = ksrepo.excludepkgs
if ksrepo.includepkgs:
repo.include = ksrepo.includepkgs
if ksrepo.proxy:
m = pattern.match(ksrepo.proxy)
if m and m.group(5):
# If both a host and port was found, just paste them
# together using the colon at the beginning of the port
# match as a separator. Otherwise, just use the host.
if m.group(6):
repo.proxy = m.group(5) + m.group(6)
else:
repo.proxy = m.group(5)
# yum also requires a protocol. If none was given,
# default to http.
if m.group(1):
repo.proxy = m.group(1) + repo.proxy
else:
repo.proxy = "http://" + repo.proxy
if m and m.group(3):
repo.proxy_username = m.group(3)
if m and m.group(4):
# Skip the leading colon.
repo.proxy_password = m.group(4)[1:]
repo.enable()
extraRepos.append(repo)
for repo in extraRepos:
try:
self.repos.add(repo)
log.info("added repository %s with URL %s" % (repo.name, repo.mirrorlist or repo.baseurl))
except:
log.warning("ignoring duplicate repository %s with URL %s" % (repo.name, repo.mirrorlist or repo.baseurl))
self.repos.setCacheDir(self.conf.cachedir)
if os.path.exists("%s/boot/upgrade/install.img" % self.anaconda.rootPath):
log.info("REMOVING stage2 image from %s /boot/upgrade" % self.anaconda.rootPath )
try:
os.unlink("%s/boot/upgrade/install.img" % self.anaconda.rootPath)
except:
log.warning("failed to clean /boot/upgrade")
def downloadHeader(self, po):
while True:
# retrying version of download header
try:
YumSorter.downloadHeader(self, po)
break
except yum.Errors.NoMoreMirrorsRepoError:
self._handleFailure(po)
except IOError:
self._handleFailure(po)
except yum.Errors.RepoError, e:
continue
def _handleFailure(self, package):
if not self.isodir and self.currentMedia:
buttons = [_("Re_boot"), _("_Eject")]
else:
buttons = [_("Re_boot"), _("_Retry")]
pkgFile = to_unicode(os.path.basename(package.remote_path))
rc = self.anaconda.intf.messageWindow(_("Error"),
_("The file %s cannot be opened. This is due to a missing "
"file, a corrupt package or corrupt media. Please "
"verify your installation source.\n\n"
"If you exit, your system will be left in an inconsistent "
"state that will likely require reinstallation.\n\n") %
(pkgFile,),
type="custom", custom_icon="error",
custom_buttons=buttons)
if rc == 0:
sys.exit(0)
else:
if os.path.exists(package.localPkg()):
os.unlink(package.localPkg())
if not self.isodir and self.currentMedia:
self._switchCD(self.currentMedia)
else:
return
def mirrorFailureCB (self, obj, *args, **kwargs):
# This gets called when a mirror fails, but it cannot know whether
# or not there are other mirrors left to try, since it cannot know
# which mirror we were on when we started this particular download.
# Whenever we have run out of mirrors the grabber's get/open/retrieve
# method will raise a URLGrabError exception with errno 256.
grab = self.repos.getRepo(kwargs["repo"]).grab
log.warning("Failed to get %s from mirror %d/%d, "
"or downloaded file is corrupt" % (obj.url, grab._next + 1,
len(grab.mirrors)))
if self.currentMedia:
dev = self.anaconda.storage.devicetree.getDeviceByName(self.anaconda.mediaDevice)
dev.format.mountpoint = self.tree
unmountCD(dev, self.anaconda.intf.messageWindow)
self.currentMedia = None
def urlgrabberFailureCB (self, obj, *args, **kwargs):
if hasattr(obj, "exception"):
log.warning("Try %s/%s for %s failed: %s" % (obj.tries, obj.retry, obj.url, obj.exception))
else:
log.warning("Try %s/%s for %s failed" % (obj.tries, obj.retry, obj.url))
if obj.tries == obj.retry:
return
delay = 0.25*(2**(obj.tries-1))
if delay > 1:
w = self.anaconda.intf.waitWindow(_("Retrying"), _("Retrying download."))
time.sleep(delay)
w.pop()
else:
time.sleep(delay)
def getDownloadPkgs(self):
downloadpkgs = []
totalSize = 0
totalFiles = 0
for txmbr in self.tsInfo.getMembersWithState(output_states=TS_INSTALL_STATES):
if txmbr.po:
totalSize += int(txmbr.po.returnSimple("installedsize")) / 1024
for filetype in txmbr.po.returnFileTypes():
totalFiles += len(txmbr.po.returnFileEntries(ftype=filetype))
downloadpkgs.append(txmbr.po)
return (downloadpkgs, totalSize, totalFiles)
def setColor(self):
if rpmUtils.arch.isMultiLibArch():
self.ts.ts.setColor(3)
def run(self, instLog, cb, intf, id):
def mediasort(a, b):
# sort so that first CD comes first, etc. -99 is a magic number
# to tell us that the cd should be last
if a == -99:
return 1
elif b == -99:
return -1
if a < b:
return -1
elif a > b:
return 1
return 0
self.initActionTs()
if self.anaconda.upgrade:
self.ts.ts.setProbFilter(~rpm.RPMPROB_FILTER_DISKSPACE)
self.setColor()
# If we don't have any required media assume single disc
if self.tsInfo.reqmedia == {}:
self.tsInfo.reqmedia[0] = None
mkeys = self.tsInfo.reqmedia.keys()
mkeys.sort(mediasort)
stage2img = "%s/images/install.img" % self.tree
if os.path.exists(stage2img):
if self.anaconda.backend.mountInstallImage(self.anaconda, stage2img):
self.anaconda.storage.umountFilesystems()
return DISPATCH_BACK
for i in mkeys:
self.tsInfo.curmedia = i
if i > 0:
pkgtup = self.tsInfo.reqmedia[i][0]
try:
self.dsCallback = DownloadHeaderProgress(intf, self)
self.populateTs(keepold=0)
self.dsCallback.pop()
self.dsCallback = None
except RepoError, e:
msg = _("There was an error running your transaction for "
"the following reason: %s\n") % str(e)
if self.anaconda.upgrade:
rc = intf.messageWindow(_("Error"), msg, type="custom",
custom_icon="error",
custom_buttons=[_("_Exit installer")])
sys.exit(1)
else:
rc = intf.messageWindow(_("Error"), msg,
type="custom", custom_icon="error",
custom_buttons=[_("_Back"), _("_Exit installer")])
if rc == 1:
sys.exit(1)
else:
self.tsInfo.curmedia = None
return DISPATCH_BACK
self.ts.check()
self.ts.order()
if self._run(instLog, cb, intf) == DISPATCH_BACK:
self.tsInfo.curmedia = None
return DISPATCH_BACK
self.ts.close()
def _run(self, instLog, cb, intf):
# set log fd. FIXME: this is ugly. see changelog entry from 2005-09-13
self.ts.ts.scriptFd = instLog.fileno()
rpm.setLogFile(instLog)
uniqueProbs = {}
spaceneeded = {}
spaceprob = ""
fileConflicts = []
fileprob = ""
try:
self.runTransaction(cb=cb)
except YumBaseError, probs:
# FIXME: we need to actually look at these problems...
probTypes = { rpm.RPMPROB_NEW_FILE_CONFLICT : _('file conflicts'),
rpm.RPMPROB_FILE_CONFLICT : _('file conflicts'),
rpm.RPMPROB_OLDPACKAGE: _('older package(s)'),
rpm.RPMPROB_DISKSPACE: _('insufficient disk space'),
rpm.RPMPROB_DISKNODES: _('insufficient disk inodes'),
rpm.RPMPROB_CONFLICT: _('package conflicts'),
rpm.RPMPROB_PKG_INSTALLED: _('package already installed'),
rpm.RPMPROB_REQUIRES: _('required package'),
rpm.RPMPROB_BADARCH: _('package for incorrect arch'),
rpm.RPMPROB_BADOS: _('package for incorrect os'),
}
for (descr, (ty, mount, need)) in probs.value: # FIXME: probs.value???
log.error("%s: %s" %(probTypes[ty], descr))
if not uniqueProbs.has_key(ty) and probTypes.has_key(ty):
uniqueProbs[ty] = probTypes[ty]
if ty == rpm.RPMPROB_DISKSPACE:
spaceneeded[mount] = need
elif ty in [rpm.RPMPROB_NEW_FILE_CONFLICT, rpm.RPMPROB_FILE_CONFLICT]:
fileConflicts.append(descr)
if spaceneeded:
spaceprob = _("You need more space on the following "
"file systems:\n")
for (mount, need) in spaceneeded.items():
log.info("(%s, %s)" %(mount, need))
if mount.startswith("/mnt/sysimage/"):
mount.replace("/mnt/sysimage", "")
elif mount.startswith("/mnt/sysimage"):
mount = "/" + mount.replace("/mnt/sysimage", "")
spaceprob += "%d M on %s\n" % (need / (1024*1024), mount)
elif fileConflicts:
fileprob = _("There were file conflicts when checking the "
"packages to be installed:\n%s\n") % ("\n".join(fileConflicts),)
msg = _("There was an error running your transaction for "
"the following reason(s): %s.\n") % ', '.join(uniqueProbs.values())
spaceprob = to_unicode(spaceprob)
fileprob = to_unicode(fileprob)
if len(self.anaconda.backend.getRequiredMedia()) > 1 or self.anaconda.upgrade:
intf.detailedMessageWindow(_("Error Running Transaction"),
msg, spaceprob + "\n" + fileprob, type="custom",
custom_icon="error", custom_buttons=[_("_Exit installer")])
sys.exit(1)
else:
rc = intf.detailedMessageWindow(_("Error Running Transaction"),
msg, spaceprob + "\n" + fileprob, type="custom",
custom_icon="error",
custom_buttons=[_("_Back"), _("_Exit installer")])
if rc == 1:
sys.exit(1)
else:
self._undoDepInstalls()
return DISPATCH_BACK
def doMacros(self):
for (key, val) in self.macros.items():
rpm.addMacro(key, val)
def simpleDBInstalled(self, name, arch=None):
# FIXME: doing this directly instead of using self.rpmdb.installed()
# speeds things up by 400%
mi = self.ts.ts.dbMatch('name', name)
if mi.count() == 0:
return False
if arch is None:
return True
if arch in map(lambda h: h['arch'], mi):
return True
return False
def isPackageInstalled(self, name = None, epoch = None, version = None,
release = None, arch = None, po = None):
# FIXME: this sucks. we should probably suck it into yum proper
# but it'll need a bit of cleanup first.
if po is not None:
(name, epoch, version, release, arch) = po.returnNevraTuple()
installed = False
if name and not (epoch or version or release or arch):
installed = self.simpleDBInstalled(name)
elif self.rpmdb.installed(name = name, epoch = epoch, ver = version,
rel = release, arch = arch):
installed = True
lst = self.tsInfo.matchNaevr(name = name, epoch = epoch,
ver = version, rel = release,
arch = arch)
for txmbr in lst:
if txmbr.output_state in TS_INSTALL_STATES:
return True
if installed and len(lst) > 0:
# if we get here, then it was installed, but it's in the tsInfo
# for an erase or obsoleted --> not going to be installed at end
return False
return installed
def isGroupInstalled(self, grp):
if grp.selected:
return True
elif grp.installed and not grp.toremove:
return True
return False
def _pkgExists(self, pkg):
"""Whether or not a given package exists in our universe."""
try:
pkgs = self.pkgSack.returnNewestByName(pkg)
return True
except yum.Errors.PackageSackError:
pass
try:
pkgs = self.rpmdb.returnNewestByName(pkg)
return True
except (IndexError, yum.Errors.PackageSackError):
pass
return False
def _groupHasPackages(self, grp):
# this checks to see if the given group has any packages available
# (ie, already installed or in the sack of available packages)
# so that we don't show empty groups. also, if there are mandatory
# packages and we have none of them, don't show
for pkg in grp.mandatory_packages.keys():
if self._pkgExists(pkg):
return True
if len(grp.mandatory_packages) > 0:
return False
for pkg in grp.default_packages.keys() + grp.optional_packages.keys():
if self._pkgExists(pkg):
return True
return False
class YumBackend(AnacondaBackend):
def __init__ (self, anaconda):
AnacondaBackend.__init__(self, anaconda)
self.supportsPackageSelection = True
buf = """
[main]
installroot=%s
cachedir=/var/cache/yum/$basearch/$releasever
keepcache=0
logfile=/tmp/yum.log
metadata_expire=0
obsoletes=True
pluginpath=/usr/lib/yum-plugins,/tmp/updates/yum-plugins
pluginconfpath=/etc/yum/pluginconf.d,/tmp/updates/pluginconf.d
plugins=1
reposdir=/etc/anaconda.repos.d,/tmp/updates/anaconda.repos.d,/tmp/product/anaconda.repos.d
""" % (anaconda.rootPath)
if anaconda.proxy:
buf += "proxy=%s\n" % anaconda.proxy
if anaconda.proxyUsername:
buf += "proxy_username=%s\n" % anaconda.proxyUsername
if anaconda.proxyPassword:
buf += "proxy_password=%s\n" % anaconda.proxyPassword
fd = open("/tmp/anaconda-yum.conf", "w")
fd.write(buf)
fd.close()
def complete(self, anaconda):
if not anaconda.mediaDevice and os.path.ismount(self.ayum.tree):
isys.umount(self.ayum.tree)
anaconda.backend.removeInstallImage()
# clean up rpmdb locks so that kickstart %post scripts aren't
# unhappy (#496961)
iutil.resetRpmDb(anaconda.rootPath)
def doBackendSetup(self, anaconda):
if anaconda.dir == DISPATCH_BACK:
return DISPATCH_BACK
if anaconda.upgrade:
# FIXME: make sure that the rpmdb doesn't have stale locks :/
iutil.resetRpmDb(anaconda.rootPath)
anaconda.backend.freetmp(anaconda)
self.ayum = AnacondaYum(anaconda)
self.ayum.setup()
self.ayum.doMacros()
# If any enabled repositories require networking, go ahead and bring
# it up now. No need to have people wait for the timeout when we
# know this in advance.
for repo in self.ayum.repos.listEnabled():
if repo.needsNetwork() and not network.hasActiveNetDev():
if not anaconda.intf.enableNetwork():
anaconda.intf.messageWindow(_("No Network Available"),
_("Some of your software repositories require "
"networking, but there was an error enabling the "
"network on your system."),
type="custom", custom_icon="error",
custom_buttons=[_("_Exit installer")])
sys.exit(1)
urlgrabber.grabber.reset_curl_obj()
break
self.doRepoSetup(anaconda)
self.doSackSetup(anaconda)
self.doGroupSetup(anaconda)
self.ayum.doMacros()
def doGroupSetup(self, anaconda):
while True:
try:
self.ayum.doGroupSetup()
except (GroupsError, NoSuchGroup, RepoError), e:
buttons = [_("_Exit installer"), _("_Retry")]
else:
break # success
rc = anaconda.intf.messageWindow(_("Error"),
_("Unable to read group information "
"from repositories. This is "
"a problem with the generation "
"of your install tree."),
type="custom", custom_icon="error",
custom_buttons = buttons)
if rc == 0:
sys.exit(0)
else:
self.ayum._setGroups(None)
continue
def doRepoSetup(self, anaconda, thisrepo = None, fatalerrors = True):
self.__withFuncDo(anaconda, lambda r: self.ayum.doRepoSetup(thisrepo=r.id),
thisrepo=thisrepo, fatalerrors=fatalerrors)
def doSackSetup(self, anaconda, thisrepo = None, fatalerrors = True):
self.__withFuncDo(anaconda, lambda r: self.ayum.doSackSetup(thisrepo=r.id),
thisrepo=thisrepo, fatalerrors=fatalerrors)
def __withFuncDo(self, anaconda, fn, thisrepo=None, fatalerrors=True):
# Don't do this if we're being called as a dispatcher step (instead
# of being called when a repo is added via the UI) and we're going
# back.
if thisrepo is None and anaconda.dir == DISPATCH_BACK:
return
# We want to call the function one repo at a time so we have some
# concept of which repo didn't set up correctly.
if thisrepo is not None:
repos = [self.ayum.repos.getRepo(thisrepo)]
else:
repos = self.ayum.repos.listEnabled()
for repo in repos:
if repo.name is None:
txt = _("Retrieving installation information.")
else:
txt = _("Retrieving installation information for %s.")%(repo.name)
waitwin = anaconda.intf.waitWindow(_("Installation Progress"), txt)
while True:
try:
fn(repo)
waitwin.pop()
except RepoError, e:
waitwin.pop()
buttons = [_("_Exit installer"), _("Edit"), _("_Retry")]
else:
break # success
if anaconda.ksdata:
buttons.append(_("_Continue"))
if not fatalerrors:
raise RepoError, e
rc = anaconda.intf.messageWindow(_("Error"),
_("Unable to read package metadata. This may be "
"due to a missing repodata directory. Please "
"ensure that your install tree has been "
"correctly generated.\n\n%s" % e),
type="custom", custom_icon="error",
custom_buttons=buttons)
if rc == 0:
# abort
sys.exit(0)
elif rc == 1:
# edit
anaconda.intf.editRepoWindow(repo)
break
elif rc == 2:
# retry, but only if button is present
continue
else:
# continue, but only if button is present
self.ayum.repos.delete(repo.id)
break
# if we're in kickstart the repo may have been deleted just above
try:
self.ayum.repos.getRepo(repo.id)
except RepoError:
log.debug("repo %s has been removed" % (repo.id,))
continue
repo.setFailureObj(self.ayum.urlgrabberFailureCB)
repo.setMirrorFailureObj((self.ayum.mirrorFailureCB, (),
{"repo": repo.id}))
self.ayum.repos.callback = None
def getDefaultGroups(self, anaconda):
langs = anaconda.instLanguage.getCurrentLangSearchList()
rc = map(lambda x: x.groupid,
filter(lambda x: x.default, self.ayum.comps.groups))
for g in self.ayum.comps.groups:
if g.langonly in langs:
rc.append(g.groupid)
return rc
def resetPackageSelections(self):
"""Reset the package selection to an empty state."""
for txmbr in self.ayum.tsInfo:
self.ayum.tsInfo.remove(txmbr.pkgtup)
self.ayum.tsInfo.conditionals.clear()
for grp in self.ayum.comps.groups:
grp.selected = False
def selectModulePackages(self, anaconda, kernelPkgName):
(base, sep, ext) = kernelPkgName.partition("-")
moduleProvides = []
for (path, name) in anaconda.extraModules:
if ext != "":
moduleProvides.append("dud-%s-%s" % (name, ext))
else:
moduleProvides.append("dud-%s" % name)
#We need to install the packages which contain modules from DriverDiscs
for modPath in isys.modulesWithPaths():
if modPath.startswith(DD_EXTRACTED):
moduleProvides.append(modPath[len(DD_EXTRACTED):])
else:
continue
for module in moduleProvides:
pkgs = self.ayum.returnPackagesByDep(module)
if not pkgs:
log.warning("Didn't find any package providing %s" % module)
for pkg in pkgs:
log.info("selecting package %s for %s" % (pkg.name, module))
self.ayum.install(po=pkg)
def selectBestKernel(self, anaconda):
"""Find the best kernel package which is available and select it."""
def getBestKernelByArch(pkgname, ayum):
"""Convenience func to find the best arch of a kernel by name"""
try:
pkgs = ayum.pkgSack.returnNewestByName(pkgname)
except yum.Errors.PackageSackError:
return None
pkgs = self.ayum.bestPackagesFromList(pkgs)
if len(pkgs) == 0:
return None
return pkgs[0]
def selectKernel(pkgname):
try:
pkg = getBestKernelByArch(pkgname, self.ayum)
except PackageSackError:
log.debug("no %s package" % pkgname)
return False
if not pkg:
return False
log.info("selected %s package for kernel" % pkg.name)
self.ayum.install(po=pkg)
self.selectModulePackages(anaconda, pkg.name)
if len(self.ayum.tsInfo.matchNaevr(name="gcc")) > 0:
log.debug("selecting %s-devel" % pkg.name)
self.selectPackage("%s-devel.%s" % (pkg.name, pkg.arch))
return True
foundkernel = False
if not foundkernel and isys.isPaeAvailable():
if selectKernel("kernel-PAE"):
foundkernel = True
if not foundkernel:
selectKernel("kernel")
def selectFSPackages(self, storage):
for device in storage.fsset.devices:
# this takes care of device and filesystem packages
map(self.selectPackage, device.packages)
# anaconda requires several programs on the installed system to complete
# installation, but we have no guarantees that some of these will be
# installed (they could have been removed in kickstart). So we'll force
# it.
def selectAnacondaNeeds(self):
for pkg in ['authconfig', 'chkconfig', 'system-config-firewall-base']:
self.selectPackage(pkg)
def doPostSelection(self, anaconda):
# Only solve dependencies on the way through the installer, not the way back.
if anaconda.dir == DISPATCH_BACK:
return
dscb = YumDepSolveProgress(anaconda.intf, self.ayum)
self.ayum.dsCallback = dscb
# do some sanity checks for kernel and bootloader
if not anaconda.upgrade:
# New installs only - upgrades will already have all this stuff.
self.selectBestKernel(anaconda)
map(self.selectPackage, anaconda.platform.packages)
self.selectFSPackages(anaconda.storage)
self.selectAnacondaNeeds()
else:
self.ayum.update()
while True:
try:
(code, msgs) = self.ayum.buildTransaction()
# If %packages --ignoremissing was given, don't bother
# prompting for missing dependencies.
if anaconda.ksdata and anaconda.ksdata.packages.handleMissing == KS_MISSING_IGNORE:
break
if code == 1 and not anaconda.upgrade:
# resolveDeps returns 0 if empty transaction, 1 if error,
# 2 if success
depprob = "\n".join(msgs)
rc = anaconda.intf.detailedMessageWindow(_("Warning"),
_("Some of the packages you have selected for "
"install are missing dependencies. You can "
"exit the installation, go back and change "
"your package selections, or continue "
"installing these packages without their "
"dependencies. If you continue, these packages "
"may not work correctly due to missing components."),
depprob + "\n", type="custom", custom_icon="error",
custom_buttons=[_("_Exit installer"), _("_Back"),
_("_Continue")])
dscb.pop()
if rc == 0:
sys.exit(1)
elif rc == 1:
self.ayum._undoDepInstalls()
return DISPATCH_BACK
break
except RepoError, e:
# FIXME: would be nice to be able to recover here
rc = anaconda.intf.messageWindow(_("Error"),
_("Unable to read package metadata. This may be "
"due to a missing repodata directory. Please "
"ensure that your install tree has been "
"correctly generated.\n\n%s" % e),
type="custom", custom_icon="error",
custom_buttons=[_("_Exit installer"), _("_Retry")])
dscb.pop()
if rc == 0:
sys.exit(0)
else:
continue
else:
break
(self.dlpkgs, self.totalSize, self.totalFiles) = self.ayum.getDownloadPkgs()
if not anaconda.upgrade:
largePart = anaconda.storage.mountpoints.get("/usr", anaconda.storage.rootDevice)
if largePart and largePart.size < self.totalSize / 1024:
rc = anaconda.intf.messageWindow(_("Error"),
_("Your selected packages require %d MB "
"of free space for installation, but "
"you do not have enough available. "
"You can change your selections or "
"exit the installer." % (self.totalSize / 1024)),
type="custom", custom_icon="error",
custom_buttons=[_("_Back"), _("_Exit installer")])
dscb.pop()
if rc == 1:
sys.exit(1)
else:
self.ayum._undoDepInstalls()
return DISPATCH_BACK
dscb.pop()
if anaconda.mediaDevice and not anaconda.ksdata:
rc = presentRequiredMediaMessage(anaconda)
if rc == 0:
rc2 = anaconda.intf.messageWindow(_("Reboot?"),
_("The system will be rebooted now."),
type="custom", custom_icon="warning",
custom_buttons=[_("_Back"), _("_Reboot")])
if rc2 == 1:
sys.exit(0)
else:
return DISPATCH_BACK
elif rc == 1: # they asked to go back
return DISPATCH_BACK
self.ayum.dsCallback = None
def doPreInstall(self, anaconda):
if anaconda.dir == DISPATCH_BACK:
for d in ("/selinux", "/dev", "/proc/bus/usb"):
try:
isys.umount(anaconda.rootPath + d, removeDir = False)
except Exception, e:
log.error("unable to unmount %s: %s" %(d, e))
return
if anaconda.upgrade:
# An old mtab can cause confusion (esp if loop devices are
# in it). Be extra special careful and delete any mtab first,
# in case the user has done something funny like make it into
# a symlink.
if os.access(anaconda.rootPath + "/etc/mtab", os.F_OK):
os.remove(anaconda.rootPath + "/etc/mtab")
f = open(anaconda.rootPath + "/etc/mtab", "w+")
f.close()
# we really started writing modprobe.conf out before things were
# all completely ready. so now we need to nuke old modprobe.conf's
# if you're upgrading from a 2.4 dist so that we can get the
# transition right
if (os.path.exists(anaconda.rootPath + "/etc/modules.conf") and
os.path.exists(anaconda.rootPath + "/etc/modprobe.conf") and
not os.path.exists(anaconda.rootPath + "/etc/modprobe.conf.anacbak")):
log.info("renaming old modprobe.conf -> modprobe.conf.anacbak")
os.rename(anaconda.rootPath + "/etc/modprobe.conf",
anaconda.rootPath + "/etc/modprobe.conf.anacbak")
dirList = ['/var', '/var/lib', '/var/lib/rpm', '/tmp', '/dev', '/etc',
'/etc/sysconfig', '/etc/sysconfig/network-scripts',
'/etc/X11', '/root', '/var/tmp', '/etc/rpm', '/var/cache',
'/var/cache/yum', '/etc/modprobe.d']
# If there are any protected partitions we want to mount, create their
# mount points now.
for protected in anaconda.storage.protectedDevices:
if getattr(protected.format, "mountpoint", None):
dirList.append(protected.format.mountpoint)
for i in dirList:
try:
os.mkdir(anaconda.rootPath + i)
except os.error, (errno, msg):
pass
# log.error("Error making directory %s: %s" % (i, msg))
self.initLog(anaconda.rootPath)
try:
# FIXME: making the /var/lib/rpm symlink here is a hack to
# workaround db->close() errors from rpm
iutil.mkdirChain("/var/lib")
for path in ("/var/tmp", "/var/lib/rpm"):
if os.path.exists(path) and not os.path.islink(path):
shutil.rmtree(path)
if not os.path.islink(path):
os.symlink("%s/%s" %(anaconda.rootPath, path), "%s" %(path,))
else:
log.warning("%s already exists as a symlink to %s" %(path, os.readlink(path),))
except Exception, e:
# how this could happen isn't entirely clear; log it in case
# it does and causes problems later
log.error("error creating symlink, continuing anyway: %s" %(e,))
# SELinux hackery (#121369)
if flags.selinux:
try:
os.mkdir(anaconda.rootPath + "/selinux")
except Exception, e:
pass
try:
isys.mount("/selinux", anaconda.rootPath + "/selinux", "selinuxfs")
except Exception, e:
log.error("error mounting selinuxfs: %s" %(e,))
# For usbfs
try:
isys.mount("/proc/bus/usb", anaconda.rootPath + "/proc/bus/usb", "usbfs")
except Exception, e:
log.error("error mounting usbfs: %s" %(e,))
# write out the fstab
if not anaconda.upgrade:
anaconda.storage.fsset.write(anaconda.rootPath)
if os.access("/etc/modprobe.d/anaconda.conf", os.R_OK):
shutil.copyfile("/etc/modprobe.d/anaconda.conf",
anaconda.rootPath + "/etc/modprobe.d/anaconda.conf")
anaconda.network.write(instPath=anaconda.rootPath, anaconda=anaconda)
anaconda.storage.write(anaconda.rootPath)
if not anaconda.isHeadless:
anaconda.keyboard.write(anaconda.rootPath)
# make a /etc/mtab so mkinitrd can handle certain hw (usb) correctly
f = open(anaconda.rootPath + "/etc/mtab", "w+")
f.write(anaconda.storage.mtab)
f.close()
def checkSupportedUpgrade(self, anaconda):
if anaconda.dir == DISPATCH_BACK:
return
self._checkUpgradeVersion(anaconda)
self._checkUpgradeArch(anaconda)
def _checkUpgradeVersion(self, anaconda):
# Figure out current version for upgrade nag and for determining weird
# upgrade cases
supportedUpgradeVersion = -1
for pkgtup in self.ayum.rpmdb.whatProvides('redhat-release', None, None):
n, a, e, v, r = pkgtup
if supportedUpgradeVersion <= 0:
val = rpmUtils.miscutils.compareEVR((None, '3', '1'),
(e, v,r))
if val > 0:
supportedUpgradeVersion = 0
else:
supportedUpgradeVersion = 1
break
if "Red Hat Enterprise Linux" not in productName:
supportedUpgradeVersion = 1
if supportedUpgradeVersion == 0:
rc = anaconda.intf.messageWindow(_("Warning"),
_("You appear to be upgrading from a system "
"which is too old to upgrade to this "
"version of %s. Are you sure you wish to "
"continue the upgrade "
"process?") %(productName,),
type = "yesno")
if rc == 0:
iutil.resetRpmDb(anaconda.rootPath)
sys.exit(0)
def _checkUpgradeArch(self, anaconda):
def compareArch(a, b):
if re.match("i.86", a) and re.match("i.86", b):
return True
else:
return a == b
# get the arch of the initscripts package
try:
pkgs = self.ayum.pkgSack.returnNewestByName('initscripts')
except yum.Errors.PackageSackError:
log.info("no packages named initscripts")
return None
pkgs = self.ayum.bestPackagesFromList(pkgs)
if len(pkgs) == 0:
log.info("no best package")
return
myarch = pkgs[0].arch
log.info("initscripts is arch: %s" %(myarch,))
for po in self.ayum.rpmdb.getProvides('initscripts'):
log.info("po.arch is arch: %s" %(po.arch,))
if not compareArch(po.arch, myarch):
rc = anaconda.intf.messageWindow(_("Warning"),
_("The arch of the release of %(productName)s you "
"are upgrading to appears to be %(myarch)s which "
"does not match your previously installed arch of "
"%(arch)s. This is likely to not succeed. Are "
"you sure you wish to continue the upgrade "
"process?")
% {'productName': productName,
'myarch': myarch,
'arch': po.arch},
type="yesno")
if rc == 0:
iutil.resetRpmDb(anaconda.rootPath)
sys.exit(0)
else:
log.warning("upgrade between possibly incompatible "
"arches %s -> %s" %(po.arch, myarch))
break
def doInstall(self, anaconda):
log.info("Preparing to install packages")
if not anaconda.upgrade:
rpm.addMacro("__dbi_htconfig",
"hash nofsync %{__dbi_other} %{__dbi_perms}")
if anaconda.ksdata and anaconda.ksdata.packages.excludeDocs:
rpm.addMacro("_excludedocs", "1")
cb = AnacondaCallback(self.ayum, anaconda,
self.instLog, self.modeText)
cb.setSizes(len(self.dlpkgs), self.totalSize, self.totalFiles)
rc = self.ayum.run(self.instLog, cb, anaconda.intf, anaconda.id)
if cb.initWindow is not None:
cb.initWindow.pop()
self.instLog.write("*** FINISHED INSTALLING PACKAGES ***")
self.instLog.close ()
anaconda.intf.setInstallProgressClass(None)
if rc == DISPATCH_BACK:
return DISPATCH_BACK
def doPostInstall(self, anaconda):
if anaconda.upgrade:
w = anaconda.intf.waitWindow(_("Post Upgrade"),
_("Performing post-upgrade configuration"))
else:
w = anaconda.intf.waitWindow(_("Post Installation"),
_("Performing post-installation configuration"))
packages.rpmSetupGraphicalSystem(anaconda)
for repo in self.ayum.repos.listEnabled():
repo.dirCleanup()
# expire yum caches on upgrade
if anaconda.upgrade and os.path.exists("%s/var/cache/yum" %(anaconda.rootPath,)):
log.info("Expiring yum caches")
try:
iutil.execWithRedirect("yum", ["clean", "all"],
stdout="/dev/tty5", stderr="/dev/tty5",
root = anaconda.rootPath)
except:
pass
# nuke preupgrade
if flags.cmdline.has_key("preupgrade") and os.path.exists("%s/var/cache/yum/anaconda-upgrade" %(anaconda.rootPath,)):
try:
shutil.rmtree("%s/var/cache/yum/anaconda-upgrade" %(anaconda.rootPath,))
except:
pass
# XXX: write proper lvm config
AnacondaBackend.doPostInstall(self, anaconda)
w.pop()
def kernelVersionList(self, rootPath="/"):
# FIXME: using rpm here is a little lame, but otherwise, we'd
# be pulling in filelists
return packages.rpmKernelVersionList(rootPath)
def __getGroupId(self, group):
"""Get the groupid for the given name (english or translated)."""
for g in self.ayum.comps.groups:
if group == g.name:
return g.groupid
for trans in g.translated_name.values():
if group == trans:
return g.groupid
def isGroupSelected(self, group):
try:
grp = self.ayum.comps.return_group(group)
if grp.selected: return True
except yum.Errors.GroupsError, e:
pass
return False
def selectGroup(self, group, *args):
if not self.ayum.comps.has_group(group):
log.debug("no such group %s" % group)
raise NoSuchGroup, group
types = ["mandatory"]
if args:
if args[0][0]:
types.append("default")
if args[0][1]:
types.append("optional")
else:
types.append("default")
try:
mbrs = self.ayum.selectGroup(group, group_package_types=types)
if len(mbrs) == 0 and self.isGroupSelected(group):
return
except yum.Errors.GroupsError, e:
# try to find out if it's the name or translated name
gid = self.__getGroupId(group)
if gid is not None:
mbrs = self.ayum.selectGroup(gid, group_package_types=types)
if len(mbrs) == 0 and self.isGroupSelected(gid):
return
else:
log.debug("no such group %s" %(group,))
raise NoSuchGroup, group
def deselectGroup(self, group, *args):
try:
self.ayum.deselectGroup(group)
except yum.Errors.GroupsError, e:
# try to find out if it's the name or translated name
gid = self.__getGroupId(group)
if gid is not None:
self.ayum.deselectGroup(gid)
else:
log.debug("no such group %s" %(group,))
def selectPackage(self, pkg, *args):
if self.ayum.tsInfo.matchNaevr(name=pkg):
return 0
try:
mbrs = self.ayum.install(pattern=pkg)
return len(mbrs)
except yum.Errors.InstallError:
log.debug("no package matching %s" %(pkg,))
return 0
def deselectPackage(self, pkg, *args):
sp = pkg.rsplit(".", 2)
txmbrs = []
if len(sp) == 2:
txmbrs = self.ayum.tsInfo.matchNaevr(name=sp[0], arch=sp[1])
if len(txmbrs) == 0:
exact, match, unmatch = yum.packages.parsePackages(self.ayum.pkgSack.returnPackages(), [pkg], casematch=1)
for p in exact + match:
txmbrs.append(p)
if len(txmbrs) > 0:
for x in txmbrs:
self.ayum.tsInfo.remove(x.pkgtup)
# we also need to remove from the conditionals
# dict so that things don't get pulled back in as a result
# of them. yes, this is ugly. conditionals should die.
for req, pkgs in self.ayum.tsInfo.conditionals.iteritems():
if x in pkgs:
pkgs.remove(x)
self.ayum.tsInfo.conditionals[req] = pkgs
return len(txmbrs)
else:
log.debug("no such package %s to remove" %(pkg,))
return 0
def groupListExists(self, grps):
"""Returns bool of whether all of the given groups exist."""
for gid in grps:
g = self.ayum.comps.return_group(gid)
if not g:
return False
return True
def groupListDefault(self, grps):
"""Returns bool of whether all of the given groups are default"""
rc = False
for gid in grps:
g = self.ayum.comps.return_group(gid)
if g and not g.default:
return False
elif g:
rc = True
return rc
def writeKS(self, f):
for repo in self.ayum.repos.listEnabled():
if repo.name == "Installation Repo":
continue
line = "repo --name=\"%s\" " % (repo.name or repo.repoid)
if repo.baseurl:
line += " --baseurl=%s\n" % repo.baseurl[0]
else:
line += " --mirrorlist=%s\n" % repo.mirrorlist
f.write(line)
def writePackagesKS(self, f, anaconda):
if anaconda.ksdata:
f.write(anaconda.ksdata.packages.__str__())
return
groups = []
installed = []
removed = []
# Faster to grab all the package names up front rather than call
# searchNevra in the loop below.
allPkgNames = map(lambda pkg: pkg.name, self.ayum.pkgSack.returnPackages())
allPkgNames.sort()
# On CD/DVD installs, we have one transaction per CD and will end up
# checking allPkgNames against a very short list of packages. So we
# have to reset to media #0, which is an all packages transaction.
old = self.ayum.tsInfo.curmedia
self.ayum.tsInfo.curmedia = 0
self.ayum.tsInfo.makelists()
txmbrNames = map (lambda x: x.name, self.ayum.tsInfo.getMembers())
self.ayum.tsInfo.curmedia = old
if len(self.ayum.tsInfo.instgroups) == 0 and len(txmbrNames) == 0:
return
f.write("\n%packages\n")
for grp in filter(lambda x: x.selected, self.ayum.comps.groups):
groups.append(grp.groupid)
defaults = grp.default_packages.keys() + grp.mandatory_packages.keys()
optionals = grp.optional_packages.keys()
for pkg in filter(lambda x: x in defaults and (not x in txmbrNames and x in allPkgNames), grp.packages):
removed.append(pkg)
for pkg in filter(lambda x: x in txmbrNames, optionals):
installed.append(pkg)
for grp in groups:
f.write("@%s\n" % grp)
for pkg in installed:
f.write("%s\n" % pkg)
for pkg in removed:
f.write("-%s\n" % pkg)
f.write("%end")
def writeConfiguration(self):
return
def getRequiredMedia(self):
return self.ayum.tsInfo.reqmedia.keys()
class DownloadHeaderProgress:
def __init__(self, intf, ayum=None):
window = intf.progressWindow(_("Installation Starting"),
_("Starting installation process"),
1.0, 0.01)
self.window = window
self.ayum = ayum
self.current = self.loopstart = 0
self.incr = 1
if self.ayum is not None and self.ayum.tsInfo is not None:
self.numpkgs = len(self.ayum.tsInfo.getMembers())
if self.numpkgs != 0:
self.incr = (1.0 / self.numpkgs) * (1.0 - self.loopstart)
else:
self.numpkgs = 0
self.refresh()
self.restartLoop = self.downloadHeader = self.transactionPopulation = self.refresh
self.procReq = self.procConflict = self.unresolved = self.noop
def noop(self, *args, **kwargs):
pass
def pkgAdded(self, *args):
if self.numpkgs:
self.set(self.current + self.incr)
def pop(self):
self.window.pop()
def refresh(self, *args):
self.window.refresh()
def set(self, value):
self.current = value
self.window.set(self.current)
class YumDepSolveProgress:
def __init__(self, intf, ayum = None):
window = intf.progressWindow(_("Dependency Check"),
_("Checking dependencies in packages selected for installation"),
1.0, 0.01)
self.window = window
self.numpkgs = None
self.loopstart = None
self.incr = None
self.ayum = ayum
self.current = 0
self.restartLoop = self.downloadHeader = self.transactionPopulation = self.refresh
self.procReq = self.procConflict = self.unresolved = self.noop
def tscheck(self, num = None):
self.refresh()
if num is None and self.ayum is not None and self.ayum.tsInfo is not None:
num = len(self.ayum.tsInfo.getMembers())
if num:
self.numpkgs = num
self.loopstart = self.current
self.incr = (1.0 / num) * ((1.0 - self.loopstart) / 2)
def pkgAdded(self, *args):
if self.numpkgs:
self.set(self.current + self.incr)
def noop(self, *args, **kwargs):
pass
def refresh(self, *args):
self.window.refresh()
def set(self, value):
self.current = value
self.window.set(self.current)
def start(self):
self.set(0.0)
self.refresh()
def end(self):
self.window.set(1.0)
self.window.refresh()
def pop(self):
self.window.pop()
| codeparrot/github-code-clean |
"""
This module defines the SFrame class which provides the
ability to create, access and manipulate a remote scalable dataframe object.
SFrame acts similarly to pandas.DataFrame, but the data is completely immutable
and is stored column wise on the GraphLab Server side.
"""
'''
Copyright (C) 2015 Dato, Inc.
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the DATO-PYTHON-LICENSE file for details.
'''
import graphlab.connect as _mt
import graphlab.connect.main as glconnect
from graphlab.cython.cy_type_utils import infer_type_of_list
from graphlab.cython.context import debug_trace as cython_context
from graphlab.cython.cy_sframe import UnitySFrameProxy
from graphlab.util import _check_canvas_enabled, _make_internal_url, _is_callable
from graphlab.data_structures.sarray import SArray, _create_sequential_sarray
import graphlab.aggregate
import graphlab
import array
from prettytable import PrettyTable
from textwrap import wrap
import datetime
import inspect
from graphlab.deps import pandas, HAS_PANDAS
import time
import itertools
import os
import subprocess
import uuid
import platform
__all__ = ['SFrame']
SFRAME_GARBAGE_COLLECTOR = []
FOOTER_STRS = ['Note: Only the head of the SFrame is printed.',
'You can use print_rows(num_rows=m, num_columns=n) to print more rows and columns.']
LAZY_FOOTER_STRS = ['Note: Only the head of the SFrame is printed. This SFrame is lazily evaluated.',
'You can use len(sf) to force materialization.']
SFRAME_ROOTS = [# Binary/lib location in production egg
os.path.abspath(os.path.join(os.path.dirname(
os.path.realpath(__file__)), '..')),
# Build tree location of SFrame binaries
os.path.abspath(os.path.join(os.path.dirname(
os.path.realpath(__file__)),
'..', '..', '..', '..', 'sframe')),
# Location of python sources
os.path.abspath(os.path.join(os.path.dirname(
os.path.realpath(__file__)),
'..', '..', '..', '..', 'unity', 'python', 'graphlab')),
# Build tree dependency location
os.path.abspath(os.path.join(os.path.dirname(
os.path.realpath(__file__)),
'..', '..', '..', '..', '..', '..', 'deps', 'local', 'lib'))
]
RDD_SFRAME_PICKLE = "rddtosf_pickle"
RDD_SFRAME_NONPICKLE = "rddtosf_nonpickle"
SFRAME_RDD_PICKLE = "sftordd_pickle"
HDFS_LIB = "libhdfs.so"
RDD_JAR_FILE = "graphlab-create-spark-integration.jar"
SYS_UTIL_PY = "sys_util.py"
RDD_SUPPORT_INITED = False
BINARY_PATHS = {}
STAGING_DIR = None
RDD_SUPPORT = True
PRODUCTION_RUN = False
YARN_OS = None
SPARK_SUPPORT_NAMES = {'RDD_SFRAME_PATH':'rddtosf_pickle',
'RDD_SFRAME_NONPICKLE_PATH':'rddtosf_nonpickle',
'SFRAME_RDD_PATH':'sftordd_pickle',
'HDFS_LIB_PATH':'libhdfs.so',
'RDD_JAR_PATH':'graphlab-create-spark-integration.jar',
'SYS_UTIL_PY_PATH':'sys_util.py',
'SPARK_PIPE_WRAPPER_PATH':'spark_pipe_wrapper'}
first = True
for i in SFRAME_ROOTS:
for key,val in SPARK_SUPPORT_NAMES.iteritems():
tmp_path = os.path.join(i, val)
if key not in BINARY_PATHS and os.path.isfile(tmp_path):
BINARY_PATHS[key] = tmp_path
if all(name in BINARY_PATHS for name in SPARK_SUPPORT_NAMES.keys()):
if first:
PRODUCTION_RUN = True
break
first = False
if not all(name in BINARY_PATHS for name in SPARK_SUPPORT_NAMES.keys()):
RDD_SUPPORT = False
def get_spark_integration_jar_path():
"""
The absolute path of the jar file required to enable GraphLab Create's
integration with Apache Spark.
"""
if 'RDD_JAR_PATH' not in BINARY_PATHS:
raise RuntimeError("Could not find a spark integration jar. "\
"Does your version of GraphLab Create support Spark Integration (is it >= 1.0)?")
return BINARY_PATHS['RDD_JAR_PATH']
def __rdd_support_init__(sprk_ctx):
global YARN_OS
global RDD_SUPPORT_INITED
global STAGING_DIR
global BINARY_PATHS
if not RDD_SUPPORT or RDD_SUPPORT_INITED:
return
# Make sure our GraphLabUtil scala functions are accessible from the driver
try:
tmp = sprk_ctx._jvm.org.graphlab.create.GraphLabUtil.EscapeString(sprk_ctx._jvm.java.lang.String("1,2,3,4"))
except:
raise RuntimeError("Could not execute RDD translation functions. "\
"Please make sure you have started Spark "\
"(either with spark-submit or pyspark) with the following flag set:\n"\
"'--driver-class-path " + BINARY_PATHS['RDD_JAR_PATH']+"'\n"\
"OR set the property spark.driver.extraClassPath in spark-defaults.conf")
dummy_rdd = sprk_ctx.parallelize([1])
if PRODUCTION_RUN and sprk_ctx.master == 'yarn-client':
# Get cluster operating system
os_rdd = dummy_rdd.map(lambda x: platform.system())
YARN_OS = os_rdd.collect()[0]
# Set binary path
for i in BINARY_PATHS.keys():
s = BINARY_PATHS[i]
if os.path.basename(s) == SPARK_SUPPORT_NAMES['SYS_UTIL_PY_PATH']:
continue
if YARN_OS == 'Linux':
BINARY_PATHS[i] = os.path.join(os.path.dirname(s), 'linux', os.path.basename(s))
elif YARN_OS == 'Darwin':
BINARY_PATHS[i] = os.path.join(os.path.dirname(s), 'osx', os.path.basename(s))
else:
raise RuntimeError("YARN cluster has unsupported operating system "\
"(something other than Linux or Mac OS X). "\
"Cannot convert RDDs on this cluster to SFrame.")
# Create staging directory
staging_dir = '.graphlabStaging'
if sprk_ctx.master == 'yarn-client':
tmp_loc = None
# Get that staging directory's full name
tmp_loc = dummy_rdd.map(
lambda x: subprocess.check_output(
["hdfs", "getconf", "-confKey", "fs.defaultFS"]).rstrip()).collect()[0]
STAGING_DIR = os.path.join(tmp_loc, "user", sprk_ctx.sparkUser(), staging_dir)
if STAGING_DIR is None:
raise RuntimeError("Failed to create a staging directory on HDFS. "\
"Do your cluster nodes have a working hdfs client?")
# Actually create the staging dir
unity = glconnect.get_unity()
unity.__mkdir__(STAGING_DIR)
unity.__chmod__(STAGING_DIR, 0777)
elif sprk_ctx.master[0:5] == 'local':
# Save the output sframes to the same temp workspace this engine is
# using
#TODO: Consider cases where server and client aren't on the same machine
unity = glconnect.get_unity()
STAGING_DIR = unity.get_current_cache_file_location()
if STAGING_DIR is None:
raise RuntimeError("Could not retrieve local staging directory! \
Please contact us on http://forum.dato.com.")
else:
raise RuntimeError("Your spark context's master is '" +
str(sprk_ctx.master) +
"'. Only 'local' and 'yarn-client' are supported.")
if sprk_ctx.master == 'yarn-client':
sprk_ctx.addFile(BINARY_PATHS['RDD_SFRAME_PATH'])
sprk_ctx.addFile(BINARY_PATHS['HDFS_LIB_PATH'])
sprk_ctx.addFile(BINARY_PATHS['SFRAME_RDD_PATH'])
sprk_ctx.addFile(BINARY_PATHS['RDD_SFRAME_NONPICKLE_PATH'])
sprk_ctx.addFile(BINARY_PATHS['SYS_UTIL_PY_PATH'])
sprk_ctx.addFile(BINARY_PATHS['SPARK_PIPE_WRAPPER_PATH'])
sprk_ctx._jsc.addJar(BINARY_PATHS['RDD_JAR_PATH'])
RDD_SUPPORT_INITED = True
def load_sframe(filename):
"""
Load an SFrame. The filename extension is used to determine the format
automatically. This function is particularly useful for SFrames previously
saved in binary format. For CSV imports the ``SFrame.read_csv`` function
provides greater control. If the SFrame is in binary format, ``filename`` is
actually a directory, created when the SFrame is saved.
Parameters
----------
filename : string
Location of the file to load. Can be a local path or a remote URL.
Returns
-------
out : SFrame
See Also
--------
SFrame.save, SFrame.read_csv
Examples
--------
>>> sf = graphlab.SFrame({'id':[1,2,3], 'val':['A','B','C']})
>>> sf.save('my_sframe') # 'my_sframe' is a directory
>>> sf_loaded = graphlab.load_sframe('my_sframe')
"""
sf = SFrame(data=filename)
return sf
class SFrame(object):
"""
A tabular, column-mutable dataframe object that can scale to big data. The
data in SFrame is stored column-wise on the GraphLab Server side, and is
stored on persistent storage (e.g. disk) to avoid being constrained by
memory size. Each column in an SFrame is a size-immutable
:class:`~graphlab.SArray`, but SFrames are mutable in that columns can be
added and subtracted with ease. An SFrame essentially acts as an ordered
dict of SArrays.
Currently, we support constructing an SFrame from the following data
formats:
* csv file (comma separated value)
* sframe directory archive (A directory where an sframe was saved
previously)
* general text file (with csv parsing options, See :py:meth:`read_csv()`)
* a Python dictionary
* pandas.DataFrame
* JSON
* Apache Avro
* PySpark RDD
and from the following sources:
* your local file system
* the GraphLab Server's file system
* HDFS
* Amazon S3
* HTTP(S).
Only basic examples of construction are covered here. For more information
and examples, please see the `User Guide <https://dato.com/learn/user
guide/index.html#Working_with_data_Tabular_data>`_, `API Translator
<https://dato.com/learn/translator>`_, `How-Tos
<https://dato.com/learn/how-to>`_, and data science `Gallery
<https://dato.com/learn/gallery>`_.
Parameters
----------
data : array | pandas.DataFrame | string | dict, optional
The actual interpretation of this field is dependent on the ``format``
parameter. If ``data`` is an array or Pandas DataFrame, the contents are
stored in the SFrame. If ``data`` is a string, it is interpreted as a
file. Files can be read from local file system or urls (local://,
hdfs://, s3://, http://).
format : string, optional
Format of the data. The default, "auto" will automatically infer the
input data format. The inference rules are simple: If the data is an
array or a dataframe, it is associated with 'array' and 'dataframe'
respectively. If the data is a string, it is interpreted as a file, and
the file extension is used to infer the file format. The explicit
options are:
- "auto"
- "array"
- "dict"
- "sarray"
- "dataframe"
- "csv"
- "tsv"
- "sframe".
See Also
--------
read_csv:
Create a new SFrame from a csv file. Preferred for text and CSV formats,
because it has a lot more options for controlling the parser.
save : Save an SFrame for later use.
Notes
-----
- When working with the GraphLab EC2 instance (see
:py:func:`graphlab.aws.launch_EC2()`), an SFrame cannot be constructed
using local file path, because it involves a potentially large amount of
data transfer from client to server. However, it is still okay to use a
remote file path. See the examples below. A similar restriction applies to
:py:class:`graphlab.SGraph` and :py:class:`graphlab.SArray`.
- When reading from HDFS on Linux we must guess the location of your java
installation. By default, we will use the location pointed to by the
JAVA_HOME environment variable. If this is not set, we check many common
installation paths. You may use two environment variables to override
this behavior. GRAPHLAB_JAVA_HOME allows you to specify a specific java
installation and overrides JAVA_HOME. GRAPHLAB_LIBJVM_DIRECTORY
overrides all and expects the exact directory that your preferred
libjvm.so file is located. Use this ONLY if you'd like to use a
non-standard JVM.
Examples
--------
>>> import graphlab
>>> from graphlab import SFrame
**Construction**
Construct an SFrame from a dataframe and transfers the dataframe object
across the network.
>>> df = pandas.DataFrame()
>>> sf = SFrame(data=df)
Construct an SFrame from a local csv file (only works for local server).
>>> sf = SFrame(data='~/mydata/foo.csv')
Construct an SFrame from a csv file on Amazon S3. This requires the
environment variables: *AWS_ACCESS_KEY_ID* and *AWS_SECRET_ACCESS_KEY* to be
set before the python session started. Alternatively, you can use
:py:func:`graphlab.aws.set_credentials()` to set the credentials after
python is started and :py:func:`graphlab.aws.get_credentials()` to verify
these environment variables.
>>> sf = SFrame(data='s3://mybucket/foo.csv')
Read from HDFS using a specific java installation (environment variable
only applies when using Linux)
>>> import os
>>> os.environ['GRAPHLAB_JAVA_HOME'] = '/my/path/to/java'
>>> from graphlab import SFrame
>>> sf = SFrame("hdfs://mycluster.example.com:8020/user/myname/coolfile.txt")
An SFrame can be constructed from a dictionary of values or SArrays:
>>> sf = gl.SFrame({'id':[1,2,3],'val':['A','B','C']})
>>> sf
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
2 3 C
Or equivalently:
>>> ids = SArray([1,2,3])
>>> vals = SArray(['A','B','C'])
>>> sf = SFrame({'id':ids,'val':vals})
It can also be constructed from an array of SArrays in which case column
names are automatically assigned.
>>> ids = SArray([1,2,3])
>>> vals = SArray(['A','B','C'])
>>> sf = SFrame([ids, vals])
>>> sf
Columns:
X1 int
X2 str
Rows: 3
Data:
X1 X2
0 1 A
1 2 B
2 3 C
If the SFrame is constructed from a list of values, an SFrame of a single
column is constructed.
>>> sf = SFrame([1,2,3])
>>> sf
Columns:
X1 int
Rows: 3
Data:
X1
0 1
1 2
2 3
**Parsing**
The :py:func:`graphlab.SFrame.read_csv()` is quite powerful and, can be
used to import a variety of row-based formats.
First, some simple cases:
>>> !cat ratings.csv
user_id,movie_id,rating
10210,1,1
10213,2,5
10217,2,2
10102,1,3
10109,3,4
10117,5,2
10122,2,4
10114,1,5
10125,1,1
>>> gl.SFrame.read_csv('ratings.csv')
Columns:
user_id int
movie_id int
rating int
Rows: 9
Data:
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 10210 | 1 | 1 |
| 10213 | 2 | 5 |
| 10217 | 2 | 2 |
| 10102 | 1 | 3 |
| 10109 | 3 | 4 |
| 10117 | 5 | 2 |
| 10122 | 2 | 4 |
| 10114 | 1 | 5 |
| 10125 | 1 | 1 |
+---------+----------+--------+
[9 rows x 3 columns]
Delimiters can be specified, if "," is not the delimiter, for instance
space ' ' in this case. Only single character delimiters are supported.
>>> !cat ratings.csv
user_id movie_id rating
10210 1 1
10213 2 5
10217 2 2
10102 1 3
10109 3 4
10117 5 2
10122 2 4
10114 1 5
10125 1 1
>>> gl.SFrame.read_csv('ratings.csv', delimiter=' ')
By default, "NA" or a missing element are interpreted as missing values.
>>> !cat ratings2.csv
user,movie,rating
"tom",,1
harry,5,
jack,2,2
bill,,
>>> gl.SFrame.read_csv('ratings2.csv')
Columns:
user str
movie int
rating int
Rows: 4
Data:
+---------+-------+--------+
| user | movie | rating |
+---------+-------+--------+
| tom | None | 1 |
| harry | 5 | None |
| jack | 2 | 2 |
| missing | None | None |
+---------+-------+--------+
[4 rows x 3 columns]
Furthermore due to the dictionary types and list types, can handle parsing
of JSON-like formats.
>>> !cat ratings3.csv
business, categories, ratings
"Restaurant 1", [1 4 9 10], {"funny":5, "cool":2}
"Restaurant 2", [], {"happy":2, "sad":2}
"Restaurant 3", [2, 11, 12], {}
>>> gl.SFrame.read_csv('ratings3.csv')
Columns:
business str
categories array
ratings dict
Rows: 3
Data:
+--------------+--------------------------------+-------------------------+
| business | categories | ratings |
+--------------+--------------------------------+-------------------------+
| Restaurant 1 | array('d', [1.0, 4.0, 9.0, ... | {'funny': 5, 'cool': 2} |
| Restaurant 2 | array('d') | {'sad': 2, 'happy': 2} |
| Restaurant 3 | array('d', [2.0, 11.0, 12.0]) | {} |
+--------------+--------------------------------+-------------------------+
[3 rows x 3 columns]
The list and dictionary parsers are quite flexible and can absorb a
variety of purely formatted inputs. Also, note that the list and dictionary
types are recursive, allowing for arbitrary values to be contained.
All these are valid lists:
>>> !cat interesting_lists.csv
list
[]
[1,2,3]
[1;2,3]
[1 2 3]
[{a:b}]
["c",d, e]
[[a]]
>>> gl.SFrame.read_csv('interesting_lists.csv')
Columns:
list list
Rows: 7
Data:
+-----------------+
| list |
+-----------------+
| [] |
| [1, 2, 3] |
| [1, 2, 3] |
| [1, 2, 3] |
| [{'a': 'b'}] |
| ['c', 'd', 'e'] |
| [['a']] |
+-----------------+
[7 rows x 1 columns]
All these are valid dicts:
>>> !cat interesting_dicts.csv
dict
{"classic":1,"dict":1}
{space:1 seperated:1}
{emptyvalue:}
{}
{:}
{recursive1:[{a:b}]}
{:[{:[a]}]}
>>> gl.SFrame.read_csv('interesting_dicts.csv')
Columns:
dict dict
Rows: 7
Data:
+------------------------------+
| dict |
+------------------------------+
| {'dict': 1, 'classic': 1} |
| {'seperated': 1, 'space': 1} |
| {'emptyvalue': None} |
| {} |
| {None: None} |
| {'recursive1': [{'a': 'b'}]} |
| {None: [{None: array('d')}]} |
+------------------------------+
[7 rows x 1 columns]
**Saving**
Save and load the sframe in native format.
>>> sf.save('mysframedir')
>>> sf2 = graphlab.load_sframe('mysframedir')
**Column Manipulation **
An SFrame is composed of a collection of columns of SArrays, and individual
SArrays can be extracted easily. For instance given an SFrame:
>>> sf = SFrame({'id':[1,2,3],'val':['A','B','C']})
>>> sf
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
2 3 C
The "id" column can be extracted using:
>>> sf["id"]
dtype: int
Rows: 3
[1, 2, 3]
And can be deleted using:
>>> del sf["id"]
Multiple columns can be selected by passing a list of column names:
>>> sf = SFrame({'id':[1,2,3],'val':['A','B','C'],'val2':[5,6,7]})
>>> sf
Columns:
id int
val str
val2 int
Rows: 3
Data:
id val val2
0 1 A 5
1 2 B 6
2 3 C 7
>>> sf2 = sf[['id','val']]
>>> sf2
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
2 3 C
The same mechanism can be used to re-order columns:
>>> sf = SFrame({'id':[1,2,3],'val':['A','B','C']})
>>> sf
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
2 3 C
>>> sf[['val','id']]
>>> sf
Columns:
val str
id int
Rows: 3
Data:
val id
0 A 1
1 B 2
2 C 3
**Element Access and Slicing**
SFrames can be accessed by integer keys just like a regular python list.
Such operations may not be fast on large datasets so looping over an SFrame
should be avoided.
>>> sf = SFrame({'id':[1,2,3],'val':['A','B','C']})
>>> sf[0]
{'id': 1, 'val': 'A'}
>>> sf[2]
{'id': 3, 'val': 'C'}
>>> sf[5]
IndexError: SFrame index out of range
Negative indices can be used to access elements from the tail of the array
>>> sf[-1] # returns the last element
{'id': 3, 'val': 'C'}
>>> sf[-2] # returns the second to last element
{'id': 2, 'val': 'B'}
The SFrame also supports the full range of python slicing operators:
>>> sf[1000:] # Returns an SFrame containing rows 1000 to the end
>>> sf[:1000] # Returns an SFrame containing rows 0 to row 999 inclusive
>>> sf[0:1000:2] # Returns an SFrame containing rows 0 to row 1000 in steps of 2
>>> sf[-100:] # Returns an SFrame containing last 100 rows
>>> sf[-100:len(sf):2] # Returns an SFrame containing last 100 rows in steps of 2
**Logical Filter**
An SFrame can be filtered using
>>> sframe[binary_filter]
where sframe is an SFrame and binary_filter is an SArray of the same length.
The result is a new SFrame which contains only rows of the SFrame where its
matching row in the binary_filter is non zero.
This permits the use of boolean operators that can be used to perform
logical filtering operations. For instance, given an SFrame
>>> sf
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
2 3 C
>>> sf[(sf['id'] >= 1) & (sf['id'] <= 2)]
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
See :class:`~graphlab.SArray` for more details on the use of the logical
filter.
This can also be used more generally to provide filtering capability which
is otherwise not expressible with simple boolean functions. For instance:
>>> sf[sf['id'].apply(lambda x: math.log(x) <= 1)]
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
Or alternatively:
>>> sf[sf.apply(lambda x: math.log(x['id']) <= 1)]
Create an SFrame from a Python dictionary.
>>> from graphlab import SFrame
>>> sf = SFrame({'id':[1,2,3], 'val':['A','B','C']})
>>> sf
Columns:
id int
val str
Rows: 3
Data:
id val
0 1 A
1 2 B
2 3 C
"""
__slots__ = ['shape', '__proxy__', '_proxy']
def __init__(self, data=None,
format='auto',
_proxy=None):
"""__init__(data=list(), format='auto')
Construct a new SFrame from a url or a pandas.DataFrame.
"""
# emit metrics for num_rows, num_columns, and type (local://, s3, hdfs, http)
tracker = _mt._get_metric_tracker()
if (_proxy):
self.__proxy__ = _proxy
else:
self.__proxy__ = UnitySFrameProxy(glconnect.get_client())
_format = None
if (format == 'auto'):
if (HAS_PANDAS and isinstance(data, pandas.DataFrame)):
_format = 'dataframe'
tracker.track('sframe.location.memory', value=1)
elif (isinstance(data, str) or isinstance(data, unicode)):
if data.find('://') == -1:
suffix = 'local'
else:
suffix = data.split('://')[0]
tracker.track(('sframe.location.%s' % (suffix)), value=1)
if data.endswith(('.csv', '.csv.gz')):
_format = 'csv'
elif data.endswith(('.tsv', '.tsv.gz')):
_format = 'tsv'
elif data.endswith(('.txt', '.txt.gz')):
print "Assuming file is csv. For other delimiters, " + \
"please use `SFrame.read_csv`."
_format = 'csv'
else:
_format = 'sframe'
elif type(data) == SArray:
_format = 'sarray'
elif isinstance(data, SFrame):
_format = 'sframe_obj'
elif (hasattr(data, 'iteritems')):
_format = 'dict'
tracker.track('sframe.location.memory', value=1)
elif hasattr(data, '__iter__'):
_format = 'array'
tracker.track('sframe.location.memory', value=1)
elif data is None:
_format = 'empty'
else:
raise ValueError('Cannot infer input type for data ' + str(data))
else:
_format = format
tracker.track(('sframe.format.%s' % _format), value=1)
with cython_context():
if (_format == 'dataframe'):
self.__proxy__.load_from_dataframe(data)
elif (_format == 'sframe_obj'):
for col in data.column_names():
self.__proxy__.add_column(data[col].__proxy__, col)
elif (_format == 'sarray'):
self.__proxy__.add_column(data.__proxy__, "")
elif (_format == 'array'):
if len(data) > 0:
unique_types = set([type(x) for x in data if x is not None])
if len(unique_types) == 1 and SArray in unique_types:
for arr in data:
self.add_column(arr)
elif SArray in unique_types:
raise ValueError("Cannot create SFrame from mix of regular values and SArrays")
else:
self.__proxy__.add_column(SArray(data).__proxy__, "")
elif (_format == 'dict'):
for key,val in iter(sorted(data.iteritems())):
if (type(val) == SArray):
self.__proxy__.add_column(val.__proxy__, key)
else:
self.__proxy__.add_column(SArray(val).__proxy__, key)
elif (_format == 'csv'):
url = _make_internal_url(data)
tmpsf = SFrame.read_csv(url, delimiter=',', header=True)
self.__proxy__ = tmpsf.__proxy__
elif (_format == 'tsv'):
url = _make_internal_url(data)
tmpsf = SFrame.read_csv(url, delimiter='\t', header=True)
self.__proxy__ = tmpsf.__proxy__
elif (_format == 'sframe'):
url = _make_internal_url(data)
self.__proxy__.load_from_sframe_index(url)
elif (_format == 'empty'):
pass
else:
raise ValueError('Unknown input type: ' + format)
sframe_size = -1
if self.__has_size__():
sframe_size = self.num_rows()
tracker.track('sframe.row.size', value=sframe_size)
tracker.track('sframe.col.size', value=self.num_cols())
@staticmethod
def _infer_column_types_from_lines(first_rows):
if (len(first_rows.column_names()) < 1):
print "Insufficient number of columns to perform type inference"
raise RuntimeError("Insufficient columns ")
if len(first_rows) < 1:
print "Insufficient number of rows to perform type inference"
raise RuntimeError("Insufficient rows")
# gets all the values column-wise
all_column_values_transposed = [list(first_rows[col])
for col in first_rows.column_names()]
# transpose
all_column_values = [list(x) for x in zip(*all_column_values_transposed)]
all_column_type_hints = [[type(t) for t in vals] for vals in all_column_values]
# collect the hints
# if every line was inferred to have a different number of elements, die
if len(set(len(x) for x in all_column_type_hints)) != 1:
print "Unable to infer column types. Defaulting to str"
return str
import types
column_type_hints = all_column_type_hints[0]
# now perform type combining across rows
for i in range(1, len(all_column_type_hints)):
currow = all_column_type_hints[i]
for j in range(len(column_type_hints)):
# combine types
d = set([currow[j], column_type_hints[j]])
if (len(d) == 1):
# easy case. both agree on the type
continue
if ((int in d) and (float in d)):
# one is an int, one is a float. its a float
column_type_hints[j] = float
elif ((array.array in d) and (list in d)):
# one is an array , one is a list. its a list
column_type_hints[j] = list
elif types.NoneType in d:
# one is a NoneType. assign to other type
if currow[j] != types.NoneType:
column_type_hints[j] = currow[j]
else:
column_type_hints[j] = str
# final pass. everything whih is still NoneType is now a str
for i in range(len(column_type_hints)):
if column_type_hints[i] == types.NoneType:
column_type_hints[i] = str
return column_type_hints
@classmethod
def _read_csv_impl(cls,
url,
delimiter=',',
header=True,
error_bad_lines=False,
comment_char='',
escape_char='\\',
double_quote=True,
quote_char='\"',
skip_initial_space=True,
column_type_hints=None,
na_values=["NA"],
nrows=None,
verbose=True,
store_errors=True):
"""
Constructs an SFrame from a CSV file or a path to multiple CSVs, and
returns a pair containing the SFrame and optionally
(if store_errors=True) a dict of filenames to SArrays
indicating for each file, what are the incorrectly parsed lines
encountered.
Parameters
----------
store_errors : bool
If true, the output errors dict will be filled.
See `read_csv` for the rest of the parameters.
"""
parsing_config = dict()
parsing_config["delimiter"] = delimiter
parsing_config["use_header"] = header
parsing_config["continue_on_failure"] = not error_bad_lines
parsing_config["comment_char"] = comment_char
parsing_config["escape_char"] = escape_char
parsing_config["double_quote"] = double_quote
parsing_config["quote_char"] = quote_char
parsing_config["skip_initial_space"] = skip_initial_space
parsing_config["store_errors"] = store_errors
if type(na_values) is str:
na_values = [na_values]
if na_values is not None and len(na_values) > 0:
parsing_config["na_values"] = na_values
if nrows != None:
parsing_config["row_limit"] = nrows
proxy = UnitySFrameProxy(glconnect.get_client())
internal_url = _make_internal_url(url)
if (not verbose):
glconnect.get_client().set_log_progress(False)
# Attempt to automatically detect the column types. Either produce a
# list of types; otherwise default to all str types.
column_type_inference_was_used = False
if column_type_hints is None:
try:
# Get the first 100 rows (using all the desired arguments).
first_rows = graphlab.SFrame.read_csv(url, nrows=100,
column_type_hints=type(None),
header=header,
delimiter=delimiter,
comment_char=comment_char,
escape_char=escape_char,
double_quote=double_quote,
quote_char=quote_char,
skip_initial_space=skip_initial_space,
na_values = na_values)
column_type_hints = SFrame._infer_column_types_from_lines(first_rows)
typelist = '[' + ','.join(t.__name__ for t in column_type_hints) + ']'
print "------------------------------------------------------"
print "Inferred types from first line of file as "
print "column_type_hints="+ typelist
print "If parsing fails due to incorrect types, you can correct"
print "the inferred type list above and pass it to read_csv in"
print "the column_type_hints argument"
print "------------------------------------------------------"
column_type_inference_was_used = True
except Exception as e:
if type(e) == RuntimeError and "CSV parsing cancelled" in e.message:
raise e
# If the above fails, default back to str for all columns.
column_type_hints = str
print 'Could not detect types. Using str for each column.'
if type(column_type_hints) is type:
type_hints = {'__all_columns__': column_type_hints}
elif type(column_type_hints) is list:
type_hints = dict(zip(['__X%d__' % i for i in range(len(column_type_hints))], column_type_hints))
elif type(column_type_hints) is dict:
type_hints = column_type_hints
else:
raise TypeError("Invalid type for column_type_hints. Must be a dictionary, list or a single type.")
_mt._get_metric_tracker().track('sframe.csv.parse')
suffix=''
if url.find('://') == -1:
suffix = 'local'
else:
suffix = url.split('://')[0]
_mt._get_metric_tracker().track(('sframe.location.%s' % (suffix)), value=1)
try:
with cython_context():
errors = proxy.load_from_csvs(internal_url, parsing_config, type_hints)
except Exception as e:
if type(e) == RuntimeError and "CSV parsing cancelled" in e.message:
raise e
if column_type_inference_was_used:
# try again
print "Unable to parse the file with automatic type inference."
print "Defaulting to column_type_hints=str"
type_hints = {'__all_columns__': str}
try:
with cython_context():
errors = proxy.load_from_csvs(internal_url, parsing_config, type_hints)
except:
raise
else:
raise
glconnect.get_client().set_log_progress(True)
return (cls(_proxy=proxy), { f: SArray(_proxy = es) for (f, es) in errors.iteritems() })
@classmethod
def read_csv_with_errors(cls,
url,
delimiter=',',
header=True,
comment_char='',
escape_char='\\',
double_quote=True,
quote_char='\"',
skip_initial_space=True,
column_type_hints=None,
na_values=["NA"],
nrows=None,
verbose=True):
"""
Constructs an SFrame from a CSV file or a path to multiple CSVs, and
returns a pair containing the SFrame and a dict of filenames to SArrays
indicating for each file, what are the incorrectly parsed lines
encountered.
Parameters
----------
url : string
Location of the CSV file or directory to load. If URL is a directory
or a "glob" pattern, all matching files will be loaded.
delimiter : string, optional
This describes the delimiter used for parsing csv files.
header : bool, optional
If true, uses the first row as the column names. Otherwise use the
default column names: 'X1, X2, ...'.
comment_char : string, optional
The character which denotes that the
remainder of the line is a comment.
escape_char : string, optional
Character which begins a C escape sequence
double_quote : bool, optional
If True, two consecutive quotes in a string are parsed to a single
quote.
quote_char : string, optional
Character sequence that indicates a quote.
skip_initial_space : bool, optional
Ignore extra spaces at the start of a field
column_type_hints : None, type, list[type], dict[string, type], optional
This provides type hints for each column. By default, this method
attempts to detect the type of each column automatically.
Supported types are int, float, str, list, dict, and array.array.
* If a single type is provided, the type will be
applied to all columns. For instance, column_type_hints=float
will force all columns to be parsed as float.
* If a list of types is provided, the types applies
to each column in order, e.g.[int, float, str]
will parse the first column as int, second as float and third as
string.
* If a dictionary of column name to type is provided,
each type value in the dictionary is applied to the key it
belongs to.
For instance {'user':int} will hint that the column called "user"
should be parsed as an integer, and the rest will default to
string.
na_values : str | list of str, optional
A string or list of strings to be interpreted as missing values.
nrows : int, optional
If set, only this many rows will be read from the file.
verbose : bool, optional
If True, print the progress.
Returns
-------
out : tuple
The first element is the SFrame with good data. The second element
is a dictionary of filenames to SArrays indicating for each file,
what are the incorrectly parsed lines encountered.
See Also
--------
read_csv, SFrame
Examples
--------
>>> bad_url = 'https://s3.amazonaws.com/gl-testdata/bad_csv_example.csv'
>>> (sf, bad_lines) = graphlab.SFrame.read_csv_with_errors(bad_url)
>>> sf
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | 3 |
| 25907 | 1663 | 3 |
| 25923 | 1663 | 3 |
| 25924 | 1663 | 3 |
| 25928 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[98 rows x 3 columns]
>>> bad_lines
{'https://s3.amazonaws.com/gl-testdata/bad_csv_example.csv': dtype: str
Rows: 1
['x,y,z,a,b,c']}
"""
return cls._read_csv_impl(url,
delimiter=delimiter,
header=header,
error_bad_lines=False, # we are storing errors,
# thus we must not fail
# on bad lines
comment_char=comment_char,
escape_char=escape_char,
double_quote=double_quote,
quote_char=quote_char,
skip_initial_space=skip_initial_space,
column_type_hints=column_type_hints,
na_values=na_values,
nrows=nrows,
verbose=verbose,
store_errors=True)
@classmethod
def read_csv(cls,
url,
delimiter=',',
header=True,
error_bad_lines=False,
comment_char='',
escape_char='\\',
double_quote=True,
quote_char='\"',
skip_initial_space=True,
column_type_hints=None,
na_values=["NA"],
nrows=None,
verbose=True):
"""
Constructs an SFrame from a CSV file or a path to multiple CSVs.
Parameters
----------
url : string
Location of the CSV file or directory to load. If URL is a directory
or a "glob" pattern, all matching files will be loaded.
delimiter : string, optional
This describes the delimiter used for parsing csv files.
header : bool, optional
If true, uses the first row as the column names. Otherwise use the
default column names : 'X1, X2, ...'.
error_bad_lines : bool
If true, will fail upon encountering a bad line. If false, will
continue parsing skipping lines which fail to parse correctly.
A sample of the first 10 encountered bad lines will be printed.
comment_char : string, optional
The character which denotes that the remainder of the line is a
comment.
escape_char : string, optional
Character which begins a C escape sequence
double_quote : bool, optional
If True, two consecutive quotes in a string are parsed to a single
quote.
quote_char : string, optional
Character sequence that indicates a quote.
skip_initial_space : bool, optional
Ignore extra spaces at the start of a field
column_type_hints : None, type, list[type], dict[string, type], optional
This provides type hints for each column. By default, this method
attempts to detect the type of each column automatically.
Supported types are int, float, str, list, dict, and array.array.
* If a single type is provided, the type will be
applied to all columns. For instance, column_type_hints=float
will force all columns to be parsed as float.
* If a list of types is provided, the types applies
to each column in order, e.g.[int, float, str]
will parse the first column as int, second as float and third as
string.
* If a dictionary of column name to type is provided,
each type value in the dictionary is applied to the key it
belongs to.
For instance {'user':int} will hint that the column called "user"
should be parsed as an integer, and the rest will default to
string.
na_values : str | list of str, optional
A string or list of strings to be interpreted as missing values.
nrows : int, optional
If set, only this many rows will be read from the file.
verbose : bool, optional
If True, print the progress.
Returns
-------
out : SFrame
See Also
--------
read_csv_with_errors, SFrame
Examples
--------
Read a regular csv file, with all default options, automatically
determine types:
>>> url = 'http://s3.amazonaws.com/gl-testdata/rating_data_example.csv'
>>> sf = graphlab.SFrame.read_csv(url)
>>> sf
Columns:
user_id int
movie_id int
rating int
Rows: 10000
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | 3 |
| 25907 | 1663 | 3 |
| 25923 | 1663 | 3 |
| 25924 | 1663 | 3 |
| 25928 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[10000 rows x 3 columns]
Read only the first 100 lines of the csv file:
>>> sf = graphlab.SFrame.read_csv(url, nrows=100)
>>> sf
Columns:
user_id int
movie_id int
rating int
Rows: 100
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | 3 |
| 25907 | 1663 | 3 |
| 25923 | 1663 | 3 |
| 25924 | 1663 | 3 |
| 25928 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[100 rows x 3 columns]
Read all columns as str type
>>> sf = graphlab.SFrame.read_csv(url, column_type_hints=str)
>>> sf
Columns:
user_id str
movie_id str
rating str
Rows: 10000
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | 3 |
| 25907 | 1663 | 3 |
| 25923 | 1663 | 3 |
| 25924 | 1663 | 3 |
| 25928 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[10000 rows x 3 columns]
Specify types for a subset of columns and leave the rest to be str.
>>> sf = graphlab.SFrame.read_csv(url,
... column_type_hints={
... 'user_id':int, 'rating':float
... })
>>> sf
Columns:
user_id str
movie_id str
rating float
Rows: 10000
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | 3.0 |
| 25907 | 1663 | 3.0 |
| 25923 | 1663 | 3.0 |
| 25924 | 1663 | 3.0 |
| 25928 | 1663 | 2.0 |
| ... | ... | ... |
+---------+----------+--------+
[10000 rows x 3 columns]
Not treat first line as header:
>>> sf = graphlab.SFrame.read_csv(url, header=False)
>>> sf
Columns:
X1 str
X2 str
X3 str
Rows: 10001
+---------+----------+--------+
| X1 | X2 | X3 |
+---------+----------+--------+
| user_id | movie_id | rating |
| 25904 | 1663 | 3 |
| 25907 | 1663 | 3 |
| 25923 | 1663 | 3 |
| 25924 | 1663 | 3 |
| 25928 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[10001 rows x 3 columns]
Treat '3' as missing value:
>>> sf = graphlab.SFrame.read_csv(url, na_values=['3'], column_type_hints=str)
>>> sf
Columns:
user_id str
movie_id str
rating str
Rows: 10000
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | None |
| 25907 | 1663 | None |
| 25923 | 1663 | None |
| 25924 | 1663 | None |
| 25928 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[10000 rows x 3 columns]
Throw error on parse failure:
>>> bad_url = 'https://s3.amazonaws.com/gl-testdata/bad_csv_example.csv'
>>> sf = graphlab.SFrame.read_csv(bad_url, error_bad_lines=True)
RuntimeError: Runtime Exception. Unable to parse line "x,y,z,a,b,c"
Set error_bad_lines=False to skip bad lines
"""
return cls._read_csv_impl(url,
delimiter=delimiter,
header=header,
error_bad_lines=error_bad_lines,
comment_char=comment_char,
escape_char=escape_char,
double_quote=double_quote,
quote_char=quote_char,
skip_initial_space=skip_initial_space,
column_type_hints=column_type_hints,
na_values=na_values,
nrows=nrows,
verbose=verbose,
store_errors=False)[0]
def to_schema_rdd(self,sc,sql,number_of_partitions=4):
"""
Convert the current SFrame to the Spark SchemaRDD.
To enable this function, you must add the jar file bundled with GraphLab
Create to the Spark driver's classpath. This must happen BEFORE Spark
launches its JVM, or else it will have no effect. To do this, first get
the location of the packaged jar with
`graphlab.get_spark_integration_jar_path`. You then have two options:
1. Add the path to the jar to your spark-defaults.conf file. The
property to set is 'spark.driver.extraClassPath'.
OR
2. Add the jar's path as a command line option to your favorite way to
start pyspark (either spark-submit or pyspark). For this, use the
command line option '--driver-class-path'.
Parameters
----------
sc : SparkContext
sc is an existing SparkContext.
sql : SQLContext
sql is an existing SQLContext.
number_of_partitions : int
number of partitions for the output rdd
Returns
----------
out: SchemaRDD
Examples
--------
>>> from pyspark import SparkContext, SQLContext
>>> from graphlab import SFrame
>>> sc = SparkContext('local')
>>> sqlc = SQLContext(sc)
>>> sf = SFrame({'x': [1,2,3], 'y': ['fish', 'chips', 'salad']})
>>> rdd = sf.to_schema_rdd(sc, sqlc)
>>> rdd.collect()
[Row(x=1, y=u'fish'), Row(x=2, y=u'chips'), Row(x=3, y=u'salad')]
"""
def homogeneous_type(seq):
if seq is None or len(seq) == 0:
return True
iseq = iter(seq)
first_type = type(next(iseq))
return True if all( (type(x) is first_type) for x in iseq ) else False
if len(self) == 0:
raise ValueError("SFrame is empty")
column_names = self.column_names()
first_row = self.head(1)[0]
for name in column_names:
if hasattr(first_row[name],'__iter__') and homogeneous_type(first_row[name]) is not True:
raise TypeError("Support for translation to Spark SchemaRDD not enabled for heterogeneous iterable type (column: %s). Use SFrame.to_rdd()." % name)
for _type in self.column_types():
if(_type.__name__ == 'datetime'):
raise TypeError("Support for translation to Spark SchemaRDD not enabled for datetime type. Use SFrame.to_rdd() ")
rdd = self.to_rdd(sc,number_of_partitions);
from pyspark.sql import Row
rowRdd = rdd.map(lambda x: Row(**x))
return sql.inferSchema(rowRdd)
def to_rdd(self, sc, number_of_partitions=4):
"""
Convert the current SFrame to the Spark RDD.
To enable this function, you must add the jar file bundled with GraphLab
Create to the Spark driver's classpath. This must happen BEFORE Spark
launches its JVM, or else it will have no effect. To do this, first get
the location of the packaged jar with
`graphlab.get_spark_integration_jar_path`. You then have two options:
1. Add the path to the jar to your spark-defaults.conf file. The
property to set is 'spark.driver.extraClassPath'.
OR
2. Add the jar's path as a command line option to your favorite way to
start pyspark (either spark-submit or pyspark). For this, use the
command line option '--driver-class-path'.
Parameters
----------
sc : SparkContext
sc is an existing SparkContext.
number_of_partitions: int
number of partitions for the output rdd
Returns
----------
out: RDD
Examples
--------
>>> from pyspark import SparkContext
>>> from graphlab import SFrame
>>> sc = SparkContext('local')
>>> sf = SFrame({'x': [1,2,3], 'y': ['fish', 'chips', 'salad']})
>>> rdd = sf.to_rdd(sc)
>>> rdd.collect()
[{'x': 1L, 'y': 'fish'}, {'x': 2L, 'y': 'chips'}, {'x': 3L, 'y': 'salad'}]
"""
_mt._get_metric_tracker().track('sframe.to_rdd')
if not RDD_SUPPORT:
raise Exception("Support for translation to Spark RDDs not enabled.")
for _type in self.column_types():
if(_type.__name__ == 'Image'):
raise TypeError("Support for translation to Spark RDDs not enabled for Image type.")
if type(number_of_partitions) is not int:
raise ValueError("number_of_partitions parameter expects an integer type")
if number_of_partitions == 0:
raise ValueError("number_of_partitions can not be initialized to zero")
# Save SFrame in a temporary place
tmp_loc = self.__get_staging_dir__(sc)
sf_loc = os.path.join(tmp_loc, str(uuid.uuid4()))
self.save(sf_loc)
# Keep track of the temporary sframe that is saved(). We need to delete it eventually.
dummysf = load_sframe(sf_loc)
dummysf.__proxy__.delete_on_close()
SFRAME_GARBAGE_COLLECTOR.append(dummysf)
sframe_len = self.__len__()
small_partition_size = sframe_len/number_of_partitions
big_partition_size = small_partition_size + 1
num_big_partition_size = sframe_len % number_of_partitions
num_small_partition_size = number_of_partitions - num_big_partition_size
count = 0
start_index = 0
ranges = []
while(count < number_of_partitions):
if(count < num_big_partition_size):
ranges.append((str(start_index)+":"+str(start_index + big_partition_size)))
start_index = start_index + big_partition_size
else:
ranges.append((str(start_index)+":"+str(start_index + small_partition_size)))
start_index = start_index + small_partition_size
count+=1
from pyspark import RDD
rdd = sc.parallelize(ranges,number_of_partitions)
if sc.master[0:5] == 'local':
pipeRdd = sc._jvm.org.graphlab.create.GraphLabUtil.pythonToJava(
rdd._jrdd).pipe(
BINARY_PATHS['SPARK_PIPE_WRAPPER_PATH'] + \
" " + BINARY_PATHS['SFRAME_RDD_PATH'] + " " + sf_loc)
elif sc.master == 'yarn-client':
pipeRdd = sc._jvm.org.graphlab.create.GraphLabUtil.pythonToJava(
rdd._jrdd).pipe(
"./" + SPARK_SUPPORT_NAMES['SPARK_PIPE_WRAPPER_PATH'] + \
" " + "./" + SPARK_SUPPORT_NAMES['SFRAME_RDD_PATH'] + \
" " + sf_loc)
serializedRdd = sc._jvm.org.graphlab.create.GraphLabUtil.stringToByte(pipeRdd)
import pyspark
output_rdd = RDD(serializedRdd,sc,pyspark.serializers.PickleSerializer())
return output_rdd
@classmethod
def __get_staging_dir__(cls,cur_sc):
if not RDD_SUPPORT_INITED:
__rdd_support_init__(cur_sc)
return STAGING_DIR
@classmethod
def from_rdd(cls, rdd):
"""
Convert a Spark RDD into a GraphLab Create SFrame.
To enable this function, you must add the jar file bundled with GraphLab
Create to the Spark driver's classpath. This must happen BEFORE Spark
launches its JVM, or else it will have no effect. To do this, first get
the location of the packaged jar with
`graphlab.get_spark_integration_jar_path`. You then have two options:
1. Add the path to the jar to your spark-defaults.conf file. The
property to set is 'spark.driver.extraClassPath'.
OR
2. Add the jar's path as a command line option to your favorite way to
start pyspark (either spark-submit or pyspark). For this, use the
command line option '--driver-class-path'.
Parameters
----------
rdd : pyspark.rdd.RDD
Returns
-------
out : SFrame
Examples
--------
>>> from pyspark import SparkContext
>>> from graphlab import SFrame
>>> sc = SparkContext('local')
>>> rdd = sc.parallelize([1,2,3])
>>> sf = SFrame.from_rdd(rdd)
>>> sf
Data:
+-----+
| X1 |
+-----+
| 1.0 |
| 2.0 |
| 3.0 |
+-----+
[3 rows x 1 columns]
"""
_mt._get_metric_tracker().track('sframe.from_rdd')
if not RDD_SUPPORT:
raise Exception("Support for translation to Spark RDDs not enabled.")
checkRes = rdd.take(1);
if len(checkRes) > 0 and checkRes[0].__class__.__name__ == 'Row' and rdd.__class__.__name__ not in {'SchemaRDD','DataFrame'}:
raise Exception("Conversion from RDD(pyspark.sql.Row) to SFrame not supported. Please call inferSchema(RDD) first.")
if(rdd._jrdd_deserializer.__class__.__name__ == 'UTF8Deserializer'):
return SFrame.__from_UTF8Deserialized_rdd__(rdd)
sf_names = None
rdd_type = "rdd"
if rdd.__class__.__name__ in {'SchemaRDD','DataFrame'}:
rdd_type = "schemardd"
first_row = rdd.take(1)[0]
if hasattr(first_row, 'keys'):
sf_names = first_row.keys()
else:
sf_names = first_row.__FIELDS__
sf_names = [str(i) for i in sf_names]
cur_sc = rdd.ctx
tmp_loc = SFrame.__get_staging_dir__(cur_sc)
if tmp_loc is None:
raise RuntimeError("Could not determine staging directory for SFrame files.")
mode = "batch"
if(rdd._jrdd_deserializer.__class__.__name__ == 'PickleSerializer'):
mode = "pickle"
if cur_sc.master[0:5] == 'local':
t = cur_sc._jvm.org.graphlab.create.GraphLabUtil.byteToString(
rdd._jrdd).pipe(
BINARY_PATHS['SPARK_PIPE_WRAPPER_PATH'] + " " + \
BINARY_PATHS['RDD_SFRAME_PATH'] + " " + tmp_loc +\
" " + mode + " " + rdd_type)
else:
t = cur_sc._jvm.org.graphlab.create.GraphLabUtil.byteToString(
rdd._jrdd).pipe(
"./" + SPARK_SUPPORT_NAMES['SPARK_PIPE_WRAPPER_PATH'] +\
" " + "./" + SPARK_SUPPORT_NAMES['RDD_SFRAME_PATH'] + " " +\
tmp_loc + " " + mode + " " + rdd_type)
# We get the location of an SFrame index file per Spark partition in
# the result. We assume that this is in partition order.
res = t.collect()
out_sf = cls()
sframe_list = []
for url in res:
sf = SFrame()
sf.__proxy__.load_from_sframe_index(_make_internal_url(url))
sf.__proxy__.delete_on_close()
out_sf_coltypes = out_sf.column_types()
if(len(out_sf_coltypes) != 0):
sf_coltypes = sf.column_types()
sf_temp_names = sf.column_names()
out_sf_temp_names = out_sf.column_names()
for i in range(len(sf_coltypes)):
if sf_coltypes[i] != out_sf_coltypes[i]:
print "mismatch for types %s and %s" % (sf_coltypes[i],out_sf_coltypes[i])
sf[sf_temp_names[i]] = sf[sf_temp_names[i]].astype(str)
out_sf[out_sf_temp_names[i]] = out_sf[out_sf_temp_names[i]].astype(str)
out_sf = out_sf.append(sf)
out_sf.__proxy__.delete_on_close()
if sf_names is not None:
out_names = out_sf.column_names()
if(set(out_names) != set(sf_names)):
out_sf = out_sf.rename(dict(zip(out_names, sf_names)))
return out_sf
@classmethod
def __from_UTF8Deserialized_rdd__(cls, rdd):
_mt._get_metric_tracker().track('sframe.__from_UTF8Deserialized_rdd__')
if not RDD_SUPPORT:
raise Exception("Support for translation to Spark RDDs not enabled.")
cur_sc = rdd.ctx
sf_names = None
sf_types = None
tmp_loc = SFrame.__get_staging_dir__(cur_sc)
if tmp_loc is None:
raise RuntimeError("Could not determine staging directory for SFrame files.")
if(rdd.__class__.__name__ in {'SchemaRDD','DataFrame'}):
first_row = rdd.take(1)[0]
if hasattr(first_row, 'keys'):
sf_names = first_row.keys()
sf_types = [type(i) for i in first_row.values()]
else:
sf_names = first_row.__FIELDS__
sf_types = [type(i) for i in first_row]
sf_names = [str(i) for i in sf_names]
for _type in sf_types:
if(_type != int and _type != str and _type != float and _type != unicode):
raise TypeError("Only int, str, and float are supported for now")
types = ""
for i in sf_types:
types += i.__name__ + ","
if cur_sc.master[0:5] == 'local':
t = rdd._jschema_rdd.toJavaStringOfValues().pipe(
BINARY_PATHS['SPARK_PIPE_WRAPPER_PATH'] + " " +\
BINARY_PATHS['RDD_SFRAME_NONPICKLE_PATH'] + " " + tmp_loc +\
" " + types)
else:
t = cur_sc._jvm.org.graphlab.create.GraphLabUtil.toJavaStringOfValues(
rdd._jschema_rdd).pipe(
"./" + SPARK_SUPPORT_NAMES['SPARK_PIPE_WRAPPER_PATH'] +\
" " + "./" +\
SPARK_SUPPORT_NAMES['RDD_SFRAME_NONPICKLE_PATH'] + " " +\
tmp_loc + " " + types)
else:
if cur_sc.master[0:5] == 'local':
t = cur_sc._jvm.org.graphlab.create.GraphLabUtil.pythonToJava(
rdd._jrdd).pipe(
BINARY_PATHS['SPARK_PIPE_WRAPPER_PATH'] + " " +\
BINARY_PATHS['RDD_SFRAME_NONPICKLE_PATH'] + " " +\
tmp_loc)
else:
t = cur_sc._jvm.org.graphlab.create.GraphLabUtil.pythonToJava(
rdd._jrdd).pipe(
"./" + SPARK_SUPPORT_NAMES['SPARK_PIPE_WRAPPER_PATH'] +\
" " + "./" +\
SPARK_SUPPORT_NAMES['RDD_SFRAME_NONPICKLE_PATH'] + " " +\
tmp_loc)
# We get the location of an SFrame index file per Spark partition in
# the result. We assume that this is in partition order.
res = t.collect()
out_sf = cls()
sframe_list = []
for url in res:
sf = SFrame()
sf.__proxy__.load_from_sframe_index(_make_internal_url(url))
sf.__proxy__.delete_on_close()
out_sf = out_sf.append(sf)
out_sf.__proxy__.delete_on_close()
if sf_names is not None:
out_names = out_sf.column_names()
if(set(out_names) != set(sf_names)):
out_sf = out_sf.rename(dict(zip(out_names, sf_names)))
return out_sf
@classmethod
def from_odbc(cls, db, sql, verbose=False):
"""
Convert a table or query from a database to an SFrame.
This function does not do any checking on the given SQL query, and
cannot know what effect it will have on the database. Any side effects
from the query will be reflected on the database. If no result
rows are returned, an empty SFrame is created.
Keep in mind the default case your database stores table names in. In
some cases, you may need to add quotation marks (or whatever character
your database uses to quote identifiers), especially if you created the
table using `to_odbc`.
Parameters
----------
db : `graphlab.extensions._odbc_connection.unity_odbc_connection`
An ODBC connection object. This can only be obtained by calling
`graphlab.connect_odbc`. Check that documentation for how to create
this object.
sql : str
A SQL query. The query must be acceptable by the ODBC driver used by
`graphlab.extensions._odbc_connection.unity_odbc_connection`.
Returns
-------
out : SFrame
Notes
-----
This functionality is only supported when using GraphLab Create
entirely on your local machine. Therefore, GraphLab Create's EC2 and
Hadoop execution modes will not be able to use ODBC. Note that this
does not apply to the machine your database is running, which can (and
often will) be running on a separate machine.
Examples
--------
>>> db = graphlab.connect_odbc("DSN=my_awesome_dsn;UID=user;PWD=mypassword")
>>> a_table = graphlab.SFrame.from_odbc(db, "SELECT * FROM a_table")
>>> join_result = graphlab.SFrame.from_odbc(db, 'SELECT * FROM "MyTable" a, "AnotherTable" b WHERE a.id=b.id')
"""
result = db.execute_query(sql)
if not isinstance(result, SFrame):
raise RuntimeError("Cannot create an SFrame for query. No result set.")
cls = result
return cls
def to_odbc(self, db, table_name, append_if_exists=False, verbose=True):
"""
Convert an SFrame to a table in a database.
By default, searches for a table in the database with the given name.
If found, this will attempt to append all the rows of the SFrame to the
end of the table. If not, this will create a new table with the given
name. This behavior is toggled with the `append_if_exists` flag.
When creating a new table, GraphLab Create uses a heuristic approach to
pick a corresponding type for each column in the SFrame using the type
information supplied by the database's ODBC driver. Your driver must
support giving this type information for GraphLab Create to support
writing to the database.
To allow more expressive and accurate naming, `to_odbc` puts quotes
around each identifier (table names and column names). Depending on
your database, you may need to refer to the created table with quote
characters around the name. This character is not the same for all
databases, but '"' is the most common.
Parameters
----------
db : `graphlab.extensions._odbc_connection.unity_odbc_connection`
An ODBC connection object. This can only be obtained by calling
`graphlab.connect_odbc`. Check that documentation for how to create
this object.
table_name : str
The name of the table you would like to create/append to.
append_if_exists : bool
If True, this will attempt to append to the table named `table_name`
if it is found to exist in the database.
verbose : bool
Print progress updates on the insertion process.
Notes
-----
This functionality is only supported when using GraphLab Create
entirely on your local machine. Therefore, GraphLab Create's EC2 and
Hadoop execution modes will not be able to use ODBC. Note that this
"local machine" rule does not apply to the machine your database is
running on, which can (and often will) be running on a separate
machine.
Examples
--------
>>> db = graphlab.connect_odbc("DSN=my_awesome_dsn;UID=user;PWD=mypassword")
>>> sf = graphlab.SFrame({'a':[1,2,3],'b':['hi','pika','bye']})
>>> sf.to_odbc(db, 'a_cool_table')
"""
if (not verbose):
glconnect.get_client().set_log_progress(False)
db._insert_sframe(self, table_name, append_if_exists)
if (not verbose):
glconnect.get_client().set_log_progress(True)
def __repr__(self):
"""
Returns a string description of the frame
"""
printed_sf = self._imagecols_to_stringcols()
ret = self.__get_column_description__()
if self.__has_size__():
ret = ret + "Rows: " + str(len(self)) + "\n\n"
else:
ret = ret + "Rows: Unknown" + "\n\n"
ret = ret + "Data:\n"
if (len(printed_sf.head()) > 0):
ret = ret + str(self)
else:
ret = ret + "\t[]"
return ret
def __get_column_description__(self):
colnames = self.column_names()
coltypes = self.column_types()
ret = "Columns:\n"
if len(colnames) > 0:
for i in range(len(colnames)):
ret = ret + "\t" + colnames[i] + "\t" + coltypes[i].__name__ + "\n"
ret = ret + "\n"
else:
ret = ret + "\tNone\n\n"
return ret
def __get_pretty_tables__(self, wrap_text=False, max_row_width=80,
max_column_width=30, max_columns=20,
max_rows_to_display=60):
"""
Returns a list of pretty print tables representing the current SFrame.
If the number of columns is larger than max_columns, the last pretty
table will contain an extra column of "...".
Parameters
----------
wrap_text : bool, optional
max_row_width : int, optional
Max number of characters per table.
max_column_width : int, optional
Max number of characters per column.
max_columns : int, optional
Max number of columns per table.
max_rows_to_display : int, optional
Max number of rows to display.
Returns
-------
out : list[PrettyTable]
"""
headsf = self.head(max_rows_to_display)
if headsf.shape == (0, 0):
return [PrettyTable()]
# convert array.array column to list column so they print like [...]
# and not array('d', ...)
for col in headsf.column_names():
if headsf[col].dtype() is array.array:
headsf[col] = headsf[col].astype(list)
def _value_to_str(value):
if (type(value) is array.array):
return str(list(value))
elif (type(value) is list):
return '[' + ", ".join(_value_to_str(x) for x in value) + ']'
else:
return str(value)
def _escape_space(s):
return "".join([ch.encode('string_escape') if ch.isspace() else ch for ch in s])
def _truncate_respect_unicode(s, max_length):
if (len(s) <= max_length):
return s
else:
u = unicode(s, 'utf-8', errors='replace')
return u[:max_length].encode('utf-8')
def _truncate_str(s, wrap_str=False):
"""
Truncate and optionally wrap the input string as unicode, replace
unconvertible character with a diamond ?.
"""
s = _escape_space(s)
if len(s) <= max_column_width:
return unicode(s, 'utf-8', errors='replace')
else:
ret = ''
# if wrap_str is true, wrap the text and take at most 2 rows
if wrap_str:
wrapped_lines = wrap(s, max_column_width)
if len(wrapped_lines) == 1:
return wrapped_lines[0]
last_line = wrapped_lines[1]
if len(last_line) >= max_column_width:
last_line = _truncate_respect_unicode(last_line, max_column_width - 4)
ret = wrapped_lines[0] + '\n' + last_line + ' ...'
else:
ret = _truncate_respect_unicode(s, max_column_width - 4) + '...'
return unicode(ret, 'utf-8', errors='replace')
columns = self.column_names()[:max_columns]
columns.reverse() # reverse the order of columns and we will pop from the end
num_column_of_last_table = 0
row_of_tables = []
# let's build a list of tables with max_columns
# each table should satisfy, max_row_width, and max_column_width
while len(columns) > 0:
tbl = PrettyTable()
table_width = 0
num_column_of_last_table = 0
while len(columns) > 0:
col = columns.pop()
# check the max length of element in the column
if len(headsf) > 0:
col_width = min(max_column_width, max(len(str(x)) for x in headsf[col]))
else:
col_width = max_column_width
if (table_width + col_width < max_row_width):
# truncate the header if necessary
header = _truncate_str(col, wrap_text)
tbl.add_column(header, [_truncate_str(_value_to_str(x), wrap_text) for x in headsf[col]])
table_width = str(tbl).find('\n')
num_column_of_last_table += 1
else:
# the column does not fit in the current table, push it back to columns
columns.append(col)
break
tbl.align = 'c'
row_of_tables.append(tbl)
# add a column of all "..." if there are more columns than displayed
if self.num_cols() > max_columns:
row_of_tables[-1].add_column('...', ['...'] * len(headsf))
num_column_of_last_table += 1
# add a row of all "..." if there are more rows than displayed
if self.__has_size__() and self.num_rows() > headsf.num_rows():
row_of_tables[-1].add_row(['...'] * num_column_of_last_table)
return row_of_tables
def print_rows(self, num_rows=10, num_columns=40, max_column_width=30,
max_row_width=80):
"""
Print the first M rows and N columns of the SFrame in human readable
format.
Parameters
----------
num_rows : int, optional
Number of rows to print.
num_columns : int, optional
Number of columns to print.
max_column_width : int, optional
Maximum width of a column. Columns use fewer characters if possible.
max_row_width : int, optional
Maximum width of a printed row. Columns beyond this width wrap to a
new line. `max_row_width` is automatically reset to be the
larger of itself and `max_column_width`.
See Also
--------
head, tail
"""
max_row_width = max(max_row_width, max_column_width + 1)
printed_sf = self._imagecols_to_stringcols(num_rows)
row_of_tables = printed_sf.__get_pretty_tables__(wrap_text=False,
max_rows_to_display=num_rows,
max_columns=num_columns,
max_column_width=max_column_width,
max_row_width=max_row_width)
footer = "[%d rows x %d columns]\n" % self.shape
print '\n'.join([str(tb) for tb in row_of_tables]) + "\n" + footer
def _imagecols_to_stringcols(self, num_rows=10):
# A list of column types
types = self.column_types()
# A list of indexable column names
names = self.column_names()
# Constructing names of sframe columns that are of image type
image_column_names = [names[i] for i in range(len(names)) if types[i] == graphlab.Image]
#If there are image-type columns, copy the SFrame and cast the top MAX_NUM_ROWS_TO_DISPLAY of those columns to string
if len(image_column_names) > 0:
printed_sf = SFrame()
for t in names:
if t in image_column_names:
printed_sf[t] = self[t]._head_str(num_rows)
else:
printed_sf[t] = self[t].head(num_rows)
else:
printed_sf = self
return printed_sf
def __str__(self, num_rows=10, footer=True):
"""
Returns a string containing the first 10 elements of the frame, along
with a description of the frame.
"""
MAX_ROWS_TO_DISPLAY = num_rows
printed_sf = self._imagecols_to_stringcols(MAX_ROWS_TO_DISPLAY)
row_of_tables = printed_sf.__get_pretty_tables__(wrap_text=False, max_rows_to_display=MAX_ROWS_TO_DISPLAY)
if (not footer):
return '\n'.join([str(tb) for tb in row_of_tables])
if self.__has_size__():
footer = '[%d rows x %d columns]\n' % self.shape
if (self.num_rows() > MAX_ROWS_TO_DISPLAY):
footer += '\n'.join(FOOTER_STRS)
else:
footer = '[? rows x %d columns]\n' % self.num_columns()
footer += '\n'.join(LAZY_FOOTER_STRS)
return '\n'.join([str(tb) for tb in row_of_tables]) + "\n" + footer
def _repr_html_(self):
MAX_ROWS_TO_DISPLAY = 10
printed_sf = self._imagecols_to_stringcols(MAX_ROWS_TO_DISPLAY)
row_of_tables = printed_sf.__get_pretty_tables__(wrap_text=True, max_row_width=120, max_columns=40, max_column_width=25, max_rows_to_display=MAX_ROWS_TO_DISPLAY)
if self.__has_size__():
footer = '[%d rows x %d columns]<br/>' % self.shape
if (self.num_rows() > MAX_ROWS_TO_DISPLAY):
footer += '<br/>'.join(FOOTER_STRS)
else:
footer = '[? rows x %d columns]<br/>' % self.num_columns()
footer += '<br/>'.join(LAZY_FOOTER_STRS)
begin = '<div style="max-height:1000px;max-width:1500px;overflow:auto;">'
end = '\n</div>'
return begin + '\n'.join([tb.get_html_string(format=True) for tb in row_of_tables]) + "\n" + footer + end
def __nonzero__(self):
"""
Returns true if the frame is not empty.
"""
return self.num_rows() != 0
def __len__(self):
"""
Returns the number of rows of the sframe.
"""
return self.num_rows()
def __copy__(self):
"""
Returns a shallow copy of the sframe.
"""
return self.select_columns(self.column_names())
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
raise NotImplementedError
def _row_selector(self, other):
"""
Where other is an SArray of identical length as the current Frame,
this returns a selection of a subset of rows in the current SFrame
where the corresponding row in the selector is non-zero.
"""
if type(other) is SArray:
if len(other) != len(self):
raise IndexError("Cannot perform logical indexing on arrays of different length.")
with cython_context():
return SFrame(_proxy=self.__proxy__.logical_filter(other.__proxy__))
def dtype(self):
"""
The type of each column.
Returns
-------
out : list[type]
Column types of the SFrame.
See Also
--------
column_types
"""
return self.column_types()
def num_rows(self):
"""
The number of rows in this SFrame.
Returns
-------
out : int
Number of rows in the SFrame.
See Also
--------
num_columns
"""
return self.__proxy__.num_rows()
def num_cols(self):
"""
The number of columns in this SFrame.
Returns
-------
out : int
Number of columns in the SFrame.
See Also
--------
num_columns, num_rows
"""
return self.__proxy__.num_columns()
def num_columns(self):
"""
The number of columns in this SFrame.
Returns
-------
out : int
Number of columns in the SFrame.
See Also
--------
num_cols, num_rows
"""
return self.__proxy__.num_columns()
def column_names(self):
"""
The name of each column in the SFrame.
Returns
-------
out : list[string]
Column names of the SFrame.
See Also
--------
rename
"""
return self.__proxy__.column_names()
def column_types(self):
"""
The type of each column in the SFrame.
Returns
-------
out : list[type]
Column types of the SFrame.
See Also
--------
dtype
"""
return self.__proxy__.dtype()
def head(self, n=10):
"""
The first n rows of the SFrame.
Parameters
----------
n : int, optional
The number of rows to fetch.
Returns
-------
out : SFrame
A new SFrame which contains the first n rows of the current SFrame
See Also
--------
tail, print_rows
"""
return SFrame(_proxy=self.__proxy__.head(n))
def to_dataframe(self):
"""
Convert this SFrame to pandas.DataFrame.
This operation will construct a pandas.DataFrame in memory. Care must
be taken when size of the returned object is big.
Returns
-------
out : pandas.DataFrame
The dataframe which contains all rows of SFrame
"""
assert HAS_PANDAS
df = pandas.DataFrame()
for i in range(self.num_columns()):
column_name = self.column_names()[i]
df[column_name] = list(self[column_name])
if len(df[column_name]) == 0:
df[column_name] = df[column_name].astype(self.column_types()[i])
return df
def tail(self, n=10):
"""
The last n rows of the SFrame.
Parameters
----------
n : int, optional
The number of rows to fetch.
Returns
-------
out : SFrame
A new SFrame which contains the last n rows of the current SFrame
See Also
--------
head, print_rows
"""
return SFrame(_proxy=self.__proxy__.tail(n))
def apply(self, fn, dtype=None, seed=None):
"""
Transform each row to an :class:`~graphlab.SArray` according to a
specified function. Returns a new SArray of ``dtype`` where each element
in this SArray is transformed by `fn(x)` where `x` is a single row in
the sframe represented as a dictionary. The ``fn`` should return
exactly one value which can be cast into type ``dtype``. If ``dtype`` is
not specified, the first 100 rows of the SFrame are used to make a guess
of the target data type.
Parameters
----------
fn : function
The function to transform each row of the SFrame. The return
type should be convertible to `dtype` if `dtype` is not None.
This can also be a toolkit extension function which is compiled
as a native shared library using SDK.
dtype : dtype, optional
The dtype of the new SArray. If None, the first 100
elements of the array are used to guess the target
data type.
seed : int, optional
Used as the seed if a random number generator is included in `fn`.
Returns
-------
out : SArray
The SArray transformed by fn. Each element of the SArray is of
type ``dtype``
Examples
--------
Concatenate strings from several columns:
>>> sf = graphlab.SFrame({'user_id': [1, 2, 3], 'movie_id': [3, 3, 6],
'rating': [4, 5, 1]})
>>> sf.apply(lambda x: str(x['user_id']) + str(x['movie_id']) + str(x['rating']))
dtype: str
Rows: 3
['134', '235', '361']
Using native toolkit extension function:
.. code-block:: c++
#include <graphlab/sdk/toolkit_function_macros.hpp>
double mean(const std::map<flexible_type, flexible_type>& dict) {
double sum = 0.0;
for (const auto& kv: dict) sum += (double)kv.second;
return sum / dict.size();
}
BEGIN_FUNCTION_REGISTRATION
REGISTER_FUNCTION(mean, "row");
END_FUNCTION_REGISTRATION
compiled into example.so
>>> import example
>>> sf = graphlab.SFrame({'x0': [1, 2, 3], 'x1': [2, 3, 1],
... 'x2': [3, 1, 2]})
>>> sf.apply(example.mean)
dtype: float
Rows: 3
[2.0,2.0,2.0]
"""
assert _is_callable(fn), "Input must be a function"
test_sf = self[:10]
dryrun = [fn(row) for row in test_sf]
if dtype is None:
dtype = SArray(dryrun).dtype()
if not seed:
seed = int(time.time())
_mt._get_metric_tracker().track('sframe.apply')
nativefn = None
try:
import graphlab.extensions as extensions
nativefn = extensions._build_native_function_call(fn)
except:
pass
if nativefn is not None:
# this is a toolkit lambda. We can do something about it
with cython_context():
return SArray(_proxy=self.__proxy__.transform_native(nativefn, dtype, seed))
with cython_context():
return SArray(_proxy=self.__proxy__.transform(fn, dtype, seed))
def flat_map(self, column_names, fn, column_types='auto', seed=None):
"""
Map each row of the SFrame to multiple rows in a new SFrame via a
function.
The output of `fn` must have type List[List[...]]. Each inner list
will be a single row in the new output, and the collection of these
rows within the outer list make up the data for the output SFrame.
All rows must have the same length and the same order of types to
make sure the result columns are homogeneously typed. For example, if
the first element emitted into in the outer list by `fn` is
[43, 2.3, 'string'], then all other elements emitted into the outer
list must be a list with three elements, where the first is an int,
second is a float, and third is a string. If column_types is not
specified, the first 10 rows of the SFrame are used to determine the
column types of the returned sframe.
Parameters
----------
column_names : list[str]
The column names for the returned SFrame.
fn : function
The function that maps each of the sframe row into multiple rows,
returning List[List[...]]. All outputted rows must have the same
length and order of types.
column_types : list[type], optional
The column types of the output SFrame. Default value will be
automatically inferred by running `fn` on the first 10 rows of the
input. If the types cannot be inferred from the first 10 rows, an
error is raised.
seed : int, optional
Used as the seed if a random number generator is included in `fn`.
Returns
-------
out : SFrame
A new SFrame containing the results of the flat_map of the
original SFrame.
Examples
---------
Repeat each row according to the value in the 'number' column.
>>> sf = graphlab.SFrame({'letter': ['a', 'b', 'c'],
... 'number': [1, 2, 3]})
>>> sf.flat_map(['number', 'letter'],
... lambda x: [list(x.itervalues()) for i in range(0, x['number'])])
+--------+--------+
| number | letter |
+--------+--------+
| 1 | a |
| 2 | b |
| 2 | b |
| 3 | c |
| 3 | c |
| 3 | c |
+--------+--------+
[6 rows x 2 columns]
"""
assert inspect.isfunction(fn), "Input must be a function"
if not seed:
seed = int(time.time())
_mt._get_metric_tracker().track('sframe.flat_map')
# determine the column_types
if column_types == 'auto':
types = set()
sample = self[0:10]
results = [fn(row) for row in sample]
for rows in results:
if type(rows) is not list:
raise TypeError("Output type of the lambda function must be a list of lists")
# note: this skips empty lists
for row in rows:
if type(row) is not list:
raise TypeError("Output type of the lambda function must be a list of lists")
types.add(tuple([type(v) for v in row]))
if len(types) == 0:
raise TypeError, \
"Could not infer output column types from the first ten rows " +\
"of the SFrame. Please use the 'column_types' parameter to " +\
"set the types."
if len(types) > 1:
raise TypeError("Mapped rows must have the same length and types")
column_types = list(types.pop())
assert type(column_types) is list
assert len(column_types) == len(column_names), "Number of output columns must match the size of column names"
with cython_context():
return SFrame(_proxy=self.__proxy__.flat_map(fn, column_names, column_types, seed))
def sample(self, fraction, seed=None):
"""
Sample the current SFrame's rows.
Parameters
----------
fraction : float
Approximate fraction of the rows to fetch. Must be between 0 and 1.
The number of rows returned is approximately the fraction times the
number of rows.
seed : int, optional
Seed for the random number generator used to sample.
Returns
-------
out : SFrame
A new SFrame containing sampled rows of the current SFrame.
Examples
--------
Suppose we have an SFrame with 6,145 rows.
>>> import random
>>> sf = SFrame({'id': range(0, 6145)})
Retrieve about 30% of the SFrame rows with repeatable results by
setting the random seed.
>>> len(sf.sample(.3, seed=5))
1783
"""
if not seed:
seed = int(time.time())
if (fraction > 1 or fraction < 0):
raise ValueError('Invalid sampling rate: ' + str(fraction))
_mt._get_metric_tracker().track('sframe.sample')
if (self.num_rows() == 0 or self.num_cols() == 0):
return self
else:
with cython_context():
return SFrame(_proxy=self.__proxy__.sample(fraction, seed))
def random_split(self, fraction, seed=None):
"""
Randomly split the rows of an SFrame into two SFrames. The first SFrame
contains *M* rows, sampled uniformly (without replacement) from the
original SFrame. *M* is approximately the fraction times the original
number of rows. The second SFrame contains the remaining rows of the
original SFrame.
Parameters
----------
fraction : float
Approximate fraction of the rows to fetch for the first returned
SFrame. Must be between 0 and 1.
seed : int, optional
Seed for the random number generator used to split.
Returns
-------
out : tuple [SFrame]
Two new SFrames.
Examples
--------
Suppose we have an SFrame with 1,024 rows and we want to randomly split
it into training and testing datasets with about a 90%/10% split.
>>> sf = graphlab.SFrame({'id': range(1024)})
>>> sf_train, sf_test = sf.random_split(.9, seed=5)
>>> print len(sf_train), len(sf_test)
922 102
"""
if (fraction > 1 or fraction < 0):
raise ValueError('Invalid sampling rate: ' + str(fraction))
if (self.num_rows() == 0 or self.num_cols() == 0):
return (SFrame(), SFrame())
if not seed:
seed = int(time.time())
# The server side requires this to be an int, so cast if we can
try:
seed = int(seed)
except ValueError:
raise ValueError('The \'seed\' parameter must be of type int.')
_mt._get_metric_tracker().track('sframe.random_split')
with cython_context():
proxy_pair = self.__proxy__.random_split(fraction, seed)
return (SFrame(data=[], _proxy=proxy_pair[0]), SFrame(data=[], _proxy=proxy_pair[1]))
def topk(self, column_name, k=10, reverse=False):
"""
Get top k rows according to the given column. Result is according to and
sorted by `column_name` in the given order (default is descending).
When `k` is small, `topk` is more efficient than `sort`.
Parameters
----------
column_name : string
The column to sort on
k : int, optional
The number of rows to return
reverse : bool, optional
If True, return the top k rows in ascending order, otherwise, in
descending order.
Returns
-------
out : SFrame
an SFrame containing the top k rows sorted by column_name.
See Also
--------
sort
Examples
--------
>>> sf = graphlab.SFrame({'id': range(1000)})
>>> sf['value'] = -sf['id']
>>> sf.topk('id', k=3)
+--------+--------+
| id | value |
+--------+--------+
| 999 | -999 |
| 998 | -998 |
| 997 | -997 |
+--------+--------+
[3 rows x 2 columns]
>>> sf.topk('value', k=3)
+--------+--------+
| id | value |
+--------+--------+
| 1 | -1 |
| 2 | -2 |
| 3 | -3 |
+--------+--------+
[3 rows x 2 columns]
"""
if type(column_name) is not str:
raise TypeError("column_name must be a string")
_mt._get_metric_tracker().track('sframe.topk')
sf = self[self[column_name].topk_index(k, reverse)]
return sf.sort(column_name, ascending=reverse)
def save(self, filename, format=None):
"""
Save the SFrame to a file system for later use.
Parameters
----------
filename : string
The location to save the SFrame. Either a local directory or a
remote URL. If the format is 'binary', a directory will be created
at the location which will contain the sframe.
format : {'binary', 'csv'}, optional
Format in which to save the SFrame. Binary saved SFrames can be
loaded much faster and without any format conversion losses. If not
given, will try to infer the format from filename given. If file
name ends with 'csv' or '.csv.gz', then save as 'csv' format,
otherwise save as 'binary' format.
See Also
--------
load_sframe, SFrame
Examples
--------
>>> # Save the sframe into binary format
>>> sf.save('data/training_data_sframe')
>>> # Save the sframe into csv format
>>> sf.save('data/training_data.csv', format='csv')
"""
_mt._get_metric_tracker().track('sframe.save', properties={'format':format})
if format == None:
if filename.endswith(('.csv', '.csv.gz')):
format = 'csv'
else:
format = 'binary'
else:
if format is 'csv':
if not filename.endswith(('.csv', '.csv.gz')):
filename = filename + '.csv'
elif format is not 'binary':
raise ValueError("Invalid format: {}. Supported formats are 'csv' and 'binary'".format(format))
## Save the SFrame
url = _make_internal_url(filename)
with cython_context():
if format is 'binary':
self.__proxy__.save(url)
elif format is 'csv':
assert filename.endswith(('.csv', '.csv.gz'))
self.__proxy__.save_as_csv(url, {})
else:
raise ValueError("Unsupported format: {}".format(format))
def select_column(self, key):
"""
Get a reference to the :class:`~graphlab.SArray` that corresponds with
the given key. Throws an exception if the key is something other than a
string or if the key is not found.
Parameters
----------
key : str
The column name.
Returns
-------
out : SArray
The SArray that is referred by ``key``.
See Also
--------
select_columns
Examples
--------
>>> sf = graphlab.SFrame({'user_id': [1,2,3],
... 'user_name': ['alice', 'bob', 'charlie']})
>>> # This line is equivalent to `sa = sf['user_name']`
>>> sa = sf.select_column('user_name')
>>> sa
dtype: str
Rows: 3
['alice', 'bob', 'charlie']
"""
if not isinstance(key, str):
raise TypeError("Invalid key type: must be str")
with cython_context():
return SArray(data=[], _proxy=self.__proxy__.select_column(key))
def select_columns(self, keylist):
"""
Get SFrame composed only of the columns referred to in the given list of
keys. Throws an exception if ANY of the keys are not in this SFrame or
if ``keylist`` is anything other than a list of strings.
Parameters
----------
keylist : list[str]
The list of column names.
Returns
-------
out : SFrame
A new SFrame that is made up of the columns referred to in
``keylist`` from the current SFrame.
See Also
--------
select_column
Examples
--------
>>> sf = graphlab.SFrame({'user_id': [1,2,3],
... 'user_name': ['alice', 'bob', 'charlie'],
... 'zipcode': [98101, 98102, 98103]
... })
>>> # This line is equivalent to `sf2 = sf[['user_id', 'zipcode']]`
>>> sf2 = sf.select_columns(['user_id', 'zipcode'])
>>> sf2
+---------+---------+
| user_id | zipcode |
+---------+---------+
| 1 | 98101 |
| 2 | 98102 |
| 3 | 98103 |
+---------+---------+
[3 rows x 2 columns]
"""
if not hasattr(keylist, '__iter__'):
raise TypeError("keylist must be an iterable")
if not all([isinstance(x, str) for x in keylist]):
raise TypeError("Invalid key type: must be str")
key_set = set(keylist)
if (len(key_set)) != len(keylist):
for key in key_set:
if keylist.count(key) > 1:
raise ValueError("There are duplicate keys in key list: '" + key + "'")
with cython_context():
return SFrame(data=[], _proxy=self.__proxy__.select_columns(keylist))
def add_column(self, data, name=""):
"""
Add a column to this SFrame. The number of elements in the data given
must match the length of every other column of the SFrame. This
operation modifies the current SFrame in place and returns self. If no
name is given, a default name is chosen.
Parameters
----------
data : SArray
The 'column' of data to add.
name : string, optional
The name of the column. If no name is given, a default name is
chosen.
Returns
-------
out : SFrame
The current SFrame.
See Also
--------
add_columns
Examples
--------
>>> sf = graphlab.SFrame({'id': [1, 2, 3], 'val': ['A', 'B', 'C']})
>>> sa = graphlab.SArray(['cat', 'dog', 'fossa'])
>>> # This line is equivalant to `sf['species'] = sa`
>>> sf.add_column(sa, name='species')
>>> sf
+----+-----+---------+
| id | val | species |
+----+-----+---------+
| 1 | A | cat |
| 2 | B | dog |
| 3 | C | fossa |
+----+-----+---------+
[3 rows x 3 columns]
"""
# Check type for pandas dataframe or SArray?
if not isinstance(data, SArray):
raise TypeError("Must give column as SArray")
if not isinstance(name, str):
raise TypeError("Invalid column name: must be str")
with cython_context():
self.__proxy__.add_column(data.__proxy__, name)
return self
def add_columns(self, data, namelist=None):
"""
Adds multiple columns to this SFrame. The number of elements in all
columns must match the length of every other column of the SFrame. This
operation modifies the current SFrame in place and returns self.
Parameters
----------
data : list[SArray] or SFrame
The columns to add.
namelist : list of string, optional
A list of column names. All names must be specified. ``namelist`` is
ignored if data is an SFrame.
Returns
-------
out : SFrame
The current SFrame.
See Also
--------
add_column
Examples
--------
>>> sf = graphlab.SFrame({'id': [1, 2, 3], 'val': ['A', 'B', 'C']})
>>> sf2 = graphlab.SFrame({'species': ['cat', 'dog', 'fossa'],
... 'age': [3, 5, 9]})
>>> sf.add_columns(sf2)
>>> sf
+----+-----+-----+---------+
| id | val | age | species |
+----+-----+-----+---------+
| 1 | A | 3 | cat |
| 2 | B | 5 | dog |
| 3 | C | 9 | fossa |
+----+-----+-----+---------+
[3 rows x 4 columns]
"""
datalist = data
if isinstance(data, SFrame):
other = data
datalist = [other.select_column(name) for name in other.column_names()]
namelist = other.column_names()
my_columns = set(self.column_names())
for name in namelist:
if name in my_columns:
raise ValueError("Column '" + name + "' already exists in current SFrame")
else:
if not hasattr(datalist, '__iter__'):
raise TypeError("datalist must be an iterable")
if not hasattr(namelist, '__iter__'):
raise TypeError("namelist must be an iterable")
if not all([isinstance(x, SArray) for x in datalist]):
raise TypeError("Must give column as SArray")
if not all([isinstance(x, str) for x in namelist]):
raise TypeError("Invalid column name in list : must all be str")
with cython_context():
self.__proxy__.add_columns([x.__proxy__ for x in datalist], namelist)
return self
def remove_column(self, name):
"""
Remove a column from this SFrame. This operation modifies the current
SFrame in place and returns self.
Parameters
----------
name : string
The name of the column to remove.
Returns
-------
out : SFrame
The SFrame with given column removed.
Examples
--------
>>> sf = graphlab.SFrame({'id': [1, 2, 3], 'val': ['A', 'B', 'C']})
>>> # This is equivalent to `del sf['val']`
>>> sf.remove_column('val')
>>> sf
+----+
| id |
+----+
| 1 |
| 2 |
| 3 |
+----+
[3 rows x 1 columns]
"""
if name not in self.column_names():
raise KeyError('Cannot find column %s' % name)
colid = self.column_names().index(name)
with cython_context():
self.__proxy__.remove_column(colid)
return self
def remove_columns(self, column_names):
"""
Remove one or more columns from this SFrame. This operation modifies the current
SFrame in place and returns self.
Parameters
----------
column_names : list or iterable
A list or iterable of column names.
Returns
-------
out : SFrame
The SFrame with given columns removed.
Examples
--------
>>> sf = graphlab.SFrame({'id': [1, 2, 3], 'val1': ['A', 'B', 'C'], 'val2' : [10, 11, 12]})
>>> sf.remove_columns(['val1', 'val2'])
>>> sf
+----+
| id |
+----+
| 1 |
| 2 |
| 3 |
+----+
[3 rows x 1 columns]
"""
column_names = list(column_names)
existing_columns = dict((k, i) for i, k in enumerate(self.column_names()))
for name in column_names:
if name not in existing_columns:
raise KeyError('Cannot find column %s' % name)
# Delete it going backwards so we don't invalidate indices
deletion_indices = sorted(existing_columns[name] for name in column_names)
for colid in reversed(deletion_indices):
with cython_context():
self.__proxy__.remove_column(colid)
return self
def swap_columns(self, column_1, column_2):
"""
Swap the columns with the given names. This operation modifies the
current SFrame in place and returns self.
Parameters
----------
column_1 : string
Name of column to swap
column_2 : string
Name of other column to swap
Returns
-------
out : SFrame
The SFrame with swapped columns.
Examples
--------
>>> sf = graphlab.SFrame({'id': [1, 2, 3], 'val': ['A', 'B', 'C']})
>>> sf.swap_columns('id', 'val')
>>> sf
+-----+-----+
| val | id |
+-----+-----+
| A | 1 |
| B | 2 |
| C | 3 |
+----+-----+
[3 rows x 2 columns]
"""
colnames = self.column_names()
colid_1 = colnames.index(column_1)
colid_2 = colnames.index(column_2)
with cython_context():
self.__proxy__.swap_columns(colid_1, colid_2)
return self
def rename(self, names):
"""
Rename the given columns. ``names`` is expected to be a dict specifying
the old and new names. This changes the names of the columns given as
the keys and replaces them with the names given as the values. This
operation modifies the current SFrame in place and returns self.
Parameters
----------
names : dict [string, string]
Dictionary of [old_name, new_name]
Returns
-------
out : SFrame
The current SFrame.
See Also
--------
column_names
Examples
--------
>>> sf = SFrame({'X1': ['Alice','Bob'],
... 'X2': ['123 Fake Street','456 Fake Street']})
>>> sf.rename({'X1': 'name', 'X2':'address'})
>>> sf
+-------+-----------------+
| name | address |
+-------+-----------------+
| Alice | 123 Fake Street |
| Bob | 456 Fake Street |
+-------+-----------------+
[2 rows x 2 columns]
"""
if (type(names) is not dict):
raise TypeError('names must be a dictionary: oldname -> newname')
all_columns = set(self.column_names())
for k in names:
if not k in all_columns:
raise ValueError('Cannot find column %s in the SFrame' % k)
with cython_context():
for k in names:
colid = self.column_names().index(k)
self.__proxy__.set_column_name(colid, names[k])
return self
def __getitem__(self, key):
"""
This method does things based on the type of `key`.
If `key` is:
* str
Calls `select_column` on `key`
* SArray
Performs a logical filter. Expects given SArray to be the same
length as all columns in current SFrame. Every row
corresponding with an entry in the given SArray that is
equivalent to False is filtered from the result.
* int
Returns a single row of the SFrame (the `key`th one) as a dictionary.
* slice
Returns an SFrame including only the sliced rows.
"""
if type(key) is SArray:
return self._row_selector(key)
elif type(key) is list:
return self.select_columns(key)
elif type(key) is str:
return self.select_column(key)
elif type(key) is int:
if key < 0:
key = len(self) + key
if key >= len(self):
raise IndexError("SFrame index out of range")
return list(SFrame(_proxy = self.__proxy__.copy_range(key, 1, key+1)))[0]
elif type(key) is slice:
start = key.start
stop = key.stop
step = key.step
if start is None:
start = 0
if stop is None:
stop = len(self)
if step is None:
step = 1
# handle negative indices
if start < 0:
start = len(self) + start
if stop < 0:
stop = len(self) + stop
return SFrame(_proxy = self.__proxy__.copy_range(start, step, stop))
else:
raise TypeError("Invalid index type: must be SArray, list, or str")
def __setitem__(self, key, value):
"""
A wrapper around add_column(s). Key can be either a list or a str. If
value is an SArray, it is added to the SFrame as a column. If it is a
constant value (int, str, or float), then a column is created where
every entry is equal to the constant value. Existing columns can also
be replaced using this wrapper.
"""
if type(key) is list:
self.add_columns(value, key)
elif type(key) is str:
sa_value = None
if (type(value) is SArray):
sa_value = value
elif hasattr(value, '__iter__'): # wrap list, array... to sarray
sa_value = SArray(value)
else: # create an sarray of constant value
sa_value = SArray.from_const(value, self.num_rows())
# set new column
if not key in self.column_names():
with cython_context():
self.add_column(sa_value, key)
else:
# special case if replacing the only column.
# server would fail the replacement if the new column has different
# length than current one, which doesn't make sense if we are replacing
# the only column. To support this, we first take out the only column
# and then put it back if exception happens
single_column = (self.num_cols() == 1)
if (single_column):
tmpname = key
saved_column = self.select_column(key)
self.remove_column(key)
else:
# add the column to a unique column name.
tmpname = '__' + '-'.join(self.column_names())
try:
self.add_column(sa_value, tmpname)
except Exception as e:
if (single_column):
self.add_column(saved_column, key)
raise
if (not single_column):
# if add succeeded, remove the column name and rename tmpname->columnname.
self.swap_columns(key, tmpname)
self.remove_column(key)
self.rename({tmpname: key})
else:
raise TypeError('Cannot set column with key type ' + str(type(key)))
def __delitem__(self, key):
"""
Wrapper around remove_column.
"""
self.remove_column(key)
def __materialize__(self):
"""
For an SFrame that is lazily evaluated, force the persistence of the
SFrame to disk, committing all lazy evaluated operations.
"""
with cython_context():
self.__proxy__.materialize()
def __is_materialized__(self):
"""
Returns whether or not the SFrame has been materialized.
"""
return self.__proxy__.is_materialized()
def __has_size__(self):
"""
Returns whether or not the size of the SFrame is known.
"""
return self.__proxy__.has_size()
def __iter__(self):
"""
Provides an iterator to the rows of the SFrame.
"""
_mt._get_metric_tracker().track('sframe.__iter__')
def generator():
elems_at_a_time = 262144
self.__proxy__.begin_iterator()
ret = self.__proxy__.iterator_get_next(elems_at_a_time)
column_names = self.column_names()
while(True):
for j in ret:
yield dict(zip(column_names, j))
if len(ret) == elems_at_a_time:
ret = self.__proxy__.iterator_get_next(elems_at_a_time)
else:
break
return generator()
def append(self, other):
"""
Add the rows of an SFrame to the end of this SFrame.
Both SFrames must have the same set of columns with the same column
names and column types.
Parameters
----------
other : SFrame
Another SFrame whose rows are appended to the current SFrame.
Returns
-------
out : SFrame
The result SFrame from the append operation.
Examples
--------
>>> sf = graphlab.SFrame({'id': [4, 6, 8], 'val': ['D', 'F', 'H']})
>>> sf2 = graphlab.SFrame({'id': [1, 2, 3], 'val': ['A', 'B', 'C']})
>>> sf = sf.append(sf2)
>>> sf
+----+-----+
| id | val |
+----+-----+
| 4 | D |
| 6 | F |
| 8 | H |
| 1 | A |
| 2 | B |
| 3 | C |
+----+-----+
[6 rows x 2 columns]
"""
_mt._get_metric_tracker().track('sframe.append')
if type(other) is not SFrame:
raise RuntimeError("SFrame append can only work with SFrame")
left_empty = len(self.column_names()) == 0
right_empty = len(other.column_names()) == 0
if (left_empty and right_empty):
return SFrame()
if (left_empty or right_empty):
non_empty_sframe = self if right_empty else other
return non_empty_sframe
my_column_names = self.column_names()
my_column_types = self.column_types()
other_column_names = other.column_names()
if (len(my_column_names) != len(other_column_names)):
raise RuntimeError("Two SFrames have to have the same number of columns")
# check if the order of column name is the same
column_name_order_match = True
for i in range(len(my_column_names)):
if other_column_names[i] != my_column_names[i]:
column_name_order_match = False
break;
processed_other_frame = other
if not column_name_order_match:
# we allow name order of two sframes to be different, so we create a new sframe from
# "other" sframe to make it has exactly the same shape
processed_other_frame = SFrame()
for i in range(len(my_column_names)):
col_name = my_column_names[i]
if(col_name not in other_column_names):
raise RuntimeError("Column " + my_column_names[i] + " does not exist in second SFrame")
other_column = other.select_column(col_name);
processed_other_frame.add_column(other_column, col_name)
# check column type
if my_column_types[i] != other_column.dtype():
raise RuntimeError("Column " + my_column_names[i] + " type is not the same in two SFrames, one is " + str(my_column_types[i]) + ", the other is " + str(other_column.dtype()))
with cython_context():
processed_other_frame.__materialize__()
return SFrame(_proxy=self.__proxy__.append(processed_other_frame.__proxy__))
def groupby(self, key_columns, operations, *args):
"""
Perform a group on the key_columns followed by aggregations on the
columns listed in operations.
The operations parameter is a dictionary that indicates which
aggregation operators to use and which columns to use them on. The
available operators are SUM, MAX, MIN, COUNT, AVG, VAR, STDV, CONCAT,
SELECT_ONE, ARGMIN, ARGMAX, and QUANTILE. For convenience, aggregators
MEAN, STD, and VARIANCE are available as synonyms for AVG, STDV, and
VAR. See :mod:`~graphlab.aggregate` for more detail on the aggregators.
Parameters
----------
key_columns : string | list[string]
Column(s) to group by. Key columns can be of any type other than
dictionary.
operations : dict, list
Dictionary of columns and aggregation operations. Each key is a
output column name and each value is an aggregator. This can also
be a list of aggregators, in which case column names will be
automatically assigned.
*args
All other remaining arguments will be interpreted in the same
way as the operations argument.
Returns
-------
out_sf : SFrame
A new SFrame, with a column for each groupby column and each
aggregation operation.
See Also
--------
aggregate
Examples
--------
Suppose we have an SFrame with movie ratings by many users.
>>> import graphlab.aggregate as agg
>>> url = 'http://s3.amazonaws.com/gl-testdata/rating_data_example.csv'
>>> sf = graphlab.SFrame.read_csv(url)
>>> sf
+---------+----------+--------+
| user_id | movie_id | rating |
+---------+----------+--------+
| 25904 | 1663 | 3 |
| 25907 | 1663 | 3 |
| 25923 | 1663 | 3 |
| 25924 | 1663 | 3 |
| 25928 | 1663 | 2 |
| 25933 | 1663 | 4 |
| 25934 | 1663 | 4 |
| 25935 | 1663 | 4 |
| 25936 | 1663 | 5 |
| 25937 | 1663 | 2 |
| ... | ... | ... |
+---------+----------+--------+
[10000 rows x 3 columns]
Compute the number of occurrences of each user.
>>> user_count = sf.groupby(key_columns='user_id',
... operations={'count': agg.COUNT()})
>>> user_count
+---------+-------+
| user_id | count |
+---------+-------+
| 62361 | 1 |
| 30727 | 1 |
| 40111 | 1 |
| 50513 | 1 |
| 35140 | 1 |
| 42352 | 1 |
| 29667 | 1 |
| 46242 | 1 |
| 58310 | 1 |
| 64614 | 1 |
| ... | ... |
+---------+-------+
[9852 rows x 2 columns]
Compute the mean and standard deviation of ratings per user.
>>> user_rating_stats = sf.groupby(key_columns='user_id',
... operations={
... 'mean_rating': agg.MEAN('rating'),
... 'std_rating': agg.STD('rating')
... })
>>> user_rating_stats
+---------+-------------+------------+
| user_id | mean_rating | std_rating |
+---------+-------------+------------+
| 62361 | 5.0 | 0.0 |
| 30727 | 4.0 | 0.0 |
| 40111 | 2.0 | 0.0 |
| 50513 | 4.0 | 0.0 |
| 35140 | 4.0 | 0.0 |
| 42352 | 5.0 | 0.0 |
| 29667 | 4.0 | 0.0 |
| 46242 | 5.0 | 0.0 |
| 58310 | 2.0 | 0.0 |
| 64614 | 2.0 | 0.0 |
| ... | ... | ... |
+---------+-------------+------------+
[9852 rows x 3 columns]
Compute the movie with the minimum rating per user.
>>> chosen_movies = sf.groupby(key_columns='user_id',
... operations={
... 'worst_movies': agg.ARGMIN('rating','movie_id')
... })
>>> chosen_movies
+---------+-------------+
| user_id | worst_movies |
+---------+-------------+
| 62361 | 1663 |
| 30727 | 1663 |
| 40111 | 1663 |
| 50513 | 1663 |
| 35140 | 1663 |
| 42352 | 1663 |
| 29667 | 1663 |
| 46242 | 1663 |
| 58310 | 1663 |
| 64614 | 1663 |
| ... | ... |
+---------+-------------+
[9852 rows x 2 columns]
Compute the movie with the max rating per user and also the movie with
the maximum imdb-ranking per user.
>>> sf['imdb-ranking'] = sf['rating'] * 10
>>> chosen_movies = sf.groupby(key_columns='user_id',
... operations={('max_rating_movie','max_imdb_ranking_movie'): agg.ARGMAX(('rating','imdb-ranking'),'movie_id')})
>>> chosen_movies
+---------+------------------+------------------------+
| user_id | max_rating_movie | max_imdb_ranking_movie |
+---------+------------------+------------------------+
| 62361 | 1663 | 16630 |
| 30727 | 1663 | 16630 |
| 40111 | 1663 | 16630 |
| 50513 | 1663 | 16630 |
| 35140 | 1663 | 16630 |
| 42352 | 1663 | 16630 |
| 29667 | 1663 | 16630 |
| 46242 | 1663 | 16630 |
| 58310 | 1663 | 16630 |
| 64614 | 1663 | 16630 |
| ... | ... | ... |
+---------+------------------+------------------------+
[9852 rows x 3 columns]
Compute the movie with the max rating per user.
>>> chosen_movies = sf.groupby(key_columns='user_id',
operations={'best_movies': agg.ARGMAX('rating','movie')})
Compute the movie with the max rating per user and also the movie with the maximum imdb-ranking per user.
>>> chosen_movies = sf.groupby(key_columns='user_id',
operations={('max_rating_movie','max_imdb_ranking_movie'): agg.ARGMAX(('rating','imdb-ranking'),'movie')})
Compute the count, mean, and standard deviation of ratings per (user,
time), automatically assigning output column names.
>>> sf['time'] = sf.apply(lambda x: (x['user_id'] + x['movie_id']) % 11 + 2000)
>>> user_rating_stats = sf.groupby(['user_id', 'time'],
... [agg.COUNT(),
... agg.AVG('rating'),
... agg.STDV('rating')])
>>> user_rating_stats
+------+---------+-------+---------------+----------------+
| time | user_id | Count | Avg of rating | Stdv of rating |
+------+---------+-------+---------------+----------------+
| 2006 | 61285 | 1 | 4.0 | 0.0 |
| 2000 | 36078 | 1 | 4.0 | 0.0 |
| 2003 | 47158 | 1 | 3.0 | 0.0 |
| 2007 | 34446 | 1 | 3.0 | 0.0 |
| 2010 | 47990 | 1 | 3.0 | 0.0 |
| 2003 | 42120 | 1 | 5.0 | 0.0 |
| 2007 | 44940 | 1 | 4.0 | 0.0 |
| 2008 | 58240 | 1 | 4.0 | 0.0 |
| 2002 | 102 | 1 | 1.0 | 0.0 |
| 2009 | 52708 | 1 | 3.0 | 0.0 |
| ... | ... | ... | ... | ... |
+------+---------+-------+---------------+----------------+
[10000 rows x 5 columns]
The groupby function can take a variable length list of aggregation
specifiers so if we want the count and the 0.25 and 0.75 quantiles of
ratings:
>>> user_rating_stats = sf.groupby(['user_id', 'time'], agg.COUNT(),
... {'rating_quantiles': agg.QUANTILE('rating',[0.25, 0.75])})
>>> user_rating_stats
+------+---------+-------+------------------------+
| time | user_id | Count | rating_quantiles |
+------+---------+-------+------------------------+
| 2006 | 61285 | 1 | array('d', [4.0, 4.0]) |
| 2000 | 36078 | 1 | array('d', [4.0, 4.0]) |
| 2003 | 47158 | 1 | array('d', [3.0, 3.0]) |
| 2007 | 34446 | 1 | array('d', [3.0, 3.0]) |
| 2010 | 47990 | 1 | array('d', [3.0, 3.0]) |
| 2003 | 42120 | 1 | array('d', [5.0, 5.0]) |
| 2007 | 44940 | 1 | array('d', [4.0, 4.0]) |
| 2008 | 58240 | 1 | array('d', [4.0, 4.0]) |
| 2002 | 102 | 1 | array('d', [1.0, 1.0]) |
| 2009 | 52708 | 1 | array('d', [3.0, 3.0]) |
| ... | ... | ... | ... |
+------+---------+-------+------------------------+
[10000 rows x 4 columns]
To put all items a user rated into one list value by their star rating:
>>> user_rating_stats = sf.groupby(["user_id", "rating"],
... {"rated_movie_ids":agg.CONCAT("movie_id")})
>>> user_rating_stats
+--------+---------+----------------------+
| rating | user_id | rated_movie_ids |
+--------+---------+----------------------+
| 3 | 31434 | array('d', [1663.0]) |
| 5 | 25944 | array('d', [1663.0]) |
| 4 | 38827 | array('d', [1663.0]) |
| 4 | 51437 | array('d', [1663.0]) |
| 4 | 42549 | array('d', [1663.0]) |
| 4 | 49532 | array('d', [1663.0]) |
| 3 | 26124 | array('d', [1663.0]) |
| 4 | 46336 | array('d', [1663.0]) |
| 4 | 52133 | array('d', [1663.0]) |
| 5 | 62361 | array('d', [1663.0]) |
| ... | ... | ... |
+--------+---------+----------------------+
[9952 rows x 3 columns]
To put all items and rating of a given user together into a dictionary
value:
>>> user_rating_stats = sf.groupby("user_id",
... {"movie_rating":agg.CONCAT("movie_id", "rating")})
>>> user_rating_stats
+---------+--------------+
| user_id | movie_rating |
+---------+--------------+
| 62361 | {1663: 5} |
| 30727 | {1663: 4} |
| 40111 | {1663: 2} |
| 50513 | {1663: 4} |
| 35140 | {1663: 4} |
| 42352 | {1663: 5} |
| 29667 | {1663: 4} |
| 46242 | {1663: 5} |
| 58310 | {1663: 2} |
| 64614 | {1663: 2} |
| ... | ... |
+---------+--------------+
[9852 rows x 2 columns]
"""
# some basic checking first
# make sure key_columns is a list
if isinstance(key_columns, str):
key_columns = [key_columns]
# check that every column is a string, and is a valid column name
my_column_names = self.column_names()
key_columns_array = []
for column in key_columns:
if not isinstance(column, str):
raise TypeError("Column name must be a string")
if column not in my_column_names:
raise KeyError("Column " + column + " does not exist in SFrame")
if self[column].dtype() == dict:
raise TypeError("Cannot group on a dictionary column.")
key_columns_array.append(column)
group_output_columns = []
group_columns = []
group_ops = []
all_ops = [operations] + list(args)
for op_entry in all_ops:
# if it is not a dict, nor a list, it is just a single aggregator
# element (probably COUNT). wrap it in a list so we can reuse the
# list processing code
operation = op_entry
if not(isinstance(operation, list) or isinstance(operation, dict)):
operation = [operation]
if isinstance(operation, dict):
# now sweep the dict and add to group_columns and group_ops
for key in operation:
val = operation[key]
if type(val) is tuple:
(op, column) = val
if (op == '__builtin__avg__' and self[column[0]].dtype() is array.array):
op = '__builtin__vector__avg__'
if (op == '__builtin__sum__' and self[column[0]].dtype() is array.array):
op = '__builtin__vector__sum__'
if (op == '__builtin__argmax__' or op == '__builtin__argmin__') and ((type(column[0]) is tuple | codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
"""
pygments.lexers._lasso_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Built-in Lasso types, traits, methods, and members.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
BUILTINS = {
'Types': (
'array',
'atbegin',
'boolean',
'bson_iter',
'bson',
'bytes_document_body',
'bytes',
'cache_server_element',
'cache_server',
'capture',
'client_address',
'client_ip',
'component_container',
'component_render_state',
'component',
'curl',
'curltoken',
'currency',
'custom',
'data_document',
'database_registry',
'date',
'dateandtime',
'dbgp_packet',
'dbgp_server',
'debugging_stack',
'decimal',
'delve',
'dir',
'dirdesc',
'dns_response',
'document_base',
'document_body',
'document_header',
'dsinfo',
'duration',
'eacher',
'email_compose',
'email_parse',
'email_pop',
'email_queue_impl_base',
'email_queue_impl',
'email_smtp',
'email_stage_impl_base',
'email_stage_impl',
'fastcgi_each_fcgi_param',
'fastcgi_server',
'fcgi_record',
'fcgi_request',
'file',
'filedesc',
'filemaker_datasource',
'generateforeachkeyed',
'generateforeachunkeyed',
'generateseries',
'hash_map',
'html_atomic_element',
'html_attr',
'html_base',
'html_binary',
'html_br',
'html_cdata',
'html_container_element',
'html_div',
'html_document_body',
'html_document_head',
'html_eol',
'html_fieldset',
'html_form',
'html_h1',
'html_h2',
'html_h3',
'html_h4',
'html_h5',
'html_h6',
'html_hr',
'html_img',
'html_input',
'html_json',
'html_label',
'html_legend',
'html_link',
'html_meta',
'html_object',
'html_option',
'html_raw',
'html_script',
'html_select',
'html_span',
'html_style',
'html_table',
'html_td',
'html_text',
'html_th',
'html_tr',
'http_document_header',
'http_document',
'http_error',
'http_header_field',
'http_server_connection_handler_globals',
'http_server_connection_handler',
'http_server_request_logger_thread',
'http_server_web_connection',
'http_server',
'image',
'include_cache',
'inline_type',
'integer',
'java_jnienv',
'jbyte',
'jbytearray',
'jchar',
'jchararray',
'jfieldid',
'jfloat',
'jint',
'jmethodid',
'jobject',
'jshort',
'json_decode',
'json_encode',
'json_literal',
'json_object',
'keyword',
'lassoapp_compiledsrc_appsource',
'lassoapp_compiledsrc_fileresource',
'lassoapp_content_rep_halt',
'lassoapp_dirsrc_appsource',
'lassoapp_dirsrc_fileresource',
'lassoapp_installer',
'lassoapp_livesrc_appsource',
'lassoapp_livesrc_fileresource',
'lassoapp_long_expiring_bytes',
'lassoapp_manualsrc_appsource',
'lassoapp_zip_file_server',
'lassoapp_zipsrc_appsource',
'lassoapp_zipsrc_fileresource',
'ldap',
'library_thread_loader',
'list_node',
'list',
'locale',
'log_impl_base',
'log_impl',
'magick_image',
'map_node',
'map',
'memberstream',
'memory_session_driver_impl_entry',
'memory_session_driver_impl',
'memory_session_driver',
'mime_reader',
'mongo_client',
'mongo_collection',
'mongo_cursor',
'mustache_ctx',
'mysql_session_driver_impl',
'mysql_session_driver',
'net_named_pipe',
'net_tcp_ssl',
'net_tcp',
'net_udp_packet',
'net_udp',
'null',
'odbc_session_driver_impl',
'odbc_session_driver',
'opaque',
'os_process',
'pair_compare',
'pair',
'pairup',
'pdf_barcode',
'pdf_chunk',
'pdf_color',
'pdf_doc',
'pdf_font',
'pdf_hyphenator',
'pdf_image',
'pdf_list',
'pdf_paragraph',
'pdf_phrase',
'pdf_read',
'pdf_table',
'pdf_text',
'pdf_typebase',
'percent',
'portal_impl',
'queriable_groupby',
'queriable_grouping',
'queriable_groupjoin',
'queriable_join',
'queriable_orderby',
'queriable_orderbydescending',
'queriable_select',
'queriable_selectmany',
'queriable_skip',
'queriable_take',
'queriable_thenby',
'queriable_thenbydescending',
'queriable_where',
'queue',
'raw_document_body',
'regexp',
'repeat',
'scientific',
'security_registry',
'serialization_element',
'serialization_object_identity_compare',
'serialization_reader',
'serialization_writer_ref',
'serialization_writer_standin',
'serialization_writer',
'session_delete_expired_thread',
'set',
'signature',
'sourcefile',
'sqlite_column',
'sqlite_currentrow',
'sqlite_db',
'sqlite_results',
'sqlite_session_driver_impl_entry',
'sqlite_session_driver_impl',
'sqlite_session_driver',
'sqlite_table',
'sqlite3_stmt',
'sqlite3',
'staticarray',
'string',
'sys_process',
'tag',
'text_document',
'tie',
'timeonly',
'trait',
'tree_base',
'tree_node',
'tree_nullnode',
'ucal',
'usgcpu',
'usgvm',
'void',
'web_error_atend',
'web_node_base',
'web_node_content_representation_css_specialized',
'web_node_content_representation_html_specialized',
'web_node_content_representation_js_specialized',
'web_node_content_representation_xhr_container',
'web_node_echo',
'web_node_root',
'web_request_impl',
'web_request',
'web_response_impl',
'web_response',
'web_router',
'websocket_handler',
'worker_pool',
'xml_attr',
'xml_cdatasection',
'xml_characterdata',
'xml_comment',
'xml_document',
'xml_documentfragment',
'xml_documenttype',
'xml_domimplementation',
'xml_element',
'xml_entity',
'xml_entityreference',
'xml_namednodemap_attr',
'xml_namednodemap_ht',
'xml_namednodemap',
'xml_node',
'xml_nodelist',
'xml_notation',
'xml_processinginstruction',
'xml_text',
'xmlstream',
'zip_file_impl',
'zip_file',
'zip_impl',
'zip',
),
'Traits': (
'any',
'formattingbase',
'html_attributed',
'html_element_coreattrs',
'html_element_eventsattrs',
'html_element_i18nattrs',
'lassoapp_capabilities',
'lassoapp_resource',
'lassoapp_source',
'queriable_asstring',
'session_driver',
'trait_array',
'trait_asstring',
'trait_backcontractible',
'trait_backended',
'trait_backexpandable',
'trait_close',
'trait_contractible',
'trait_decompose_assignment',
'trait_doubleended',
'trait_each_sub',
'trait_encodeurl',
'trait_endedfullymutable',
'trait_expandable',
'trait_file',
'trait_finite',
'trait_finiteforeach',
'trait_foreach',
'trait_foreachtextelement',
'trait_frontcontractible',
'trait_frontended',
'trait_frontexpandable',
'trait_fullymutable',
'trait_generator',
'trait_generatorcentric',
'trait_hashable',
'trait_json_serialize',
'trait_keyed',
'trait_keyedfinite',
'trait_keyedforeach',
'trait_keyedmutable',
'trait_list',
'trait_map',
'trait_net',
'trait_pathcomponents',
'trait_positionallykeyed',
'trait_positionallysearchable',
'trait_queriable',
'trait_queriablelambda',
'trait_readbytes',
'trait_readstring',
'trait_scalar',
'trait_searchable',
'trait_serializable',
'trait_setencoding',
'trait_setoperations',
'trait_stack',
'trait_treenode',
'trait_writebytes',
'trait_writestring',
'trait_xml_elementcompat',
'trait_xml_nodecompat',
'web_connection',
'web_node_container',
'web_node_content_css_specialized',
'web_node_content_document',
'web_node_content_html_specialized',
'web_node_content_js_specialized',
'web_node_content_json_specialized',
'web_node_content_representation',
'web_node_content',
'web_node_postable',
'web_node',
),
'Unbound Methods': (
'abort_clear',
'abort_now',
'abort',
'action_param',
'action_params',
'action_statement',
'admin_authorization',
'admin_currentgroups',
'admin_currentuserid',
'admin_currentusername',
'admin_getpref',
'admin_initialize',
'admin_lassoservicepath',
'admin_removepref',
'admin_setpref',
'admin_userexists',
'all',
'auth_admin',
'auth_check',
'auth_custom',
'auth_group',
'auth_prompt',
'auth_user',
'bom_utf16be',
'bom_utf16le',
'bom_utf32be',
'bom_utf32le',
'bom_utf8',
'bw',
'capture_nearestloopabort',
'capture_nearestloopcontinue',
'capture_nearestloopcount',
'checked',
'cipher_decrypt_private',
'cipher_decrypt_public',
'cipher_decrypt',
'cipher_digest',
'cipher_encrypt_private',
'cipher_encrypt_public',
'cipher_encrypt',
'cipher_generate_key',
'cipher_hmac',
'cipher_keylength',
'cipher_list',
'cipher_open',
'cipher_seal',
'cipher_sign',
'cipher_verify',
'client_addr',
'client_authorization',
'client_browser',
'client_contentlength',
'client_contenttype',
'client_cookielist',
'client_cookies',
'client_encoding',
'client_formmethod',
'client_getargs',
'client_getparam',
'client_getparams',
'client_headers',
'client_integertoip',
'client_iptointeger',
'client_password',
'client_postargs',
'client_postparam',
'client_postparams',
'client_type',
'client_url',
'client_username',
'cn',
'column_name',
'column_names',
'column_type',
'column',
'compress',
'content_addheader',
'content_body',
'content_encoding',
'content_header',
'content_replaceheader',
'content_type',
'cookie_set',
'cookie',
'curl_easy_cleanup',
'curl_easy_duphandle',
'curl_easy_getinfo',
'curl_easy_init',
'curl_easy_reset',
'curl_easy_setopt',
'curl_easy_strerror',
'curl_getdate',
'curl_http_version_1_0',
'curl_http_version_1_1',
'curl_http_version_none',
'curl_ipresolve_v4',
'curl_ipresolve_v6',
'curl_ipresolve_whatever',
'curl_multi_perform',
'curl_multi_result',
'curl_netrc_ignored',
'curl_netrc_optional',
'curl_netrc_required',
'curl_version_asynchdns',
'curl_version_debug',
'curl_version_gssnegotiate',
'curl_version_idn',
'curl_version_info',
'curl_version_ipv6',
'curl_version_kerberos4',
'curl_version_largefile',
'curl_version_libz',
'curl_version_ntlm',
'curl_version_spnego',
'curl_version_ssl',
'curl_version',
'curlauth_any',
'curlauth_anysafe',
'curlauth_basic',
'curlauth_digest',
'curlauth_gssnegotiate',
'curlauth_none',
'curlauth_ntlm',
'curle_aborted_by_callback',
'curle_bad_calling_order',
'curle_bad_content_encoding',
'curle_bad_download_resume',
'curle_bad_function_argument',
'curle_bad_password_entered',
'curle_couldnt_connect',
'curle_couldnt_resolve_host',
'curle_couldnt_resolve_proxy',
'curle_failed_init',
'curle_file_couldnt_read_file',
'curle_filesize_exceeded',
'curle_ftp_access_denied',
'curle_ftp_cant_get_host',
'curle_ftp_cant_reconnect',
'curle_ftp_couldnt_get_size',
'curle_ftp_couldnt_retr_file',
'curle_ftp_couldnt_set_ascii',
'curle_ftp_couldnt_set_binary',
'curle_ftp_couldnt_use_rest',
'curle_ftp_port_failed',
'curle_ftp_quote_error',
'curle_ftp_ssl_failed',
'curle_ftp_user_password_incorrect',
'curle_ftp_weird_227_format',
'curle_ftp_weird_pass_reply',
'curle_ftp_weird_pasv_reply',
'curle_ftp_weird_server_reply',
'curle_ftp_weird_user_reply',
'curle_ftp_write_error',
'curle_function_not_found',
'curle_got_nothing',
'curle_http_post_error',
'curle_http_range_error',
'curle_http_returned_error',
'curle_interface_failed',
'curle_ldap_cannot_bind',
'curle_ldap_invalid_url',
'curle_ldap_search_failed',
'curle_library_not_found',
'curle_login_denied',
'curle_malformat_user',
'curle_obsolete',
'curle_ok',
'curle_operation_timeouted',
'curle_out_of_memory',
'curle_partial_file',
'curle_read_error',
'curle_recv_error',
'curle_send_error',
'curle_send_fail_rewind',
'curle_share_in_use',
'curle_ssl_cacert',
'curle_ssl_certproblem',
'curle_ssl_cipher',
'curle_ssl_connect_error',
'curle_ssl_engine_initfailed',
'curle_ssl_engine_notfound',
'curle_ssl_engine_setfailed',
'curle_ssl_peer_certificate',
'curle_telnet_option_syntax',
'curle_too_many_redirects',
'curle_unknown_telnet_option',
'curle_unsupported_protocol',
'curle_url_malformat_user',
'curle_url_malformat',
'curle_write_error',
'curlftpauth_default',
'curlftpauth_ssl',
'curlftpauth_tls',
'curlftpssl_all',
'curlftpssl_control',
'curlftpssl_last',
'curlftpssl_none',
'curlftpssl_try',
'curlinfo_connect_time',
'curlinfo_content_length_download',
'curlinfo_content_length_upload',
'curlinfo_content_type',
'curlinfo_effective_url',
'curlinfo_filetime',
'curlinfo_header_size',
'curlinfo_http_connectcode',
'curlinfo_httpauth_avail',
'curlinfo_namelookup_time',
'curlinfo_num_connects',
'curlinfo_os_errno',
'curlinfo_pretransfer_time',
'curlinfo_proxyauth_avail',
'curlinfo_redirect_count',
'curlinfo_redirect_time',
'curlinfo_request_size',
'curlinfo_response_code',
'curlinfo_size_download',
'curlinfo_size_upload',
'curlinfo_speed_download',
'curlinfo_speed_upload',
'curlinfo_ssl_engines',
'curlinfo_ssl_verifyresult',
'curlinfo_starttransfer_time',
'curlinfo_total_time',
'curlmsg_done',
'curlopt_autoreferer',
'curlopt_buffersize',
'curlopt_cainfo',
'curlopt_capath',
'curlopt_connecttimeout',
'curlopt_cookie',
'curlopt_cookiefile',
'curlopt_cookiejar',
'curlopt_cookiesession',
'curlopt_crlf',
'curlopt_customrequest',
'curlopt_dns_use_global_cache',
'curlopt_egdsocket',
'curlopt_encoding',
'curlopt_failonerror',
'curlopt_filetime',
'curlopt_followlocation',
'curlopt_forbid_reuse',
'curlopt_fresh_connect',
'curlopt_ftp_account',
'curlopt_ftp_create_missing_dirs',
'curlopt_ftp_response_timeout',
'curlopt_ftp_ssl',
'curlopt_ftp_use_eprt',
'curlopt_ftp_use_epsv',
'curlopt_ftpappend',
'curlopt_ftplistonly',
'curlopt_ftpport',
'curlopt_ftpsslauth',
'curlopt_header',
'curlopt_http_version',
'curlopt_http200aliases',
'curlopt_httpauth',
'curlopt_httpget',
'curlopt_httpheader',
'curlopt_httppost',
'curlopt_httpproxytunnel',
'curlopt_infilesize_large',
'curlopt_infilesize',
'curlopt_interface',
'curlopt_ipresolve',
'curlopt_krb4level',
'curlopt_low_speed_limit',
'curlopt_low_speed_time',
'curlopt_mail_from',
'curlopt_mail_rcpt',
'curlopt_maxconnects',
'curlopt_maxfilesize_large',
'curlopt_maxfilesize',
'curlopt_maxredirs',
'curlopt_netrc_file',
'curlopt_netrc',
'curlopt_nobody',
'curlopt_noprogress',
'curlopt_port',
'curlopt_post',
'curlopt_postfields',
'curlopt_postfieldsize_large',
'curlopt_postfieldsize',
'curlopt_postquote',
'curlopt_prequote',
'curlopt_proxy',
'curlopt_proxyauth',
'curlopt_proxyport',
'curlopt_proxytype',
'curlopt_proxyuserpwd',
'curlopt_put',
'curlopt_quote',
'curlopt_random_file',
'curlopt_range',
'curlopt_readdata',
'curlopt_referer',
'curlopt_resume_from_large',
'curlopt_resume_from',
'curlopt_ssl_cipher_list',
'curlopt_ssl_verifyhost',
'curlopt_ssl_verifypeer',
'curlopt_sslcert',
'curlopt_sslcerttype',
'curlopt_sslengine_default',
'curlopt_sslengine',
'curlopt_sslkey',
'curlopt_sslkeypasswd',
'curlopt_sslkeytype',
'curlopt_sslversion',
'curlopt_tcp_nodelay',
'curlopt_timecondition',
'curlopt_timeout',
'curlopt_timevalue',
'curlopt_transfertext',
'curlopt_unrestricted_auth',
'curlopt_upload',
'curlopt_url',
'curlopt_use_ssl',
'curlopt_useragent',
'curlopt_userpwd',
'curlopt_verbose',
'curlopt_writedata',
'curlproxy_http',
'curlproxy_socks4',
'curlproxy_socks5',
'database_adddefaultsqlitehost',
'database_database',
'database_initialize',
'database_name',
'database_qs',
'database_table_database_tables',
'database_table_datasource_databases',
'database_table_datasource_hosts',
'database_table_datasources',
'database_table_table_fields',
'database_util_cleanpath',
'dbgp_stop_stack_name',
'debugging_break',
'debugging_breakpoint_get',
'debugging_breakpoint_list',
'debugging_breakpoint_remove',
'debugging_breakpoint_set',
'debugging_breakpoint_update',
'debugging_context_locals',
'debugging_context_self',
'debugging_context_vars',
'debugging_detach',
'debugging_enabled',
'debugging_get_context',
'debugging_get_stack',
'debugging_run',
'debugging_step_in',
'debugging_step_out',
'debugging_step_over',
'debugging_stop',
'debugging_terminate',
'decimal_random',
'decompress',
'decrypt_blowfish',
'define_atbegin',
'define_atend',
'dns_default',
'dns_lookup',
'document',
'email_attachment_mime_type',
'email_batch',
'email_digestchallenge',
'email_digestresponse',
'email_extract',
'email_findemails',
'email_fix_address_list',
'email_fix_address',
'email_fs_error_clean',
'email_immediate',
'email_initialize',
'email_merge',
'email_mxlookup',
'email_pop_priv_extract',
'email_pop_priv_quote',
'email_pop_priv_substring',
'email_queue',
'email_result',
'email_safeemail',
'email_send',
'email_status',
'email_token',
'email_translatebreakstocrlf',
'encode_qheader',
'encoding_iso88591',
'encoding_utf8',
'encrypt_blowfish',
'encrypt_crammd5',
'encrypt_hmac',
'encrypt_md5',
'eol',
'eq',
'error_code_aborted',
'error_code_dividebyzero',
'error_code_filenotfound',
'error_code_invalidparameter',
'error_code_methodnotfound',
'error_code_networkerror',
'error_code_noerror',
'error_code_resnotfound',
'error_code_runtimeassertion',
'error_code',
'error_msg_aborted',
'error_msg_dividebyzero',
'error_msg_filenotfound',
'error_msg_invalidparameter',
'error_msg_methodnotfound',
'error_msg_networkerror',
'error_msg_noerror',
'error_msg_resnotfound',
'error_msg_runtimeassertion',
'error_msg',
'error_obj',
'error_pop',
'error_push',
'error_reset',
'error_stack',
'escape_tag',
'evdns_resolve_ipv4',
'evdns_resolve_ipv6',
'evdns_resolve_reverse_ipv6',
'evdns_resolve_reverse',
'ew',
'fail_if',
'fail_ifnot',
'fail_now',
'fail',
'failure_clear',
'fastcgi_createfcgirequest',
'fastcgi_handlecon',
'fastcgi_handlereq',
'fastcgi_initialize',
'fastcgi_initiate_request',
'fcgi_abort_request',
'fcgi_authorize',
'fcgi_begin_request',
'fcgi_bodychunksize',
'fcgi_cant_mpx_conn',
'fcgi_data',
'fcgi_end_request',
'fcgi_filter',
'fcgi_get_values_result',
'fcgi_get_values',
'fcgi_keep_conn',
'fcgi_makeendrequestbody',
'fcgi_makestdoutbody',
'fcgi_max_conns',
'fcgi_max_reqs',
'fcgi_mpxs_conns',
'fcgi_null_request_id',
'fcgi_overloaded',
'fcgi_params',
'fcgi_read_timeout_seconds',
'fcgi_readparam',
'fcgi_request_complete',
'fcgi_responder',
'fcgi_stderr',
'fcgi_stdin',
'fcgi_stdout',
'fcgi_unknown_role',
'fcgi_unknown_type',
'fcgi_version_1',
'fcgi_x_stdin',
'field_name',
'field_names',
'field',
'file_copybuffersize',
'file_defaultencoding',
'file_forceroot',
'file_modechar',
'file_modeline',
'file_stderr',
'file_stdin',
'file_stdout',
'file_tempfile',
'filemakerds_initialize',
'filemakerds',
'found_count',
'ft',
'ftp_deletefile',
'ftp_getdata',
'ftp_getfile',
'ftp_getlisting',
'ftp_putdata',
'ftp_putfile',
'full',
'generateforeach',
'gt',
'gte',
'handle_failure',
'handle',
'hash_primes',
'html_comment',
'http_char_colon',
'http_char_cr',
'http_char_htab',
'http_char_lf',
'http_char_question',
'http_char_space',
'http_default_files',
'http_read_headers',
'http_read_timeout_secs',
'http_server_apps_path',
'http_server_request_logger',
'if_empty',
'if_false',
'if_null',
'if_true',
'include_cache_compare',
'include_currentpath',
'include_filepath',
'include_localpath',
'include_once',
'include_path',
'include_raw',
'include_url',
'include',
'includes',
'inline_colinfo_name_pos',
'inline_colinfo_type_pos',
'inline_colinfo_valuelist_pos',
'inline_columninfo_pos',
'inline_foundcount_pos',
'inline_namedget',
'inline_namedput',
'inline_resultrows_pos',
'inline_scopeget',
'inline_scopepop',
'inline_scopepush',
'inline',
'integer_bitor',
'integer_random',
'io_dir_dt_blk',
'io_dir_dt_chr',
'io_dir_dt_dir',
'io_dir_dt_fifo',
'io_dir_dt_lnk',
'io_dir_dt_reg',
'io_dir_dt_sock',
'io_dir_dt_unknown',
'io_dir_dt_wht',
'io_file_access',
'io_file_chdir',
'io_file_chmod',
'io_file_chown',
'io_file_dirname',
'io_file_f_dupfd',
'io_file_f_getfd',
'io_file_f_getfl',
'io_file_f_getlk',
'io_file_f_rdlck',
'io_file_f_setfd',
'io_file_f_setfl',
'io_file_f_setlk',
'io_file_f_setlkw',
'io_file_f_test',
'io_file_f_tlock',
'io_file_f_ulock',
'io_file_f_unlck',
'io_file_f_wrlck',
'io_file_fd_cloexec',
'io_file_fioasync',
'io_file_fioclex',
'io_file_fiodtype',
'io_file_fiogetown',
'io_file_fionbio',
'io_file_fionclex',
'io_file_fionread',
'io_file_fiosetown',
'io_file_getcwd',
'io_file_lchown',
'io_file_link',
'io_file_lockf',
'io_file_lstat_atime',
'io_file_lstat_mode',
'io_file_lstat_mtime',
'io_file_lstat_size',
'io_file_mkdir',
'io_file_mkfifo',
'io_file_mkstemp',
'io_file_o_append',
'io_file_o_async',
'io_file_o_creat',
'io_file_o_excl',
'io_file_o_exlock',
'io_file_o_fsync',
'io_file_o_nofollow',
'io_file_o_nonblock',
'io_file_o_rdonly',
'io_file_o_rdwr',
'io_file_o_shlock',
'io_file_o_sync',
'io_file_o_trunc',
'io_file_o_wronly',
'io_file_pipe',
'io_file_readlink',
'io_file_realpath',
'io_file_remove',
'io_file_rename',
'io_file_rmdir',
'io_file_s_ifblk',
'io_file_s_ifchr',
'io_file_s_ifdir',
'io_file_s_ififo',
'io_file_s_iflnk',
'io_file_s_ifmt',
'io_file_s_ifreg',
'io_file_s_ifsock',
'io_file_s_irgrp',
'io_file_s_iroth',
'io_file_s_irusr',
'io_file_s_irwxg',
'io_file_s_irwxo',
'io_file_s_irwxu',
'io_file_s_isgid',
'io_file_s_isuid',
'io_file_s_isvtx',
'io_file_s_iwgrp',
'io_file_s_iwoth',
'io_file_s_iwusr',
'io_file_s_ixgrp',
'io_file_s_ixoth',
'io_file_s_ixusr',
'io_file_seek_cur',
'io_file_seek_end',
'io_file_seek_set',
'io_file_stat_atime',
'io_file_stat_mode',
'io_file_stat_mtime',
'io_file_stat_size',
'io_file_stderr',
'io_file_stdin',
'io_file_stdout',
'io_file_symlink',
'io_file_tempnam',
'io_file_truncate',
'io_file_umask',
'io_file_unlink',
'io_net_accept',
'io_net_af_inet',
'io_net_af_inet6',
'io_net_af_unix',
'io_net_bind',
'io_net_connect',
'io_net_getpeername',
'io_net_getsockname',
'io_net_ipproto_ip',
'io_net_ipproto_udp',
'io_net_listen',
'io_net_msg_oob',
'io_net_msg_peek',
'io_net_msg_waitall',
'io_net_recv',
'io_net_recvfrom',
'io_net_send',
'io_net_sendto',
'io_net_shut_rd',
'io_net_shut_rdwr',
'io_net_shut_wr',
'io_net_shutdown',
'io_net_so_acceptconn',
'io_net_so_broadcast',
'io_net_so_debug',
'io_net_so_dontroute',
'io_net_so_error',
'io_net_so_keepalive',
'io_net_so_linger',
'io_net_so_oobinline',
'io_net_so_rcvbuf',
'io_net_so_rcvlowat',
'io_net_so_rcvtimeo',
'io_net_so_reuseaddr',
'io_net_so_sndbuf',
'io_net_so_sndlowat',
'io_net_so_sndtimeo',
'io_net_so_timestamp',
'io_net_so_type',
'io_net_so_useloopback',
'io_net_sock_dgram',
'io_net_sock_raw',
'io_net_sock_rdm',
'io_net_sock_seqpacket',
'io_net_sock_stream',
'io_net_socket',
'io_net_sol_socket',
'io_net_ssl_accept',
'io_net_ssl_begin',
'io_net_ssl_connect',
'io_net_ssl_end',
'io_net_ssl_error',
'io_net_ssl_errorstring',
'io_net_ssl_funcerrorstring',
'io_net_ssl_liberrorstring',
'io_net_ssl_read',
'io_net_ssl_reasonerrorstring',
'io_net_ssl_setacceptstate',
'io_net_ssl_setconnectstate',
'io_net_ssl_setverifylocations',
'io_net_ssl_shutdown',
'io_net_ssl_usecertificatechainfile',
'io_net_ssl_useprivatekeyfile',
'io_net_ssl_write',
'java_jvm_create',
'java_jvm_getenv',
'jdbc_initialize',
'json_back_slash',
'json_back_space',
'json_close_array',
'json_close_object',
'json_colon',
'json_comma',
'json_consume_array',
'json_consume_object',
'json_consume_string',
'json_consume_token',
'json_cr',
'json_debug',
'json_deserialize',
'json_e_lower',
'json_e_upper',
'json_f_lower',
'json_form_feed',
'json_forward_slash',
'json_lf',
'json_n_lower',
'json_negative',
'json_open_array',
'json_open_object',
'json_period',
'json_quote_double',
'json_rpccall',
'json_serialize',
'json_t_lower',
'json_tab',
'json_white_space',
'keycolumn_name',
'keycolumn_value',
'keyfield_name',
'keyfield_value',
'lasso_currentaction',
'lasso_errorreporting',
'lasso_executiontimelimit',
'lasso_methodexists',
'lasso_tagexists',
'lasso_uniqueid',
'lasso_version',
'lassoapp_current_app',
'lassoapp_current_include',
'lassoapp_do_with_include',
'lassoapp_exists',
'lassoapp_find_missing_file',
'lassoapp_format_mod_date',
'lassoapp_get_capabilities_name',
'lassoapp_include_current',
'lassoapp_include',
'lassoapp_initialize_db',
'lassoapp_initialize',
'lassoapp_invoke_resource',
'lassoapp_issourcefileextension',
'lassoapp_link',
'lassoapp_load_module',
'lassoapp_mime_get',
'lassoapp_mime_type_appcache',
'lassoapp_mime_type_css',
'lassoapp_mime_type_csv',
'lassoapp_mime_type_doc',
'lassoapp_mime_type_docx',
'lassoapp_mime_type_eof',
'lassoapp_mime_type_eot',
'lassoapp_mime_type_gif',
'lassoapp_mime_type_html',
'lassoapp_mime_type_ico',
'lassoapp_mime_type_jpg',
'lassoapp_mime_type_js',
'lassoapp_mime_type_lasso',
'lassoapp_mime_type_map',
'lassoapp_mime_type_pdf',
'lassoapp_mime_type_png',
'lassoapp_mime_type_ppt',
'lassoapp_mime_type_rss',
'lassoapp_mime_type_svg',
'lassoapp_mime_type_swf',
'lassoapp_mime_type_tif',
'lassoapp_mime_type_ttf',
'lassoapp_mime_type_txt',
'lassoapp_mime_type_woff',
'lassoapp_mime_type_xaml',
'lassoapp_mime_type_xap',
'lassoapp_mime_type_xbap',
'lassoapp_mime_type_xhr',
'lassoapp_mime_type_xml',
'lassoapp_mime_type_zip',
'lassoapp_path_to_method_name',
'lassoapp_settingsdb',
'layout_name',
'lcapi_datasourceadd',
'lcapi_datasourcecloseconnection',
'lcapi_datasourcedelete',
'lcapi_datasourceduplicate',
'lcapi_datasourceexecsql',
'lcapi_datasourcefindall',
'lcapi_datasourceimage',
'lcapi_datasourceinfo',
'lcapi_datasourceinit',
'lcapi_datasourcematchesname',
'lcapi_datasourcenames',
'lcapi_datasourcenothing',
'lcapi_datasourceopand',
'lcapi_datasourceopany',
'lcapi_datasourceopbw',
'lcapi_datasourceopct',
'lcapi_datasourceopeq',
'lcapi_datasourceopew',
'lcapi_datasourceopft',
'lcapi_datasourceopgt',
'lcapi_datasourceopgteq',
'lcapi_datasourceopin',
'lcapi_datasourceoplt',
'lcapi_datasourceoplteq',
'lcapi_datasourceopnbw',
'lcapi_datasourceopnct',
'lcapi_datasourceopneq',
'lcapi_datasourceopnew',
'lcapi_datasourceopnin',
'lcapi_datasourceopno',
'lcapi_datasourceopnot',
'lcapi_datasourceopnrx',
'lcapi_datasourceopor',
'lcapi_datasourceoprx',
'lcapi_datasourcepreparesql',
'lcapi_datasourceprotectionnone',
'lcapi_datasourceprotectionreadonly',
'lcapi_datasourcerandom',
'lcapi_datasourceschemanames',
'lcapi_datasourcescripts',
'lcapi_datasourcesearch',
'lcapi_datasourcesortascending',
'lcapi_datasourcesortcustom',
'lcapi_datasourcesortdescending',
'lcapi_datasourcetablenames',
'lcapi_datasourceterm',
'lcapi_datasourcetickle',
'lcapi_datasourcetypeblob',
'lcapi_datasourcetypeboolean',
'lcapi_datasourcetypedate',
'lcapi_datasourcetypedecimal',
'lcapi_datasourcetypeinteger',
'lcapi_datasourcetypestring',
'lcapi_datasourceunpreparesql',
'lcapi_datasourceupdate',
'lcapi_fourchartointeger',
'lcapi_listdatasources',
'lcapi_loadmodule',
'lcapi_loadmodules',
'lcapi_updatedatasourceslist',
'ldap_scope_base',
'ldap_scope_onelevel',
'ldap_scope_subtree',
'library_once',
'library',
'ljapi_initialize',
'locale_availablelocales',
'locale_canada',
'locale_canadafrench',
'locale_china',
'locale_chinese',
'locale_default',
'locale_english',
'locale_format_style_date_time',
'locale_format_style_default',
'locale_format_style_full',
'locale_format_style_long',
'locale_format_style_medium',
'locale_format_style_none',
'locale_format_style_short',
'locale_format',
'locale_france',
'locale_french',
'locale_german',
'locale_germany',
'locale_isocountries',
'locale_isolanguages',
'locale_italian',
'locale_italy',
'locale_japan',
'locale_japanese',
'locale_korea',
'locale_korean',
'locale_prc',
'locale_setdefault',
'locale_simplifiedchinese',
'locale_taiwan',
'locale_traditionalchinese',
'locale_uk',
'locale_us',
'log_always',
'log_critical',
'log_deprecated',
'log_destination_console',
'log_destination_database',
'log_destination_file',
'log_detail',
'log_initialize',
'log_level_critical',
'log_level_deprecated',
'log_level_detail',
'log_level_sql',
'log_level_warning',
'log_max_file_size',
'log_setdestination',
'log_sql',
'log_trim_file_size',
'log_warning',
'log',
'loop_abort',
'loop_continue',
'loop_count',
'loop_key_pop',
'loop_key_push',
'loop_key',
'loop_pop',
'loop_push',
'loop_value_pop',
'loop_value_push',
'loop_value',
'loop',
'lt',
'lte',
'main_thread_only',
'max',
'maxrecords_value',
'median',
'method_name',
'micros',
'millis',
'min',
'minimal',
'mongo_insert_continue_on_error',
'mongo_insert_no_validate',
'mongo_insert_none',
'mongo_query_await_data',
'mongo_query_exhaust',
'mongo_query_no_cursor_timeout',
'mongo_query_none',
'mongo_query_oplog_replay',
'mongo_query_partial',
'mongo_query_slave_ok',
'mongo_query_tailable_cursor',
'mongo_remove_none',
'mongo_remove_single_remove',
'mongo_update_multi_update',
'mongo_update_no_validate',
'mongo_update_none',
'mongo_update_upsert',
'mustache_compile_file',
'mustache_compile_string',
'mustache_include',
'mysqlds',
'namespace_global',
'namespace_import',
'namespace_using',
'nbw',
'ncn',
'neq',
'net_connectinprogress',
'net_connectok',
'net_typessl',
'net_typessltcp',
'net_typessludp',
'net_typetcp',
'net_typeudp',
'net_waitread',
'net_waittimeout',
'net_waitwrite',
'new',
'none',
'nrx',
'nslookup',
'odbc_session_driver_mssql',
'odbc',
'output_none',
'output',
'pdf_package',
'pdf_rectangle',
'pdf_serve',
'pi',
'portal',
'postgresql',
'process',
'protect_now',
'protect',
'queriable_average',
'queriable_defaultcompare',
'queriable_do',
'queriable_internal_combinebindings',
'queriable_max',
'queriable_min',
'queriable_qsort',
'queriable_reversecompare',
'queriable_sum',
'random_seed',
'range',
'records_array',
'records_map',
'records',
'redirect_url',
'referer_url',
'referrer_url',
'register_thread',
'register',
'response_filepath',
'response_localpath',
'response_path',
'response_realm',
'response_root',
'resultset_count',
'resultset',
'resultsets',
'rows_array',
'rows_impl',
'rows',
'rx',
'schema_name',
'security_database',
'security_default_realm',
'security_initialize',
'security_table_groups',
'security_table_ug_map',
'security_table_users',
'selected',
'series',
'server_admin',
'server_ip',
'server_name',
'server_port',
'server_protocol',
'server_push',
'server_signature',
'server_software',
'session_abort',
'session_addvar',
'session_decorate',
'session_deleteexpired',
'session_end',
'session_getdefaultdriver',
'session_id',
'session_initialize',
'session_removevar',
'session_result',
'session_setdefaultdriver',
'session_start',
'shown_count',
'shown_first',
'shown_last',
'site_id',
'site_name',
'skiprecords_value',
'sleep',
'split_thread',
'sqlite_abort',
'sqlite_auth',
'sqlite_blob',
'sqlite_busy',
'sqlite_cantopen',
'sqlite_constraint',
'sqlite_corrupt',
'sqlite_createdb',
'sqlite_done',
'sqlite_empty',
'sqlite_error',
'sqlite_float',
'sqlite_format',
'sqlite_full',
'sqlite_integer',
'sqlite_internal',
'sqlite_interrupt',
'sqlite_ioerr',
'sqlite_locked',
'sqlite_mismatch',
'sqlite_misuse',
'sqlite_nolfs',
'sqlite_nomem',
'sqlite_notadb',
'sqlite_notfound',
'sqlite_null',
'sqlite_ok',
'sqlite_perm',
'sqlite_protocol',
'sqlite_range',
'sqlite_readonly',
'sqlite_row',
'sqlite_schema',
'sqlite_setsleepmillis',
'sqlite_setsleeptries',
'sqlite_text',
'sqlite_toobig',
'sqliteconnector',
'staticarray_join',
'stdout',
'stdoutnl',
'string_validcharset',
'suspend',
'sys_appspath',
'sys_chroot',
'sys_clock',
'sys_clockspersec',
'sys_credits',
'sys_databasespath',
'sys_detach_exec',
'sys_difftime',
'sys_dll_ext',
'sys_drand48',
'sys_environ',
'sys_eol',
'sys_erand48',
'sys_errno',
'sys_exec_pid_to_os_pid',
'sys_exec',
'sys_exit',
'sys_fork',
'sys_garbagecollect',
'sys_getbytessincegc',
'sys_getchar',
'sys_getegid',
'sys_getenv',
'sys_geteuid',
'sys_getgid',
'sys_getgrnam',
'sys_getheapfreebytes',
'sys_getheapsize',
'sys_getlogin',
'sys_getpid',
'sys_getppid',
'sys_getpwnam',
'sys_getpwuid',
'sys_getstartclock',
'sys_getthreadcount',
'sys_getuid',
'sys_growheapby',
'sys_homepath',
'sys_is_full_path',
'sys_is_windows',
'sys_isfullpath',
'sys_iswindows',
'sys_iterate',
'sys_jrand48',
'sys_kill_exec',
'sys_kill',
'sys_lcong48',
'sys_librariespath',
'sys_listtraits',
'sys_listtypes',
'sys_listunboundmethods',
'sys_loadlibrary',
'sys_lrand48',
'sys_masterhomepath',
'sys_mrand48',
'sys_nrand48',
'sys_pid_exec',
'sys_pointersize',
'sys_rand',
'sys_random',
'sys_seed48',
'sys_setenv',
'sys_setgid',
'sys_setsid',
'sys_setuid',
'sys_sigabrt',
'sys_sigalrm',
'sys_sigbus',
'sys_sigchld',
'sys_sigcont',
'sys_sigfpe',
'sys_sighup',
'sys_sigill',
'sys_sigint',
'sys_sigkill',
'sys_sigpipe',
'sys_sigprof',
'sys_sigquit',
'sys_sigsegv',
'sys_sigstop',
'sys_sigsys',
'sys_sigterm',
'sys_sigtrap',
'sys_sigtstp',
'sys_sigttin',
'sys_sigttou',
'sys_sigurg',
'sys_sigusr1',
'sys_sigusr2',
'sys_sigvtalrm',
'sys_sigxcpu',
'sys_sigxfsz',
'sys_srand',
'sys_srand48',
'sys_srandom',
'sys_strerror',
'sys_supportpath',
'sys_test_exec',
'sys_time',
'sys_uname',
'sys_unsetenv',
'sys_usercapimodulepath',
'sys_userstartuppath',
'sys_version',
'sys_wait_exec',
'sys_waitpid',
'sys_wcontinued',
'sys_while',
'sys_wnohang',
'sys_wuntraced',
'table_name',
'tag_exists',
'tag_name',
'thread_var_get',
'thread_var_pop',
'thread_var_push',
'threadvar_find',
'threadvar_get',
'threadvar_set_asrt',
'threadvar_set',
'timer',
'token_value',
'treemap',
'u_lb_alphabetic',
'u_lb_ambiguous',
'u_lb_break_after',
'u_lb_break_before',
'u_lb_break_both',
'u_lb_break_symbols',
'u_lb_carriage_return',
'u_lb_close_punctuation',
'u_lb_combining_mark',
'u_lb_complex_context',
'u_lb_contingent_break',
'u_lb_exclamation',
'u_lb_glue',
'u_lb_h2',
'u_lb_h3',
'u_lb_hyphen',
'u_lb_ideographic',
'u_lb_infix_numeric',
'u_lb_inseparable',
'u_lb_jl',
'u_lb_jt',
'u_lb_jv',
'u_lb_line_feed',
'u_lb_mandatory_break',
'u_lb_next_line',
'u_lb_nonstarter',
'u_lb_numeric',
'u_lb_open_punctuation',
'u_lb_postfix_numeric',
'u_lb_prefix_numeric',
'u_lb_quotation',
'u_lb_space',
'u_lb_surrogate',
'u_lb_unknown',
'u_lb_word_joiner',
'u_lb_zwspace',
'u_nt_decimal',
'u_nt_digit',
'u_nt_none',
'u_nt_numeric',
'u_sb_aterm',
'u_sb_close',
'u_sb_format',
'u_sb_lower',
'u_sb_numeric',
'u_sb_oletter',
'u_sb_other',
'u_sb_sep',
'u_sb_sp',
'u_sb_sterm',
'u_sb_upper',
'u_wb_aletter',
'u_wb_extendnumlet',
'u_wb_format',
'u_wb_katakana',
'u_wb_midletter',
'u_wb_midnum',
'u_wb_numeric',
'u_wb_other',
'ucal_ampm',
'ucal_dayofmonth',
'ucal_dayofweek',
'ucal_dayofweekinmonth',
'ucal_dayofyear',
'ucal_daysinfirstweek',
'ucal_dowlocal',
'ucal_dstoffset',
'ucal_era',
'ucal_extendedyear',
'ucal_firstdayofweek',
'ucal_hour',
'ucal_hourofday',
'ucal_julianday',
'ucal_lenient',
'ucal_listtimezones',
'ucal_millisecond',
'ucal_millisecondsinday',
'ucal_minute',
'ucal_month',
'ucal_second',
'ucal_weekofmonth',
'ucal_weekofyear',
'ucal_year',
'ucal_yearwoy',
'ucal_zoneoffset',
'uchar_age',
'uchar_alphabetic',
'uchar_ascii_hex_digit',
'uchar_bidi_class',
'uchar_bidi_control',
'uchar_bidi_mirrored',
'uchar_bidi_mirroring_glyph',
'uchar_block',
'uchar_canonical_combining_class',
'uchar_case_folding',
'uchar_case_sensitive',
'uchar_dash',
'uchar_decomposition_type',
'uchar_default_ignorable_code_point',
'uchar_deprecated',
'uchar_diacritic',
'uchar_east_asian_width',
'uchar_extender',
'uchar_full_composition_exclusion',
'uchar_general_category_mask',
'uchar_general_category',
'uchar_grapheme_base',
'uchar_grapheme_cluster_break',
'uchar_grapheme_extend',
'uchar_grapheme_link',
'uchar_hangul_syllable_type',
'uchar_hex_digit',
'uchar_hyphen',
'uchar_id_continue',
'uchar_ideographic',
'uchar_ids_binary_operator',
'uchar_ids_trinary_operator',
'uchar_iso_comment',
'uchar_join_control',
'uchar_joining_group',
'uchar_joining_type',
'uchar_lead_canonical_combining_class',
'uchar_line_break',
'uchar_logical_order_exception',
'uchar_lowercase_mapping',
'uchar_lowercase',
'uchar_math',
'uchar_name',
'uchar_nfc_inert',
'uchar_nfc_quick_check',
'uchar_nfd_inert',
'uchar_nfd_quick_check',
'uchar_nfkc_inert',
'uchar_nfkc_quick_check',
'uchar_nfkd_inert',
'uchar_nfkd_quick_check',
'uchar_noncharacter_code_point',
'uchar_numeric_type',
'uchar_numeric_value',
'uchar_pattern_syntax',
'uchar_pattern_white_space',
'uchar_posix_alnum',
'uchar_posix_blank',
'uchar_posix_graph',
'uchar_posix_print',
'uchar_posix_xdigit',
'uchar_quotation_mark',
'uchar_radical',
'uchar_s_term',
'uchar_script',
'uchar_segment_starter',
'uchar_sentence_break',
'uchar_simple_case_folding',
'uchar_simple_lowercase_mapping',
'uchar_simple_titlecase_mapping',
'uchar_simple_uppercase_mapping',
'uchar_soft_dotted',
'uchar_terminal_punctuation',
'uchar_titlecase_mapping',
'uchar_trail_canonical_combining_class',
'uchar_unicode_1_name',
'uchar_unified_ideograph',
'uchar_uppercase_mapping',
'uchar_uppercase',
'uchar_variation_selector',
'uchar_white_space',
'uchar_word_break',
'uchar_xid_continue',
'uncompress',
'usage',
'uuid_compare',
'uuid_copy',
'uuid_generate_random',
'uuid_generate_time',
'uuid_generate',
'uuid_is_null',
'uuid_parse',
'uuid_unparse_lower',
'uuid_unparse_upper',
'uuid_unparse',
'value_list',
'value_listitem',
'valuelistitem',
'var_keys',
'var_values',
'wap_isenabled',
'wap_maxbuttons',
'wap_maxcolumns',
'wap_maxhorzpixels',
'wap_maxrows',
'wap_maxvertpixels',
'web_handlefcgirequest',
'web_node_content_representation_css',
'web_node_content_representation_html',
'web_node_content_representation_js',
'web_node_content_representation_xhr',
'web_node_forpath',
'web_nodes_initialize',
'web_nodes_normalizeextension',
'web_nodes_processcontentnode',
'web_nodes_requesthandler',
'web_response_nodesentry',
'web_router_database',
'web_router_initialize',
'websocket_handler_timeout',
'wexitstatus',
'wifcontinued',
'wifexited',
'wifsignaled',
'wifstopped',
'wstopsig',
'wtermsig',
'xml_transform',
'xml',
'zip_add_dir',
'zip_add',
'zip_checkcons',
'zip_close',
'zip_cm_bzip2',
'zip_cm_default',
'zip_cm_deflate',
'zip_cm_deflate64',
'zip_cm_implode',
'zip_cm_pkware_implode',
'zip_cm_reduce_1',
'zip_cm_reduce_2',
'zip_cm_reduce_3',
'zip_cm_reduce_4',
'zip_cm_shrink',
'zip_cm_store',
'zip_create',
'zip_delete',
'zip_em_3des_112',
'zip_em_3des_168',
'zip_em_aes_128',
'zip_em_aes_192',
'zip_em_aes_256',
'zip_em_des',
'zip_em_none',
'zip_em_rc2_old',
'zip_em_rc2',
'zip_em_rc4',
'zip_em_trad_pkware',
'zip_em_unknown',
'zip_er_changed',
'zip_er_close',
'zip_er_compnotsupp',
'zip_er_crc',
'zip_er_deleted',
'zip_er_eof',
'zip_er_exists',
'zip_er_incons',
'zip_er_internal',
'zip_er_inval',
'zip_er_memory',
'zip_er_multidisk',
'zip_er_noent',
'zip_er_nozip',
'zip_er_ok',
'zip_er_open',
'zip_er_read',
'zip_er_remove',
'zip_er_rename',
'zip_er_seek',
'zip_er_tmpopen',
'zip_er_write',
'zip_er_zipclosed',
'zip_er_zlib',
'zip_error_get_sys_type',
'zip_error_get',
'zip_error_to_str',
'zip_et_none',
'zip_et_sys',
'zip_et_zlib',
'zip_excl',
'zip_fclose',
'zip_file_error_get',
'zip_file_strerror',
'zip_fl_compressed',
'zip_fl_nocase',
'zip_fl_nodir',
'zip_fl_unchanged',
'zip_fopen_index',
'zip_fopen',
'zip_fread',
'zip_get_archive_comment',
'zip_get_file_comment',
'zip_get_name',
'zip_get_num_files',
'zip_name_locate',
'zip_open',
'zip_rename',
'zip_replace',
'zip_set_archive_comment',
'zip_set_file_comment',
'zip_stat_index',
'zip_stat',
'zip_strerror',
'zip_unchange_all',
'zip_unchange_archive',
'zip_unchange',
'zlib_version',
),
'Lasso 8 Tags': (
'__char',
'__sync_timestamp__',
'_admin_addgroup',
'_admin_adduser',
'_admin_defaultconnector',
'_admin_defaultconnectornames',
'_admin_defaultdatabase',
'_admin_defaultfield',
'_admin_defaultgroup',
'_admin_defaulthost',
'_admin_defaulttable',
'_admin_defaultuser',
'_admin_deleteconnector',
'_admin_deletedatabase',
'_admin_deletefield',
'_admin_deletegroup',
'_admin_deletehost',
'_admin_deletetable',
'_admin_deleteuser',
'_admin_duplicategroup',
'_admin_internaldatabase',
'_admin_listconnectors',
'_admin_listdatabases',
'_admin_listfields',
'_admin_listgroups',
'_admin_listhosts',
'_admin_listtables',
'_admin_listusers',
'_admin_refreshconnector',
'_admin_refreshsecurity',
'_admin_servicepath',
'_admin_updateconnector',
'_admin_updatedatabase',
'_admin_updatefield',
'_admin_updategroup',
'_admin_updatehost',
'_admin_updatetable',
'_admin_updateuser',
'_chartfx_activation_string',
'_chartfx_getchallengestring',
'_chop_args',
'_chop_mimes',
'_client_addr_old',
'_client_address_old',
'_client_ip_old',
'_database_names',
'_datasource_reload',
'_date_current',
'_date_format',
'_date_msec',
'_date_parse',
'_execution_timelimit',
'_file_chmod',
'_initialize',
'_jdbc_acceptsurl',
'_jdbc_debug',
'_jdbc_deletehost',
'_jdbc_driverclasses',
'_jdbc_driverinfo',
'_jdbc_metainfo',
'_jdbc_propertyinfo',
'_jdbc_setdriver',
'_lasso_param',
'_log_helper',
'_proc_noparam',
'_proc_withparam',
'_recursion_limit',
'_request_param',
'_security_binaryexpiration',
'_security_flushcaches',
'_security_isserialized',
'_security_serialexpiration',
'_srand',
'_strict_literals',
'_substring',
'_xmlrpc_exconverter',
'_xmlrpc_inconverter',
'_xmlrpc_xmlinconverter',
'abort',
'action_addinfo',
'action_addrecord',
'action_param',
'action_params',
'action_setfoundcount',
'action_setrecordid',
'action_settotalcount',
'action_statement',
'admin_allowedfileroots',
'admin_changeuser',
'admin_createuser',
'admin_currentgroups',
'admin_currentuserid',
'admin_currentusername',
'admin_getpref',
'admin_groupassignuser',
'admin_grouplistusers',
'admin_groupremoveuser',
'admin_lassoservicepath',
'admin_listgroups',
'admin_refreshlicensing',
'admin_refreshsecurity',
'admin_reloaddatasource',
'admin_removepref',
'admin_setpref',
'admin_userexists',
'admin_userlistgroups',
'all',
'and',
'array',
'array_iterator',
'auth',
'auth_admin',
'auth_auth',
'auth_custom',
'auth_group',
'auth_prompt',
'auth_user',
'base64',
'bean',
'bigint',
'bom_utf16be',
'bom_utf16le',
'bom_utf32be',
'bom_utf32le',
'bom_utf8',
'boolean',
'bw',
'bytes',
'cache',
'cache_delete',
'cache_empty',
'cache_exists',
'cache_fetch',
'cache_internal',
'cache_maintenance',
'cache_object',
'cache_preferences',
'cache_store',
'case',
'chartfx',
'chartfx_records',
'chartfx_serve',
'checked',
'choice_list',
'choice_listitem',
'choicelistitem',
'cipher_decrypt',
'cipher_digest',
'cipher_encrypt',
'cipher_hmac',
'cipher_keylength',
'cipher_list',
'click_text',
'client_addr',
'client_address',
'client_authorization',
'client_browser',
'client_contentlength',
'client_contenttype',
'client_cookielist',
'client_cookies',
'client_encoding',
'client_formmethod',
'client_getargs',
'client_getparams',
'client_headers',
'client_ip',
'client_ipfrominteger',
'client_iptointeger',
'client_password',
'client_postargs',
'client_postparams',
'client_type',
'client_url',
'client_username',
'cn',
'column',
'column_name',
'column_names',
'compare_beginswith',
'compare_contains',
'compare_endswith',
'compare_equalto',
'compare_greaterthan',
'compare_greaterthanorequals',
'compare_greaterthanorequls',
'compare_lessthan',
'compare_lessthanorequals',
'compare_notbeginswith',
'compare_notcontains',
'compare_notendswith',
'compare_notequalto',
'compare_notregexp',
'compare_regexp',
'compare_strictequalto',
'compare_strictnotequalto',
'compiler_removecacheddoc',
'compiler_setdefaultparserflags',
'compress',
'content_body',
'content_encoding',
'content_header',
'content_type',
'cookie',
'cookie_set',
'curl_ftp_getfile',
'curl_ftp_getlisting',
'curl_ftp_putfile',
'curl_include_url',
'currency',
'database_changecolumn',
'database_changefield',
'database_createcolumn',
'database_createfield',
'database_createtable',
'database_fmcontainer',
'database_hostinfo',
'database_inline',
'database_name',
'database_nameitem',
'database_names',
'database_realname',
'database_removecolumn',
'database_removefield',
'database_removetable',
'database_repeating',
'database_repeating_valueitem',
'database_repeatingvalueitem',
'database_schemanameitem',
'database_schemanames',
'database_tablecolumn',
'database_tablenameitem',
'database_tablenames',
'datasource_name',
'datasource_register',
'date',
'date__date_current',
'date__date_format',
'date__date_msec',
'date__date_parse',
'date_add',
'date_date',
'date_difference',
'date_duration',
'date_format',
'date_getcurrentdate',
'date_getday',
'date_getdayofweek',
'date_gethour',
'date_getlocaltimezone',
'date_getminute',
'date_getmonth',
'date_getsecond',
'date_gettime',
'date_getyear',
'date_gmttolocal',
'date_localtogmt',
'date_maximum',
'date_minimum',
'date_msec',
'date_setformat',
'date_subtract',
'db_layoutnameitem',
'db_layoutnames',
'db_nameitem',
'db_names',
'db_tablenameitem',
'db_tablenames',
'dbi_column_names',
'dbi_field_names',
'decimal',
'decimal_setglobaldefaultprecision',
'decode_base64',
'decode_bheader',
'decode_hex',
'decode_html',
'decode_json',
'decode_qheader',
'decode_quotedprintable',
'decode_quotedprintablebytes',
'decode_url',
'decode_xml',
'decompress',
'decrypt_blowfish',
'decrypt_blowfish2',
'default',
'define_atbegin',
'define_atend',
'define_constant',
'define_prototype',
'define_tag',
'define_tagp',
'define_type',
'define_typep',
'deserialize',
'directory_directorynameitem',
'directory_lister',
'directory_nameitem',
'directorynameitem',
'dns_default',
'dns_lookup',
'dns_response',
'duration',
'else',
'email_batch',
'email_compose',
'email_digestchallenge',
'email_digestresponse',
'email_extract',
'email_findemails',
'email_immediate',
'email_merge',
'email_mxerror',
'email_mxlookup',
'email_parse',
'email_pop',
'email_queue',
'email_result',
'email_safeemail',
'email_send',
'email_smtp',
'email_status',
'email_token',
'email_translatebreakstocrlf',
'encode_base64',
'encode_bheader',
'encode_break',
'encode_breaks',
'encode_crc32',
'encode_hex',
'encode_html',
'encode_htmltoxml',
'encode_json',
'encode_qheader',
'encode_quotedprintable',
'encode_quotedprintablebytes',
'encode_set',
'encode_smart',
'encode_sql',
'encode_sql92',
'encode_stricturl',
'encode_url',
'encode_xml',
'encrypt_blowfish',
'encrypt_blowfish2',
'encrypt_crammd5',
'encrypt_hmac',
'encrypt_md5',
'eq',
'error_adderror',
'error_code',
'error_code_aborted',
'error_code_assert',
'error_code_bof',
'error_code_connectioninvalid',
'error_code_couldnotclosefile',
'error_code_couldnotcreateoropenfile',
'error_code_couldnotdeletefile',
'error_code_couldnotdisposememory',
'error_code_couldnotlockmemory',
'error_code_couldnotreadfromfile',
'error_code_couldnotunlockmemory',
'error_code_couldnotwritetofile',
'error_code_criterianotmet',
'error_code_datasourceerror',
'error_code_directoryfull',
'error_code_diskfull',
'error_code_dividebyzero',
'error_code_eof',
'error_code_failure',
'error_code_fieldrestriction',
'error_code_file',
'error_code_filealreadyexists',
'error_code_filecorrupt',
'error_code_fileinvalid',
'error_code_fileinvalidaccessmode',
'error_code_fileisclosed',
'error_code_fileisopen',
'error_code_filelocked',
'error_code_filenotfound',
'error_code_fileunlocked',
'error_code_httpfilenotfound',
'error_code_illegalinstruction',
'error_code_illegaluseoffrozeninstance',
'error_code_invaliddatabase',
'error_code_invalidfilename',
'error_code_invalidmemoryobject',
'error_code_invalidparameter',
'error_code_invalidpassword',
'error_code_invalidpathname',
'error_code_invalidusername',
'error_code_ioerror',
'error_code_loopaborted',
'error_code_memory',
'error_code_network',
'error_code_nilpointer',
'error_code_noerr',
'error_code_nopermission',
'error_code_outofmemory',
'error_code_outofstackspace',
'error_code_overflow',
'error_code_postconditionfailed',
'error_code_preconditionfailed',
'error_code_resnotfound',
'error_code_resource',
'error_code_streamreaderror',
'error_code_streamwriteerror',
'error_code_syntaxerror',
'error_code_tagnotfound',
'error_code_unknownerror',
'error_code_varnotfound',
'error_code_volumedoesnotexist',
'error_code_webactionnotsupported',
'error_code_webadderror',
'error_code_webdeleteerror',
'error_code_webmodulenotfound',
'error_code_webnosuchobject',
'error_code_webrepeatingrelatedfield',
'error_code_webrequiredfieldmissing',
'error_code_webtimeout',
'error_code_webupdateerror',
'error_columnrestriction',
'error_currenterror',
'error_databaseconnectionunavailable',
'error_databasetimeout',
'error_deleteerror',
'error_fieldrestriction',
'error_filenotfound',
'error_invaliddatabase',
'error_invalidpassword',
'error_invalidusername',
'error_modulenotfound',
'error_msg',
'error_msg_aborted',
'error_msg_assert',
'error_msg_bof',
'error_msg_connectioninvalid',
'error_msg_couldnotclosefile',
'error_msg_couldnotcreateoropenfile',
'error_msg_couldnotdeletefile',
'error_msg_couldnotdisposememory',
'error_msg_couldnotlockmemory',
'error_msg_couldnotreadfromfile',
'error_msg_couldnotunlockmemory',
'error_msg_couldnotwritetofile',
'error_msg_criterianotmet',
'error_msg_datasourceerror',
'error_msg_directoryfull',
'error_msg_diskfull',
'error_msg_dividebyzero',
'error_msg_eof',
'error_msg_failure',
'error_msg_fieldrestriction',
'error_msg_file',
'error_msg_filealreadyexists',
'error_msg_filecorrupt',
'error_msg_fileinvalid',
'error_msg_fileinvalidaccessmode',
'error_msg_fileisclosed',
'error_msg_fileisopen',
'error_msg_filelocked',
'error_msg_filenotfound',
'error_msg_fileunlocked',
'error_msg_httpfilenotfound',
'error_msg_illegalinstruction',
'error_msg_illegaluseoffrozeninstance',
'error_msg_invaliddatabase',
'error_msg_invalidfilename',
'error_msg_invalidmemoryobject',
'error_msg_invalidparameter',
'error_msg_invalidpassword',
'error_msg_invalidpathname',
'error_msg_invalidusername',
'error_msg_ioerror',
'error_msg_loopaborted',
'error_msg_memory',
'error_msg_network',
'error_msg_nilpointer',
'error_msg_noerr',
'error_msg_nopermission',
'error_msg_outofmemory',
'error_msg_outofstackspace',
'error_msg_overflow',
'error_msg_postconditionfailed',
'error_msg_preconditionfailed',
'error_msg_resnotfound',
'error_msg_resource',
'error_msg_streamreaderror',
'error_msg_streamwriteerror',
'error_msg_syntaxerror',
'error_msg_tagnotfound',
'error_msg_unknownerror',
'error_msg_varnotfound',
'error_msg_volumedoesnotexist',
'error_msg_webactionnotsupported',
'error_msg_webadderror',
'error_msg_webdeleteerror',
'error_msg_webmodulenotfound',
'error_msg_webnosuchobject',
'error_msg_webrepeatingrelatedfield',
'error_msg_webrequiredfieldmissing',
'error_msg_webtimeout',
'error_msg_webupdateerror',
'error_noerror',
'error_nopermission',
'error_norecordsfound',
'error_outofmemory',
'error_pop',
'error_push',
'error_reqcolumnmissing',
'error_reqfieldmissing',
'error_requiredcolumnmissing',
'error_requiredfieldmissing',
'error_reset',
'error_seterrorcode',
'error_seterrormessage',
'error_updateerror',
'euro',
'event_schedule',
'ew',
'fail',
'fail_if',
'false',
'field',
'field_name',
'field_names',
'file',
'file_autoresolvefullpaths',
'file_chmod',
'file_control',
'file_copy',
'file_create',
'file_creationdate',
'file_currenterror',
'file_delete',
'file_exists',
'file_getlinecount',
'file_getsize',
'file_isdirectory',
'file_listdirectory',
'file_moddate',
'file_modechar',
'file_modeline',
'file_move',
'file_openread',
'file_openreadwrite',
'file_openwrite',
'file_openwriteappend',
'file_openwritetruncate',
'file_probeeol',
'file_processuploads',
'file_read',
'file_readline',
'file_rename',
'file_serve',
'file_setsize',
'file_stream',
'file_streamcopy',
'file_uploads',
'file_waitread',
'file_waittimeout',
'file_waitwrite',
'file_write',
'find_soap_ops',
'form_param',
'found_count',
'ft',
'ftp_getfile',
'ftp_getlisting',
'ftp_putfile',
'full',
'global',
'global_defined',
'global_remove',
'global_reset',
'globals',
'gt',
'gte',
'handle',
'handle_error',
'header',
'html_comment',
'http_getfile',
'ical_alarm',
'ical_attribute',
'ical_calendar',
'ical_daylight',
'ical_event',
'ical_freebusy',
'ical_item',
'ical_journal',
'ical_parse',
'ical_standard',
'ical_timezone',
'ical_todo',
'if',
'if_empty',
'if_false',
'if_null',
'if_true',
'image',
'image_url',
'img',
'include',
'include_cgi',
'include_currentpath',
'include_once',
'include_raw',
'include_url',
'inline',
'integer',
'iterate',
'iterator',
'java',
'java_bean',
'json_records',
'json_rpccall',
'keycolumn_name',
'keycolumn_value',
'keyfield_name',
'keyfield_value',
'lasso_comment',
'lasso_currentaction',
'lasso_datasourceis',
'lasso_datasourceis4d',
'lasso_datasourceisfilemaker',
'lasso_datasourceisfilemaker7',
'lasso_datasourceisfilemaker9',
'lasso_datasourceisfilemakersa',
'lasso_datasourceisjdbc',
'lasso_datasourceislassomysql',
'lasso_datasourceismysql',
'lasso_datasourceisodbc',
'lasso_datasourceisopenbase',
'lasso_datasourceisoracle',
'lasso_datasourceispostgresql',
'lasso_datasourceisspotlight',
'lasso_datasourceissqlite',
'lasso_datasourceissqlserver',
'lasso_datasourcemodulename',
'lasso_datatype',
'lasso_disableondemand',
'lasso_errorreporting',
'lasso_executiontimelimit',
'lasso_parser',
'lasso_process',
'lasso_sessionid',
'lasso_siteid',
'lasso_siteisrunning',
'lasso_sitename',
'lasso_siterestart',
'lasso_sitestart',
'lasso_sitestop',
'lasso_tagexists',
'lasso_tagmodulename',
'lasso_uniqueid',
'lasso_updatecheck',
'lasso_uptime',
'lasso_version',
'lassoapp_create',
'lassoapp_dump',
'lassoapp_flattendir',
'lassoapp_getappdata',
'lassoapp_link',
'lassoapp_list',
'lassoapp_process',
'lassoapp_unitize',
'layout_name',
'ldap',
'ldap_scope_base',
'ldap_scope_onelevel',
'ldap_scope_subtree',
'ldml',
'ldml_ldml',
'library',
'library_once',
'link',
'link_currentaction',
'link_currentactionparams',
'link_currentactionurl',
'link_currentgroup',
'link_currentgroupparams',
'link_currentgroupurl',
'link_currentrecord',
'link_currentrecordparams',
'link_currentrecordurl',
'link_currentsearch',
'link_currentsearchparams',
'link_currentsearchurl',
'link_detail',
'link_detailparams',
'link_detailurl',
'link_firstgroup',
'link_firstgroupparams',
'link_firstgroupurl',
'link_firstrecord',
'link_firstrecordparams',
'link_firstrecordurl',
'link_lastgroup',
'link_lastgroupparams',
'link_lastgroupurl',
'link_lastrecord',
'link_lastrecordparams',
'link_lastrecordurl',
'link_nextgroup',
'link_nextgroupparams',
'link_nextgroupurl',
'link_nextrecord',
'link_nextrecordparams',
'link_nextrecordurl',
'link_params',
'link_prevgroup',
'link_prevgroupparams',
'link_prevgroupurl',
'link_prevrecord',
'link_prevrecordparams',
'link_prevrecordurl',
'link_setformat',
'link_url',
'list',
'list_additem',
'list_fromlist',
'list_fromstring',
'list_getitem',
'list_itemcount',
'list_iterator',
'list_removeitem',
'list_replaceitem',
'list_reverseiterator',
'list_tostring',
'literal',
'ljax_end',
'ljax_hastarget',
'ljax_include',
'ljax_start',
'ljax_target',
'local',
'local_defined',
'local_remove',
'local_reset',
'locale_format',
'locals',
'log',
'log_always',
'log_critical',
'log_deprecated',
'log_destination_console',
'log_destination_database',
'log_destination_file',
'log_detail',
'log_level_critical',
'log_level_deprecated',
'log_level_detail',
'log_level_sql',
'log_level_warning',
'log_setdestination',
'log_sql',
'log_warning',
'logicalop_value',
'logicaloperator_value',
'loop',
'loop_abort',
'loop_continue',
'loop_count',
'lt',
'lte',
'magick_image',
'map',
'map_iterator',
'match_comparator',
'match_notrange',
'match_notregexp',
'match_range',
'match_regexp',
'math_abs',
'math_acos',
'math_add',
'math_asin',
'math_atan',
'math_atan2',
'math_ceil',
'math_converteuro',
'math_cos',
'math_div',
'math_exp',
'math_floor',
'math_internal_rand',
'math_internal_randmax',
'math_internal_srand',
'math_ln',
'math_log',
'math_log10',
'math_max',
'math_min',
'math_mod',
'math_mult',
'math_pow',
'math_random',
'math_range',
'math_rint',
'math_roman',
'math_round',
'math_sin',
'math_sqrt',
'math_sub',
'math_tan',
'maxrecords_value',
'memory_session_driver',
'mime_type',
'minimal',
'misc__srand',
'misc_randomnumber',
'misc_roman',
'misc_valid_creditcard',
'mysql_session_driver',
'named_param',
'namespace_current',
'namespace_delimiter',
'namespace_exists',
'namespace_file_fullpathexists',
'namespace_global',
'namespace_import',
'namespace_load',
'namespace_page',
'namespace_unload',
'namespace_using',
'neq',
'net',
'net_connectinprogress',
'net_connectok',
'net_typessl',
'net_typessltcp',
'net_typessludp',
'net_typetcp',
'net_typeudp',
'net_waitread',
'net_waittimeout',
'net_waitwrite',
'no_default_output',
'none',
'noprocess',
'not',
'nrx',
'nslookup',
'null',
'object',
'once',
'oneoff',
'op_logicalvalue',
'operator_logicalvalue',
'option',
'or',
'os_process',
'output',
'output_none',
'pair',
'params_up',
'pdf_barcode',
'pdf_color',
'pdf_doc',
'pdf_font',
'pdf_image',
'pdf_list',
'pdf_read',
'pdf_serve',
'pdf_table',
'pdf_text',
'percent',
'portal',
'postcondition',
'precondition',
'prettyprintingnsmap',
'prettyprintingtypemap',
'priorityqueue',
'private',
'proc_convert',
'proc_convertbody',
'proc_convertone',
'proc_extract',
'proc_extractone',
'proc_find',
'proc_first',
'proc_foreach',
'proc_get',
'proc_join',
'proc_lasso',
'proc_last',
'proc_map_entry',
'proc_null',
'proc_regexp',
'proc_xml',
'proc_xslt',
'process',
'protect',
'queue',
'rand',
'randomnumber',
'raw',
'recid_value',
'record_count',
'recordcount',
'recordid_value',
'records',
'records_array',
'records_map',
'redirect_url',
'reference',
'referer',
'referer_url',
'referrer',
'referrer_url',
'regexp',
'repeating',
'repeating_valueitem',
'repeatingvalueitem',
'repetition',
'req_column',
'req_field',
'required_column',
'required_field',
'response_fileexists',
'response_filepath',
'response_localpath',
'response_path',
'response_realm',
'resultset',
'resultset_count',
'return',
'return_value',
'reverseiterator',
'roman',
'row_count',
'rows',
'rows_array',
'run_children',
'rx',
'schema_name',
'scientific',
'search_args',
'search_arguments',
'search_columnitem',
'search_fielditem',
'search_operatoritem',
'search_opitem',
'search_valueitem',
'searchfielditem',
'searchoperatoritem',
'searchopitem',
'searchvalueitem',
'select',
'selected',
'self',
'serialize',
'series',
'server_date',
'server_day',
'server_ip',
'server_name',
'server_port',
'server_push',
'server_siteisrunning',
'server_sitestart',
'server_sitestop',
'server_time',
'session_abort',
'session_addoutputfilter',
'session_addvar',
'session_addvariable',
'session_deleteexpired',
'session_driver',
'session_end',
'session_id',
'session_removevar',
'session_removevariable',
'session_result',
'session_setdriver',
'session_start',
'set',
'set_iterator',
'set_reverseiterator',
'shown_count',
'shown_first',
'shown_last',
'site_atbegin',
'site_id',
'site_name',
'site_restart',
'skiprecords_value',
'sleep',
'soap_convertpartstopairs',
'soap_definetag',
'soap_info',
'soap_lastrequest',
'soap_lastresponse',
'soap_stub',
'sort_args',
'sort_arguments',
'sort_columnitem',
'sort_fielditem',
'sort_orderitem',
'sortcolumnitem',
'sortfielditem',
'sortorderitem',
'sqlite_createdb',
'sqlite_session_driver',
'sqlite_setsleepmillis',
'sqlite_setsleeptries',
'srand',
'stack',
'stock_quote',
'string',
'string_charfromname',
'string_concatenate',
'string_countfields',
'string_endswith',
'string_extract',
'string_findposition',
'string_findregexp',
'string_fordigit',
'string_getfield',
'string_getunicodeversion',
'string_insert',
'string_isalpha',
'string_isalphanumeric',
'string_isdigit',
'string_ishexdigit',
'string_islower',
'string_isnumeric',
'string_ispunctuation',
'string_isspace',
'string_isupper',
'string_length',
'string_lowercase',
'string_remove',
'string_removeleading',
'string_removetrailing',
'string_replace',
'string_replaceregexp',
'string_todecimal',
'string_tointeger',
'string_uppercase',
'string_validcharset',
'table_name',
'table_realname',
'tag',
'tag_name',
'tags',
'tags_find',
'tags_list',
'tcp_close',
'tcp_open',
'tcp_send',
'tcp_tcp_close',
'tcp_tcp_open',
'tcp_tcp_send',
'thread_abort',
'thread_atomic',
'thread_event',
'thread_exists',
'thread_getcurrentid',
'thread_getpriority',
'thread_info',
'thread_list',
'thread_lock',
'thread_pipe',
'thread_priority_default',
'thread_priority_high',
'thread_priority_low',
'thread_rwlock',
'thread_semaphore',
'thread_setpriority',
'token_value',
'total_records',
'treemap',
'treemap_iterator',
'true',
'url_rewrite',
'valid_creditcard',
'valid_date',
'valid_email',
'valid_url',
'value_list',
'value_listitem',
'valuelistitem',
'var',
'var_defined',
'var_remove',
'var_reset',
'var_set',
'variable',
'variable_defined',
'variable_set',
'variables',
'variant_count',
'vars',
'wap_isenabled',
'wap_maxbuttons',
'wap_maxcolumns',
'wap_maxhorzpixels',
'wap_maxrows',
'wap_maxvertpixels',
'while',
'wsdl_extract',
'wsdl_getbinding',
'wsdl_getbindingforoperation',
'wsdl_getbindingoperations',
'wsdl_getmessagenamed',
'wsdl_getmessageparts',
'wsdl_getmessagetriofromporttype',
'wsdl_getopbodystyle',
'wsdl_getopbodyuse',
'wsdl_getoperation',
'wsdl_getoplocation',
'wsdl_getopmessagetypes',
'wsdl_getopsoapaction',
'wsdl_getportaddress',
'wsdl_getportsforservice',
'wsdl_getporttype',
'wsdl_getporttypeoperation',
'wsdl_getservicedocumentation',
'wsdl_getservices',
'wsdl_gettargetnamespace',
'wsdl_issoapoperation',
'wsdl_listoperations',
'wsdl_maketest',
'xml',
'xml_extract',
'xml_rpc',
'xml_rpccall',
'xml_rw',
'xml_serve',
'xml_transform',
'xml_xml',
'xml_xmlstream',
'xmlstream',
'xsd_attribute',
'xsd_blankarraybase',
'xsd_blankbase',
'xsd_buildtype',
'xsd_cache',
'xsd_checkcardinality',
'xsd_continueall',
'xsd_continueannotation',
'xsd_continueany',
'xsd_continueanyattribute',
'xsd_continueattribute',
'xsd_continueattributegroup',
'xsd_continuechoice',
'xsd_continuecomplexcontent',
'xsd_continuecomplextype',
'xsd_continuedocumentation',
'xsd_continueextension',
'xsd_continuegroup',
'xsd_continuekey',
'xsd_continuelist',
'xsd_continuerestriction',
'xsd_continuesequence',
'xsd_continuesimplecontent',
'xsd_continuesimpletype',
'xsd_continueunion',
'xsd_deserialize',
'xsd_fullyqualifyname',
'xsd_generate',
'xsd_generateblankfromtype',
'xsd_generateblanksimpletype',
'xsd_generatetype',
'xsd_getschematype',
'xsd_issimpletype',
'xsd_loadschema',
'xsd_lookupnamespaceuri',
'xsd_lookuptype',
'xsd_processany',
'xsd_processattribute',
'xsd_processattributegroup',
'xsd_processcomplextype',
'xsd_processelement',
'xsd_processgroup',
'xsd_processimport',
'xsd_processinclude',
'xsd_processschema',
'xsd_processsimpletype',
'xsd_ref',
'xsd_type',
)
}
MEMBERS = {
'Member Methods': (
'abort',
'abs',
'accept_charset',
'accept',
'acceptconnections',
'acceptdeserializedelement',
'acceptnossl',
'acceptpost',
'accesskey',
'acos',
'acosh',
'action',
'actionparams',
'active_tick',
'add',
'addatend',
'addattachment',
'addbarcode',
'addchapter',
'addcheckbox',
'addcolumninfo',
'addcombobox',
'addcomment',
'addcomponent',
'addcomponents',
'addcss',
'adddatabasetable',
'adddatasource',
'adddatasourcedatabase',
'adddatasourcehost',
'adddir',
'adddirpath',
'addendjs',
'addendjstext',
'adderror',
'addfavicon',
'addfile',
'addgroup',
'addheader',
'addhiddenfield',
'addhtmlpart',
'addimage',
'addjavascript',
'addjs',
'addjstext',
'addlist',
'addmathfunctions',
'addmember',
'addoneheaderline',
'addpage',
'addparagraph',
'addpart',
'addpasswordfield',
'addphrase',
'addpostdispatch',
'addpredispatch',
'addradiobutton',
'addradiogroup',
'addresetbutton',
'addrow',
'addsection',
'addselectlist',
'addset',
'addsubmitbutton',
'addsubnode',
'addtable',
'addtask',
'addtext',
'addtextarea',
'addtextfield',
'addtextpart',
'addtobuffer',
'addtrait',
'adduser',
'addusertogroup',
'addwarning',
'addzip',
'allocobject',
'am',
'ampm',
'annotate',
'answer',
'apop',
'append',
'appendarray',
'appendarraybegin',
'appendarrayend',
'appendbool',
'appendbytes',
'appendchar',
'appendchild',
'appendcolon',
'appendcomma',
'appenddata',
'appenddatetime',
'appenddbpointer',
'appenddecimal',
'appenddocument',
'appendimagetolist',
'appendinteger',
'appendnowutc',
'appendnull',
'appendoid',
'appendregex',
'appendreplacement',
'appendstring',
'appendtail',
'appendtime',
'applyheatcolors',
'appmessage',
'appname',
'appprefix',
'appstatus',
'arc',
'archive',
'arguments',
'argumentvalue',
'asarray',
'asarraystring',
'asasync',
'asbytes',
'ascopy',
'ascopydeep',
'asdecimal',
'asgenerator',
'asin',
'asinh',
'asinteger',
'askeyedgenerator',
'aslazystring',
'aslist',
'asraw',
'asstaticarray',
'asstring',
'asstringhex',
'asstringoct',
'asxml',
'atan',
'atan2',
'atanh',
'atend',
'atends',
'atime',
'attributecount',
'attributes',
'attrs',
'auth',
'authenticate',
'authorize',
'autocollectbuffer',
'average',
'back',
'basename',
'basepaths',
'baseuri',
'bcc',
'beginssl',
'beginswith',
'begintls',
'bestcharset',
'bind_blob',
'bind_double',
'bind_int',
'bind_null',
'bind_parameter_index',
'bind_text',
'bind',
'bindcount',
'bindone',
'bindparam',
'bitand',
'bitclear',
'bitflip',
'bitformat',
'bitnot',
'bitor',
'bitset',
'bitshiftleft',
'bitshiftright',
'bittest',
'bitxor',
'blur',
'body',
'bodybytes',
'boundary',
'bptoxml',
'bptypetostr',
'bucketnumber',
'buff',
'buildquery',
'businessdaysbetween',
'by',
'bytes',
'cachedappprefix',
'cachedroot',
'callboolean',
'callbooleanmethod',
'callbytemethod',
'callcharmethod',
'calldoublemethod',
'calledname',
'callfirst',
'callfloat',
'callfloatmethod',
'callint',
'callintmethod',
'calllongmethod',
'callnonvirtualbooleanmethod',
'callnonvirtualbytemethod',
'callnonvirtualcharmethod',
'callnonvirtualdoublemethod',
'callnonvirtualfloatmethod',
'callnonvirtualintmethod',
'callnonvirtuallongmethod',
'callnonvirtualobjectmethod',
'callnonvirtualshortmethod',
'callnonvirtualvoidmethod',
'callobject',
'callobjectmethod',
'callshortmethod',
'callsite_col',
'callsite_file',
'callsite_line',
'callstack',
'callstaticboolean',
'callstaticbooleanmethod',
'callstaticbytemethod',
'callstaticcharmethod',
'callstaticdoublemethod',
'callstaticfloatmethod',
'callstaticint',
'callstaticintmethod',
'callstaticlongmethod',
'callstaticobject',
'callstaticobjectmethod',
'callstaticshortmethod',
'callstaticstring',
'callstaticvoidmethod',
'callstring',
'callvoid',
'callvoidmethod',
'cancel',
'cap',
'capa',
'capabilities',
'capi',
'cbrt',
'cc',
'ceil',
'chardigitvalue',
'charname',
'charset',
'chartype',
'checkdebugging',
'checked',
'checkuser',
'childnodes',
'chk',
'chmod',
'choosecolumntype',
'chown',
'chunked',
'circle',
'class',
'classid',
'clear',
'clonenode',
'close',
'closepath',
'closeprepared',
'closewrite',
'code',
'codebase',
'codetype',
'colmap',
'colorspace',
'column_blob',
'column_count',
'column_decltype',
'column_double',
'column_int64',
'column_name',
'column_text',
'column_type',
'command',
'comments',
'compare',
'comparecodepointorder',
'componentdelimiter',
'components',
'composite',
'compress',
'concat',
'condtoint',
'configureds',
'configuredskeys',
'connect',
'connection',
'connectionhandler',
'connhandler',
'consume_domain',
'consume_label',
'consume_message',
'consume_rdata',
'consume_string',
'contains',
'content_disposition',
'content_transfer_encoding',
'content_type',
'content',
'contentlength',
'contents',
'contenttype',
'continuation',
'continuationpacket',
'continuationpoint',
'continuationstack',
'continue',
'contrast',
'conventionaltop',
'convert',
'cookie',
'cookies',
'cookiesarray',
'cookiesary',
'copyto',
'cos',
'cosh',
'count',
'countkeys',
'country',
'countusersbygroup',
'crc',
'create',
'createattribute',
'createattributens',
'createcdatasection',
'createcomment',
'createdocument',
'createdocumentfragment',
'createdocumenttype',
'createelement',
'createelementns',
'createentityreference',
'createindex',
'createprocessinginstruction',
'createtable',
'createtextnode',
'criteria',
'crop',
'csscontent',
'curl',
'current',
'currentfile',
'curveto',
'd',
'data',
'databasecolumnnames',
'databasecolumns',
'databasemap',
'databasename',
'datasourcecolumnnames',
'datasourcecolumns',
'datasourcemap',
'date',
'day',
'dayofmonth',
'dayofweek',
'dayofweekinmonth',
'dayofyear',
'days',
'daysbetween',
'db',
'dbtablestable',
'debug',
'declare',
'decodebase64',
'decodehex',
'decodehtml',
'decodeqp',
'decodeurl',
'decodexml',
'decompose',
'decomposeassignment',
'defaultcontentrepresentation',
'defer',
'deg2rad',
'dele',
'delete',
'deletedata',
'deleteglobalref',
'deletelocalref',
'delim',
'depth',
'dereferencepointer',
'describe',
'description',
'deserialize',
'detach',
'detectcharset',
'didinclude',
'difference',
'digit',
'dir',
'displaycountry',
'displaylanguage',
'displayname',
'displayscript',
'displayvariant',
'div',
'dns_response',
'do',
'doatbegins',
'doatends',
'doccomment',
'doclose',
'doctype',
'document',
'documentelement',
'documentroot',
'domainbody',
'done',
'dosessions',
'dowithclose',
'dowlocal',
'download',
'drawtext',
'drop',
'dropindex',
'dsdbtable',
'dshoststable',
'dsinfo',
'dst',
'dstable',
'dstoffset',
'dtdid',
'dup',
'dup2',
'each',
'eachbyte',
'eachcharacter',
'eachchild',
'eachcomponent',
'eachdir',
'eachdirpath',
'eachdirpathrecursive',
'eachentry',
'eachfile',
'eachfilename',
'eachfilepath',
'eachfilepathrecursive',
'eachkey',
'eachline',
'eachlinebreak',
'eachmatch',
'eachnode',
'eachpair',
'eachpath',
'eachpathrecursive',
'eachrow',
'eachsub',
'eachword',
'eachwordbreak',
'element',
'eligiblepath',
'eligiblepaths',
'encodebase64',
'encodehex',
'encodehtml',
'encodehtmltoxml',
'encodemd5',
'encodepassword',
'encodeqp',
'encodesql',
'encodesql92',
'encodeurl',
'encodevalue',
'encodexml',
'encoding',
'enctype',
'end',
'endjs',
'endssl',
'endswith',
'endtls',
'enhance',
'ensurestopped',
'entities',
'entry',
'env',
'equals',
'era',
'erf',
'erfc',
'err',
'errcode',
'errmsg',
'error',
'errors',
'errstack',
'escape_member',
'establisherrorstate',
'exceptioncheck',
'exceptionclear',
'exceptiondescribe',
'exceptionoccurred',
'exchange',
'execinits',
'execinstalls',
'execute',
'executelazy',
'executenow',
'exists',
'exit',
'exitcode',
'exp',
'expire',
'expireminutes',
'expiresminutes',
'expm1',
'export16bits',
'export32bits',
'export64bits',
'export8bits',
'exportas',
'exportbytes',
'exportfdf',
'exportpointerbits',
'exportsigned16bits',
'exportsigned32bits',
'exportsigned64bits',
'exportsigned8bits',
'exportstring',
'expose',
'extendedyear',
'extensiondelimiter',
'extensions',
'extract',
'extractfast',
'extractfastone',
'extractimage',
'extractone',
'f',
'fabs',
'fail',
'failnoconnectionhandler',
'family',
'fatalerror',
'fcgireq',
'fchdir',
'fchmod',
'fchown',
'fd',
'features',
'fetchdata',
'fieldnames',
'fieldposition',
'fieldstable',
'fieldtype',
'fieldvalue',
'file',
'filename',
'filenames',
'filequeue',
'fileuploads',
'fileuploadsary',
'filterinputcolumn',
'finalize',
'find',
'findall',
'findandmodify',
'findbucket',
'findcase',
'findclass',
'findcount',
'finddescendant',
'findfirst',
'findinclude',
'findinctx',
'findindex',
'findlast',
'findpattern',
'findposition',
'findsymbols',
'first',
'firstchild',
'firstcomponent',
'firstdayofweek',
'firstnode',
'fixformat',
'flags',
'fliph',
'flipv',
'floor',
'flush',
'foldcase',
'foo',
'for',
'forcedrowid',
'foreach',
'foreachaccept',
'foreachbyte',
'foreachcharacter',
'foreachchild',
'foreachday',
'foreachentry',
'foreachfile',
'foreachfilename',
'foreachkey',
'foreachline',
'foreachlinebreak',
'foreachmatch',
'foreachnode',
'foreachpair',
'foreachpathcomponent',
'foreachrow',
'foreachspool',
'foreachsub',
'foreachwordbreak',
'form',
'format',
'formatas',
'formatcontextelement',
'formatcontextelements',
'formatnumber',
'free',
'frexp',
'from',
'fromname',
'fromport',
'fromreflectedfield',
'fromreflectedmethod',
'front',
'fsync',
'ftpdeletefile',
'ftpgetlisting',
'ftruncate',
'fullpath',
'fx',
'gamma',
'gatewayinterface',
'gen',
'generatechecksum',
'get',
'getabswidth',
'getalignment',
'getappsource',
'getarraylength',
'getattr',
'getattribute',
'getattributenamespace',
'getattributenode',
'getattributenodens',
'getattributens',
'getbarheight',
'getbarmultiplier',
'getbarwidth',
'getbaseline',
'getbold',
'getbooleanarrayelements',
'getbooleanarrayregion',
'getbooleanfield',
'getbordercolor',
'getborderwidth',
'getbytearrayelements',
'getbytearrayregion',
'getbytefield',
'getchararrayelements',
'getchararrayregion',
'getcharfield',
'getclass',
'getcode',
'getcolor',
'getcolumn',
'getcolumncount',
'getcolumns',
'getdatabasebyalias',
'getdatabasebyid',
'getdatabasebyname',
'getdatabasehost',
'getdatabasetable',
'getdatabasetablebyalias',
'getdatabasetablebyid',
'getdatabasetablepart',
'getdatasource',
'getdatasourcedatabase',
'getdatasourcedatabasebyid',
'getdatasourcehost',
'getdatasourceid',
'getdatasourcename',
'getdefaultstorage',
'getdoublearrayelements',
'getdoublearrayregion',
'getdoublefield',
'getelementbyid',
'getelementsbytagname',
'getelementsbytagnamens',
'getencoding',
'getface',
'getfield',
'getfieldid',
'getfile',
'getfloatarrayelements',
'getfloatarrayregion',
'getfloatfield',
'getfont',
'getformat',
'getfullfontname',
'getgroup',
'getgroupid',
'getheader',
'getheaders',
'gethostdatabase',
'gethtmlattr',
'gethtmlattrstring',
'getinclude',
'getintarrayelements',
'getintarrayregion',
'getintfield',
'getisocomment',
'getitalic',
'getlasterror',
'getlcapitype',
'getlibrary',
'getlongarrayelements',
'getlongarrayregion',
'getlongfield',
'getmargins',
'getmethodid',
'getmode',
'getnameditem',
'getnameditemns',
'getnode',
'getnumericvalue',
'getobjectarrayelement',
'getobjectclass',
'getobjectfield',
'getpadding',
'getpagenumber',
'getparts',
'getprefs',
'getpropertyvalue',
'getprowcount',
'getpsfontname',
'getrange',
'getrowcount',
'getset',
'getshortarrayelements',
'getshortarrayregion',
'getshortfield',
'getsize',
'getsortfieldspart',
'getspacing',
'getstaticbooleanfield',
'getstaticbytefield',
'getstaticcharfield',
'getstaticdoublefield',
'getstaticfieldid',
'getstaticfloatfield',
'getstaticintfield',
'getstaticlongfield',
'getstaticmethodid',
'getstaticobjectfield',
'getstaticshortfield',
'getstatus',
'getstringchars',
'getstringlength',
'getstyle',
'getsupportedencodings',
'gettablebyid',
'gettext',
'gettextalignment',
'gettextsize',
'gettrigger',
'gettype',
'getunderline',
'getuniquealiasname',
'getuser',
'getuserbykey',
'getuserid',
'getversion',
'getzipfilebytes',
'givenblock',
'gmt',
'gotconnection',
'gotfileupload',
'groupby',
'groupcolumns',
'groupcount',
'groupjoin',
'handlebreakpointget',
'handlebreakpointlist',
'handlebreakpointremove',
'handlebreakpointset',
'handlebreakpointupdate',
'handlecontextget',
'handlecontextnames',
'handlecontinuation',
'handledefinitionbody',
'handledefinitionhead',
'handledefinitionresource',
'handledevconnection',
'handleevalexpired',
'handlefeatureget',
'handlefeatureset',
'handlelassoappcontent',
'handlelassoappresponse',
'handlenested',
'handlenormalconnection',
'handlepop',
'handleresource',
'handlesource',
'handlestackget',
'handlestderr',
'handlestdin',
'handlestdout',
'handshake',
'hasattribute',
'hasattributens',
'hasattributes',
'hasbinaryproperty',
'haschildnodes',
'hasexpired',
'hasfeature',
'hasfield',
'hash',
'hashtmlattr',
'hasmethod',
'hastable',
'hastrailingcomponent',
'hasvalue',
'head',
'header',
'headerbytes',
'headers',
'headersarray',
'headersmap',
'height',
'histogram',
'home',
'host',
'hostcolumnnames',
'hostcolumnnames2',
'hostcolumns',
'hostcolumns2',
'hostdatasource',
'hostextra',
'hostid',
'hostisdynamic',
'hostmap',
'hostmap2',
'hostname',
'hostpassword',
'hostport',
'hostschema',
'hosttableencoding',
'hosttonet16',
'hosttonet32',
'hosttonet64',
'hostusername',
'hour',
'hourofampm',
'hourofday',
'hoursbetween',
'href',
'hreflang',
'htmlcontent',
'htmlizestacktrace',
'htmlizestacktracelink',
'httpaccept',
'httpacceptencoding',
'httpacceptlanguage',
'httpauthorization',
'httpcachecontrol',
'httpconnection',
'httpcookie',
'httpequiv',
'httphost',
'httpreferer',
'httpreferrer',
'httpuseragent',
'hypot',
'id',
'idealinmemory',
'idle',
'idmap',
'ifempty',
'ifkey',
'ifnotempty',
'ifnotkey',
'ignorecase',
'ilogb',
'imgptr',
'implementation',
'import16bits',
'import32bits',
'import64bits',
'import8bits',
'importas',
'importbytes',
'importfdf',
'importnode',
'importpointer',
'importstring',
'in',
'include',
'includebytes',
'includelibrary',
'includelibraryonce',
'includeonce',
'includes',
'includestack',
'indaylighttime',
'index',
'init',
'initialize',
'initrequest',
'inits',
'inneroncompare',
'input',
'inputcolumns',
'inputtype',
'insert',
'insertback',
'insertbefore',
'insertdata',
'insertfirst',
'insertfrom',
'insertfront',
'insertinternal',
'insertlast',
'insertpage',
'install',
'installs',
'integer',
'internalsubset',
'interrupt',
'intersection',
'inttocond',
'invoke',
'invokeautocollect',
'invokeuntil',
'invokewhile',
'ioctl',
'isa',
'isalive',
'isallof',
'isalnum',
'isalpha',
'isanyof',
'isbase',
'isblank',
'iscntrl',
'isdigit',
'isdir',
'isempty',
'isemptyelement',
'isfirststep',
'isfullpath',
'isgraph',
'ishttps',
'isidle',
'isinstanceof',
'islink',
'islower',
'ismultipart',
'isnan',
'isnota',
'isnotempty',
'isnothing',
'iso3country',
'iso3language',
'isopen',
'isprint',
'ispunct',
'issameobject',
'isset',
'issourcefile',
'isspace',
'isssl',
'issupported',
'istitle',
'istruetype',
'istype',
'isualphabetic',
'isulowercase',
'isupper',
'isuuppercase',
'isuwhitespace',
'isvalid',
'iswhitespace',
'isxdigit',
'isxhr',
'item',
'j0',
'j1',
'javascript',
'jbarcode',
'jcolor',
'jfont',
'jimage',
'jlist',
'jn',
'jobjectisa',
'join',
'jread',
'jscontent',
'jsonfornode',
'jsonhtml',
'jsonisleaf',
'jsonlabel',
'jtable',
'jtext',
'julianday',
'kernel',
'key',
'keycolumns',
'keys',
'keywords',
'kill',
'label',
'lang',
'language',
'last_insert_rowid',
'last',
'lastaccessdate',
'lastaccesstime',
'lastchild',
'lastcomponent',
'lasterror',
'lastinsertid',
'lastnode',
'lastpoint',
'lasttouched',
'lazyvalue',
'ldexp',
'leaveopen',
'left',
'length',
'lgamma',
'line',
'linediffers',
'linkto',
'linktype',
'list',
'listactivedatasources',
'listalldatabases',
'listalltables',
'listdatabasetables',
'listdatasourcedatabases',
'listdatasourcehosts',
'listdatasources',
'listen',
'listgroups',
'listgroupsbyuser',
'listhostdatabases',
'listhosts',
'listmethods',
'listnode',
'listusers',
'listusersbygroup',
'loadcerts',
'loaddatasourcehostinfo',
'loaddatasourceinfo',
'loadlibrary',
'localaddress',
'localname',
'locals',
'lock',
'log',
'log10',
'log1p',
'logb',
'lookupnamespace',
'lop',
'lowagiefont',
'lowercase',
'makecolor',
'makecolumnlist',
'makecolumnmap',
'makecookieyumyum',
'makefullpath',
'makeinheritedcopy',
'makenonrelative',
'makeurl',
'map',
'marker',
'matches',
'matchesstart',
'matchposition',
'matchstring',
'matchtriggers',
'max',
'maxinmemory',
'maxlength',
'maxrows',
'maxworkers',
'maybeslash',
'maybevalue',
'md5hex',
'media',
'members',
'merge',
'meta',
'method',
'methodname',
'millisecond',
'millisecondsinday',
'mime_boundary',
'mime_contenttype',
'mime_hdrs',
'mime',
'mimes',
'min',
'minute',
'minutesbetween',
'moddatestr',
'mode',
'modf',
'modificationdate',
'modificationtime',
'modulate',
'monitorenter',
'monitorexit',
'month',
'moveto',
'movetoattribute',
'movetoattributenamespace',
'movetoelement',
'movetofirstattribute',
'movetonextattribute',
'msg',
'mtime',
'multiple',
'n',
'name',
'named',
'namespaceuri',
'needinitialization',
'net',
'nettohost16',
'nettohost32',
'nettohost64',
'new',
'newbooleanarray',
'newbytearray',
'newchararray',
'newdoublearray',
'newfloatarray',
'newglobalref',
'newintarray',
'newlongarray',
'newobject',
'newobjectarray',
'newshortarray',
'newstring',
'next',
'nextafter',
'nextnode',
'nextprime',
'nextprune',
'nextprunedelta',
'nextsibling',
'nodeforpath',
'nodelist',
'nodename',
'nodetype',
'nodevalue',
'noop',
'normalize',
'notationname',
'notations',
'novaluelists',
'numsets',
'object',
'objects',
'objecttype',
'onclick',
'oncompare',
'oncomparestrict',
'onconvert',
'oncreate',
'ondblclick',
'onkeydown',
'onkeypress',
'onkeyup',
'onmousedown',
'onmousemove',
'onmouseout',
'onmouseover',
'onmouseup',
'onreset',
'onsubmit',
'ontop',
'open',
'openappend',
'openread',
'opentruncate',
'openwith',
'openwrite',
'openwriteonly',
'orderby',
'orderbydescending',
'out',
'output',
'outputencoding',
'ownerdocument',
'ownerelement',
'padleading',
'padtrailing',
'padzero',
'pagecount',
'pagerotation',
'pagesize',
'param',
'paramdescs',
'params',
'parent',
'parentdir',
'parentnode',
'parse_body',
'parse_boundary',
'parse_charset',
'parse_content_disposition',
'parse_content_transfer_encoding',
'parse_content_type',
'parse_hdrs',
'parse_mode',
'parse_msg',
'parse_parts',
'parse_rawhdrs',
'parse',
'parseas',
'parsedocument',
'parsenumber',
'parseoneheaderline',
'pass',
'path',
'pathinfo',
'pathtouri',
'pathtranslated',
'pause',
'payload',
'pdifference',
'perform',
'performonce',
'perms',
'pid',
'pixel',
'pm',
'polldbg',
'pollide',
'pop_capa',
'pop_cmd',
'pop_debug',
'pop_err',
'pop_get',
'pop_ids',
'pop_index',
'pop_log',
'pop_mode',
'pop_net',
'pop_res',
'pop_server',
'pop_timeout',
'pop_token',
'pop',
'popctx',
'popinclude',
'populate',
'port',
'position',
'postdispatch',
'postparam',
'postparams',
'postparamsary',
'poststring',
'pow',
'predispatch',
'prefix',
'preflight',
'prepare',
'prepared',
'pretty',
'prev',
'previoussibling',
'printsimplemsg',
'private_compare',
'private_find',
'private_findlast',
'private_merge',
'private_rebalanceforinsert',
'private_rebalanceforremove',
'private_replaceall',
'private_replacefirst',
'private_rotateleft',
'private_rotateright',
'private_setrange',
'private_split',
'probemimetype',
'provides',
'proxying',
'prune',
'publicid',
'pullhttpheader',
'pullmimepost',
'pulloneheaderline',
'pullpost',
'pullrawpost',
'pullrawpostchunks',
'pullrequest',
'pullrequestline',
'push',
'pushctx',
'pushinclude',
'qdarray',
'qdcount',
'queryparam',
'queryparams',
'queryparamsary',
'querystring',
'queue_maintenance',
'queue_messages',
'queue_status',
'queue',
'quit',
'r',
'raw',
'rawcontent',
'rawdiff',
'rawheader',
'rawheaders',
'rawinvokable',
'read',
'readattributevalue',
'readbytes',
'readbytesfully',
'readdestinations',
'readerror',
'readidobjects',
'readline',
'readmessage',
'readnumber',
'readobject',
'readobjecttcp',
'readpacket',
'readsomebytes',
'readstring',
'ready',
'realdoc',
'realpath',
'receivefd',
'recipients',
'recover',
'rect',
'rectype',
'red',
'redirectto',
'referrals',
'refid',
'refobj',
'refresh',
'rel',
'remainder',
'remoteaddr',
'remoteaddress',
'remoteport',
'remove',
'removeall',
'removeattribute',
'removeattributenode',
'removeattributens',
'removeback',
'removechild',
'removedatabasetable',
'removedatasource',
'removedatasourcedatabase',
'removedatasourcehost',
'removefield',
'removefirst',
'removefront',
'removegroup',
'removelast',
'removeleading',
'removenameditem',
'removenameditemns',
'removenode',
'removesubnode',
'removetrailing',
'removeuser',
'removeuserfromallgroups',
'removeuserfromgroup',
'rename',
'renderbytes',
'renderdocumentbytes',
'renderstring',
'replace',
'replaceall',
'replacechild',
'replacedata',
'replacefirst',
'replaceheader',
'replacepattern',
'representnode',
'representnoderesult',
'reqid',
'requestid',
'requestmethod',
'requestparams',
'requesturi',
'requires',
'reserve',
'reset',
'resize',
'resolutionh',
'resolutionv',
'resolvelinks',
'resourcedata',
'resourceinvokable',
'resourcename',
'resources',
'respond',
'restart',
'restname',
'result',
'results',
'resume',
'retr',
'retrieve',
'returncolumns',
'returntype',
'rev',
'reverse',
'rewind',
'right',
'rint',
'roll',
'root',
'rootmap',
'rotate',
'route',
'rowsfound',
'rset',
'rule',
'rules',
'run',
'running',
'runonce',
's',
'sa',
'safeexport8bits',
'sameas',
'save',
'savedata',
'scalb',
'scale',
'scanfordatasource',
'scantasks',
'scanworkers',
'schemaname',
'scheme',
'script',
'scriptextensions',
'scriptfilename',
'scriptname',
'scripttype',
'scripturi',
'scripturl',
'scrubkeywords',
'search',
'searchinbucket',
'searchurl',
'second',
'secondsbetween',
'seek',
'select',
'selected',
'selectmany',
'self',
'send',
'sendchunk',
'sendfd',
'sendfile',
'sendpacket',
'sendresponse',
'separator',
'serializationelements',
'serialize',
'serveraddr',
'serveradmin',
'servername',
'serverport',
'serverprotocol',
'serversignature',
'serversoftware',
'sessionsdump',
'sessionsmap',
'set',
'setalignment',
'setattr',
'setattribute',
'setattributenode',
'setattributenodens',
'setattributens',
'setbarheight',
'setbarmultiplier',
'setbarwidth',
'setbaseline',
'setbold',
'setbooleanarrayregion',
'setbooleanfield',
'setbordercolor',
'setborderwidth',
'setbytearrayregion',
'setbytefield',
'setchararrayregion',
'setcharfield',
'setcode',
'setcolor',
'setcolorspace',
'setcookie',
'setcwd',
'setdefaultstorage',
'setdestination',
'setdoublearrayregion',
'setdoublefield',
'setencoding',
'setface',
'setfieldvalue',
'setfindpattern',
'setfloatarrayregion',
'setfloatfield',
'setfont',
'setformat',
'setgeneratechecksum',
'setheaders',
'sethtmlattr',
'setignorecase',
'setinput',
'setintarrayregion',
'setintfield',
'setitalic',
'setlinewidth',
'setlongarrayregion',
'setlongfield',
'setmarker',
'setmaxfilesize',
'setmode',
'setname',
'setnameditem',
'setnameditemns',
'setobjectarrayelement',
'setobjectfield',
'setpadding',
'setpagenumber',
'setpagerange',
'setposition',
'setrange',
'setreplacepattern',
'setshortarrayregion',
'setshortfield',
'setshowchecksum',
'setsize',
'setspacing',
'setstaticbooleanfield',
'setstaticbytefield',
'setstaticcharfield',
'setstaticdoublefield',
'setstaticfloatfield',
'setstaticintfield',
'setstaticlongfield',
'setstaticobjectfield',
'setstaticshortfield',
'setstatus',
'settextalignment',
'settextsize',
'settimezone',
'settrait',
'setunderline',
'sharpen',
'shouldabort',
'shouldclose',
'showchecksum',
'showcode39startstop',
'showeanguardbars',
'shutdownrd',
'shutdownrdwr',
'shutdownwr',
'sin',
'sinh',
'size',
'skip',
'skiprows',
'sort',
'sortcolumns',
'source',
'sourcecolumn',
'sourcefile',
'sourceline',
'specified',
'split',
'splitconnection',
'splitdebuggingthread',
'splitextension',
'splittext',
'splitthread',
'splittoprivatedev',
'splituppath',
'sql',
'sqlite3',
'sqrt',
'src',
'srcpath',
'sslerrfail',
'stack',
'standby',
'start',
'startone',
'startup',
'stat',
'statement',
'statementonly',
'stats',
'status',
'statuscode',
'statusmsg',
'stdin',
'step',
'stls',
'stop',
'stoprunning',
'storedata',
'stripfirstcomponent',
'striplastcomponent',
'style',
'styletype',
'sub',
'subject',
'subnode',
'subnodes',
'substringdata',
'subtract',
'subtraits',
'sum',
'supportscontentrepresentation',
'swapbytes',
'systemid',
't',
'tabindex',
'table',
'tablecolumnnames',
'tablecolumns',
'tablehascolumn',
'tableizestacktrace',
'tableizestacktracelink',
'tablemap',
'tablename',
'tables',
'tabs',
'tabstr',
'tag',
'tagname',
'take',
'tan',
'tanh',
'target',
'tasks',
'tb',
'tell',
'testexitcode',
'testlock',
'textwidth',
'thenby',
'thenbydescending',
'threadreaddesc',
'throw',
'thrownew',
'time',
'timezone',
'title',
'titlecase',
'to',
'token',
'tolower',
'top',
'toreflectedfield',
'toreflectedmethod',
'total_changes',
'totitle',
'touch',
'toupper',
'toxmlstring',
'trace',
'trackingid',
'trait',
'transform',
'trigger',
'trim',
'trunk',
'tryfinderrorfile',
'trylock',
'tryreadobject',
'type',
'typename',
'uidl',
'uncompress',
'unescape',
'union',
'uniqueid',
'unlock',
'unspool',
'up',
'update',
'updategroup',
'upload',
'uppercase',
'url',
'used',
'usemap',
'user',
'usercolumns',
'valid',
'validate',
'validatesessionstable',
'value',
'values',
'valuetype',
'variant',
'version',
'wait',
'waitforcompletion',
'warnings',
'week',
'weekofmonth',
'weekofyear',
'where',
'width',
'workers',
'workinginputcolumns',
'workingkeycolumns',
'workingkeyfield_name',
'workingreturncolumns',
'workingsortcolumns',
'write',
'writebodybytes',
'writebytes',
'writeheader',
'writeheaderbytes',
'writeheaderline',
'writeid',
'writemessage',
'writeobject',
'writeobjecttcp',
'writestring',
'wroteheaders',
'xhtml',
'xmllang',
'y0',
'y1',
'year',
'yearwoy',
'yn',
'z',
'zip',
'zipfile',
'zipfilename',
'zipname',
'zips',
'zoneoffset',
),
'Lasso 8 Member Tags': (
'accept',
'add',
'addattachment',
'addattribute',
'addbarcode',
'addchapter',
'addcheckbox',
'addchild',
'addcombobox',
'addcomment',
'addcontent',
'addhiddenfield',
'addhtmlpart',
'addimage',
'addjavascript',
'addlist',
'addnamespace',
'addnextsibling',
'addpage',
'addparagraph',
'addparenttype',
'addpart',
'addpasswordfield',
'addphrase',
'addprevsibling',
'addradiobutton',
'addradiogroup',
'addresetbutton',
'addsection',
'addselectlist',
'addsibling',
'addsubmitbutton',
'addtable',
'addtext',
'addtextarea',
'addtextfield',
'addtextpart',
'alarms',
'annotate',
'answer',
'append',
'appendreplacement',
'appendtail',
'arc',
'asasync',
'astype',
'atbegin',
'atbottom',
'atend',
'atfarleft',
'atfarright',
'attop',
'attributecount',
'attributes',
'authenticate',
'authorize',
'backward',
'baseuri',
'bcc',
'beanproperties',
'beginswith',
'bind',
'bitand',
'bitclear',
'bitflip',
'bitformat',
'bitnot',
'bitor',
'bitset',
'bitshiftleft',
'bitshiftright',
'bittest',
'bitxor',
'blur',
'body',
'boundary',
'bytes',
'call',
'cancel',
'capabilities',
'cc',
'chardigitvalue',
'charname',
'charset',
'chartype',
'children',
'circle',
'close',
'closepath',
'closewrite',
'code',
'colorspace',
'command',
'comments',
'compare',
'comparecodepointorder',
'compile',
'composite',
'connect',
'contains',
'content_disposition',
'content_transfer_encoding',
'content_type',
'contents',
'contrast',
'convert',
'crop',
'curveto',
'data',
'date',
'day',
'daylights',
'dayofweek',
'dayofyear',
'decrement',
'delete',
'depth',
'describe',
'description',
'deserialize',
'detach',
'detachreference',
'difference',
'digit',
'document',
'down',
'drawtext',
'dst',
'dump',
'endswith',
'enhance',
'equals',
'errors',
'eval',
'events',
'execute',
'export16bits',
'export32bits',
'export64bits',
'export8bits',
'exportfdf',
'exportstring',
'extract',
'extractone',
'fieldnames',
'fieldtype',
'fieldvalue',
'file',
'find',
'findindex',
'findnamespace',
'findnamespacebyhref',
'findpattern',
'findposition',
'first',
'firstchild',
'fliph',
'flipv',
'flush',
'foldcase',
'foreach',
'format',
'forward',
'freebusies',
'freezetype',
'freezevalue',
'from',
'fulltype',
'generatechecksum',
'get',
'getabswidth',
'getalignment',
'getattribute',
'getattributenamespace',
'getbarheight',
'getbarmultiplier',
'getbarwidth',
'getbaseline',
'getbordercolor',
'getborderwidth',
'getcode',
'getcolor',
'getcolumncount',
'getencoding',
'getface',
'getfont',
'getformat',
'getfullfontname',
'getheaders',
'getmargins',
'getmethod',
'getnumericvalue',
'getpadding',
'getpagenumber',
'getparams',
'getproperty',
'getpsfontname',
'getrange',
'getrowcount',
'getsize',
'getspacing',
'getsupportedencodings',
'gettextalignment',
'gettextsize',
'gettype',
'gmt',
'groupcount',
'hasattribute',
'haschildren',
'hasvalue',
'header',
'headers',
'height',
'histogram',
'hosttonet16',
'hosttonet32',
'hour',
'id',
'ignorecase',
'import16bits',
'import32bits',
'import64bits',
'import8bits',
'importfdf',
'importstring',
'increment',
'input',
'insert',
'insertatcurrent',
'insertfirst',
'insertfrom',
'insertlast',
'insertpage',
'integer',
'intersection',
'invoke',
'isa',
'isalnum',
'isalpha',
'isbase',
'iscntrl',
'isdigit',
'isemptyelement',
'islower',
'isopen',
'isprint',
'isspace',
'istitle',
'istruetype',
'isualphabetic',
'isulowercase',
'isupper',
'isuuppercase',
'isuwhitespace',
'iswhitespace',
'iterator',
'javascript',
'join',
'journals',
'key',
'keys',
'last',
'lastchild',
'lasterror',
'left',
'length',
'line',
'listen',
'localaddress',
'localname',
'lock',
'lookupnamespace',
'lowercase',
'marker',
'matches',
'matchesstart',
'matchposition',
'matchstring',
'merge',
'millisecond',
'minute',
'mode',
'modulate',
'month',
'moveto',
'movetoattributenamespace',
'movetoelement',
'movetofirstattribute',
'movetonextattribute',
'name',
'namespaces',
'namespaceuri',
'nettohost16',
'nettohost32',
'newchild',
'next',
'nextsibling',
'nodetype',
'open',
'output',
'padleading',
'padtrailing',
'pagecount',
'pagesize',
'paraminfo',
'params',
'parent',
'path',
'pixel',
'position',
'prefix',
'previoussibling',
'properties',
'rawheaders',
'read',
'readattributevalue',
'readerror',
'readfrom',
'readline',
'readlock',
'readstring',
'readunlock',
'recipients',
'rect',
'refcount',
'referrals',
'remoteaddress',
'remove',
'removeall',
'removeattribute',
'removechild',
'removecurrent',
'removefirst',
'removelast',
'removeleading',
'removenamespace',
'removetrailing',
'render',
'replace',
'replaceall',
'replacefirst',
'replacepattern',
'replacewith',
'reserve',
'reset',
'resolutionh',
'resolutionv',
'response',
'results',
'retrieve',
'returntype',
'reverse',
'reverseiterator',
'right',
'rotate',
'run',
'save',
'scale',
'search',
'second',
'send',
'serialize',
'set',
'setalignment',
'setbarheight',
'setbarmultiplier',
'setbarwidth',
'setbaseline',
'setblocking',
'setbordercolor',
'setborderwidth',
'setbytes',
'setcode',
'setcolor',
'setcolorspace',
'setdatatype',
'setencoding',
'setface',
'setfieldvalue',
'setfont',
'setformat',
'setgeneratechecksum',
'setheight',
'setlassodata',
'setlinewidth',
'setmarker',
'setmode',
'setname',
'setpadding',
'setpagenumber',
'setpagerange',
'setposition',
'setproperty',
'setrange',
'setshowchecksum',
'setsize',
'setspacing',
'settemplate',
'settemplatestr',
'settextalignment',
'settextdata',
'settextsize',
'settype',
'setunderline',
'setwidth',
'setxmldata',
'sharpen',
'showchecksum',
'showcode39startstop',
'showeanguardbars',
'signal',
'signalall',
'size',
'smooth',
'sort',
'sortwith',
'split',
'standards',
'steal',
'subject',
'substring',
'subtract',
'swapbytes',
'textwidth',
'time',
'timezones',
'titlecase',
'to',
'todos',
'tolower',
'totitle',
'toupper',
'transform',
'trim',
'type',
'unescape',
'union',
'uniqueid',
'unlock',
'unserialize',
'up',
'uppercase',
'value',
'values',
'valuetype',
'wait',
'waskeyword',
'week',
'width',
'write',
'writelock',
'writeto',
'writeunlock',
'xmllang',
'xmlschematype',
'year',
)
}
| codeparrot/github-code-clean |
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
#-------------------------------------------------------------------
# tarfile.py
#-------------------------------------------------------------------
# Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
"""Read from and write to tar format archives.
"""
__version__ = "$Revision: 70528 $"
# $Source$
version = "0.9.0"
__author__ = "Lars Gustäbel (lars@gustaebel.de)"
__date__ = "$Date: 2009-03-22 17:29:48 -0400 (Sun, 22 Mar 2009) $"
__cvsid__ = "$Id: tarfile.py 70528 2009-03-22 21:29:48Z lars.gustaebel $"
__credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
#---------
# Imports
#---------
import sys
import os
import shutil
import stat
import errno
import time
import struct
import copy
import re
import operator
if sys.platform == 'mac':
# This module needs work for MacOS9, especially in the area of pathname
# handling. In many places it is assumed a simple substitution of / by the
# local os.path.sep is good enough to convert pathnames, but this does not
# work with the mac rooted:path:name versus :nonrooted:path:name syntax
raise ImportError, "tarfile does not work for platform==mac"
try:
import grp, pwd
except ImportError:
grp = pwd = None
# from tarfile import *
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
#---------------------------------------------------------
# tar constants
#---------------------------------------------------------
NUL = "\0" # the null character
BLOCKSIZE = 512 # length of processing blocks
RECORDSIZE = BLOCKSIZE * 20 # length of records
GNU_MAGIC = "ustar \0" # magic gnu tar string
POSIX_MAGIC = "ustar\x0000" # magic posix tar string
LENGTH_NAME = 100 # maximum length of a filename
LENGTH_LINK = 100 # maximum length of a linkname
LENGTH_PREFIX = 155 # maximum length of the prefix field
REGTYPE = "0" # regular file
AREGTYPE = "\0" # regular file
LNKTYPE = "1" # link (inside tarfile)
SYMTYPE = "2" # symbolic link
CHRTYPE = "3" # character special device
BLKTYPE = "4" # block special device
DIRTYPE = "5" # directory
FIFOTYPE = "6" # fifo special device
CONTTYPE = "7" # contiguous file
GNUTYPE_LONGNAME = "L" # GNU tar longname
GNUTYPE_LONGLINK = "K" # GNU tar longlink
GNUTYPE_SPARSE = "S" # GNU tar sparse file
XHDTYPE = "x" # POSIX.1-2001 extended header
XGLTYPE = "g" # POSIX.1-2001 global header
SOLARIS_XHDTYPE = "X" # Solaris extended header
USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1 # GNU tar format
PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
DEFAULT_FORMAT = GNU_FORMAT
#---------------------------------------------------------
# tarfile constants
#---------------------------------------------------------
# File types that tarfile supports:
SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
SYMTYPE, DIRTYPE, FIFOTYPE,
CONTTYPE, CHRTYPE, BLKTYPE,
GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# File types that will be treated as a regular file.
REGULAR_TYPES = (REGTYPE, AREGTYPE,
CONTTYPE, GNUTYPE_SPARSE)
# File types that are part of the GNU tar format.
GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# Fields from a pax header that override a TarInfo attribute.
PAX_FIELDS = ("path", "linkpath", "size", "mtime",
"uid", "gid", "uname", "gname")
# Fields in a pax header that are numbers, all other fields
# are treated as strings.
PAX_NUMBER_FIELDS = {
"atime": float,
"ctime": float,
"mtime": float,
"uid": int,
"gid": int,
"size": int
}
#---------------------------------------------------------
# Bits used in the mode field, values in octal.
#---------------------------------------------------------
S_IFLNK = 0120000 # symbolic link
S_IFREG = 0100000 # regular file
S_IFBLK = 0060000 # block device
S_IFDIR = 0040000 # directory
S_IFCHR = 0020000 # character device
S_IFIFO = 0010000 # fifo
TSUID = 04000 # set UID on execution
TSGID = 02000 # set GID on execution
TSVTX = 01000 # reserved
TUREAD = 0400 # read by owner
TUWRITE = 0200 # write by owner
TUEXEC = 0100 # execute/search by owner
TGREAD = 0040 # read by group
TGWRITE = 0020 # write by group
TGEXEC = 0010 # execute/search by group
TOREAD = 0004 # read by other
TOWRITE = 0002 # write by other
TOEXEC = 0001 # execute/search by other
#---------------------------------------------------------
# initialization
#---------------------------------------------------------
ENCODING = sys.getfilesystemencoding()
if ENCODING is None:
ENCODING = sys.getdefaultencoding()
#---------------------------------------------------------
# Some useful functions
#---------------------------------------------------------
def stn(s, length):
"""Convert a python string to a null-terminated string buffer.
"""
return s[:length] + (length - len(s)) * NUL
def nts(s):
"""Convert a null-terminated string field to a python string.
"""
# Use the string up to the first null char.
p = s.find("\0")
if p == -1:
return s
return s[:p]
def nti(s):
"""Convert a number field to a python number.
"""
# There are two possible encodings for a number field, see
# itn() below.
if s[0] != chr(0200):
try:
n = int(nts(s) or "0", 8)
except ValueError:
raise HeaderError("invalid header")
else:
n = 0L
for i in xrange(len(s) - 1):
n <<= 8
n += ord(s[i + 1])
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
# that if necessary. A leading 0200 byte indicates this particular
# encoding, the following digits-1 bytes are a big-endian
# representation. This allows values up to (256**(digits-1))-1.
if 0 <= n < 8 ** (digits - 1):
s = "%0*o" % (digits - 1, n) + NUL
else:
if format != GNU_FORMAT or n >= 256 ** (digits - 1):
raise ValueError("overflow in number field")
if n < 0:
# XXX We mimic GNU tar's behaviour with negative numbers,
# this could raise OverflowError.
n = struct.unpack("L", struct.pack("l", n))[0]
s = ""
for i in xrange(digits - 1):
s = chr(n & 0377) + s
n >>= 8
s = chr(0200) + s
return s
def uts(s, encoding, errors):
"""Convert a unicode object to a string.
"""
if errors == "utf-8":
# An extra error handler similar to the -o invalid=UTF-8 option
# in POSIX.1-2001. Replace untranslatable characters with their
# UTF-8 representation.
try:
return s.encode(encoding, "strict")
except UnicodeEncodeError:
x = []
for c in s:
try:
x.append(c.encode(encoding, "strict"))
except UnicodeEncodeError:
x.append(c.encode("utf8"))
return "".join(x)
else:
return s.encode(encoding, errors)
def calc_chksums(buf):
"""Calculate the checksum for a member's header by summing up all
characters except for the chksum field which is treated as if
it was filled with spaces. According to the GNU tar sources,
some tars (Sun and NeXT) calculate chksum with signed char,
which will be different if there are chars in the buffer with
the high bit set. So we calculate two checksums, unsigned and
signed.
"""
unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
return unsigned_chksum, signed_chksum
def copyfileobj(src, dst, length=None):
"""Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
"""
if length == 0:
return
if length is None:
shutil.copyfileobj(src, dst)
return
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in xrange(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
raise IOError("end of file reached")
dst.write(buf)
return
filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((TUREAD, "r"),),
((TUWRITE, "w"),),
((TUEXEC|TSUID, "s"),
(TSUID, "S"),
(TUEXEC, "x")),
((TGREAD, "r"),),
((TGWRITE, "w"),),
((TGEXEC|TSGID, "s"),
(TSGID, "S"),
(TGEXEC, "x")),
((TOREAD, "r"),),
((TOWRITE, "w"),),
((TOEXEC|TSVTX, "t"),
(TSVTX, "T"),
(TOEXEC, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form
-rwxrwxrwx.
Used by TarFile.list()
"""
perm = []
for table in filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
if os.sep != "/":
normpath = lambda path: os.path.normpath(path).replace(os.sep, "/")
else:
normpath = os.path.normpath
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadble tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
pass
class StreamError(TarError):
"""Exception for unsupported operations on stream-like TarFiles."""
pass
class HeaderError(TarError):
"""Exception for invalid headers."""
pass
#---------------------------
# internal stream interface
#---------------------------
class _LowLevelFile:
"""Low-level file object. Supports reading and writing.
It is used instead of a regular file object for streaming
access.
"""
def __init__(self, name, mode):
mode = {
"r": os.O_RDONLY,
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode)
def close(self):
os.close(self.fd)
def read(self, size):
return os.read(self.fd, size)
def write(self, s):
os.write(self.fd, s)
class _Stream:
"""Class that serves as an adapter between TarFile and
a stream-like object. The stream-like object only
needs to have a read() or write() method and is accessed
blockwise. Use of gzip or bzip2 compression is possible.
A stream-like object could be for example: sys.stdin,
sys.stdout, a socket, a tape device etc.
_Stream is intended to be used only internally.
"""
def __init__(self, name, mode, comptype, fileobj, bufsize):
"""Construct a _Stream object.
"""
self._extfileobj = True
if fileobj is None:
fileobj = _LowLevelFile(name, mode)
self._extfileobj = False
if comptype == '*':
# Enable transparent compression detection for the
# stream interface
fileobj = _StreamProxy(fileobj)
comptype = fileobj.getcomptype()
self.name = name or ""
self.mode = mode
self.comptype = comptype
self.fileobj = fileobj
self.bufsize = bufsize
self.buf = ""
self.pos = 0L
self.closed = False
if comptype == "gz":
try:
import zlib
except ImportError:
raise CompressionError("zlib module is not available")
self.zlib = zlib
self.crc = zlib.crc32("") & 0xffffffffL
if mode == "r":
self._init_read_gz()
else:
self._init_write_gz()
if comptype == "bz2":
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if mode == "r":
self.dbuf = ""
self.cmp = bz2.BZ2Decompressor()
else:
self.cmp = bz2.BZ2Compressor()
def __del__(self):
if hasattr(self, "closed") and not self.closed:
self.close()
def _init_write_gz(self):
"""Initialize for writing with gzip compression.
"""
self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-self.zlib.MAX_WBITS,
self.zlib.DEF_MEM_LEVEL,
0)
timestamp = struct.pack("<L", long(time.time()))
self.__write("\037\213\010\010%s\002\377" % timestamp)
if self.name.endswith(".gz"):
self.name = self.name[:-3]
self.__write(self.name + NUL)
def write(self, s):
"""Write string s to the stream.
"""
if self.comptype == "gz":
self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
self.pos += len(s)
if self.comptype != "tar":
s = self.cmp.compress(s)
self.__write(s)
def __write(self, s):
"""Write string s to the stream if a whole new block
is ready to be written.
"""
self.buf += s
while len(self.buf) > self.bufsize:
self.fileobj.write(self.buf[:self.bufsize])
self.buf = self.buf[self.bufsize:]
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return
if self.mode == "w" and self.comptype != "tar":
self.buf += self.cmp.flush()
if self.mode == "w" and self.buf:
self.fileobj.write(self.buf)
self.buf = ""
if self.comptype == "gz":
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def _init_read_gz(self):
"""Initialize for reading a gzip compressed fileobj.
"""
self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
self.dbuf = ""
# taken from gzip.GzipFile with some alterations
if self.__read(2) != "\037\213":
raise ReadError("not a gzip file")
if self.__read(1) != "\010":
raise CompressionError("unsupported compression method")
flag = ord(self.__read(1))
self.__read(6)
if flag & 4:
xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
self.read(xlen)
if flag & 8:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 16:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 2:
self.__read(2)
def tell(self):
"""Return the stream's file pointer position.
"""
return self.pos
def seek(self, pos=0):
"""Set the stream's file pointer to pos. Negative seeking
is forbidden.
"""
if pos - self.pos >= 0:
blocks, remainder = divmod(pos - self.pos, self.bufsize)
for i in xrange(blocks):
self.read(self.bufsize)
self.read(remainder)
else:
raise StreamError("seeking backwards is not allowed")
return self.pos
def read(self, size=None):
"""Return the next size number of bytes from the stream.
If size is not defined, return all bytes of the stream
up to EOF.
"""
if size is None:
t = []
while True:
buf = self._read(self.bufsize)
if not buf:
break
t.append(buf)
buf = "".join(t)
else:
buf = self._read(size)
self.pos += len(buf)
return buf
def _read(self, size):
"""Return size bytes from the stream.
"""
if self.comptype == "tar":
return self.__read(size)
c = len(self.dbuf)
t = [self.dbuf]
while c < size:
buf = self.__read(self.bufsize)
if not buf:
break
try:
buf = self.cmp.decompress(buf)
except IOError:
raise ReadError("invalid compressed data")
t.append(buf)
c += len(buf)
t = "".join(t)
self.dbuf = t[size:]
return t[:size]
def __read(self, size):
"""Return size bytes from stream. If internal buffer is empty,
read another block from the stream.
"""
c = len(self.buf)
t = [self.buf]
while c < size:
buf = self.fileobj.read(self.bufsize)
if not buf:
break
t.append(buf)
c += len(buf)
t = "".join(t)
self.buf = t[size:]
return t[:size]
# class _Stream
class _StreamProxy(object):
"""Small proxy class that enables transparent compression
detection for the Stream interface (mode 'r|*').
"""
def __init__(self, fileobj):
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith("\037\213\010"):
return "gz"
if self.buf.startswith("BZh91"):
return "bz2"
return "tar"
def close(self):
self.fileobj.close()
# class StreamProxy
class _BZ2Proxy(object):
"""Small proxy class that enables external file object
support for "r:bz2" and "w:bz2" modes. This is actually
a workaround for a limitation in bz2 module's BZ2File
class which (unlike gzip.GzipFile) has no support for
a file object argument.
"""
blocksize = 16 * 1024
def __init__(self, fileobj, mode):
self.fileobj = fileobj
self.mode = mode
self.name = getattr(self.fileobj, "name", None)
self.init()
def init(self):
import bz2
self.pos = 0
if self.mode == "r":
self.bz2obj = bz2.BZ2Decompressor()
self.fileobj.seek(0)
self.buf = ""
else:
self.bz2obj = bz2.BZ2Compressor()
def read(self, size):
b = [self.buf]
x = len(self.buf)
while x < size:
raw = self.fileobj.read(self.blocksize)
if not raw:
break
try:
data = self.bz2obj.decompress(raw)
except EOFError:
break
b.append(data)
x += len(data)
self.buf = "".join(b)
buf = self.buf[:size]
self.buf = self.buf[size:]
self.pos += len(buf)
return buf
def seek(self, pos):
if pos < self.pos:
self.init()
self.read(pos - self.pos)
def tell(self):
return self.pos
def write(self, data):
self.pos += len(data)
raw = self.bz2obj.compress(data)
self.fileobj.write(raw)
def close(self):
if self.mode == "w":
raw = self.bz2obj.flush()
self.fileobj.write(raw)
# class _BZ2Proxy
#------------------------
# Extraction file object
#------------------------
class _FileInFile(object):
"""A thin wrapper around an existing file object that
provides a part of its data as an individual file
object.
"""
def __init__(self, fileobj, offset, size, sparse=None):
self.fileobj = fileobj
self.offset = offset
self.size = size
self.sparse = sparse
self.position = 0
def tell(self):
"""Return the current file position.
"""
return self.position
def seek(self, position):
"""Seek to a position in the file.
"""
self.position = position
def read(self, size=None):
"""Read data from the file.
"""
if size is None:
size = self.size - self.position
else:
size = min(size, self.size - self.position)
if self.sparse is None:
return self.readnormal(size)
else:
return self.readsparse(size)
def readnormal(self, size):
"""Read operation for regular files.
"""
self.fileobj.seek(self.offset + self.position)
self.position += size
return self.fileobj.read(size)
def readsparse(self, size):
"""Read operation for sparse files.
"""
data = []
while size > 0:
buf = self.readsparsesection(size)
if not buf:
break
size -= len(buf)
data.append(buf)
return "".join(data)
def readsparsesection(self, size):
"""Read a single section of a sparse file.
"""
section = self.sparse.find(self.position)
if section is None:
return ""
size = min(size, section.offset + section.size - self.position)
if isinstance(section, _data):
realpos = section.realpos + self.position - section.offset
self.fileobj.seek(self.offset + realpos)
self.position += size
return self.fileobj.read(size)
else:
self.position += size
return NUL * size
#class _FileInFile
class ExFileObject(object):
"""File-like object for reading an archive member.
Is returned by TarFile.extractfile().
"""
blocksize = 1024
def __init__(self, tarfile, tarinfo):
self.fileobj = _FileInFile(tarfile.fileobj,
tarinfo.offset_data,
tarinfo.size,
getattr(tarinfo, "sparse", None))
self.name = tarinfo.name
self.mode = "r"
self.closed = False
self.size = tarinfo.size
self.position = 0
self.buffer = ""
def read(self, size=None):
"""Read at most size bytes from the file. If size is not
present or None, read all data until EOF is reached.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
buf = ""
if self.buffer:
if size is None:
buf = self.buffer
self.buffer = ""
else:
buf = self.buffer[:size]
self.buffer = self.buffer[size:]
if size is None:
buf += self.fileobj.read()
else:
buf += self.fileobj.read(size - len(buf))
self.position += len(buf)
return buf
def readline(self, size=-1):
"""Read one entire line from the file. If size is present
and non-negative, return a string with at most that
size, which may be an incomplete line.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
if "\n" in self.buffer:
pos = self.buffer.find("\n") + 1
else:
buffers = [self.buffer]
while True:
buf = self.fileobj.read(self.blocksize)
buffers.append(buf)
if not buf or "\n" in buf:
self.buffer = "".join(buffers)
pos = self.buffer.find("\n") + 1
if pos == 0:
# no newline found.
pos = len(self.buffer)
break
if size != -1:
pos = min(size, pos)
buf = self.buffer[:pos]
self.buffer = self.buffer[pos:]
self.position += len(buf)
return buf
def readlines(self):
"""Return a list with all remaining lines.
"""
result = []
while True:
line = self.readline()
if not line: break
result.append(line)
return result
def tell(self):
"""Return the current file position.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
return self.position
def seek(self, pos, whence=os.SEEK_SET):
"""Seek to a position in the file.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
if whence == os.SEEK_SET:
self.position = min(max(pos, 0), self.size)
elif whence == os.SEEK_CUR:
if pos < 0:
self.position = max(self.position + pos, 0)
else:
self.position = min(self.position + pos, self.size)
elif whence == os.SEEK_END:
self.position = max(min(self.size + pos, self.size), 0)
else:
raise ValueError("Invalid argument")
self.buffer = ""
self.fileobj.seek(self.position)
def close(self):
"""Close the file object.
"""
self.closed = True
def __iter__(self):
"""Get an iterator over the file's lines.
"""
while True:
line = self.readline()
if not line:
break
yield line
#class ExFileObject
#------------------
# Exported Classes
#------------------
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
TarInfo objects are returned by TarFile.getmember(),
TarFile.getmembers() and TarFile.gettarinfo() and are
usually created internally.
"""
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
of the member.
"""
self.name = name # member name
self.mode = 0644 # file permissions
self.uid = 0 # user id
self.gid = 0 # group id
self.size = 0 # file size
self.mtime = 0 # modification time
self.chksum = 0 # header checksum
self.type = REGTYPE # member type
self.linkname = "" # link name
self.uname = "root" # user name
self.gname = "root" # group name
self.devmajor = 0 # device major number
self.devminor = 0 # device minor number
self.offset = 0 # the tar header starts here
self.offset_data = 0 # the file's data starts here
self.pax_headers = {} # pax header information
# In pax headers the "name" and "linkname" field are called
# "path" and "linkpath".
def _getpath(self):
return self.name
def _setpath(self, name):
self.name = name
path = property(_getpath, _setpath)
def _getlinkpath(self):
return self.linkname
def _setlinkpath(self, linkname):
self.linkname = linkname
linkpath = property(_getlinkpath, _setlinkpath)
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
def get_info(self, encoding, errors):
"""Return the TarInfo's attributes as a dictionary.
"""
info = {
"name": normpath(self.name),
"mode": self.mode & 07777,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
"mtime": self.mtime,
"chksum": self.chksum,
"type": self.type,
"linkname": normpath(self.linkname) if self.linkname else "",
"uname": self.uname,
"gname": self.gname,
"devmajor": self.devmajor,
"devminor": self.devminor
}
if info["type"] == DIRTYPE and not info["name"].endswith("/"):
info["name"] += "/"
for key in ("name", "linkname", "uname", "gname"):
if type(info[key]) is unicode:
info[key] = info[key].encode(encoding, errors)
return info
def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info(encoding, errors)
if format == USTAR_FORMAT:
return self.create_ustar_header(info)
elif format == GNU_FORMAT:
return self.create_gnu_header(info)
elif format == PAX_FORMAT:
return self.create_pax_header(info, encoding, errors)
else:
raise ValueError("invalid format")
def create_ustar_header(self, info):
"""Return the object as a ustar header block.
"""
info["magic"] = POSIX_MAGIC
if len(info["linkname"]) > LENGTH_LINK:
raise ValueError("linkname is too long")
if len(info["name"]) > LENGTH_NAME:
info["prefix"], info["name"] = self._posix_split_name(info["name"])
return self._create_header(info, USTAR_FORMAT)
def create_gnu_header(self, info):
"""Return the object as a GNU header block sequence.
"""
info["magic"] = GNU_MAGIC
buf = ""
if len(info["linkname"]) > LENGTH_LINK:
buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
if len(info["name"]) > LENGTH_NAME:
buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
return buf + self._create_header(info, GNU_FORMAT)
def create_pax_header(self, info, encoding, errors):
"""Return the object as a ustar header block. If it cannot be
represented this way, prepend a pax extended header sequence
with supplement information.
"""
info["magic"] = POSIX_MAGIC
pax_headers = self.pax_headers.copy()
# Test string fields for values that exceed the field length or cannot
# be represented in ASCII encoding.
for name, hname, length in (
("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
("uname", "uname", 32), ("gname", "gname", 32)):
if hname in pax_headers:
# The pax header has priority.
continue
val = info[name].decode(encoding, errors)
# Try to encode the string as ASCII.
try:
val.encode("ascii")
except UnicodeEncodeError:
pax_headers[hname] = val
continue
if len(info[name]) > length:
pax_headers[hname] = val
# Test number fields for values that exceed the field limit or values
# that like to be stored as float.
for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
if name in pax_headers:
# The pax header has priority. Avoid overflow.
info[name] = 0
continue
val = info[name]
if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
pax_headers[name] = unicode(val)
info[name] = 0
# Create a pax extended header if necessary.
if pax_headers:
buf = self._create_pax_generic_header(pax_headers)
else:
buf = ""
return buf + self._create_header(info, USTAR_FORMAT)
@classmethod
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
and a name part.
"""
prefix = name[:LENGTH_PREFIX + 1]
while prefix and prefix[-1] != "/":
prefix = prefix[:-1]
name = name[len(prefix):]
prefix = prefix[:-1]
if not prefix or len(name) > LENGTH_NAME:
raise ValueError("name is too long")
return prefix, name
@staticmethod
def _create_header(info, format):
"""Return a header block. info is a dictionary with file
information, format must be one of the *_FORMAT constants.
"""
parts = [
stn(info.get("name", ""), 100),
itn(info.get("mode", 0) & 07777, 8, format),
itn(info.get("uid", 0), 8, format),
itn(info.get("gid", 0), 8, format),
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
" ", # checksum field
info.get("type", REGTYPE),
stn(info.get("linkname", ""), 100),
stn(info.get("magic", POSIX_MAGIC), 8),
stn(info.get("uname", "root"), 32),
stn(info.get("gname", "root"), 32),
itn(info.get("devmajor", 0), 8, format),
itn(info.get("devminor", 0), 8, format),
stn(info.get("prefix", ""), 155)
]
buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
return buf
@staticmethod
def _create_payload(payload):
"""Return the string payload filled with zero bytes
up to the next 512 byte border.
"""
blocks, remainder = divmod(len(payload), BLOCKSIZE)
if remainder > 0:
payload += (BLOCKSIZE - remainder) * NUL
return payload
@classmethod
def _create_gnu_long_header(cls, name, type):
"""Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
for name.
"""
name += NUL
info = {}
info["name"] = "././@LongLink"
info["type"] = type
info["size"] = len(name)
info["magic"] = GNU_MAGIC
# create extended header + name blocks.
return cls._create_header(info, USTAR_FORMAT) + \
cls._create_payload(name)
@classmethod
def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
"""Return a POSIX.1-2001 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be unicode objects.
"""
records = []
for keyword, value in pax_headers.iteritems():
keyword = keyword.encode("utf8")
value = value.encode("utf8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records.append("%d %s=%s\n" % (p, keyword, value))
records = "".join(records)
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, USTAR_FORMAT) + \
cls._create_payload(records)
@classmethod
def frombuf(cls, buf):
"""Construct a TarInfo object from a 512 byte string buffer.
"""
if len(buf) != BLOCKSIZE:
raise HeaderError("truncated header")
if buf.count(NUL) == BLOCKSIZE:
raise HeaderError("empty header")
chksum = nti(buf[148:156])
if chksum not in calc_chksums(buf):
raise HeaderError("bad checksum")
obj = cls()
obj.buf = buf
obj.name = nts(buf[0:100])
obj.mode = nti(buf[100:108])
obj.uid = nti(buf[108:116])
obj.gid = nti(buf[116:124])
obj.size = nti(buf[124:136])
obj.mtime = nti(buf[136:148])
obj.chksum = chksum
obj.type = buf[156:157]
obj.linkname = nts(buf[157:257])
obj.uname = nts(buf[265:297])
obj.gname = nts(buf[297:329])
obj.devmajor = nti(buf[329:337])
obj.devminor = nti(buf[337:345])
prefix = nts(buf[345:500])
# Old V7 tar format represents a directory as a regular
# file with a trailing slash.
if obj.type == AREGTYPE and obj.name.endswith("/"):
obj.type = DIRTYPE
# Remove redundant slashes from directories.
if obj.isdir():
obj.name = obj.name.rstrip("/")
# Reconstruct a ustar longname.
if prefix and obj.type not in GNU_TYPES:
obj.name = prefix + "/" + obj.name
return obj
@classmethod
def fromtarfile(cls, tarfile):
"""Return the next TarInfo object from TarFile object
tarfile.
"""
buf = tarfile.fileobj.read(BLOCKSIZE)
if not buf:
return
obj = cls.frombuf(buf)
obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
return obj._proc_member(tarfile)
#--------------------------------------------------------------------------
# The following are methods that are called depending on the type of a
# member. The entry point is _proc_member() which can be overridden in a
# subclass to add custom _proc_*() methods. A _proc_*() method MUST
# implement the following
# operations:
# 1. Set self.offset_data to the position where the data blocks begin,
# if there is data that follows.
# 2. Set tarfile.offset to the position where the next member's header will
# begin.
# 3. Return self or another valid TarInfo object.
def _proc_member(self, tarfile):
"""Choose the right processing method depending on
the type and call it.
"""
if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
return self._proc_gnulong(tarfile)
elif self.type == GNUTYPE_SPARSE:
return self._proc_sparse(tarfile)
elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
return self._proc_pax(tarfile)
else:
return self._proc_builtin(tarfile)
def _proc_builtin(self, tarfile):
"""Process a builtin type or an unknown type which
will be treated as a regular file.
"""
self.offset_data = tarfile.fileobj.tell()
offset = self.offset_data
if self.isreg() or self.type not in SUPPORTED_TYPES:
# Skip the following data blocks.
offset += self._block(self.size)
tarfile.offset = offset
# Patch the TarInfo object with saved global
# header information.
self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
return self
def _proc_gnulong(self, tarfile):
"""Process the blocks that hold a GNU longname
or longlink member.
"""
buf = tarfile.fileobj.read(self._block(self.size))
# Fetch the next header and process it.
next = self.fromtarfile(tarfile)
if next is None:
raise HeaderError("missing subsequent header")
# Patch the TarInfo object from the next header with
# the longname information.
next.offset = self.offset
if self.type == GNUTYPE_LONGNAME:
next.name = nts(buf)
elif self.type == GNUTYPE_LONGLINK:
next.linkname = nts(buf)
return next
def _proc_sparse(self, tarfile):
"""Process a GNU sparse header plus extra headers.
"""
buf = self.buf
sp = _ringbuffer()
pos = 386
lastpos = 0L
realpos = 0L
# There are 4 possible sparse structs in the
# first header.
for i in xrange(4):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
if offset > lastpos:
sp.append(_hole(lastpos, offset - lastpos))
sp.append(_data(offset, numbytes, realpos))
realpos += numbytes
lastpos = offset + numbytes
pos += 24
isextended = ord(buf[482])
origsize = nti(buf[483:495])
# If the isextended flag is given,
# there are extra headers to process.
while isextended == 1:
buf = tarfile.fileobj.read(BLOCKSIZE)
pos = 0
for i in xrange(21):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
if offset > lastpos:
sp.append(_hole(lastpos, offset - lastpos))
sp.append(_data(offset, numbytes, realpos))
realpos += numbytes
lastpos = offset + numbytes
pos += 24
isextended = ord(buf[504])
if lastpos < origsize:
sp.append(_hole(lastpos, origsize - lastpos))
self.sparse = sp
self.offset_data = tarfile.fileobj.tell()
tarfile.offset = self.offset_data + self._block(self.size)
self.size = origsize
return self
def _proc_pax(self, tarfile):
"""Process an extended or global header as described in
POSIX.1-2001.
"""
# Read the header information.
buf = tarfile.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == XGLTYPE:
pax_headers = tarfile.pax_headers
else:
pax_headers = tarfile.pax_headers.copy()
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(r"(\d+) ([^=]+)=", re.U)
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
keyword = keyword.decode("utf8")
value = value.decode("utf8")
pax_headers[keyword] = value
pos += length
# Fetch the next header.
next = self.fromtarfile(tarfile)
if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
if next is None:
raise HeaderError("missing subsequent header")
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in SUPPORTED_TYPES:
offset += next._block(next.size)
tarfile.offset = offset
return next
def _apply_pax_info(self, pax_headers, encoding, errors):
"""Replace fields with supplemental information from a previous
pax extended or global header.
"""
for keyword, value in pax_headers.iteritems():
if keyword not in PAX_FIELDS:
continue
if keyword == "path":
value = value.rstrip("/")
if keyword in PAX_NUMBER_FIELDS:
try:
value = PAX_NUMBER_FIELDS[keyword](value)
except ValueError:
value = 0
else:
value = uts(value, encoding, errors)
setattr(self, keyword, value)
self.pax_headers = pax_headers.copy()
def _block(self, count):
"""Round up a byte count by BLOCKSIZE and return it,
e.g. _block(834) => 1024.
"""
blocks, remainder = divmod(count, BLOCKSIZE)
if remainder:
blocks += 1
return blocks * BLOCKSIZE
def isreg(self):
return self.type in REGULAR_TYPES
def isfile(self):
return self.isreg()
def isdir(self):
return self.type == DIRTYPE
def issym(self):
return self.type == SYMTYPE
def islnk(self):
return self.type == LNKTYPE
def ischr(self):
return self.type == CHRTYPE
def isblk(self):
return self.type == BLKTYPE
def isfifo(self):
return self.type == FIFOTYPE
def issparse(self):
return self.type == GNUTYPE_SPARSE
def isdev(self):
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
class TarFile(object):
"""The TarFile Class provides an interface to tar archives.
"""
debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
dereference = False # If true, add content of linked file to the
# tar file, else the link.
ignore_zeros = False # If true, skips empty or invalid blocks and
# continues processing.
errorlevel = 0 # If 0, fatal errors only appear in debug
# messages (if debug >= 0). If > 0, errors
# are passed to the caller as exceptions.
format = DEFAULT_FORMAT # The format to use when creating an archive.
encoding = ENCODING # Encoding for 8-bit character strings.
errors = None # Error handler for unicode conversion.
tarinfo = TarInfo # The default TarInfo class to use.
fileobject = ExFileObject # The default ExFileObject class to use.
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors=None, pax_headers=None, debug=None, errorlevel=None):
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
read from an existing archive, 'a' to append data to an existing
file or 'w' to create a new file overwriting an existing one. `mode'
defaults to 'r'.
If `fileobj' is given, it is used for reading or writing data. If it
can be determined, `mode' is overridden by `fileobj's mode.
`fileobj' is not closed, when TarFile is closed.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
self.mode = mode
self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
if not fileobj:
if self.mode == "a" and not os.path.exists(name):
# Create nonexistent files in append mode.
self.mode = "w"
self._mode = "wb"
fileobj = bltn_open(name, self._mode)
self._extfileobj = False
else:
if name is None and hasattr(fileobj, "name"):
name = fileobj.name
if hasattr(fileobj, "mode"):
self._mode = fileobj.mode
self._extfileobj = True
self.name = os.path.abspath(name) if name else None
self.fileobj = fileobj
# Init attributes.
if format is not None:
self.format = format
if tarinfo is not None:
self.tarinfo = tarinfo
if dereference is not None:
self.dereference = dereference
if ignore_zeros is not None:
self.ignore_zeros = ignore_zeros
if encoding is not None:
self.encoding = encoding
if errors is not None:
self.errors = errors
elif mode == "r":
self.errors = "utf-8"
else:
self.errors = "strict"
if pax_headers is not None and self.format == PAX_FORMAT:
self.pax_headers = pax_headers
else:
self.pax_headers = {}
if debug is not None:
self.debug = debug
if errorlevel is not None:
self.errorlevel = errorlevel
# Init datastructures.
self.closed = False
self.members = [] # list of members as TarInfo objects
self._loaded = False # flag if all members have been read
self.offset = self.fileobj.tell()
# current position in the archive file
self.inodes = {} # dictionary caching the inodes of
# archive members already added
if self.mode == "r":
self.firstmember = None
self.firstmember = self.next()
if self.mode == "a":
# Move to the end of the archive,
# before the first empty block.
self.firstmember = None
while True:
if self.next() is None:
if self.offset > 0:
self.fileobj.seek(- BLOCKSIZE, 1)
break
if self.mode in "aw":
self._loaded = True
if self.pax_headers:
buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
self.fileobj.write(buf)
self.offset += len(buf)
def _getposix(self):
return self.format == USTAR_FORMAT
def _setposix(self, value):
import warnings
warnings.warn("use the format attribute instead", DeprecationWarning)
if value:
self.format = USTAR_FORMAT
else:
self.format = GNU_FORMAT
posix = property(_getposix, _setposix)
#--------------------------------------------------------------------------
# Below are the classmethods which act as alternate constructors to the
# TarFile class. The open() method is the only one that is needed for
# public use; it is the "super"-constructor and is able to select an
# adequate "sub"-constructor for a particular compression using the mapping
# from OPEN_METH.
#
# This concept allows one to subclass TarFile without losing the comfort of
# the super-constructor. A sub-constructor is registered and made available
# by adding it to the mapping in OPEN_METH.
@classmethod
def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
"""Open a tar archive for reading, writing or appending. Return
an appropriate TarFile class.
mode:
'r' or 'r:*' open for reading with transparent compression
'r:' open for reading exclusively uncompressed
'r:gz' open for reading with gzip compression
'r:bz2' open for reading with bzip2 compression
'a' or 'a:' open for appending, creating the file if necessary
'w' or 'w:' open for writing without compression
'w:gz' open for writing with gzip compression
'w:bz2' open for writing with bzip2 compression
'r|*' open a stream of tar blocks with transparent compression
'r|' open an uncompressed stream of tar blocks for reading
'r|gz' open a gzip compressed stream of tar blocks
'r|bz2' open a bzip2 compressed stream of tar blocks
'w|' open an uncompressed stream for writing
'w|gz' open a gzip compressed stream for writing
'w|bz2' open a bzip2 compressed stream for writing
"""
if not name and not fileobj:
raise ValueError("nothing to open")
if mode in ("r", "r:*"):
# Find out which *open() is appropriate for opening the file.
for comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
if fileobj is not None:
saved_pos = fileobj.tell()
try:
return func(name, "r", fileobj, **kwargs)
except (ReadError, CompressionError), e:
if fileobj is not None:
fileobj.seek(saved_pos)
continue
raise ReadError("file could not be opened successfully")
elif ":" in mode:
filemode, comptype = mode.split(":", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
# Select the *open() function according to
# given compression.
if comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
else:
raise CompressionError("unknown compression type %r" % comptype)
return func(name, filemode, fileobj, **kwargs)
elif "|" in mode:
filemode, comptype = mode.split("|", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
if filemode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
t = cls(name, filemode,
_Stream(name, filemode, comptype, fileobj, bufsize),
**kwargs)
t._extfileobj = False
return t
elif mode in "aw":
return cls.taropen(name, mode, fileobj, **kwargs)
raise ValueError("undiscernible mode")
@classmethod
def taropen(cls, name, mode="r", fileobj=None, **kwargs):
"""Open uncompressed tar archive name for reading or writing.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
return cls(name, mode, fileobj, **kwargs)
@classmethod
def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open gzip compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
try:
import gzip
gzip.GzipFile
except (ImportError, AttributeError):
raise CompressionError("gzip module is not available")
if fileobj is None:
fileobj = bltn_open(name, mode + "b")
try:
t = cls.taropen(name, mode,
gzip.GzipFile(name, mode, compresslevel, fileobj),
**kwargs)
except IOError:
raise ReadError("not a gzip file")
t._extfileobj = False
return t
@classmethod
def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open bzip2 compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'.")
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if fileobj is not None:
fileobj = _BZ2Proxy(fileobj, mode)
else:
fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
except IOError:
raise ReadError("not a bzip2 file")
t._extfileobj = False
return t
# All *open() methods are registered here.
OPEN_METH = {
"tar": "taropen", # uncompressed tar
"gz": "gzopen", # gzip compressed tar
"bz2": "bz2open" # bzip2 compressed tar
}
#--------------------------------------------------------------------------
# The public methods which TarFile provides:
def close(self):
"""Close the TarFile. In write-mode, two finishing zero blocks are
appended to the archive.
"""
if self.closed:
return
if self.mode in "aw":
self.fileobj.write(NUL * (BLOCKSIZE * 2))
self.offset += (BLOCKSIZE * 2)
# fill up the end with zero-blocks
# (like option -b20 for tar does)
blocks, remainder = divmod(self.offset, RECORDSIZE)
if remainder > 0:
self.fileobj.write(NUL * (RECORDSIZE - remainder))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def getmember(self, name):
"""Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurence is assumed to be the
most up-to-date version.
"""
tarinfo = self._getmember(name)
if tarinfo is None:
raise KeyError("filename %r not found" % name)
return tarinfo
def getmembers(self):
"""Return the members of the archive as a list of TarInfo objects. The
list has the same order as the members in the archive.
"""
self._check()
if not self._loaded: # if we want to obtain a list of
self._load() # all members, we first have to
# scan the whole archive.
return self.members
def getnames(self):
"""Return the members of the archive as a list of their names. It has
the same order as the list returned by getmembers().
"""
return [tarinfo.name for tarinfo in self.getmembers()]
def gettarinfo(self, name=None, arcname=None, fileobj=None):
"""Create a TarInfo object for either the file `name' or the file
object `fileobj' (using os.fstat on its file descriptor). You can
modify some of the TarInfo's attributes before you add it using
addfile(). If given, `arcname' specifies an alternative name for the
file in the archive.
"""
self._check("aw")
# When fileobj is given, replace name by
# fileobj's real name.
if fileobj is not None:
name = fileobj.name
# Building the name of the member in the archive.
# Backward slashes are converted to forward slashes,
# Absolute paths are turned to relative paths.
if arcname is None:
arcname = name
arcname = normpath(arcname)
drv, arcname = os.path.splitdrive(arcname)
while arcname[0:1] == "/":
arcname = arcname[1:]
# Now, fill the TarInfo object with
# information specific for the file.
tarinfo = self.tarinfo()
tarinfo.tarfile = self
# Use os.stat or os.lstat, depending on platform
# and if symlinks shall be resolved.
if fileobj is None:
if hasattr(os, "lstat") and not self.dereference:
statres = os.lstat(name)
else:
statres = os.stat(name)
else:
statres = os.fstat(fileobj.fileno())
linkname = ""
stmd = statres.st_mode
if stat.S_ISREG(stmd):
inode = (statres.st_ino, statres.st_dev)
if not self.dereference and statres.st_nlink > 1 and \
inode in self.inodes and arcname != self.inodes[inode]:
# Is it a hardlink to an already
# archived file?
type = LNKTYPE
linkname = self.inodes[inode]
else:
# The inode is added only if its valid.
# For win32 it is always 0.
type = REGTYPE
if inode[0]:
self.inodes[inode] = arcname
elif stat.S_ISDIR(stmd):
type = DIRTYPE
elif stat.S_ISFIFO(stmd):
type = FIFOTYPE
elif stat.S_ISLNK(stmd):
type = SYMTYPE
linkname = os.readlink(name)
elif stat.S_ISCHR(stmd):
type = CHRTYPE
elif stat.S_ISBLK(stmd):
type = BLKTYPE
else:
return None
# Fill the TarInfo object with all
# information we can get.
tarinfo.name = arcname
tarinfo.mode = stmd
tarinfo.uid = statres.st_uid
tarinfo.gid = statres.st_gid
if stat.S_ISREG(stmd):
tarinfo.size = statres.st_size
else:
tarinfo.size = 0L
tarinfo.mtime = statres.st_mtime
tarinfo.type = type
tarinfo.linkname = linkname
if pwd:
try:
tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
except KeyError:
pass
if grp:
try:
tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
except KeyError:
pass
if type in (CHRTYPE, BLKTYPE):
if hasattr(os, "major") and hasattr(os, "minor"):
tarinfo.devmajor = os.major(statres.st_rdev)
tarinfo.devminor = os.minor(statres.st_rdev)
return tarinfo
def list(self, verbose=True):
"""Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced.
"""
self._check()
for tarinfo in self:
if verbose:
print filemode(tarinfo.mode),
print "%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid),
if tarinfo.ischr() or tarinfo.isblk():
print "%10s" % ("%d,%d" \
% (tarinfo.devmajor, tarinfo.devminor)),
else:
print "%10d" % tarinfo.size,
print "%d-%02d-%02d %02d:%02d:%02d" \
% time.localtime(tarinfo.mtime)[:6],
print tarinfo.name + ("/" if tarinfo.isdir() else ""),
if verbose:
if tarinfo.issym():
print "->", tarinfo.linkname,
if tarinfo.islnk():
print "link to", tarinfo.linkname,
print
def add(self, name, arcname=None, recursive=True, exclude=None):
"""Add the file `name' to the archive. `name' may be any type of file
(directory, fifo, symbolic link, etc.). If given, `arcname'
specifies an alternative name for the file in the archive.
Directories are added recursively by default. This can be avoided by
setting `recursive' to False. `exclude' is a function that should
return True for each filename to be excluded.
"""
self._check("aw")
if arcname is None:
arcname = name
# Exclude pathnames.
if exclude is not None and exclude(name):
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Skip if somebody tries to archive the archive...
if self.name is not None and os.path.abspath(name) == self.name:
self._dbg(2, "tarfile: Skipped %r" % name)
return
# Special case: The user wants to add the current
# working directory.
if name == ".":
if recursive:
if arcname == ".":
arcname = ""
for f in os.listdir(name):
self.add(f, os.path.join(arcname, f), recursive, exclude)
return
self._dbg(1, name)
# Create a TarInfo object from the file.
tarinfo = self.gettarinfo(name, arcname)
if tarinfo is None:
self._dbg(1, "tarfile: Unsupported type %r" % name)
return
# Append the tar header and data to the archive.
if tarinfo.isreg():
f = bltn_open(name, "rb")
self.addfile(tarinfo, f)
f.close()
elif tarinfo.isdir():
self.addfile(tarinfo)
if recursive:
for f in os.listdir(name):
self.add(os.path.join(name, f), os.path.join(arcname, f), recursive, exclude)
else:
self.addfile(tarinfo)
def addfile(self, tarinfo, fileobj=None):
"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
given, tarinfo.size bytes are read from it and added to the archive.
You can create TarInfo objects using gettarinfo().
On Windows platforms, `fileobj' should always be opened with mode
'rb' to avoid irritation about the file size.
"""
self._check("aw")
tarinfo = copy.copy(tarinfo)
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
self.fileobj.write(buf)
self.offset += len(buf)
# If there's data to follow, append it.
if fileobj is not None:
copyfileobj(fileobj, self.fileobj, tarinfo.size)
blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
if remainder > 0:
self.fileobj.write(NUL * (BLOCKSIZE - remainder))
blocks += 1
self.offset += blocks * BLOCKSIZE
self.members.append(tarinfo)
def extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0700
self.extract(tarinfo, path)
# Reverse sort directories.
directories.sort(key=operator.attrgetter('name'))
directories.reverse()
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError, e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extract(self, member, path=""):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'.
"""
self._check("r")
if isinstance(member, basestring):
tarinfo = self.getmember(member)
else:
tarinfo = member
# Prepare the link target for makelink().
if tarinfo.islnk():
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
except EnvironmentError, e:
if self.errorlevel > 0:
raise
else:
if e.filename is None:
self._dbg(1, "tarfile: %s" % e.strerror)
else:
self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
except ExtractError, e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
a filename or a TarInfo object. If `member' is a regular file, a
file-like object is returned. If `member' is a link, a file-like
object is constructed from the link's target. If `member' is none of
the above, None is returned.
The file-like object is read-only and provides the following
methods: read(), readline(), readlines(), seek() and tell()
"""
self._check("r")
if isinstance(member, basestring):
tarinfo = self.getmember(member)
else:
tarinfo = member
if tarinfo.isreg():
return self.fileobject(self, tarinfo)
elif tarinfo.type not in SUPPORTED_TYPES:
# If a member's type is unknown, it is treated as a
# regular file.
return self.fileobject(self, tarinfo)
elif tarinfo.islnk() or tarinfo.issym():
if isinstance(self.fileobj, _Stream):
# A small but ugly workaround for the case that someone tries
# to extract a (sym)link as a file-object from a non-seekable
# stream of tar blocks.
raise StreamError("cannot extract (sym)link as file object")
else:
# A (sym)link's file object is its target's file object.
return self.extractfile(self._getmember(tarinfo.linkname,
tarinfo))
else:
# If there's no data associated with the member (directory, chrdev,
# blkdev, etc.), return None instead of a file object.
return None
def _extract_member(self, tarinfo, targetpath):
"""Extract the TarInfo object tarinfo to a physical
file called targetpath.
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
# forward slashes to platform specific separators.
if targetpath[-1:] == "/":
targetpath = targetpath[:-1]
targetpath = os.path.normpath(targetpath)
# Create all upper directories.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
# Create directories that are not part of the archive with
# default permissions.
os.makedirs(upperdirs)
if tarinfo.islnk() or tarinfo.issym():
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
else:
self._dbg(1, tarinfo.name)
if tarinfo.isreg():
self.makefile(tarinfo, targetpath)
elif tarinfo.isdir():
self.makedir(tarinfo, targetpath)
elif tarinfo.isfifo():
self.makefifo(tarinfo, targetpath)
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
self.makelink(tarinfo, targetpath)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
self.makefile(tarinfo, targetpath)
self.chown(tarinfo, targetpath)
if not tarinfo.issym():
self.chmod(tarinfo, targetpath)
self.utime(tarinfo, targetpath)
#--------------------------------------------------------------------------
# Below are the different file methods. They are called via
# _extract_member() when extract() is called. They can be replaced in a
# subclass to implement other functionality.
def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
# Use a safe mode for the directory, the real mode is set
# later in _extract_member().
os.mkdir(targetpath, 0700)
except EnvironmentError, e:
if e.errno != errno.EEXIST:
raise
def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
"""
source = self.extractfile(tarinfo)
target = bltn_open(targetpath, "wb")
copyfileobj(source, target)
source.close()
target.close()
def makeunknown(self, tarinfo, targetpath):
"""Make a file from a TarInfo object with an unknown type
at targetpath.
"""
self.makefile(tarinfo, targetpath)
self._dbg(1, "tarfile: Unknown file type %r, " \
"extracted as regular file." % tarinfo.type)
def makefifo(self, tarinfo, targetpath):
"""Make a fifo called targetpath.
"""
if hasattr(os, "mkfifo"):
os.mkfifo(targetpath)
else:
raise ExtractError("fifo not supported by system")
def makedev(self, tarinfo, targetpath):
"""Make a character or block device called targetpath.
"""
if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
mode |= stat.S_IFCHR
os.mknod(targetpath, mode,
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
linkpath = tarinfo.linkname
try:
if tarinfo.issym():
os.symlink(linkpath, targetpath)
else:
# See extract().
os.link(tarinfo._link_target, targetpath)
except AttributeError:
if tarinfo.issym():
linkpath = os.path.join(os.path.dirname(tarinfo.name),
linkpath)
linkpath = normpath(linkpath)
try:
self._extract_member(self.getmember(linkpath), targetpath)
except (EnvironmentError, KeyError), e:
linkpath = os.path.normpath(linkpath)
try:
shutil.copy2(linkpath, targetpath)
except EnvironmentError, e:
raise IOError("link could not be created")
def chown(self, tarinfo, targetpath):
"""Set owner of targetpath according to tarinfo.
"""
if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
# We have to be root to do so.
try:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
try:
g = grp.getgrgid(tarinfo.gid)[2]
except KeyError:
g = os.getgid()
try:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
try:
u = pwd.getpwuid(tarinfo.uid)[2]
except KeyError:
u = os.getuid()
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
if sys.platform != "os2emx":
os.chown(targetpath, u, g)
except EnvironmentError, e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
except EnvironmentError, e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
if not hasattr(os, 'utime'):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
except EnvironmentError, e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
def next(self):
"""Return the next member of the archive as a TarInfo object, when
TarFile is opened for reading. Return None if there is no more
available.
"""
self._check("ra")
if self.firstmember is not None:
m = self.firstmember
self.firstmember = None
return m
# Read the next block.
self.fileobj.seek(self.offset)
while True:
try:
tarinfo = self.tarinfo.fromtarfile(self)
if tarinfo is None:
return
self.members.append(tarinfo)
except HeaderError, e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
else:
if self.offset == 0:
raise ReadError(str(e))
return None
break
return tarinfo
#--------------------------------------------------------------------------
# Little helper methods:
def _getmember(self, name, tarinfo=None):
"""Find an archive member by name from bottom to top.
If tarinfo is given, it is used as the starting point.
"""
# Ensure that all members have been loaded.
members = self.getmembers()
if tarinfo is None:
end = len(members)
else:
end = members.index(tarinfo)
for i in xrange(end - 1, -1, -1):
if name == members[i].name:
return members[i]
def _load(self):
"""Read through the entire archive file and look for readable
members.
"""
while True:
tarinfo = self.next()
if tarinfo is None:
break
self._loaded = True
def _check(self, mode=None):
"""Check if TarFile is still open, and if the operation's mode
corresponds to TarFile's mode.
"""
if self.closed:
raise IOError("%s is closed" % self.__class__.__name__)
if mode is not None and self.mode not in mode:
raise IOError("bad operation for mode %r" % self.mode)
def __iter__(self):
"""Provide an iterator object.
"""
if self._loaded:
return iter(self.members)
else:
return TarIter(self)
def _dbg(self, level, msg):
"""Write debugging output to sys.stderr.
"""
if level <= self.debug:
print >> sys.stderr, msg
# class TarFile
class TarIter:
"""Iterator Class.
for tarinfo in TarFile(...):
suite...
"""
def __init__(self, tarfile):
"""Construct a TarIter object.
"""
self.tarfile = tarfile
self.index = 0
def __iter__(self):
"""Return iterator object.
"""
return self
def next(self):
"""Return the next item using TarFile's next() method.
When all members have been read, set TarFile as _loaded.
"""
# Fix for SF #1100429: Under rare circumstances it can
# happen that getmembers() is called during iteration,
# which will cause TarIter to stop prematurely.
if not self.tarfile._loaded:
tarinfo = self.tarfile.next()
if not tarinfo:
self.tarfile._loaded = True
raise StopIteration
else:
try:
tarinfo = self.tarfile.members[self.index]
except IndexError:
raise StopIteration
self.index += 1
return tarinfo
# Helper classes for sparse file support
class _section:
"""Base class for _data and _hole.
"""
def __init__(self, offset, size):
self.offset = offset
self.size = size
def __contains__(self, offset):
return self.offset <= offset < self.offset + self.size
class _data(_section):
"""Represent a data section in a sparse file.
"""
def __init__(self, offset, size, realpos):
_section.__init__(self, offset, size)
self.realpos = realpos
class _hole(_section):
"""Represent a hole section in a sparse file.
"""
pass
class _ringbuffer(list):
"""Ringbuffer class which increases performance
over a regular list.
"""
def __init__(self):
self.idx = 0
def find(self, offset):
idx = self.idx
while True:
item = self[idx]
if offset in item:
break
idx += 1
if idx == len(self):
idx = 0
if idx == self.idx:
# End of File
return None
self.idx = idx
return item
#---------------------------------------------
# zipfile compatible TarFile class
#---------------------------------------------
TAR_PLAIN = 0 # zipfile.ZIP_STORED
TAR_GZIPPED = 8 # zipfile.ZIP_DEFLATED
class TarFileCompat:
"""TarFile class compatible with standard module zipfile's
ZipFile class.
"""
def __init__(self, file, mode="r", compression=TAR_PLAIN):
from warnings import warnpy3k
warnpy3k("the TarFileCompat class has been removed in Python 3.0",
stacklevel=2)
if compression == TAR_PLAIN:
self.tarfile = TarFile.taropen(file, mode)
elif compression == TAR_GZIPPED:
self.tarfile = TarFile.gzopen(file, mode)
else:
raise ValueError("unknown compression constant")
if mode[0:1] == "r":
members = self.tarfile.getmembers()
for m in members:
m.filename = m.name
m.file_size = m.size
m.date_time = time.gmtime(m.mtime)[:6]
def namelist(self):
return map(lambda m: m.name, self.infolist())
def infolist(self):
return filter(lambda m: m.type in REGULAR_TYPES,
self.tarfile.getmembers())
def printdir(self):
self.tarfile.list()
def testzip(self):
return
def getinfo(self, name):
return self.tarfile.getmember(name)
def read(self, name):
return self.tarfile.extractfile(self.tarfile.getmember(name)).read()
def write(self, filename, arcname=None, compress_type=None):
self.tarfile.add(filename, arcname)
def writestr(self, zinfo, bytes):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import calendar
tinfo = TarInfo(zinfo.filename)
tinfo.size = len(bytes)
tinfo.mtime = calendar.timegm(zinfo.date_time)
self.tarfile.addfile(tinfo, StringIO(bytes))
def close(self):
self.tarfile.close()
#class TarFileCompat
#--------------------
# exported functions
#--------------------
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
"""
try:
t = open(name)
t.close()
return True
except TarError:
return False
bltn_open = open
open = TarFile.open
| codeparrot/github-code-clean |
#!/usr/bin/env python3
# Copyright (c) 2016 The nealcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
from test_framework.mininode import *
from test_framework.test_framework import nealcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, WITNESS_COMMITMENT_HEADER
from test_framework.key import CECKey, CPubKey
import time
import random
from binascii import hexlify
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
VB_PERIOD = 144
VB_ACTIVATION_THRESHOLD = 108
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
# Calculate the virtual size of a witness block:
# (base + witness/4)
def get_virtual_size(witness_block):
base_size = len(witness_block.serialize())
total_size = len(witness_block.serialize(with_witness=True))
# the "+3" is so we round up
vsize = int((3*base_size + total_size + 3)/4)
return vsize
# Note: we can reduce code by using SingleNodeConnCB (in master, not 0.12)
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong(0)
self.sleep_time = 0.05
self.getdataset = set()
self.last_reject = None
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_inv(self, conn, message):
self.last_inv = message
def on_block(self, conn, message):
self.last_block = message.block
self.last_block.calc_sha256()
def on_getdata(self, conn, message):
for inv in message.inv:
self.getdataset.add(inv.hash)
self.last_getdata = message
def on_getheaders(self, conn, message):
self.last_getheaders = message
def on_pong(self, conn, message):
self.last_pong = message
def on_reject(self, conn, message):
self.last_reject = message
#print (message)
# Syncing helpers
def sync(self, test_function, timeout=60):
while timeout > 0:
with mininode_lock:
if test_function():
return
time.sleep(self.sleep_time)
timeout -= self.sleep_time
raise AssertionError("Sync failed to complete")
def sync_with_ping(self, timeout=60):
self.send_message(msg_ping(nonce=self.ping_counter))
test_function = lambda: self.last_pong.nonce == self.ping_counter
self.sync(test_function, timeout)
self.ping_counter += 1
return
def wait_for_block(self, blockhash, timeout=60):
test_function = lambda: self.last_block != None and self.last_block.sha256 == blockhash
self.sync(test_function, timeout)
return
def wait_for_getdata(self, timeout=60):
test_function = lambda: self.last_getdata != None
self.sync(test_function, timeout)
def wait_for_getheaders(self, timeout=60):
test_function = lambda: self.last_getheaders != None
self.sync(test_function, timeout)
def wait_for_inv(self, expected_inv, timeout=60):
test_function = lambda: self.last_inv != expected_inv
self.sync(test_function, timeout)
def announce_tx_and_wait_for_getdata(self, tx, timeout=60):
with mininode_lock:
self.last_getdata = None
self.send_message(msg_inv(inv=[CInv(1, tx.sha256)]))
self.wait_for_getdata(timeout)
return
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
with mininode_lock:
self.last_getdata = None
self.last_getheaders = None
msg = msg_headers()
msg.headers = [ CBlockHeader(block) ]
if use_header:
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
self.wait_for_getdata()
return
def announce_block(self, block, use_header):
with mininode_lock:
self.last_getdata = None
if use_header:
msg = msg_headers()
msg.headers = [ CBlockHeader(block) ]
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
def request_block(self, blockhash, inv_type, timeout=60):
with mininode_lock:
self.last_block = None
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
return self.last_block
def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None):
tx_message = msg_tx(tx)
if with_witness:
tx_message = msg_witness_tx(tx)
self.send_message(tx_message)
self.sync_with_ping()
assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted)
if (reason != None and not accepted):
# Check the rejection reason as well.
with mininode_lock:
assert_equal(self.last_reject.reason, reason)
# Test whether a witness block had the correct effect on the tip
def test_witness_block(self, block, accepted, with_witness=True):
if with_witness:
self.send_message(msg_witness_block(block))
else:
self.send_message(msg_block(block))
self.sync_with_ping()
assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted)
# Used to keep track of anyone-can-spend outputs that we can use in the tests
class UTXO(object):
def __init__(self, sha256, n, nValue):
self.sha256 = sha256
self.n = n
self.nValue = nValue
# Helper for getting the script associated with a P2PKH
def GetP2PKHScript(pubkeyhash):
return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)])
# Add signature for a P2PK witness program.
def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key):
tx_hash = SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, value)
signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1')
txTo.wit.vtxinwit[inIdx].scriptWitness.stack = [signature, script]
txTo.rehash()
class SegWitTest(nealcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-whitelist=127.0.0.1"]))
# Start a node for testing IsStandard rules.
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug", "-logtimemicros=1", "-whitelist=127.0.0.1", "-acceptnonstdtxn=0"]))
connect_nodes(self.nodes[0], 1)
# Disable segwit's bip9 parameter to simulate upgrading after activation.
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1", "-bip9params=segwit:0:0"]))
connect_nodes(self.nodes[0], 2)
''' Helpers '''
# Build a block on top of node0's tip.
def build_next_block(self, nVersion=4):
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount() + 1
block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.nVersion = nVersion
block.rehash()
return block
# Adds list of transactions to block, adds witness commitment, then solves.
def update_witness_block_with_transactions(self, block, tx_list, nonce=0):
block.vtx.extend(tx_list)
add_witness_commitment(block, nonce)
block.solve()
return
''' Individual tests '''
def test_witness_services(self):
print("\tVerifying NODE_WITNESS service bit")
assert((self.test_node.connection.nServices & NODE_WITNESS) != 0)
# See if sending a regular transaction works, and create a utxo
# to use in later tests.
def test_non_witness_transaction(self):
# Mine a block with an anyone-can-spend coinbase,
# let it mature, then try to spend it.
print("\tTesting non-witness transaction")
block = self.build_next_block(nVersion=1)
block.solve()
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping() # make sure the block was processed
txid = block.vtx[0].sha256
self.nodes[0].generate(99) # let the block mature
# Create a transaction that spends the coinbase
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(txid, 0), b""))
tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE])))
tx.calc_sha256()
# Check that serializing it with or without witness is the same
# This is a sanity check of our testing framework.
assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize())
self.test_node.send_message(msg_witness_tx(tx))
self.test_node.sync_with_ping() # make sure the tx was processed
assert(tx.hash in self.nodes[0].getrawmempool())
# Save this transaction for later
self.utxo.append(UTXO(tx.sha256, 0, 49*100000000))
self.nodes[0].generate(1)
# Verify that blocks with witnesses are rejected before activation.
def test_unnecessary_witness_before_segwit_activation(self):
print("\tTesting behavior of unnecessary witnesses")
# For now, rely on earlier tests to have created at least one utxo for
# us to use
assert(len(self.utxo) > 0)
assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active')
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])]
# Verify the hash with witness differs from the txid
# (otherwise our testing framework must be broken!)
tx.rehash()
assert(tx.sha256 != tx.calc_sha256(with_witness=True))
# Construct a segwit-signaling block that includes the transaction.
block = self.build_next_block(nVersion=(VB_TOP_BITS|(1 << VB_WITNESS_BIT)))
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
self.test_node.test_witness_block(block, accepted=False)
# TODO: fix synchronization so we can test reject reason
# Right now, nealcoind delays sending reject messages for blocks
# until the future, making synchronization here difficult.
#assert_equal(self.test_node.last_reject.reason, "unexpected-witness")
# But it should not be permanently marked bad...
# Resend without witness information.
self.test_node.send_message(msg_block(block))
self.test_node.sync_with_ping()
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
sync_blocks(self.nodes)
# Create a p2sh output -- this is so we can pass the standardness
# rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped
# in P2SH).
p2sh_program = CScript([OP_TRUE])
p2sh_pubkey = hash160(p2sh_program)
scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
# Now check that unnecessary witnesses can't be used to blind a node
# to a transaction, eg by violating standardness checks.
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
self.test_node.test_transaction_acceptance(tx2, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# We'll add an unnecessary witness to this transaction that would cause
# it to be non-standard, to test that violating policy with a witness before
# segwit activation doesn't blind a node to a transaction. Transactions
# rejected for having a witness before segwit activation shouldn't be added
# to the rejection cache.
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program])))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptPubKey))
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a'*400000]
tx3.rehash()
# Note that this should be rejected for the premature witness reason,
# rather than a policy check, since segwit hasn't activated yet.
self.std_node.test_transaction_acceptance(tx3, True, False, b'no-witness-yet')
# If we send without witness, it should be accepted.
self.std_node.test_transaction_acceptance(tx3, False, True)
# Now create a new anyone-can-spend utxo for the next test.
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), CScript([p2sh_program])))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, CScript([OP_TRUE])))
tx4.rehash()
self.test_node.test_transaction_acceptance(tx3, False, True)
self.test_node.test_transaction_acceptance(tx4, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx4.sha256, 0, tx4.vout[0].nValue))
# Mine enough blocks for segwit's vb state to be 'started'.
def advance_to_segwit_started(self):
height = self.nodes[0].getblockcount()
# Will need to rewrite the tests here if we are past the first period
assert(height < VB_PERIOD - 1)
# Genesis block is 'defined'.
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined')
# Advance to end of period, status should now be 'started'
self.nodes[0].generate(VB_PERIOD-height-1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Mine enough blocks to lock in segwit, but don't activate.
# TODO: we could verify that lockin only happens at the right threshold of
# signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_lockin(self):
height = self.nodes[0].getblockcount()
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
# Advance to end of period, and verify lock-in happens at the end
self.nodes[0].generate(VB_PERIOD-1)
height = self.nodes[0].getblockcount()
assert((height % VB_PERIOD) == VB_PERIOD - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
# Mine enough blocks to activate segwit.
# TODO: we could verify that activation only happens at the right threshold
# of signalling blocks, rather than just at the right period boundary.
def advance_to_segwit_active(self):
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
height = self.nodes[0].getblockcount()
self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in')
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active')
# This test can only be run after segwit has activated
def test_witness_commitments(self):
print("\tTesting witness commitments")
# First try a correct witness commitment.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Test the test -- witness serialization should be different
assert(msg_witness_block(block).serialize() != msg_block(block).serialize())
# This empty block should be valid.
self.test_node.test_witness_block(block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
add_witness_commitment(block_2, nonce=28)
block_2.solve()
# The commitment should have changed!
assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1])
# This should also be valid.
self.test_node.test_witness_block(block_2, accepted=True)
# Now test commitments with actual transactions
assert (len(self.utxo) > 0)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# Let's construct a witness program
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# tx2 will spend tx1, and send back to a regular anyone-can-spend address
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1)
# Add an extra OP_RETURN output that matches the witness commitment template,
# even though it has extra data after the incorrect commitment.
# This block should fail.
block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10])))
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=False)
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
# This should succeed (nValue shouldn't affect finding the
# witness commitment).
add_witness_commitment(block_3, nonce=0)
block_3.vtx[0].vout[0].nValue -= 1
block_3.vtx[0].vout[-1].nValue += 1
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.rehash()
assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns
block_3.solve()
self.test_node.test_witness_block(block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
block_4 = self.build_next_block()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program))
tx3.rehash()
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
self.test_node.test_witness_block(block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_block_malleability(self):
print("\tTesting witness block malleability")
# Make sure that a block that has too big a virtual size
# because of a too-large coinbase witness is not permanently
# marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000)
assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE)
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE)
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() == block.hash)
# Now make sure that malleating the witness nonce doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ]
self.test_node.test_witness_block(block, accepted=False)
# Changing the witness nonce doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ]
self.test_node.test_witness_block(block, accepted=True)
def test_witness_block_size(self):
print("\tTesting witness block size limit")
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in p2p-fullblocktest.py
# Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB.
block = self.build_next_block()
assert(len(self.utxo) > 0)
# Create a P2WSH transaction.
# The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
# virtual size.
NUM_DROPS = 200 # 201 max ops per script!
NUM_OUTPUTS = 50
witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE])
witness_hash = uint256_from_str(sha256(witness_program))
scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)])
prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n)
value = self.utxo[0].nValue
parent_tx = CTransaction()
parent_tx.vin.append(CTxIn(prevout, b""))
child_value = int(value/NUM_OUTPUTS)
for i in range(NUM_OUTPUTS):
parent_tx.vout.append(CTxOut(child_value, scriptPubKey))
parent_tx.vout[0].nValue -= 50000
assert(parent_tx.vout[0].nValue > 0)
parent_tx.rehash()
child_tx = CTransaction()
for i in range(NUM_OUTPUTS):
child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b""))
child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))]
for i in range(NUM_OUTPUTS):
child_tx.wit.vtxinwit.append(CTxInWitness())
child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program]
child_tx.rehash()
self.update_witness_block_with_transactions(block, [parent_tx, child_tx])
vsize = get_virtual_size(block)
additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize)*4
i = 0
while additional_bytes > 0:
# Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1
extra_bytes = min(additional_bytes+1, 55)
block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes)
additional_bytes -= extra_bytes
i += 1
block.vtx[0].vout.pop() # Remove old commitment
add_witness_commitment(block)
block.solve()
vsize = get_virtual_size(block)
assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1)
# Make sure that our test case would exceed the old max-network-message
# limit
assert(len(block.serialize(True)) > 2*1024*1024)
self.test_node.test_witness_block(block, accepted=False)
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1)
block.vtx[0].vout.pop()
add_witness_commitment(block)
block.solve()
assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE)
self.test_node.test_witness_block(block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
# submitblock will try to add the nonce automatically, so that mining
# software doesn't need to worry about doing so itself.
def test_submit_block(self):
block = self.build_next_block()
# Try using a custom nonce and then don't supply it.
# This shouldn't possibly work.
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert(self.nodes[0].getbestblockhash() != block.hash)
# Now redo commitment with the standard nonce, but let nealcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
# the commitment.
block_2 = self.build_next_block()
add_witness_commitment(block_2)
block_2.solve()
# Drop commitment and nonce -- submitblock should not fill in.
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True)))
# Tip should not advance!
assert(self.nodes[0].getbestblockhash() != block_2.hash)
# Consensus tests of extra witness data in a transaction.
def test_extra_witness_data(self):
print("\tTesting extra witness data in tx")
assert(len(self.utxo) > 0)
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First try extra witness data on a tx that doesn't require a witness
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-2000, scriptPubKey))
tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])]
tx.rehash()
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
self.test_node.test_witness_block(block, accepted=False)
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
block.vtx[1].vin[0].scriptSig = CScript([OP_0])
block.vtx[1].rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()])
tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ]
tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
tx2.vin[1].scriptSig = CScript([OP_TRUE])
tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0)
tx2.wit.vtxinwit[1].scriptWitness.stack = []
tx2.rehash()
add_witness_commitment(block)
block.solve()
# This has extra signature data for a witness input, so it should fail.
self.test_node.test_witness_block(block, accepted=False)
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
tx2.vin[0].scriptSig = b""
tx2.rehash()
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_push_length(self):
''' Should only allow up to 520 byte pushes in witness stack '''
print("\tTesting maximum witness push size")
MAX_SCRIPT_ELEMENT_SIZE = 520
assert(len(self.utxo))
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
# First try a 521-byte stack element
tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
self.test_node.test_witness_block(block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_max_witness_program_length(self):
# Can create witness outputs that are long, but can't be greater than
# 10k bytes to successfully spend
print("\tTesting maximum witness program length")
assert(len(self.utxo))
MAX_PROGRAM_LENGTH = 10000
# This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes.
long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE])
assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1)
long_witness_hash = sha256(long_witness_program)
long_scriptPubKey = CScript([OP_0, long_witness_hash])
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, long_scriptPubKey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Try again with one less byte in the witness program
witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE])
assert(len(witness_program) == MAX_PROGRAM_LENGTH)
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey)
tx.rehash()
tx2.vin[0].prevout.hash = tx.sha256
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program]
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_input_length(self):
''' Ensure that vin length must match vtxinwit length '''
print("\tTesting witness input length")
assert(len(self.utxo))
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
nValue = self.utxo[0].nValue
for i in range(10):
tx.vout.append(CTxOut(int(nValue/10), scriptPubKey))
tx.vout[0].nValue -= 1000
assert(tx.vout[0].nValue >= 0)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
# the length of vtxinwit.
class BrokenCTransaction(CTransaction):
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
tx2 = BrokenCTransaction()
for i in range(10):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE])))
# First try using a too long vtxinwit
for i in range(11):
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
tx2.wit.vtxinwit.pop()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program]
tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=False)
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
def test_witness_tx_relay_before_segwit_activation(self):
print("\tTesting relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected for premature-witness, but should
# not be added to recently rejected list.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
assert(self.old_node.last_getdata.inv[0].type == 1)
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
try:
self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2)
print("Error: duplicate tx getdata!")
assert(False)
except AssertionError as e:
pass
# Delivering this transaction with witness should fail (no matter who
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Cleanup: mine the first transaction and update utxo
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
# After segwit activates, verify that mempool:
# - rejects transactions with unnecessary/extra witnesses
# - accepts transactions with valid witnesses
# and that witness transactions are relayed to non-upgraded peers.
def test_tx_relay_after_segwit_activation(self):
print("\tTesting relay of witness transactions")
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected because we can't use a witness
# when spending a non-witness output.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ]
tx.rehash()
tx_hash = tx.sha256
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
self.test_node.announce_tx_and_wait_for_getdata(tx)
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
# Add too-large for IsStandard witness and check that it does not enter reject filter
p2sh_program = CScript([OP_TRUE])
p2sh_pubkey = hash160(p2sh_program)
witness_program2 = CScript([b'a'*400000])
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])))
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2]
tx3.rehash()
# Node will not be blinded to the transaction
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
# Remove witness stuffing, instead add extra witness push on stack
tx3.vout[0] = CTxOut(tx2.vout[0].nValue-1000, CScript([OP_TRUE]))
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv(CInv(1, tx2.sha256)) # wait until tx2 was inv'ed
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv(CInv(1, tx3.sha256))
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4
assert_equal(raw_tx["vsize"], vsize)
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii'))
assert(vsize != raw_tx["size"])
# Cleanup: mine the transactions and update utxo for next test
self.nodes[0].generate(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
# Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG
# This is true regardless of segwit activation.
# Also test that we don't ask for blocks from unupgraded peers
def test_block_relay(self, segwit_activated):
print("\tTesting block relay")
blocktype = 2|MSG_WITNESS_FLAG
# test_node has set NODE_WITNESS, so all getdata requests should be for
# witness blocks.
# Test announcing a block via inv results in a getdata, and that
# announcing a version 4 or random VB block with a header results in a getdata
block1 = self.build_next_block()
block1.solve()
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert(self.test_node.last_getdata.inv[0].type == blocktype)
self.test_node.test_witness_block(block1, True)
block2 = self.build_next_block(nVersion=4)
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert(self.test_node.last_getdata.inv[0].type == blocktype)
self.test_node.test_witness_block(block2, True)
block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15)))
block3.solve()
self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True)
assert(self.test_node.last_getdata.inv[0].type == blocktype)
self.test_node.test_witness_block(block3, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
if segwit_activated == False:
# Before activation, we should be able to request old blocks with
# or without witness, and they should be the same.
chain_height = self.nodes[0].getblockcount()
# Pick 10 random blocks on main chain, and verify that getdata's
# for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal.
all_heights = list(range(chain_height+1))
random.shuffle(all_heights)
all_heights = all_heights[0:10]
for height in all_heights:
block_hash = self.nodes[0].getblockhash(height)
rpc_block = self.nodes[0].getblock(block_hash, False)
block_hash = int(block_hash, 16)
block = self.test_node.request_block(block_hash, 2)
wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG)
assert_equal(block.serialize(True), wit_block.serialize(True))
assert_equal(block.serialize(), hex_str_to_bytes(rpc_block))
else:
# After activation, witness blocks and non-witness blocks should
# be different. Verify rpc getblock() returns witness blocks, while
# getdata respects the requested type.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [])
# This gives us a witness commitment.
assert(len(block.vtx[0].wit.vtxinwit) == 1)
assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1)
self.test_node.test_witness_block(block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
wit_block = self.test_node.request_block(block.sha256, 2|MSG_WITNESS_FLAG)
assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block))
assert_equal(wit_block.serialize(False), non_wit_block.serialize())
assert_equal(wit_block.serialize(True), block.serialize(True))
# Test size, vsize, weight
rpc_details = self.nodes[0].getblock(block.hash, True)
assert_equal(rpc_details["size"], len(block.serialize(True)))
assert_equal(rpc_details["strippedsize"], len(block.serialize(False)))
weight = 3*len(block.serialize(False)) + len(block.serialize(True))
assert_equal(rpc_details["weight"], weight)
# Upgraded node should not ask for blocks from unupgraded
block4 = self.build_next_block(nVersion=4)
block4.solve()
self.old_node.getdataset = set()
# Blocks can be requested via direct-fetch (immediately upon processing the announcement)
# or via parallel download (with an indeterminate delay from processing the announcement)
# so to test that a block is NOT requested, we could guess a time period to sleep for,
# and then check. We can avoid the sleep() by taking advantage of transaction getdata's
# being processed after block getdata's, and announce a transaction as well,
# and then check to see if that particular getdata has been received.
self.old_node.announce_block(block4, use_header=False)
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert(block4.sha256 not in self.old_node.getdataset)
# V0 segwit outputs should be standard after activation, but not before.
def test_standardness_v0(self, segwit_activated):
print("\tTesting standardness of v0 outputs (%s activation)" % ("after" if segwit_activated else "before"))
assert(len(self.utxo))
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
p2sh_pubkey = hash160(witness_program)
p2sh_scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
# First prepare a p2sh output (so that spending it will pass standardness)
p2sh_tx = CTransaction()
p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
p2sh_tx.vout = [CTxOut(self.utxo[0].nValue-1000, p2sh_scriptPubKey)]
p2sh_tx.rehash()
# Mine it on test_node to create the confirmed output.
self.test_node.test_transaction_acceptance(p2sh_tx, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Now test standardness of v0 P2WSH outputs.
# Start by creating a transaction with two outputs.
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx.vout = [CTxOut(p2sh_tx.vout[0].nValue-10000, scriptPubKey)]
tx.vout.append(CTxOut(8000, scriptPubKey)) # Might burn this later
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated)
# Now create something that looks like a P2PKH output. This won't be spendable.
scriptPubKey = CScript([OP_0, hash160(witness_hash)])
tx2 = CTransaction()
if segwit_activated:
# if tx was accepted, then we spend the second output.
tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")]
tx2.vout = [CTxOut(7000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
else:
# if tx wasn't accepted, we just re-spend the p2sh output we started with.
tx2.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, scriptPubKey)]
tx2.rehash()
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated)
# Now update self.utxo for later tests.
tx3 = CTransaction()
if segwit_activated:
# tx and tx2 were both accepted. Don't bother trying to reclaim the
# P2PKH output; just send tx's first output back to an anyone-can-spend.
sync_mempools([self.nodes[0], self.nodes[1]])
tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx3.vout = [CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE]))]
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
else:
# tx and tx2 didn't go anywhere; just clean up the p2sh_tx output.
tx3.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, witness_program)]
tx3.rehash()
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
assert_equal(len(self.nodes[1].getrawmempool()), 0)
# Verify that future segwit upgraded transactions are non-standard,
# but valid in blocks. Can run this before and after segwit activation.
def test_segwit_versions(self):
print("\tTesting standardness/consensus for segwit versions (0-16)")
assert(len(self.utxo))
NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16
if (len(self.utxo) < NUM_TESTS):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
split_value = (self.utxo[0].nValue - 4000) // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, CScript([OP_TRUE])))
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_TESTS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
sync_blocks(self.nodes)
temp_utxo = []
tx = CTransaction()
count = 0
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16+1)) + [OP_0]:
count += 1
# First try to spend to a future version segwit scriptPubKey.
scriptPubKey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue-1000, scriptPubKey)]
tx.rehash()
self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
self.nodes[0].generate(1) # Mine all the transactions
sync_blocks(self.nodes)
assert(len(self.nodes[0].getrawmempool()) == 0)
# Finally, verify that version 0 -> version 1 transactions
# are non-standard
scriptPubKey = CScript([CScriptOp(OP_1), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue-1000, scriptPubKey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
tx2.rehash()
# Gets accepted to test_node, because standardness of outputs isn't
# checked with fRequireStandard
self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
# Spend everything in temp_utxo back to an OP_TRUE output.
tx3 = CTransaction()
total_value = 0
for i in temp_utxo:
tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
total_value += i.nValue
tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program]
tx3.vout.append(CTxOut(total_value - 1000, CScript([OP_TRUE])))
tx3.rehash()
# Spending a higher version witness output is not allowed by policy,
# even with fRequireStandard=false.
self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
self.test_node.sync_with_ping()
with mininode_lock:
assert(b"reserved for soft-fork upgrades" in self.test_node.last_reject.reason)
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
def test_premature_coinbase_witness_spend(self):
print("\tTesting premature coinbase witness spend")
block = self.build_next_block()
# Change the output of the block to be a witness output.
witness_program = CScript([OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
block.vtx[0].vout[0].scriptPubKey = scriptPubKey
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)]
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
spend_tx.rehash()
# Now test a premature spend.
self.nodes[0].generate(98)
sync_blocks(self.nodes)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=False)
# Advancing one more block should allow the spend.
self.nodes[0].generate(1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
self.test_node.test_witness_block(block2, accepted=True)
sync_blocks(self.nodes)
def test_signature_version_1(self):
print("\tTesting segwit signature hash version 1")
key = CECKey()
key.set_secretbytes(b"9")
pubkey = CPubKey(key.get_pubkey())
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
# First create a witness output for use in the tests.
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
sync_blocks(self.nodes)
self.utxo.pop(0)
# Test each hashtype
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
for sigflag in [ 0, SIGHASH_ANYONECANPAY ]:
for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]:
hashtype |= sigflag
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
tx.vout.append(CTxOut(prev_utxo.nValue - 1000, scriptPubKey))
tx.wit.vtxinwit.append(CTxInWitness())
# Too-large input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key)
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Too-small input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=False)
# Now try correct value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
# Test combinations of signature hashes.
# Split the utxo into a lot of outputs.
# Randomly choose up to 10 to spend, sign with different hashtypes, and
# output to a random number of outputs. Repeat NUM_TESTS times.
# Ensure that we've tested a situation where we use SIGHASH_SINGLE with
# an input index > number of outputs.
NUM_TESTS = 500
temp_utxos = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
split_value = prev_utxo.nValue // NUM_TESTS
for i in range(NUM_TESTS):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key)
for i in range(NUM_TESTS):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
for i in range(NUM_TESTS):
# Ping regularly to keep the connection alive
if (not i % 100):
self.test_node.sync_with_ping()
# Choose random number of inputs to use.
num_inputs = random.randint(1, 10)
# Create a slight bias for producing more utxos
num_outputs = random.randint(1, 11)
random.shuffle(temp_utxos)
assert(len(temp_utxos) > num_inputs)
tx = CTransaction()
total_value = 0
for i in range(num_inputs):
tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
total_value += temp_utxos[i].nValue
split_value = total_value // num_outputs
for i in range(num_outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
for i in range(num_inputs):
# Now try to sign each input, using a random hashtype.
anyonecanpay = 0
if random.randint(0, 1):
anyonecanpay = SIGHASH_ANYONECANPAY
hashtype = random.randint(1, 3) | anyonecanpay
sign_P2PK_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key)
if (hashtype == SIGHASH_SINGLE and i >= num_outputs):
used_sighash_single_out_of_bounds = True
tx.rehash()
for i in range(num_outputs):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
temp_utxos = temp_utxos[num_inputs:]
block.vtx.append(tx)
# Test the block periodically, if we're close to maxblocksize
if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
print("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value")
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
self.test_node.test_witness_block(block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b""))
tx.vout.append(CTxOut(temp_utxos[0].nValue, scriptPKH))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = GetP2PKHScript(pubkeyhash)
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
self.test_node.test_witness_block(block, accepted=False)
# Move the signature to the witness.
block.vtx.pop()
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.vin[0].scriptSig = b""
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
temp_utxos.pop(0)
# Update self.utxos for later tests. Just spend everything in
# temp_utxos to a corresponding entry in self.utxos
tx = CTransaction()
index = 0
for i in temp_utxos:
# Just spend to our usual anyone-can-spend output
# Use SIGHASH_SINGLE|SIGHASH_ANYONECANPAY so we can build up
# the signatures as we go.
tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx.vout.append(CTxOut(i.nValue, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx, index, SIGHASH_SINGLE|SIGHASH_ANYONECANPAY, i.nValue, key)
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
# Test P2SH wrapped witness programs.
def test_p2sh_witness(self, segwit_activated):
print("\tTesting P2SH witness transactions")
assert(len(self.utxo))
# Prepare the p2sh-wrapped witness output
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
p2wsh_pubkey = CScript([OP_0, witness_hash])
p2sh_witness_hash = hash160(p2wsh_pubkey)
scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script
# Fund the P2SH output
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
# Verify mempool acceptance and block validity
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated)
sync_blocks(self.nodes)
# Now test attempts to spend the output.
spend_tx = CTransaction()
spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), scriptSig))
spend_tx.vout.append(CTxOut(tx.vout[0].nValue-1000, CScript([OP_TRUE])))
spend_tx.rehash()
# This transaction should not be accepted into the mempool pre- or
# post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older nealcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
spend_tx.vin[0].scriptSig = scriptSig
spend_tx.rehash()
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ]
# Verify mempool acceptance
self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
# If we're before activation, then sending this without witnesses
# should be valid. If we're after activation, then sending this with
# witnesses should be valid.
if segwit_activated:
self.test_node.test_witness_block(block, accepted=True)
else:
self.test_node.test_witness_block(block, accepted=True, with_witness=False)
# Update self.utxo
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
# Test the behavior of starting up a segwit-aware node after the softfork
# has activated. As segwit requires different block data than pre-segwit
# nodes would have stored, this requires special handling.
# To enable this test, pass --oldbinary=<path-to-pre-segwit-nealcoind> to
# the test.
def test_upgrade_after_activation(self, node, node_id):
print("\tTesting software upgrade after softfork activation")
assert(node_id != 0) # node0 is assumed to be a segwit-active nealcoind
# Make sure the nodes are all up
sync_blocks(self.nodes)
# Restart with the new binary
stop_node(node, node_id)
self.nodes[node_id] = start_node(node_id, self.options.tmpdir, ["-debug"])
connect_nodes(self.nodes[0], node_id)
sync_blocks(self.nodes)
# Make sure that this peer thinks segwit has activated.
assert(get_bip9_status(node, 'segwit')['status'] == "active")
# Make sure this peers blocks match those of node0.
height = node.getblockcount()
while height >= 0:
block_hash = node.getblockhash(height)
assert_equal(block_hash, self.nodes[0].getblockhash(height))
assert_equal(self.nodes[0].getblock(block_hash), node.getblock(block_hash))
height -= 1
def test_witness_sigops(self):
'''Ensure sigop counting is correct inside witnesses.'''
print("\tTesting sigops limit")
assert(len(self.utxo))
# Keep this under MAX_OPS_PER_SCRIPT (201)
witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
sigops_per_script = 20*5 + 193*1
# We'll produce 2 extra outputs, one with a program that would take us
# over max sig ops, and one with a program that would exactly reach max
# sig ops
outputs = (MAX_SIGOP_COST // sigops_per_script) + 2
extra_sigops_available = MAX_SIGOP_COST % sigops_per_script
# We chose the number of checkmultisigs/checksigs to make this work:
assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT
# This script, when spent with the first
# N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction,
# would push us just over the block sigop limit.
witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF])
witness_hash_toomany = sha256(witness_program_toomany)
scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany])
# If we spend this script instead, we would exactly reach our sigop
# limit (for witness sigops).
witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available) + [OP_ENDIF])
witness_hash_justright = sha256(witness_program_justright)
scriptPubKey_justright = CScript([OP_0, witness_hash_justright])
# First split our available utxo into a bunch of outputs
split_value = self.utxo[0].nValue // outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
for i in range(outputs):
tx.vout.append(CTxOut(split_value, scriptPubKey))
tx.vout[-2].scriptPubKey = scriptPubKey_toomany
tx.vout[-1].scriptPubKey = scriptPubKey_justright
tx.rehash()
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
self.test_node.test_witness_block(block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
# too many sigops.
total_value = 0
for i in range(outputs-1):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ]
total_value += tx.vout[i].nValue
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ]
tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE])))
tx2.rehash()
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
self.test_node.test_witness_block(block_2, accepted=False)
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
checksig_count = (extra_sigops_available // 4) + 1
scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count)
tx2.vout.append(CTxOut(0, scriptPubKey_checksigs))
tx2.vin.pop()
tx2.wit.vtxinwit.pop()
tx2.vout[0].nValue -= tx.vout[-2].nValue
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
self.test_node.test_witness_block(block_3, accepted=False)
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
self.test_node.test_witness_block(block_4, accepted=True)
# Reset the tip back down for the next test
sync_blocks(self.nodes)
for x in self.nodes:
x.invalidateblock(block_4.hash)
# Try replacing the last input of tx2 to be spending the last
# output of tx
block_5 = self.build_next_block()
tx2.vout.pop()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs-1), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
self.test_node.test_witness_block(block_5, accepted=True)
# TODO: test p2sh sigop counting
def test_getblocktemplate_before_lockin(self):
print("\tTesting getblocktemplate setting of segwit versionbit (before lockin)")
# Node0 is segwit aware, node2 is not.
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate()
block_version = gbt_results['version']
# If we're not indicating segwit support, we should not be signalling
# for segwit activation, nor should we get a witness commitment.
assert_equal(block_version & (1 << VB_WITNESS_BIT), 0)
assert('default_witness_commitment' not in gbt_results)
# Workaround:
# Can either change the tip, or change the mempool and wait 5 seconds
# to trigger a recomputation of getblocktemplate.
txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16)
# Using mocktime lets us avoid sleep()
sync_mempools(self.nodes)
self.nodes[0].setmocktime(int(time.time())+10)
self.nodes[2].setmocktime(int(time.time())+10)
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate({"rules" : ["segwit"]})
block_version = gbt_results['version']
if node == self.nodes[2]:
# If this is a non-segwit node, we should still not get a witness
# commitment, nor a version bit signalling segwit.
assert_equal(block_version & (1 << VB_WITNESS_BIT), 0)
assert('default_witness_commitment' not in gbt_results)
else:
# For segwit-aware nodes, check the version bit and the witness
# commitment are correct.
assert(block_version & (1 << VB_WITNESS_BIT) != 0)
assert('default_witness_commitment' in gbt_results)
witness_commitment = gbt_results['default_witness_commitment']
# TODO: this duplicates some code from blocktools.py, would be nice
# to refactor.
# Check that default_witness_commitment is present.
block = CBlock()
witness_root = block.get_merkle_root([ser_uint256(0), ser_uint256(txid)])
check_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(0)))
from test_framework.blocktools import WITNESS_COMMITMENT_HEADER
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(check_commitment)
script = CScript([OP_RETURN, output_data])
assert_equal(witness_commitment, bytes_to_hex_str(script))
# undo mocktime
self.nodes[0].setmocktime(0)
self.nodes[2].setmocktime(0)
# Uncompressed pubkeys are no longer supported in default relay policy,
# but (for now) are still valid in blocks.
def test_uncompressed_pubkey(self):
print("\tTesting uncompressed pubkeys")
# Segwit transactions using uncompressed pubkeys are not accepted
# under default policy, but should still pass consensus.
key = CECKey()
key.set_secretbytes(b"9")
key.set_compressed(False)
pubkey = CPubKey(key.get_pubkey())
assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey
assert(len(self.utxo) > 0)
utxo = self.utxo.pop(0)
# Test 1: P2WPKH
# First create a P2WPKH output that uses an uncompressed pubkey
pubkeyhash = hash160(pubkey)
scriptPKH = CScript([OP_0, pubkeyhash])
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b""))
tx.vout.append(CTxOut(utxo.nValue-1000, scriptPKH))
tx.rehash()
# Confirm it in a block.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
self.test_node.test_witness_block(block, accepted=True)
# Now try to spend it. Send it to a P2WSH output, which we'll
# use in the next test.
witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)])
witness_hash = sha256(witness_program)
scriptWSH = CScript([OP_0, witness_hash])
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptWSH))
script = GetP2PKHScript(pubkeyhash)
sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [ signature, pubkey ]
tx2.rehash()
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
self.test_node.test_witness_block(block, accepted=True)
# Test 2: P2WSH
# Try to spend the P2WSH output created in last test.
# Send it to a P2SH(P2WSH) output, which we'll use in the next test.
p2sh_witness_hash = hash160(scriptWSH)
scriptP2SH = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL])
scriptSig = CScript([scriptWSH])
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx2.vout[0].nValue-1000, scriptP2SH))
tx3.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx3])
self.test_node.test_witness_block(block, accepted=True)
# Test 3: P2SH(P2WSH)
# Try to spend the P2SH output created in the last test.
# Send it to a P2PKH output, which we'll use in the next test.
scriptPubKey = GetP2PKHScript(pubkeyhash)
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), scriptSig))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, scriptPubKey))
tx4.wit.vtxinwit.append(CTxInWitness())
sign_P2PK_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
# Should fail policy test.
self.test_node.test_transaction_acceptance(tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx4])
self.test_node.test_witness_block(block, accepted=True)
# Test 4: Uncompressed pubkeys should still be valid in non-segwit
# transactions.
tx5 = CTransaction()
tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b""))
tx5.vout.append(CTxOut(tx4.vout[0].nValue-1000, CScript([OP_TRUE])))
(sig_hash, err) = SignatureHash(scriptPubKey, tx5, 0, SIGHASH_ALL)
signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx5.vin[0].scriptSig = CScript([signature, pubkey])
tx5.rehash()
# Should pass policy and consensus.
self.test_node.test_transaction_acceptance(tx5, True, True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx5])
self.test_node.test_witness_block(block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
def test_non_standard_witness(self):
print("\tTesting detection of non-standard P2WSH witness")
pad = chr(1).encode('latin-1')
# Create scripts for tests
scripts = []
scripts.append(CScript([OP_DROP] * 100))
scripts.append(CScript([OP_DROP] * 99))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61))
p2wsh_scripts = []
assert(len(self.utxo))
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# For each script, generate a pair of P2WSH and P2SH-P2WSH output.
outputvalue = (self.utxo[0].nValue - 1000) // (len(scripts) * 2)
for i in scripts:
p2wsh = CScript([OP_0, sha256(i)])
p2sh = hash160(p2wsh)
p2wsh_scripts.append(p2wsh)
tx.vout.append(CTxOut(outputvalue, p2wsh))
tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL])))
tx.rehash()
txid = tx.sha256
self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Creating transactions for tests
p2wsh_txs = []
p2sh_txs = []
for i in range(len(scripts)):
p2wsh_tx = CTransaction()
p2wsh_tx.vin.append(CTxIn(COutPoint(txid,i*2)))
p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))])))
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.rehash()
p2wsh_txs.append(p2wsh_tx)
p2sh_tx = CTransaction()
p2sh_tx.vin.append(CTxIn(COutPoint(txid,i*2+1), CScript([p2wsh_scripts[i]])))
p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(hex_str_to_bytes(""))])))
p2sh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_tx.rehash()
p2sh_txs.append(p2sh_tx)
# Testing native P2WSH
# Witness stack size, excluding witnessScript, over 100 is non-standard
p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
self.std_node.test_transaction_acceptance(p2wsh_txs[0], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[0], True, True)
# Stack element size over 80 bytes is non-standard
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
# Standard nodes should accept if element size is not over 80 bytes
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
# witnessScript size at 3600 bytes is standard
p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
self.test_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
self.std_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
# witnessScript size at 3601 bytes is non-standard
p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
self.std_node.test_transaction_acceptance(p2wsh_txs[3], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
self.test_node.test_transaction_acceptance(p2wsh_txs[3], True, True)
# Repeating the same tests with P2SH-P2WSH
p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
self.std_node.test_transaction_acceptance(p2sh_txs[0], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[0], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2sh_txs[1], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[1], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
self.std_node.test_transaction_acceptance(p2sh_txs[1], True, True)
p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
self.test_node.test_transaction_acceptance(p2sh_txs[2], True, True)
self.std_node.test_transaction_acceptance(p2sh_txs[2], True, True)
p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
self.std_node.test_transaction_acceptance(p2sh_txs[3], True, False, b'bad-witness-nonstandard')
self.test_node.test_transaction_acceptance(p2sh_txs[3], True, True)
self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node
# Valid but non-standard transactions in a block should be accepted by standard node
sync_blocks(self.nodes)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.utxo.pop(0)
def run_test(self):
# Setup the p2p connections and start up the network thread.
self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK
self.old_node = TestNode() # only NODE_NETWORK
self.std_node = TestNode() # for testing node1 (fRequireStandard=true)
self.p2p_connections = [self.test_node, self.old_node]
self.connections = []
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS))
self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK))
self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS))
self.test_node.add_connection(self.connections[0])
self.old_node.add_connection(self.connections[1])
self.std_node.add_connection(self.connections[2])
NetworkThread().start() # Start up network handling in another thread
# Keep a place to store utxo's that can be used in later tests
self.utxo = []
# Test logic begins here
self.test_node.wait_for_verack()
print("\nStarting tests before segwit lock in:")
self.test_witness_services() # Verifies NODE_WITNESS
self.test_non_witness_transaction() # non-witness tx's are accepted
self.test_unnecessary_witness_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
# Advance to segwit being 'started'
self.advance_to_segwit_started()
sync_blocks(self.nodes)
self.test_getblocktemplate_before_lockin()
sync_blocks(self.nodes)
# At lockin, nothing should change.
print("\nTesting behavior post lockin, pre-activation")
self.advance_to_segwit_lockin()
# Retest unnecessary witnesses
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
self.test_p2sh_witness(segwit_activated=False)
self.test_standardness_v0(segwit_activated=False)
sync_blocks(self.nodes)
# Now activate segwit
print("\nTesting behavior after segwit activation")
self.advance_to_segwit_active()
sync_blocks(self.nodes)
# Test P2SH witness handling again
self.test_p2sh_witness(segwit_activated=True)
self.test_witness_commitments()
self.test_block_malleability()
self.test_witness_block_size()
self.test_submit_block()
self.test_extra_witness_data()
self.test_max_witness_push_length()
self.test_max_witness_program_length()
self.test_witness_input_length()
self.test_block_relay(segwit_activated=True)
self.test_tx_relay_after_segwit_activation()
self.test_standardness_v0(segwit_activated=True)
self.test_segwit_versions()
self.test_premature_coinbase_witness_spend()
self.test_uncompressed_pubkey()
self.test_signature_version_1()
self.test_non_standard_witness()
sync_blocks(self.nodes)
self.test_upgrade_after_activation(self.nodes[2], 2)
self.test_witness_sigops()
if __name__ == '__main__':
SegWitTest().main()
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
#
# This file is part of the python-chess library.
# Copyright (C) 2015 Jean-Noël Avila <jn.avila@free.fr>
# Copyright (C) 2015-2016 Niklas Fiekas <niklas.fiekas@backscattering.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ctypes
import ctypes.util
import fnmatch
import os
import os.path
import logging
import struct
import chess
try:
import backport_collections as collections
except ImportError:
import collections
LOGGER = logging.getLogger(__name__)
NOSQUARE = 64
NOINDEX = -1
WHITES = 1 << 6
BLACKS = 1 << 7
NOPIECE = 0
PAWN = 1
KNIGHT = 2
BISHOP = 3
ROOK = 4
QUEEN = 5
KING = 6
wK = KING | WHITES
wP = PAWN | WHITES
wN = KNIGHT | WHITES
wB = BISHOP | WHITES
wR = ROOK | WHITES
wQ = QUEEN | WHITES
bK = KING | BLACKS
bP = PAWN | BLACKS
bN = KNIGHT | BLACKS
bB = BISHOP | BLACKS
bR = ROOK | BLACKS
bQ = QUEEN | BLACKS
MAX_KKINDEX = 462
MAX_PPINDEX = 576
MAX_PpINDEX = 24 * 48
MAX_AAINDEX = (63 - 62) + (62 // 2 * (127 - 62)) - 1 + 1
MAX_AAAINDEX = 64 * 21 * 31
MAX_PPP48_INDEX = 8648
MAX_PP48_INDEX = 1128
MAX_KXK = MAX_KKINDEX * 64
MAX_kabk = MAX_KKINDEX * 64 * 64
MAX_kakb = MAX_KKINDEX * 64 * 64
MAX_kpk = 24 * 64 * 64
MAX_kakp = 24 * 64 * 64 * 64
MAX_kapk = 24 * 64 * 64 * 64
MAX_kppk = MAX_PPINDEX * 64 * 64
MAX_kpkp = MAX_PpINDEX * 64 * 64
MAX_kaak = MAX_KKINDEX * MAX_AAINDEX
MAX_kabkc = MAX_KKINDEX * 64 * 64 * 64
MAX_kabck = MAX_KKINDEX * 64 * 64 * 64
MAX_kaakb = MAX_KKINDEX * MAX_AAINDEX * 64
MAX_kaabk = MAX_KKINDEX * MAX_AAINDEX * 64
MAX_kabbk = MAX_KKINDEX * MAX_AAINDEX * 64
MAX_kaaak = MAX_KKINDEX * MAX_AAAINDEX
MAX_kapkb = 24 * 64 * 64 * 64 * 64
MAX_kabkp = 24 * 64 * 64 * 64 * 64
MAX_kabpk = 24 * 64 * 64 * 64 * 64
MAX_kppka = MAX_kppk * 64
MAX_kappk = MAX_kppk * 64
MAX_kapkp = MAX_kpkp * 64
MAX_kaapk = 24 * MAX_AAINDEX * 64 * 64
MAX_kaakp = 24 * MAX_AAINDEX * 64 * 64
MAX_kppkp = 24 * MAX_PP48_INDEX * 64 * 64
MAX_kpppk = MAX_PPP48_INDEX * 64 * 64
PLYSHIFT = 3
INFOMASK = 7
WE_FLAG = 1
NS_FLAG = 2
NW_SE_FLAG = 4
ITOSQ = [
chess.H7, chess.G7, chess.F7, chess.E7,
chess.H6, chess.G6, chess.F6, chess.E6,
chess.H5, chess.G5, chess.F5, chess.E5,
chess.H4, chess.G4, chess.F4, chess.E4,
chess.H3, chess.G3, chess.F3, chess.E3,
chess.H2, chess.G2, chess.F2, chess.E2,
chess.D7, chess.C7, chess.B7, chess.A7,
chess.D6, chess.C6, chess.B6, chess.A6,
chess.D5, chess.C5, chess.B5, chess.A5,
chess.D4, chess.C4, chess.B4, chess.A4,
chess.D3, chess.C3, chess.B3, chess.A3,
chess.D2, chess.C2, chess.B2, chess.A2,
]
ENTRIES_PER_BLOCK = 16 * 1024
EGTB_MAXBLOCKSIZE = 65536
def map24_b(s):
s = s - 8
return ((s & 3) + s) >> 1
def in_queenside(x):
return (x & (1 << 2)) == 0
def flip_we(x):
return x ^ 7
def flip_ns(x):
return x ^ 56
def flip_nw_se(x):
return ((x & 7) << 3) | (x >> 3)
def idx_is_empty(x):
return x == -1
def getcol(x):
return x & 7
def getrow(x):
return x >> 3
def flip_type(x, y):
ret = 0
if getcol(x) > 3:
x = flip_we(x)
y = flip_we(y)
ret |= 1
if getrow(x) > 3:
x = flip_ns(x)
y = flip_ns(y)
ret |= 2
rowx = getrow(x)
colx = getcol(x)
if rowx > colx:
x = flip_nw_se(x)
y = flip_nw_se(y)
ret |= 4
rowy = getrow(y)
coly = getcol(y)
if rowx == colx and rowy > coly:
x = flip_nw_se(x)
y = flip_nw_se(y)
ret |= 4
return ret
def init_flipt():
return [[flip_type(j, i) for i in range(64)] for j in range(64)]
FLIPT = init_flipt()
def init_pp48_idx():
MAX_I = 48
MAX_J = 48
idx = 0
pp48_idx = [[-1] * MAX_J for i in range(MAX_I)]
pp48_sq_x = [NOSQUARE] * MAX_PP48_INDEX
pp48_sq_y = [NOSQUARE] * MAX_PP48_INDEX
idx = 0
for a in range(chess.H7, chess.A2 - 1, -1):
for b in range(a - 1, chess.A2 - 1, -1):
i = flip_we(flip_ns(a)) - 8
j = flip_we(flip_ns(b)) - 8
if idx_is_empty(pp48_idx[i][j]):
pp48_idx[i][j] = idx
pp48_idx[j][i] = idx
pp48_sq_x[idx] = i
pp48_sq_y[idx] = j
idx += 1
return pp48_idx, pp48_sq_x, pp48_sq_y
PP48_IDX, PP48_SQ_X, PP48_SQ_Y = init_pp48_idx()
def init_ppp48_idx():
MAX_I = 48
MAX_J = 48
MAX_K = 48
ppp48_idx = [[[-1] * MAX_I for j in range(MAX_J)] for k in range(MAX_K)]
ppp48_sq_x = [NOSQUARE] * MAX_PPP48_INDEX
ppp48_sq_y = [NOSQUARE] * MAX_PPP48_INDEX
ppp48_sq_z = [NOSQUARE] * MAX_PPP48_INDEX
idx = 0
for x in range(48):
for y in range(x + 1, 48):
for z in range(y + 1, 48):
a = ITOSQ[x]
b = ITOSQ[y]
c = ITOSQ[z]
if not in_queenside(b) or not in_queenside(c):
continue
i = a - 8
j = b - 8
k = c - 8
if idx_is_empty(ppp48_idx[i][j][k]):
ppp48_idx[i][j][k] = idx
ppp48_idx[i][k][j] = idx
ppp48_idx[j][i][k] = idx
ppp48_idx[j][k][i] = idx
ppp48_idx[k][i][j] = idx
ppp48_idx[k][j][i] = idx
ppp48_sq_x[idx] = i
ppp48_sq_y[idx] = j
ppp48_sq_z[idx] = k
idx = idx + 1
return ppp48_idx, ppp48_sq_x, ppp48_sq_y, ppp48_sq_z
PPP48_IDX, PPP48_SQ_X, PPP48_SQ_Y, PPP48_SQ_Z = init_ppp48_idx()
def init_aaidx():
aaidx = [[-1] * 64 for y in range(64)]
aabase = [0] * MAX_AAINDEX
idx = 0
for x in range(64):
for y in range(x + 1, 64):
if idx_is_empty(aaidx[x][y]):
# Still empty.
aaidx[x][y] = idx
aaidx[y][x] = idx
aabase[idx] = x
idx += 1
return aabase, aaidx
AABASE, AAIDX = init_aaidx()
def init_aaa():
# Get aaa_base.
comb = [a * (a - 1) // 2 for a in range(64)]
accum = 0
aaa_base = [0] * 64
for a in range(64 - 1):
accum = accum + comb[a]
aaa_base[a + 1] = accum
# Get aaa_xyz.
aaa_xyz = [[-1] * 3 for idx in range(MAX_AAAINDEX)]
idx = 0
for z in range(64):
for y in range(z):
for x in range(y):
aaa_xyz[idx][0] = x
aaa_xyz[idx][1] = y
aaa_xyz[idx][2] = z
idx += 1
return aaa_base, aaa_xyz
AAA_BASE, AAA_XYZ = init_aaa()
def pp_putanchorfirst(a, b):
row_b = b & 56
row_a = a & 56
# Default.
anchor = a
loosen = b
if row_b > row_a:
anchor = b
loosen = a
elif row_b == row_a:
x = a
col = x & 7
inv = col ^ 7
x = (1 << col) | (1 << inv)
x &= (x - 1)
hi_a = x
x = b
col = x & 7
inv = col ^ 7
x = (1 << col) | (1 << inv)
x &= (x - 1)
hi_b = x
if hi_b > hi_a:
anchor = b
loosen = a
if hi_b < hi_a:
anchor = a
loosen = b
if hi_b == hi_a:
if a < b:
anchor = a
loosen = b
else:
anchor = b
loosen = a
return anchor, loosen
def wsq_to_pidx24(pawn):
sq = pawn
sq = flip_ns(sq)
sq -= 8 # Down one row.
idx24 = (sq + (sq & 3)) >> 1
return idx24
def wsq_to_pidx48(pawn):
sq = pawn
sq = flip_ns(sq)
sq -= 8 # Down one row.
idx48 = sq
return idx48
def init_ppidx():
ppidx = [[-1] * 48 for i in range(24)]
pp_hi24 = [-1] * MAX_PPINDEX
pp_lo48 = [-1] * MAX_PPINDEX
idx = 0
for a in range(chess.H7, chess.A2 - 1, -1):
if in_queenside(a):
continue
for b in range(a - 1, chess.A2 - 1, -1):
anchor = 0
loosen = 0
anchor, loosen = pp_putanchorfirst(a, b)
if (anchor & 7) > 3:
# Square in the kingside.
anchor = flip_we(anchor)
loosen = flip_we(loosen)
i = wsq_to_pidx24(anchor)
j = wsq_to_pidx48(loosen)
if idx_is_empty(ppidx[i][j]):
ppidx[i][j] = idx
pp_hi24[idx] = i
pp_lo48[idx] = j
idx += 1
return ppidx, pp_hi24, pp_lo48
PPIDX, PP_HI24, PP_LO48 = init_ppidx()
def bb_isbiton(bb, bit):
return 0 != (bb >> bit) & 1
def map88(x):
return x + (x & 56)
def unmap88(x):
return x + (x & 7) >> 1
def mapx88(x):
return ((x & 56) << 1) | (x & 7)
BSTEP = [17, 15, -15, -17, 0]
RSTEP = [1, 16, -1, -16, 0]
NSTEP = [18, 33, 31, 14, -18, -33, -31, -14, 0]
KSTEP = [1, 17, 16, 15, -1, -17, -16, -15, 0]
PSTEPARR = [
None, # No piece.
None, # Pawn.
NSTEP,
BSTEP,
RSTEP,
KSTEP, # Queen.
KSTEP, # King.
]
PSLIDER = [
False, # No piece.
False, # Pawn.
False,
True,
True,
True,
False,
]
def gen_rev(occ, input_piece, sq):
# Get list of reversible piece moves. Yields squares.
from_ = map88(sq)
pc = input_piece & (PAWN | KNIGHT | BISHOP | ROOK | QUEEN | KING)
steparr = PSTEPARR[pc]
slider = PSLIDER[pc]
if slider:
for step in steparr:
if step == 0:
break
s = from_ + step
while 0 == (s & 0x88):
us = unmap88(s)
if 0 != (0x1 & (occ >> us)):
break
yield us
s += step
else:
for step in steparr:
if step == 0:
break
s = from_ + step
if 0 == (s & 0x88):
us = unmap88(s)
if 0 == (0x1 & (occ >> us)):
yield us
def reach_init():
stp_a = [15, -15]
stp_b = [17, -17]
reach = [[-1] * 64 for _ in range(7)]
for pc in range(KNIGHT, KING + 1):
for sq in range(64):
bb = 0
for li in gen_rev(0, pc, sq):
bb |= 1 << li
reach[pc][sq] = bb
for side in range(2):
index = 1 ^ side
step_a = stp_a[side]
step_b = stp_b[side]
for sq in range(64):
sq88 = map88(sq)
bb = 0
thelist = []
s = sq88 + step_a
if 0 == (s & 0x88):
us = unmap88(s)
thelist.append(us)
s = sq88 + step_b
if 0 == (s & 0x88):
us = unmap88(s)
thelist.append(us)
for li in thelist:
bb |= 1 << li
reach[index][sq] = bb
return reach
REACH = reach_init()
def attack_maps_init():
attmsk = [0] * 256
attmsk[wP] = 1 << 0
attmsk[bP] = 1 << 1
attmsk[KNIGHT] = 1 << 2
attmsk[wN] = 1 << 2
attmsk[bN] = 1 << 2
attmsk[BISHOP] = 1 << 3
attmsk[wB] = 1 << 3
attmsk[bB] = 1 << 3
attmsk[ROOK] = 1 << 4
attmsk[wR] = 1 << 4
attmsk[bR] = 1 << 4
attmsk[QUEEN] = 1 << 5
attmsk[wQ] = 1 << 5
attmsk[bQ] = 1 << 5
attmsk[KING] = 1 << 6
attmsk[wK] = 1 << 6
attmsk[bK] = 1 << 6
attmap = [[0] * 64 for i in range(64)]
for to_ in range(64):
for from_ in range(64):
m = 0
rook = REACH[ROOK][from_]
bishop = REACH[BISHOP][from_]
queen = REACH[QUEEN][from_]
knight = REACH[KNIGHT][from_]
king = REACH[KING][from_]
if bb_isbiton(knight, to_):
m |= attmsk[wN]
if bb_isbiton(king, to_):
m |= attmsk[wK]
if bb_isbiton(rook, to_):
m |= attmsk[wR]
if bb_isbiton(bishop, to_):
m |= attmsk[wB]
if bb_isbiton(queen, to_):
m |= attmsk[wQ]
to88 = mapx88(to_)
fr88 = mapx88(from_)
diff = to88 - fr88
if diff in [17, 15]:
m |= attmsk[wP]
elif diff in [-17, -15]:
m |= attmsk[bP]
attmap[to_][from_] = m
return attmsk, attmap
ATTMSK, ATTMAP = attack_maps_init()
def possible_attack(from_, to_, piece):
return 0 != ATTMAP[to_][from_] & ATTMSK[piece]
def norm_kkindex(x, y):
if getcol(x) > 3:
x = flip_we(x)
y = flip_we(y)
if getrow(x) > 3:
x = flip_ns(x)
y = flip_ns(y)
rowx = getrow(x)
colx = getcol(x)
if (rowx > colx):
x = flip_nw_se(x)
y = flip_nw_se(y)
rowy = getrow(y)
coly = getcol(y)
if rowx == colx and rowy > coly:
x = flip_nw_se(x)
y = flip_nw_se(y)
return x, y
def init_kkidx():
kkidx = [[-1] * 64 for x in range(64)]
bksq = [-1] * MAX_KKINDEX
wksq = [-1] * MAX_KKINDEX
idx = 0
for x in range(64):
for y in range(64):
# Check if x to y is legal.
if not possible_attack(x, y, wK) and x != y:
# Normalize.
i, j = norm_kkindex(x, y)
if idx_is_empty(kkidx[i][j]):
kkidx[i][j] = idx
kkidx[x][y] = idx
bksq[idx] = i
wksq[idx] = j
idx += 1
return kkidx, wksq, bksq
KKIDX, WKSQ, BKSQ = init_kkidx()
def kxk_pctoindex(c):
BLOCK_Ax = 64
ft = flip_type(c.black_piece_squares[0], c.white_piece_squares[0])
ws = c.white_piece_squares
bs = c.black_piece_squares
if (ft & 1) != 0:
ws = [flip_we(b) for b in ws]
bs = [flip_we(b) for b in bs]
if (ft & 2) != 0:
ws = [flip_ns(b) for b in ws]
bs = [flip_ns(b) for b in bs]
if (ft & 4) != 0:
ws = [flip_nw_se(b) for b in ws]
bs = [flip_nw_se(b) for b in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
if ki == -1:
return NOINDEX
return ki * BLOCK_Ax + ws[1]
def kapkb_pctoindex(c):
BLOCK_A = 64 * 64 * 64 * 64
BLOCK_B = 64 * 64 * 64
BLOCK_C = 64 * 64
BLOCK_D = 64
pawn = c.white_piece_squares[2]
wa = c.white_piece_squares[1]
wk = c.white_piece_squares[0]
bk = c.black_piece_squares[0]
ba = c.black_piece_squares[1]
if not (chess.A2 <= pawn < chess.A8):
return NOINDEX
if (pawn & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
ba = flip_we(ba)
sq = pawn
sq ^= 56 # flip_ns
sq -= 8 # down one row
pslice = (sq + (sq & 3)) >> 1
return pslice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + wa * BLOCK_D + ba
def kabpk_pctoindex(c):
BLOCK_A = 64 * 64 * 64 * 64
BLOCK_B = 64 * 64 * 64
BLOCK_C = 64 * 64
BLOCK_D = 64
wk = c.white_piece_squares[0]
wa = c.white_piece_squares[1]
wb = c.white_piece_squares[2]
pawn = c.white_piece_squares[3]
bk = c.black_piece_squares[0]
if (pawn & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
wb = flip_we(wb)
pslice = wsq_to_pidx24(pawn)
return pslice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + wa * BLOCK_D + wb
def kabkp_pctoindex(c):
BLOCK_A = 64 * 64 * 64 * 64
BLOCK_B = 64 * 64 * 64
BLOCK_C = 64 * 64
BLOCK_D = 64
pawn = c.black_piece_squares[1]
wa = c.white_piece_squares[1]
wk = c.white_piece_squares[0]
bk = c.black_piece_squares[0]
wb = c.white_piece_squares[2]
if not (chess.A2 <= pawn < chess.A8):
return NOINDEX
if (pawn & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
wb = flip_we(wb)
sq = pawn
sq -= 8 # down one row
pslice = (sq + (sq & 3)) >> 1
return pslice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + wa * BLOCK_D + wb
def kaapk_pctoindex(c):
BLOCK_C = MAX_AAINDEX
BLOCK_B = 64 * BLOCK_C
BLOCK_A = 64 * BLOCK_B
wk = c.white_piece_squares[0]
wa = c.white_piece_squares[1]
wa2 = c.white_piece_squares[2]
pawn = c.white_piece_squares[3]
bk = c.black_piece_squares[0]
if (pawn & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
wa2 = flip_we(wa2)
pslice = wsq_to_pidx24(pawn)
aa_combo = AAIDX[wa][wa2]
if idx_is_empty(aa_combo):
return NOINDEX
return pslice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + aa_combo
def kaakp_pctoindex(c):
BLOCK_C = MAX_AAINDEX
BLOCK_B = 64 * BLOCK_C
BLOCK_A = 64 * BLOCK_B
wk = c.white_piece_squares[0]
wa = c.white_piece_squares[1]
wa2 = c.white_piece_squares[2]
bk = c.black_piece_squares[0]
pawn = c.black_piece_squares[1]
if (pawn & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
wa2 = flip_we(wa2)
pawn = flip_ns(pawn)
pslice = wsq_to_pidx24(pawn)
aa_combo = AAIDX[wa][wa2]
if idx_is_empty(aa_combo):
return NOINDEX
return pslice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + aa_combo
def kapkp_pctoindex(c):
BLOCK_A = 64 * 64 * 64
BLOCK_B = 64 * 64
BLOCK_C = 64
wk = c.white_piece_squares[0]
wa = c.white_piece_squares[1]
pawn_a = c.white_piece_squares[2]
bk = c.black_piece_squares[0]
pawn_b = c.black_piece_squares[1]
anchor = pawn_a
loosen = pawn_b
if (anchor & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
anchor = flip_we(anchor)
loosen = flip_we(loosen)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
m = wsq_to_pidx24(anchor)
n = loosen - 8
pp_slice = m * 48 + n
if idx_is_empty(pp_slice):
return NOINDEX
return pp_slice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + wa
def kappk_pctoindex(c):
BLOCK_A = 64 * 64 * 64
BLOCK_B = 64 * 64
BLOCK_C = 64
wk = c.white_piece_squares[0]
wa = c.white_piece_squares[1]
pawn_a = c.white_piece_squares[2]
pawn_b = c.white_piece_squares[3]
bk = c.black_piece_squares[0]
anchor, loosen = pp_putanchorfirst(pawn_a, pawn_b)
if (anchor & 7) > 3:
# Column is more than 3, i.e. e, f, g or h.
anchor = flip_we(anchor)
loosen = flip_we(loosen)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
i = wsq_to_pidx24(anchor)
j = wsq_to_pidx48(loosen)
pp_slice = PPIDX[i][j]
if idx_is_empty(pp_slice):
return NOINDEX
return pp_slice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + wa
def kppka_pctoindex(c):
BLOCK_A = 64 * 64 * 64
BLOCK_B = 64 * 64
BLOCK_C = 64
wk = c.white_piece_squares[0]
pawn_a = c.white_piece_squares[1]
pawn_b = c.white_piece_squares[2]
bk = c.black_piece_squares[0]
ba = c.black_piece_squares[1]
anchor, loosen = pp_putanchorfirst(pawn_a, pawn_b)
if (anchor & 7) > 3:
anchor = flip_we(anchor)
loosen = flip_we(loosen)
wk = flip_we(wk)
bk = flip_we(bk)
ba = flip_we(ba)
i = wsq_to_pidx24(anchor)
j = wsq_to_pidx48(loosen)
pp_slice = PPIDX[i][j]
if idx_is_empty(pp_slice):
return NOINDEX
return pp_slice * BLOCK_A + wk * BLOCK_B + bk * BLOCK_C + ba
def kabck_pctoindex(c):
N_WHITE = 4
N_BLACK = 1
BLOCK_A = 64 * 64 * 64
BLOCK_B = 64 * 64
BLOCK_C = 64
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
if (ft & WE_FLAG) != 0:
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if (ft & NS_FLAG) != 0:
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if (ft & NW_SE_FLAG) != 0:
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
if idx_is_empty(ki):
return NOINDEX
return ki * BLOCK_A + ws[1] * BLOCK_B + ws[2] * BLOCK_C + ws[3]
def kabbk_pctoindex(c):
N_WHITE = 4
N_BLACK = 1
BLOCK_Bx = 64
BLOCK_Ax = BLOCK_Bx * MAX_AAINDEX
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
if (ft & WE_FLAG) != 0:
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if (ft & NS_FLAG) != 0:
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if (ft & NW_SE_FLAG) != 0:
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
ai = AAIDX[ws[2]][ws[3]]
if idx_is_empty(ki) or idx_is_empty(ai):
return NOINDEX
return ki * BLOCK_Ax + ai * BLOCK_Bx + ws[1]
def kaabk_pctoindex(c):
N_WHITE = 4
N_BLACK = 1
BLOCK_Bx = 64
BLOCK_Ax = BLOCK_Bx * MAX_AAINDEX
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
if ((ft & WE_FLAG) != 0):
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if ((ft & NS_FLAG) != 0):
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if ((ft & NW_SE_FLAG) != 0):
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
ai = AAIDX[ws[1]][ws[2]]
if idx_is_empty(ki) or idx_is_empty(ai):
return NOINDEX
return ki * BLOCK_Ax + ai * BLOCK_Bx + ws[3]
def aaa_getsubi(x, y, z):
bse = AAA_BASE[z]
calc_idx = x + (y - 1) * y // 2 + bse
return calc_idx
def kaaak_pctoindex(c):
N_WHITE = 4
N_BLACK = 1
BLOCK_Ax = MAX_AAAINDEX
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
if (ft & WE_FLAG) != 0:
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if (ft & NS_FLAG) != 0:
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if (ft & NW_SE_FLAG) != 0:
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
if ws[2] < ws[1]:
tmp = ws[1]
ws[1] = ws[2]
ws[2] = tmp
if ws[3] < ws[2]:
tmp = ws[2]
ws[2] = ws[3]
ws[3] = tmp
if ws[2] < ws[1]:
tmp = ws[1]
ws[1] = ws[2]
ws[2] = tmp
ki = KKIDX[bs[0]][ws[0]]
if ws[1] == ws[2] or ws[1] == ws[3] or ws[2] == ws[3]:
return NOINDEX
ai = aaa_getsubi(ws[1], ws[2], ws[3])
if idx_is_empty(ki) or idx_is_empty(ai):
return NOINDEX
return ki * BLOCK_Ax + ai
def kppkp_pctoindex(c):
BLOCK_Ax = MAX_PP48_INDEX * 64 * 64
BLOCK_Bx = 64 * 64
BLOCK_Cx = 64
wk = c.white_piece_squares[0]
pawn_a = c.white_piece_squares[1]
pawn_b = c.white_piece_squares[2]
bk = c.black_piece_squares[0]
pawn_c = c.black_piece_squares[1]
if (pawn_c & 7) > 3:
wk = flip_we(wk)
pawn_a = flip_we(pawn_a)
pawn_b = flip_we(pawn_b)
bk = flip_we(bk)
pawn_c = flip_we(pawn_c)
i = flip_we(flip_ns(pawn_a)) - 8
j = flip_we(flip_ns(pawn_b)) - 8
# Black pawn, so low indexes mean more advanced.
k = map24_b(pawn_c)
pp48_slice = PP48_IDX[i][j]
if idx_is_empty(pp48_slice):
return NOINDEX
return k * BLOCK_Ax + pp48_slice * BLOCK_Bx + wk * BLOCK_Cx + bk
def kaakb_pctoindex(c):
N_WHITE = 3
N_BLACK = 2
BLOCK_Bx = 64
BLOCK_Ax = BLOCK_Bx * MAX_AAINDEX
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
if (ft & WE_FLAG) != 0:
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if (ft & NS_FLAG) != 0:
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if (ft & NW_SE_FLAG) != 0:
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
ai = AAIDX[ws[1]][ws[2]]
if idx_is_empty(ki) or idx_is_empty(ai):
return NOINDEX
return ki * BLOCK_Ax + ai * BLOCK_Bx + bs[1]
def kabkc_pctoindex(c):
N_WHITE = 3
N_BLACK = 2
BLOCK_Ax = 64 * 64 * 64
BLOCK_Bx = 64 * 64
BLOCK_Cx = 64
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
if (ft & WE_FLAG) != 0:
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if (ft & NS_FLAG) != 0:
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if (ft & NW_SE_FLAG) != 0:
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX [black king] [white king]
if idx_is_empty(ki):
return NOINDEX
return ki * BLOCK_Ax + ws[1] * BLOCK_Bx + ws[2] * BLOCK_Cx + bs[1]
def kpkp_pctoindex(c):
BLOCK_Ax = 64 * 64
BLOCK_Bx = 64
wk = c.white_piece_squares[0]
bk = c.black_piece_squares[0]
pawn_a = c.white_piece_squares[1]
pawn_b = c.black_piece_squares[1]
anchor = pawn_a
loosen = pawn_b
if (anchor & 7) > 3:
anchor = flip_we(anchor)
loosen = flip_we(loosen)
wk = flip_we(wk)
bk = flip_we(bk)
m = wsq_to_pidx24(anchor)
n = loosen - 8
pp_slice = m * 48 + n
if idx_is_empty(pp_slice):
return NOINDEX
return pp_slice * BLOCK_Ax + wk * BLOCK_Bx + bk
def kppk_pctoindex(c):
BLOCK_Ax = 64 * 64
BLOCK_Bx = 64
wk = c.white_piece_squares[0]
pawn_a = c.white_piece_squares[1]
pawn_b = c.white_piece_squares[2]
bk = c.black_piece_squares[0]
anchor, loosen = pp_putanchorfirst(pawn_a, pawn_b)
if (anchor & 7) > 3:
anchor = flip_we(anchor)
loosen = flip_we(loosen)
wk = flip_we(wk)
bk = flip_we(bk)
i = wsq_to_pidx24(anchor)
j = wsq_to_pidx48(loosen)
pp_slice = PPIDX[i][j]
if idx_is_empty(pp_slice):
return NOINDEX
return pp_slice * BLOCK_Ax + wk * BLOCK_Bx + bk
def kapk_pctoindex(c):
BLOCK_Ax = 64 * 64 * 64
BLOCK_Bx = 64 * 64
BLOCK_Cx = 64
pawn = c.white_piece_squares[2]
wa = c.white_piece_squares[1]
wk = c.white_piece_squares[0]
bk = c.black_piece_squares[0]
if not (chess.A2 <= pawn < chess.A8):
return NOINDEX
if (pawn & 7) > 3:
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
sq = pawn
sq ^= 56 # flip_ns
sq -= 8 # down one row
pslice = ((sq + (sq & 3)) >> 1)
return pslice * BLOCK_Ax + wk * BLOCK_Bx + bk * BLOCK_Cx + wa
def kabk_pctoindex(c):
BLOCK_Ax = 64 * 64
BLOCK_Bx = 64
ft = flip_type(c.black_piece_squares[0], c.white_piece_squares[0])
ws = c.white_piece_squares
bs = c.black_piece_squares
if (ft & 1) != 0:
ws = [flip_we(b) for b in ws]
bs = [flip_we(b) for b in bs]
if (ft & 2) != 0:
ws = [flip_ns(b) for b in ws]
bs = [flip_ns(b) for b in bs]
if (ft & 4) != 0:
ws = [flip_nw_se(b) for b in ws]
bs = [flip_nw_se(b) for b in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
if idx_is_empty(ki):
return NOINDEX
return ki * BLOCK_Ax + ws[1] * BLOCK_Bx + ws[2]
def kakp_pctoindex(c):
BLOCK_Ax = 64 * 64 * 64
BLOCK_Bx = 64 * 64
BLOCK_Cx = 64
pawn = c.black_piece_squares[1]
wa = c.white_piece_squares[1]
wk = c.white_piece_squares[0]
bk = c.black_piece_squares[0]
if not (chess.A2 <= pawn < chess.A8):
return NOINDEX
if (pawn & 7) > 3:
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
wa = flip_we(wa)
sq = pawn
sq -= 8 # down one row
pslice = (sq + (sq & 3)) >> 1
return pslice * BLOCK_Ax + wk * BLOCK_Bx + bk * BLOCK_Cx + wa
def kaak_pctoindex(c):
N_WHITE = 3
N_BLACK = 1
BLOCK_Ax = MAX_AAINDEX
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:N_WHITE]
bs = c.black_piece_squares[:N_BLACK]
if (ft & WE_FLAG) != 0:
ws = [flip_we(i) for i in ws]
bs = [flip_we(i) for i in bs]
if (ft & NS_FLAG) != 0:
ws = [flip_ns(i) for i in ws]
bs = [flip_ns(i) for i in bs]
if (ft & NW_SE_FLAG) != 0:
ws = [flip_nw_se(i) for i in ws]
bs = [flip_nw_se(i) for i in bs]
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
ai = AAIDX[ws[1]][ws[2]]
if idx_is_empty(ki) or idx_is_empty(ai):
return NOINDEX
return ki * BLOCK_Ax + ai
def kakb_pctoindex(c):
BLOCK_Ax = 64 * 64
BLOCK_Bx = 64
ft = FLIPT[c.black_piece_squares[0]][c.white_piece_squares[0]]
ws = c.white_piece_squares[:]
bs = c.black_piece_squares[:]
if (ft & 1) != 0:
ws[0] = flip_we(ws[0])
ws[1] = flip_we(ws[1])
bs[0] = flip_we(bs[0])
bs[1] = flip_we(bs[1])
if (ft & 2) != 0:
ws[0] = flip_ns(ws[0])
ws[1] = flip_ns(ws[1])
bs[0] = flip_ns(bs[0])
bs[1] = flip_ns(bs[1])
if (ft & 4) != 0:
ws[0] = flip_nw_se(ws[0])
ws[1] = flip_nw_se(ws[1])
bs[0] = flip_nw_se(bs[0])
bs[1] = flip_nw_se(bs[1])
ki = KKIDX[bs[0]][ws[0]] # KKIDX[black king][white king]
if idx_is_empty(ki):
return NOINDEX
return ki * BLOCK_Ax + ws[1] * BLOCK_Bx + bs[1]
def kpk_pctoindex(c):
BLOCK_A = 64 * 64
BLOCK_B = 64
pawn = c.white_piece_squares[1]
wk = c.white_piece_squares[0]
bk = c.black_piece_squares[0]
if not (chess.A2 <= pawn < chess.A8):
return NOINDEX
if (pawn & 7) > 3:
pawn = flip_we(pawn)
wk = flip_we(wk)
bk = flip_we(bk)
sq = pawn
sq ^= 56 # flip_ns
sq -= 8 # down one row
pslice = ((sq + (sq & 3)) >> 1)
res = pslice * BLOCK_A + wk * BLOCK_B + bk
return res
def kpppk_pctoindex(c):
BLOCK_A = 64 * 64
BLOCK_B = 64
wk = c.white_piece_squares[0]
pawn_a = c.white_piece_squares[1]
pawn_b = c.white_piece_squares[2]
pawn_c = c.white_piece_squares[3]
bk = c.black_piece_squares[0]
i = pawn_a - 8
j = pawn_b - 8
k = pawn_c - 8
ppp48_slice = PPP48_IDX[i][j][k]
if idx_is_empty(ppp48_slice):
wk = flip_we(wk)
pawn_a = flip_we(pawn_a)
pawn_b = flip_we(pawn_b)
pawn_c = flip_we(pawn_c)
bk = flip_we(bk)
i = pawn_a - 8
j = pawn_b - 8
k = pawn_c - 8
ppp48_slice = PPP48_IDX[i][j][k]
if idx_is_empty(ppp48_slice):
return NOINDEX
return ppp48_slice * BLOCK_A + wk * BLOCK_B + bk
Endgamekey = collections.namedtuple("Endgamekey", ["maxindex", "slice_n", "pctoi"])
EGKEY = {
"kqk": Endgamekey(MAX_KXK, 1, kxk_pctoindex),
"krk": Endgamekey(MAX_KXK, 1, kxk_pctoindex),
"kbk": Endgamekey(MAX_KXK, 1, kxk_pctoindex),
"knk": Endgamekey(MAX_KXK, 1, kxk_pctoindex),
"kpk": Endgamekey(MAX_kpk, 24, kpk_pctoindex),
"kqkq": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"kqkr": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"kqkb": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"kqkn": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"krkr": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"krkb": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"krkn": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"kbkb": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"kbkn": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"knkn": Endgamekey(MAX_kakb, 1, kakb_pctoindex),
"kqqk": Endgamekey(MAX_kaak, 1, kaak_pctoindex),
"kqrk": Endgamekey(MAX_kabk, 1, kabk_pctoindex),
"kqbk": Endgamekey(MAX_kabk, 1, kabk_pctoindex),
"kqnk": Endgamekey(MAX_kabk, 1, kabk_pctoindex),
"krrk": Endgamekey(MAX_kaak, 1, kaak_pctoindex),
"krbk": Endgamekey(MAX_kabk, 1, kabk_pctoindex),
"krnk": Endgamekey(MAX_kabk, 1, kabk_pctoindex),
"kbbk": Endgamekey(MAX_kaak, 1, kaak_pctoindex),
"kbnk": Endgamekey(MAX_kabk, 1, kabk_pctoindex),
"knnk": Endgamekey(MAX_kaak, 1, kaak_pctoindex),
"kqkp": Endgamekey(MAX_kakp, 24, kakp_pctoindex),
"krkp": Endgamekey(MAX_kakp, 24, kakp_pctoindex),
"kbkp": Endgamekey(MAX_kakp, 24, kakp_pctoindex),
"knkp": Endgamekey(MAX_kakp, 24, kakp_pctoindex),
"kqpk": Endgamekey(MAX_kapk, 24, kapk_pctoindex),
"krpk": Endgamekey(MAX_kapk, 24, kapk_pctoindex),
"kbpk": Endgamekey(MAX_kapk, 24, kapk_pctoindex),
"knpk": Endgamekey(MAX_kapk, 24, kapk_pctoindex),
"kppk": Endgamekey(MAX_kppk, MAX_PPINDEX, kppk_pctoindex),
"kpkp": Endgamekey(MAX_kpkp, MAX_PpINDEX, kpkp_pctoindex),
"kppkp": Endgamekey(MAX_kppkp, 24 * MAX_PP48_INDEX, kppkp_pctoindex),
"kbbkr": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kbbkb": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"knnkb": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"knnkn": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kqqqk": Endgamekey(MAX_kaaak, 1, kaaak_pctoindex),
"kqqrk": Endgamekey(MAX_kaabk, 1, kaabk_pctoindex),
"kqqbk": Endgamekey(MAX_kaabk, 1, kaabk_pctoindex),
"kqqnk": Endgamekey(MAX_kaabk, 1, kaabk_pctoindex),
"kqrrk": Endgamekey(MAX_kabbk, 1, kabbk_pctoindex),
"kqrbk": Endgamekey(MAX_kabck, 1, kabck_pctoindex),
"kqrnk": Endgamekey(MAX_kabck, 1, kabck_pctoindex),
"kqbbk": Endgamekey(MAX_kabbk, 1, kabbk_pctoindex),
"kqbnk": Endgamekey(MAX_kabck, 1, kabck_pctoindex),
"kqnnk": Endgamekey(MAX_kabbk, 1, kabbk_pctoindex),
"krrrk": Endgamekey(MAX_kaaak, 1, kaaak_pctoindex),
"krrbk": Endgamekey(MAX_kaabk, 1, kaabk_pctoindex),
"krrnk": Endgamekey(MAX_kaabk, 1, kaabk_pctoindex),
"krbbk": Endgamekey(MAX_kabbk, 1, kabbk_pctoindex),
"krbnk": Endgamekey(MAX_kabck, 1, kabck_pctoindex),
"krnnk": Endgamekey(MAX_kabbk, 1, kabbk_pctoindex),
"kbbbk": Endgamekey(MAX_kaaak, 1, kaaak_pctoindex),
"kbbnk": Endgamekey(MAX_kaabk, 1, kaabk_pctoindex),
"kbnnk": Endgamekey(MAX_kabbk, 1, kabbk_pctoindex),
"knnnk": Endgamekey(MAX_kaaak, 1, kaaak_pctoindex),
"kqqkq": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kqqkr": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kqqkb": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kqqkn": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kqrkq": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqrkr": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqrkb": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqrkn": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqbkq": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqbkr": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqbkb": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqbkn": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqnkq": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqnkr": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqnkb": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kqnkn": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krrkq": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"krrkr": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"krrkb": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"krrkn": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"krbkq": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krbkr": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krbkb": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krbkn": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krnkq": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krnkr": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krnkb": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"krnkn": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kbbkq": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kbbkn": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kbnkq": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kbnkr": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kbnkb": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"kbnkn": Endgamekey(MAX_kabkc, 1, kabkc_pctoindex),
"knnkq": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"knnkr": Endgamekey(MAX_kaakb, 1, kaakb_pctoindex),
"kqqpk": Endgamekey(MAX_kaapk, 24, kaapk_pctoindex),
"kqrpk": Endgamekey(MAX_kabpk, 24, kabpk_pctoindex),
"kqbpk": Endgamekey(MAX_kabpk, 24, kabpk_pctoindex),
"kqnpk": Endgamekey(MAX_kabpk, 24, kabpk_pctoindex),
"krrpk": Endgamekey(MAX_kaapk, 24, kaapk_pctoindex),
"krbpk": Endgamekey(MAX_kabpk, 24, kabpk_pctoindex),
"krnpk": Endgamekey(MAX_kabpk, 24, kabpk_pctoindex),
"kbbpk": Endgamekey(MAX_kaapk, 24, kaapk_pctoindex),
"kbnpk": Endgamekey(MAX_kabpk, 24, kabpk_pctoindex),
"knnpk": Endgamekey(MAX_kaapk, 24, kaapk_pctoindex),
"kqppk": Endgamekey(MAX_kappk, MAX_PPINDEX, kappk_pctoindex),
"krppk": Endgamekey(MAX_kappk, MAX_PPINDEX, kappk_pctoindex),
"kbppk": Endgamekey(MAX_kappk, MAX_PPINDEX, kappk_pctoindex),
"knppk": Endgamekey(MAX_kappk, MAX_PPINDEX, kappk_pctoindex),
"kqpkq": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kqpkr": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kqpkb": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kqpkn": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"krpkq": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"krpkr": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"krpkb": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"krpkn": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kbpkq": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kbpkr": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kbpkb": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kbpkn": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"knpkq": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"knpkr": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"knpkb": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"knpkn": Endgamekey(MAX_kapkb, 24, kapkb_pctoindex),
"kppkq": Endgamekey(MAX_kppka, MAX_PPINDEX, kppka_pctoindex),
"kppkr": Endgamekey(MAX_kppka, MAX_PPINDEX, kppka_pctoindex),
"kppkb": Endgamekey(MAX_kppka, MAX_PPINDEX, kppka_pctoindex),
"kppkn": Endgamekey(MAX_kppka, MAX_PPINDEX, kppka_pctoindex),
"kqqkp": Endgamekey(MAX_kaakp, 24, kaakp_pctoindex),
"kqrkp": Endgamekey(MAX_kabkp, 24, kabkp_pctoindex),
"kqbkp": Endgamekey(MAX_kabkp, 24, kabkp_pctoindex),
"kqnkp": Endgamekey(MAX_kabkp, 24, kabkp_pctoindex),
"krrkp": Endgamekey(MAX_kaakp, 24, kaakp_pctoindex),
"krbkp": Endgamekey(MAX_kabkp, 24, kabkp_pctoindex),
"krnkp": Endgamekey(MAX_kabkp, 24, kabkp_pctoindex),
"kbbkp": Endgamekey(MAX_kaakp, 24, kaakp_pctoindex),
"kbnkp": Endgamekey(MAX_kabkp, 24, kabkp_pctoindex),
"knnkp": Endgamekey(MAX_kaakp, 24, kaakp_pctoindex),
"kqpkp": Endgamekey(MAX_kapkp, MAX_PpINDEX, kapkp_pctoindex),
"krpkp": Endgamekey(MAX_kapkp, MAX_PpINDEX, kapkp_pctoindex),
"kbpkp": Endgamekey(MAX_kapkp, MAX_PpINDEX, kapkp_pctoindex),
"knpkp": Endgamekey(MAX_kapkp, MAX_PpINDEX, kapkp_pctoindex),
"kpppk": Endgamekey(MAX_kpppk, MAX_PPP48_INDEX, kpppk_pctoindex),
}
def sortlists(ws, wp):
z = sorted(zip(wp, ws), key=lambda x: x[0], reverse=True)
wp2, ws2 = zip(*z)
return list(ws2), list(wp2)
def egtb_block_unpack(side, n, bp):
try:
return [dtm_unpack(side, i) for i in bp[:n]]
except TypeError:
return [dtm_unpack(side, ord(i)) for i in bp[:n]]
def split_index(i):
return divmod(i, ENTRIES_PER_BLOCK)
tb_DRAW = 0
tb_WMATE = 1
tb_BMATE = 2
tb_FORBID = 3
tb_UNKNOWN = 7
iDRAW = tb_DRAW
iWMATE = tb_WMATE
iBMATE = tb_BMATE
iFORBID = tb_FORBID
iDRAWt = tb_DRAW | 4
iWMATEt = tb_WMATE | 4
iBMATEt = tb_BMATE | 4
iUNKNOWN = tb_UNKNOWN
iUNKNBIT = (1 << 2)
def removepiece(ys, yp, j):
del ys[j]
del yp[j]
def opp(side):
return 1 if side == 0 else 0
def adjust_up(dist):
udist = dist
sw = udist & INFOMASK
if sw in [iWMATE, iWMATEt, iBMATE, iBMATEt]:
udist += (1 << PLYSHIFT)
return udist
def bestx(side, a, b):
# 0 = selectfirst
# 1 = selectlowest
# 2 = selecthighest
# 3 = selectsecond
comparison = [
# draw, wmate, bmate, forbid
[0, 3, 0, 0], # draw
[0, 1, 0, 0], # wmate
[3, 3, 2, 0], # bmate
[3, 3, 3, 0], # forbid
]
xorkey = [0, 3]
if (a == iFORBID):
return b
if (b == iFORBID):
return a
retu = [a, a, b, b]
if (b < a):
retu[1] = b
retu[2] = a
key = comparison[a & 3][b & 3] ^ xorkey[side]
return retu[key]
def unpackdist(d):
return d >> PLYSHIFT, d & INFOMASK
def dtm_unpack(stm, packed):
p = packed
if p in [iDRAW, iFORBID]:
return p
info = p & 3
store = p >> 2
if stm == 0:
if info == iWMATE:
moves = store + 1
plies = moves * 2 - 1
prefx = info
elif info == iBMATE:
moves = store
plies = moves * 2
prefx = info
elif info == iDRAW:
moves = store + 1 + 63
plies = moves * 2 - 1
prefx = iWMATE
elif info == iFORBID:
moves = store + 63
plies = moves * 2
prefx = iBMATE
else:
plies = 0
prefx = 0
ret = prefx | (plies << 3)
else:
if info == iBMATE:
moves = store + 1
plies = moves * 2 - 1
prefx = info
elif info == iWMATE:
moves = store
plies = moves * 2
prefx = info
elif info == iDRAW:
if store == 63:
# Exception: no position in the 5-man TBs needs to store 63 for
# iBMATE. It is then just used to indicate iWMATE.
store += 1
moves = store + 63
plies = moves * 2
prefx = iWMATE
else:
moves = store + 1 + 63
plies = moves * 2 - 1
prefx = iBMATE
elif info == iFORBID:
moves = store + 63
plies = moves * 2
prefx = iWMATE
else:
plies = 0
prefx = 0
ret = prefx | (plies << 3)
return ret
class TableBlock(object):
def __init__(self, egkey, side, offset, age):
self.egkey = egkey
self.side = side
self.offset = offset
self.age = age
self.pcache = None
class Request(object):
def __init__(self, white_squares, white_types, black_squares, black_types, side, epsq):
self.white_squares, self.white_types = sortlists(white_squares, white_types)
self.black_squares, self.black_types = sortlists(black_squares, black_types)
self.realside = side
self.side = side
self.epsq = epsq
self.egkey = None
self.white_piece_squares = None
self.white_piece_types = None
self.black_piece_squares = None
self.black_piece_types = None
self.is_reversed = None
self.white_piece_squares = None
Zipinfo = collections.namedtuple("Zipinfo", ["extraoffset", "totalblocks", "blockindex"])
class PythonTablebases(object):
"""Provides access to Gaviota tablebases using pure Python code."""
def __init__(self, directory, lzma):
self.lzma = lzma
self.available_tables = {}
self.streams = {}
self.zipinfo = {}
self.block_cache = {}
self.block_age = 0
if directory is not None:
self.open_directory(directory)
def open_directory(self, directory):
"""Loads *.gtb.cp4* tables from a directory."""
directory = os.path.abspath(directory)
if not os.path.isdir(directory):
raise IOError("not a tablebase directory: {0}".format(repr(directory)))
for tbfile in fnmatch.filter(os.listdir(directory), "*.gtb.cp4"):
self.available_tables[os.path.basename(tbfile).replace(".gtb.cp4", "")] = os.path.join(directory, tbfile)
def probe_dtm(self, board):
"""
Probes for depth to mate information.
Returns ``None`` if the position was not found in any of the tables.
Otherwise the absolute value is the number of half moves until
forced mate. The value is positive if the side to move is winning,
otherwise it is negative.
In the example position white to move will get mated in 10 half moves:
>>> with chess.gaviota.open_tablebases("data/gaviota") as tablebases:
... tablebases.probe_dtm(chess.Board("8/8/8/8/8/8/8/K2kr3 w - - 0 1"))
...
-10
"""
# Can not probe positions with castling rights.
if board.castling_rights:
return None
# Prepare the tablebase request.
white = [(square, board.piece_type_at(square)) for square in chess.SquareSet(board.occupied_co[chess.WHITE])]
black = [(square, board.piece_type_at(square)) for square in chess.SquareSet(board.occupied_co[chess.BLACK])]
white_squares, white_types = zip(*white)
black_squares, black_types = zip(*black)
side = 0 if (board.turn == chess.WHITE) else 1
epsq = board.ep_square if board.ep_square else NOSQUARE
req = Request(white_squares, white_types, black_squares, black_types, side, epsq)
# KvK is a draw.
if len(white_squares) == 1 and len(black_squares) == 1:
return 0
# Only up to 5-men tablebases.
if len(white_squares) + len(black_squares) > 5:
return None
# Probe.
try:
dtm = self.egtb_get_dtm(req)
except IndexError:
return None
ply, res = unpackdist(dtm)
if res == iDRAW:
# Draw.
return 0
elif res == iWMATE:
# White mates in the stored position.
if req.realside == 1:
if req.is_reversed:
return ply
else:
return -ply
else:
if req.is_reversed:
return -ply
else:
return ply
elif res == iBMATE:
# Black mates in the stored position.
if req.realside == 0:
if req.is_reversed:
return ply
else:
return -ply
else:
if req.is_reversed:
return -ply
else:
return ply
def probe_wdl(self, board):
"""
Probes for win/draw/loss-information.
Returns ``None`` if the position was not found in any of the tables.
Returns ``1`` if the side to move is winning, ``0`` if it is a draw,
and ``-1`` if the side to move is losing.
>>> with chess.gaviota.open_tablebases("data/gaviota") as tablebases:
... tablebases.probe_wdl(chess.Board("8/4k3/8/B7/8/8/8/4K3 w - - 0 1"))
...
0
"""
dtm = self.probe_dtm(board)
if dtm == 0:
if board.is_checkmate():
return -1
else:
return 0
elif dtm > 0:
return 1
elif dtm < 0:
return -1
else:
return None
def _setup_tablebase(self, req):
white_letters = "".join([chess.PIECE_SYMBOLS[i] for i in req.white_types])
black_letters = "".join([chess.PIECE_SYMBOLS[i] for i in req.black_types])
if (white_letters + black_letters) in self.available_tables:
req.is_reversed = False
req.egkey = white_letters + black_letters
req.white_piece_squares = req.white_squares
req.white_piece_types = req.white_types
req.black_piece_squares = req.black_squares
req.black_piece_types = req.black_types
elif (black_letters + white_letters) in self.available_tables:
req.is_reversed = True
req.egkey = black_letters + white_letters
req.white_piece_squares = [flip_ns(s) for s in req.black_squares]
req.white_piece_types = req.black_types
req.black_piece_squares = [flip_ns(s) for s in req.white_squares]
req.black_piece_types = req.white_types
req.side = opp(req.side)
if req.epsq != NOSQUARE:
req.epsq = flip_ns(req.epsq)
else:
raise IndexError("no tablebase available for: {0} {1}".format(white_letters, black_letters))
return self._open_tablebase(req)
def _open_tablebase(self, req):
stream = self.streams.get(req.egkey)
if stream is None:
path = self.available_tables[req.egkey]
stream = open(path, "rb+")
self.egtb_loadindexes(req.egkey, stream)
self.streams[req.egkey] = stream
return stream
def close(self):
"""Closes all loaded tables."""
self.available_tables.clear()
self.zipinfo.clear()
self.block_age = 0
self.block_cache.clear()
while self.streams:
_, stream = self.streams.popitem()
stream.close()
def egtb_get_dtm(self, req):
dtm = self._tb_probe(req)
if req.epsq != NOSQUARE:
capturer_a = 0
capturer_b = 0
xed = 0
# Flip for move generation.
if req.side == 0:
xs = list(req.white_piece_squares)
xp = list(req.white_piece_types)
ys = list(req.black_piece_squares)
yp = list(req.black_piece_types)
else:
xs = list(req.black_piece_squares)
xp = list(req.black_piece_types)
ys = list(req.white_piece_squares)
yp = list(req.white_piece_types)
# Captured pawn trick: from ep square to captured.
xed = req.epsq ^ (1 << 3)
# Find captured index (j).
try:
j = ys.index(xed)
except ValueError:
j = -1
# Try first possible ep capture.
if 0 == (0x88 & (map88(xed) + 1)):
capturer_a = xed + 1
# Try second possible ep capture
if 0 == (0x88 & (map88(xed) - 1)):
capturer_b = xed - 1
if (j > -1) and (ys[j] == xed):
# Find capturers (i).
for i in range(len(xs)):
if xp[i] == PAWN and (xs[i] == capturer_a or xs[i] == capturer_b):
epscore = iFORBID
# Execute capture.
xs[i] = req.epsq
removepiece(ys, yp, j)
# Flip back.
if req.side == 1:
xs, ys = ys, xs
xp, yp = yp, xp
# Make subrequest.
subreq = Request(xs, xp, ys, yp, opp(req.side), NOSQUARE)
try:
epscore = self._tb_probe(subreq)
epscore = adjust_up(epscore)
# Chooses to ep or not.
dtm = bestx(req.side, epscore, dtm)
except IndexError:
break
return dtm
def egtb_block_getnumber(self, req, idx):
maxindex = EGKEY[req.egkey].maxindex
blocks_per_side = 1 + (maxindex - 1) // ENTRIES_PER_BLOCK
block_in_side = idx // ENTRIES_PER_BLOCK
return req.side * blocks_per_side + block_in_side
def egtb_block_getsize(self, req, idx):
blocksz = ENTRIES_PER_BLOCK
maxindex = EGKEY[req.egkey].maxindex
block = idx // blocksz
offset = block * blocksz
if (offset + blocksz) > maxindex:
return maxindex - offset # last block size
else:
return blocksz # size of a normal block
def _tb_probe(self, req):
stream = self._setup_tablebase(req)
idx = EGKEY[req.egkey].pctoi(req)
offset, remainder = split_index(idx)
t = self.block_cache.get((req.egkey, offset, req.side))
if t is None:
t = TableBlock(req.egkey, req.side, offset, self.block_age)
block = self.egtb_block_getnumber(req, idx)
n = self.egtb_block_getsize(req, idx)
z = self.egtb_block_getsize_zipped(req.egkey, block)
self.egtb_block_park(req.egkey, block, stream)
buffer_zipped = stream.read(z)
if buffer_zipped[0] == 0:
# If flag is zero, plain LZMA is following.
buffer_zipped = buffer_zipped[2:]
else:
# Else LZMA86. We have to build a fake header.
DICTIONARY_SIZE = 4096
POS_STATE_BITS = 2
NUM_LITERAL_POS_STATE_BITS = 0
NUM_LITERAL_CONTEXT_BITS = 3
properties = bytearray(13)
properties[0] = (POS_STATE_BITS * 5 + NUM_LITERAL_POS_STATE_BITS) * 9 + NUM_LITERAL_CONTEXT_BITS
for i in range(4):
properties[1 + i] = (DICTIONARY_SIZE >> (8 * i)) & 0xFF
for i in range(8):
properties[5 + i] = (n >> (8 * i)) & 0xFF
# Concatenate the fake header with the true LZMA stream.
buffer_zipped = properties + buffer_zipped[15:]
buffer_packed = self.lzma.LZMADecompressor().decompress(buffer_zipped)
t.pcache = egtb_block_unpack(req.side, n, buffer_packed)
# Update LRU block cache.
self.block_cache[(t.egkey, t.offset, t.side)] = t
if len(self.block_cache) > 128:
lru_cache_key, lru_age = None, None
for cache_key, cache_entry in self.block_cache.items():
if lru_age is None or cache_entry.age < lru_age:
lru_cache_key = cache_key
lru_age = cache_entry.age
del self.block_cache[lru_cache_key]
else:
t.age = self.block_age
self.block_age += 1
dtm = t.pcache[remainder]
return dtm
def egtb_loadindexes(self, egkey, stream):
zipinfo = self.zipinfo.get(egkey)
if zipinfo is None:
# Get reserved bytes, blocksize, offset.
stream.seek(0)
HeaderStruct = struct.Struct("<10I")
header = HeaderStruct.unpack(stream.read(HeaderStruct.size))
offset = header[8]
blocks = ((offset - 40) // 4) - 1
n_idx = blocks + 1
IndexStruct = struct.Struct("<" + "I" * n_idx)
p = IndexStruct.unpack(stream.read(IndexStruct.size))
zipinfo = Zipinfo(extraoffset=0, totalblocks=n_idx, blockindex=p)
self.zipinfo[egkey] = zipinfo
return zipinfo
def egtb_block_getsize_zipped(self, egkey, block):
i = self.zipinfo[egkey].blockindex[block]
j = self.zipinfo[egkey].blockindex[block + 1]
return j - i
def egtb_block_park(self, egkey, block, stream):
i = self.zipinfo[egkey].blockindex[block]
i += self.zipinfo[egkey].extraoffset
stream.seek(i)
return i
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
class NativeTablebases(object):
"""
Provides access to Gaviota tablebases via the shared library libgtb.
Has the same interface as :class:`~chess.gaviota.PythonTablebases`.
"""
def __init__(self, directory, libgtb):
self.libgtb = libgtb
self.libgtb.tb_init.restype = ctypes.c_char_p
self.libgtb.tb_restart.restype = ctypes.c_char_p
self.libgtb.tbpaths_getmain.restype = ctypes.c_char_p
self.libgtb.tb_probe_hard.argtypes = [
ctypes.c_uint,
ctypes.c_uint,
ctypes.c_uint,
ctypes.POINTER(ctypes.c_uint),
ctypes.POINTER(ctypes.c_uint),
ctypes.POINTER(ctypes.c_ubyte),
ctypes.POINTER(ctypes.c_ubyte),
ctypes.POINTER(ctypes.c_uint),
ctypes.POINTER(ctypes.c_uint)
]
if self.libgtb.tb_is_initialized():
raise RuntimeError("only one gaviota instance can be initialized at a time")
self.paths = []
if directory is not None:
self.open_directory(directory)
self._tbcache_restart(1024 * 1024, 50)
def open_directory(self, directory):
if not os.path.isdir(directory):
raise IOError("not a tablebase directory: {0}".format(repr(directory)))
self.paths.append(directory)
self._tb_restart()
def _tb_restart(self):
self.c_paths = (ctypes.c_char_p * len(self.paths))()
self.c_paths[:] = [path.encode("utf-8") for path in self.paths]
verbosity = ctypes.c_int(1)
compression_scheme = ctypes.c_int(4)
ret = self.libgtb.tb_restart(verbosity, compression_scheme, self.c_paths)
if ret:
LOGGER.debug(ret.decode("utf-8"))
LOGGER.debug("Main path has been set to %r", self.libgtb.tbpaths_getmain().decode("utf-8"))
av = self.libgtb.tb_availability()
if av & 1:
LOGGER.debug("Some 3 piece tablebases available")
if av & 2:
LOGGER.debug("All 3 piece tablebases complete")
if av & 4:
LOGGER.debug("Some 4 piece tablebases available")
if av & 8:
LOGGER.debug("All 4 piece tablebases complete")
if av & 16:
LOGGER.debug("Some 5 piece tablebases available")
if av & 32:
LOGGER.debug("All 5 piece tablebases complete")
def _tbcache_restart(self, cache_mem, wdl_fraction):
self.libgtb.tbcache_restart(ctypes.c_size_t(cache_mem), ctypes.c_int(wdl_fraction))
def probe_dtm(self, board):
return self._probe_hard(board)
def probe_wdl(self, board):
return self._probe_hard(board, wdl_only=True)
def _probe_hard(self, board, wdl_only=False):
if board.is_insufficient_material():
return 0
if chess.pop_count(board.occupied) > 5:
return None
if board.castling_rights:
return None
stm = ctypes.c_uint(0 if board.turn == chess.WHITE else 1)
ep_square = ctypes.c_uint(board.ep_square if board.ep_square else 64)
castling = ctypes.c_uint(0)
c_ws = (ctypes.c_uint * 17)()
c_wp = (ctypes.c_ubyte * 17)()
i = -1
for i, square in enumerate(chess.SquareSet(board.occupied_co[chess.WHITE])):
c_ws[i] = square
c_wp[i] = board.piece_type_at(square)
c_ws[i + 1] = 64
c_wp[i + 1] = 0
c_bs = (ctypes.c_uint * 17)()
c_bp = (ctypes.c_ubyte * 17)()
i = -1
for i, square in enumerate(chess.SquareSet(board.occupied_co[chess.BLACK])):
c_bs[i] = square
c_bp[i] = board.piece_type_at(square)
c_bs[i + 1] = 64
c_bp[i + 1] = 0
# Do a hard probe.
info = ctypes.c_uint()
pliestomate = ctypes.c_uint()
if not wdl_only:
ret = self.libgtb.tb_probe_hard(stm, ep_square, castling, c_ws, c_bs, c_wp, c_bp, ctypes.byref(info), ctypes.byref(pliestomate))
dtm = int(pliestomate.value)
else:
ret = self.libgtb.tb_probe_WDL_hard(stm, ep_square, castling, c_ws, c_bs, c_wp, c_bp, ctypes.byref(info))
dtm = 1
# Probe forbidden.
if info.value == 3:
LOGGER.warning("Tablebase for %s marked as forbidden", board.fen())
return None
# Probe failed or unknown.
if not ret or info.value == 7:
return None
# Draw.
if info.value == 0:
return 0
# White mates.
if info.value == 1:
return dtm if board.turn == chess.WHITE else -dtm
# Black mates.
if info.value == 2:
return dtm if board.turn == chess.BLACK else -dtm
def close(self):
self.paths = []
if self.libgtb.tb_is_initialized():
self.libgtb.tbcache_done()
self.libgtb.tb_done()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def open_tablebases_native(directory, libgtb=None, LibraryLoader=ctypes.cdll):
"""
Opens a collection of tablebases for probing using libgtb.
In most cases :func:`~chess.gaviota.open_tablebases()` should be used.
Use this function only if you do not want to downgrade to pure Python
tablebase probing.
Raises :exc:`RuntimeError` or :exc:`OSError` when libgtb can not be used.
"""
libgtb = libgtb or ctypes.util.find_library("gtb") or "libgtb.so.1.0.1"
return NativeTablebases(directory, LibraryLoader.LoadLibrary(libgtb))
def open_tablebases(directory=None, libgtb=None, LibraryLoader=ctypes.cdll):
"""
Opens a collection of tablebases for probing.
First native access via the shared library libgtb is tried. You can
optionally provide a specific library name or a library loader.
The shared library has global state and caches, so only one instance can
be open at a time.
Second pure Python probing code is tried.
"""
try:
if LibraryLoader:
return open_tablebases_native(directory, libgtb, LibraryLoader)
except (OSError, RuntimeError) as err:
LOGGER.info("Falling back to pure Python tablebases: %r", err)
try:
import lzma
except ImportError:
try:
from backports import lzma
except ImportError:
raise ImportError("chess.gaviota requires backports.lzma or libgtb")
return PythonTablebases(directory, lzma)
| codeparrot/github-code-clean |
#Copyright (C) 2002-2015 The Board of Regents of the University of Wisconsin System
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""This module implements a map of keywords that have been applied to multiple Episodes in a Series"""
__author__ = "David K. Woods <dwoods@wcer.wisc.edu>"
DEBUG = False
if DEBUG:
print "LibraryMap DEBUG is ON!!"
# import Python's os and sys modules
import os, sys
# import Python's platform module
import platform
# import Python's string module
import string
# load wxPython for GUI
import wx
# load the GraphicsControl
import GraphicsControlClass
# Load the Printout Class
from KeywordMapPrintoutClass import MyPrintout
# Import Transana's Database Interface
import DBInterface
# Import Transana's Dialogs
import Dialogs
# Import Transana's Filter Dialog
import FilterDialog
# import Transana's Keyword Object
import KeywordObject
# import Transana's Globals
import TransanaGlobal
# Import Transana's Images
import TransanaImages
# import Transana Miscellaneous functions
import Misc
# Declare Control IDs
# Menu Item and Toolbar Item for File > Filter
M_FILE_FILTER = wx.NewId()
T_FILE_FILTER = wx.NewId()
# Menu Item and Toolbar Item for File > Save As
M_FILE_SAVEAS = wx.NewId()
T_FILE_SAVEAS = wx.NewId()
# Menu Item and Toolbar Item for File > Printer Setup
M_FILE_PRINTSETUP = wx.NewId()
T_FILE_PRINTSETUP = wx.NewId()
# Menu Item and Toolbar Item for File > Print Preview
M_FILE_PRINTPREVIEW = wx.NewId()
T_FILE_PRINTPREVIEW = wx.NewId()
# Menu Item and Toolbar Item for File > Print
M_FILE_PRINT = wx.NewId()
T_FILE_PRINT = wx.NewId()
# Menu Item and Toolbar Item for File > Exit
M_FILE_EXIT = wx.NewId()
T_FILE_EXIT = wx.NewId()
# Menu Item and Toolbar Item for Help > Help
M_HELP_HELP = wx.NewId()
T_HELP_HELP = wx.NewId()
# Series List Combo Box
ID_SERIESLIST = wx.NewId()
# Episode List Combo Box
ID_EPISODELIST = wx.NewId()
class LibraryMap(wx.Frame):
""" This is the main class for the Series Map application. """
def __init__(self, parent, title, seriesNum, seriesName, reportType, controlObject=None):
# reportType 1 is the Sequence Mode, showing relative position of keywords in the Episodes
# reportType 2 is the Bar Graph mode, showing a bar graph of total time for each keyword
# reportType 3 is the Percentage mode, showing percentage of total Episode length for each keyword
# Set the Cursor to the Hourglass while the report is assembled
TransanaGlobal.menuWindow.SetCursor(wx.StockCursor(wx.CURSOR_WAIT))
# It's always important to remember your ancestors.
self.parent = parent
# Remember the title
self.title = title
# Initialize the Report Number
self.reportNumber = 0
# Remember the Report Type
self.reportType = reportType
# Let's remember the Control Object, if one is passed in
self.ControlObject = controlObject
# If a Control Object has been passed in ...
if self.ControlObject != None:
# ... register this report with the Control Object (which adds it to the Windows Menu)
self.ControlObject.AddReportWindow(self)
# Create a connection to the database
DBConn = DBInterface.get_db()
# Create a cursor and execute the appropriate query
self.DBCursor = DBConn.cursor()
# Determine the screen size for setting the initial dialog size
rect = wx.Display(TransanaGlobal.configData.primaryScreen).GetClientArea() # wx.ClientDisplayRect()
width = rect[2] * .80
height = rect[3] * .80
# Create the basic Frame structure with a white background
self.frame = wx.Frame.__init__(self, parent, -1, title, pos=(10, 10), size=wx.Size(width, height), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL | wx.NO_FULL_REPAINT_ON_RESIZE)
self.SetBackgroundColour(wx.WHITE)
# Set the icon
transanaIcon = wx.Icon(os.path.join(TransanaGlobal.programDir, "images", "Transana.ico"), wx.BITMAP_TYPE_ICO)
self.SetIcon(transanaIcon)
# Initialize Media Length to 0
self.MediaLength = 0
# Initialize all the data Lists to empty
self.episodeList = []
self.filteredEpisodeList = []
self.clipList = []
self.clipFilterList = []
self.snapshotList = []
self.snapshotFilterList = []
self.unfilteredKeywordList = []
self.filteredKeywordList = []
# To be able to show only parts of an Episode Time Line, we need variables for the time boundaries.
self.startTime = 0
self.endTime = 0
self.keywordClipList = {}
self.configName = ''
# Initialize variables required to avoid crashes when the visualization has been cleared
self.graphicindent = 0
self.Bounds = [1, 1, 1, 1]
# Create a dictionary of the colors for each keyword.
self.keywordColors = {'lastColor' : -1}
# Get the Configuration values for the Series Map Options
self.barHeight = TransanaGlobal.configData.seriesMapBarHeight
self.whitespaceHeight = TransanaGlobal.configData.seriesMapWhitespace
self.hGridLines = TransanaGlobal.configData.seriesMapHorizontalGridLines
self.vGridLines = TransanaGlobal.configData.seriesMapVerticalGridLines
self.singleLineDisplay = TransanaGlobal.configData.singleLineDisplay
self.showLegend = TransanaGlobal.configData.showLegend
# We default to Color Output. When this was configurable, if a new Map was
# created in B & W, the colors never worked right afterwards.
self.colorOutput = True
# Get the number of lines per page for multi-page reports
self.linesPerPage = 66
# If we have a Series Keyword Sequence Map in multi-line mode ...
if (self.reportType == 1) and (not self.singleLineDisplay):
# ... initialize the Episode Name Keyword Lookup Table here.
self.epNameKWGKWLookup = {}
# Initialize the Episode Counter, used for vertical placement.
self.episodeCount= 0
# We need to be able to look up Episode Lengths for the Bar Graph. Let's remember them.
self.episodeLengths = {}
# Remember the appropriate Episode information
self.seriesNum = seriesNum
self.seriesName = seriesName
# indicate that we're not working from a Clip. (The Series Maps are never Clip-based.)
self.clipNum = None
# You can't have a separate menu on the Mac, so we'll use a Toolbar
self.toolBar = self.CreateToolBar(wx.TB_HORIZONTAL | wx.NO_BORDER | wx.TB_TEXT)
self.toolBar.AddTool(T_FILE_FILTER, TransanaImages.ArtProv_LISTVIEW.GetBitmap(), shortHelpString=_("Filter"))
self.toolBar.AddTool(T_FILE_SAVEAS, TransanaImages.SaveJPG16.GetBitmap(), shortHelpString=_('Save As'))
self.toolBar.AddTool(T_FILE_PRINTSETUP, TransanaImages.PrintSetup.GetBitmap(), shortHelpString=_('Set up Page'))
# Disable Print Setup for Right-To-Left languages
# if (TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft):
# self.toolBar.EnableTool(T_FILE_PRINTSETUP, False)
self.toolBar.AddTool(T_FILE_PRINTPREVIEW, TransanaImages.PrintPreview.GetBitmap(), shortHelpString=_('Print Preview'))
# Disable Print Preview on the PPC Mac and for Right-To-Left languages
if (platform.processor() == 'powerpc') or (TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft):
self.toolBar.EnableTool(T_FILE_PRINTPREVIEW, False)
self.toolBar.AddTool(T_FILE_PRINT, TransanaImages.Print.GetBitmap(), shortHelpString=_('Print'))
# Disable Print Setup for Right-To-Left languages
# if (TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft):
# self.toolBar.EnableTool(T_FILE_PRINT, False)
# create a bitmap button for the Move Down button
self.toolBar.AddTool(T_HELP_HELP, TransanaImages.ArtProv_HELP.GetBitmap(), shortHelpString=_("Help"))
self.toolBar.AddTool(T_FILE_EXIT, TransanaImages.Exit.GetBitmap(), shortHelpString=_('Exit'))
self.toolBar.Realize()
# Let's go ahead and keep the menu for non-Mac platforms
if not '__WXMAC__' in wx.PlatformInfo:
# Add a Menu Bar
menuBar = wx.MenuBar() # Create the Menu Bar
self.menuFile = wx.Menu() # Create the File Menu
self.menuFile.Append(M_FILE_FILTER, _("&Filter"), _("Filter report contents")) # Add "Filter" to File Menu
self.menuFile.Append(M_FILE_SAVEAS, _("Save &As"), _("Save image in JPEG format")) # Add "Save As" to File Menu
self.menuFile.Enable(M_FILE_SAVEAS, False)
self.menuFile.Append(M_FILE_PRINTSETUP, _("Page Setup"), _("Set up Page")) # Add "Printer Setup" to the File Menu
# Disable Print Setup for Right-To-Left languages
# if (TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft):
# self.menuFile.Enable(M_FILE_PRINTSETUP, False)
self.menuFile.Append(M_FILE_PRINTPREVIEW, _("Print Preview"), _("Preview your printed output")) # Add "Print Preview" to the File Menu
self.menuFile.Enable(M_FILE_PRINTPREVIEW, False)
self.menuFile.Append(M_FILE_PRINT, _("&Print"), _("Send your output to the Printer")) # Add "Print" to the File Menu
self.menuFile.Enable(M_FILE_PRINT, False)
self.menuFile.Append(M_FILE_EXIT, _("E&xit"), _("Exit the Series Map program")) # Add "Exit" to the File Menu
menuBar.Append(self.menuFile, _('&File')) # Add the File Menu to the Menu Bar
self.menuHelp = wx.Menu()
self.menuHelp.Append(M_HELP_HELP, _("&Help"), _("Help"))
menuBar.Append(self.menuHelp, _("&Help"))
self.SetMenuBar(menuBar) # Connect the Menu Bar to the Frame
# Link menu items and toolbar buttons to the appropriate methods
wx.EVT_MENU(self, M_FILE_FILTER, self.OnFilter) # Attach File > Filter to a method
wx.EVT_MENU(self, T_FILE_FILTER, self.OnFilter) # Attach Toolbar Filter to a method
wx.EVT_MENU(self, M_FILE_SAVEAS, self.OnSaveAs) # Attach File > Save As to a method
wx.EVT_MENU(self, T_FILE_SAVEAS, self.OnSaveAs) # Attach Toolbar Save As to a method
wx.EVT_MENU(self, M_FILE_PRINTSETUP, self.OnPrintSetup) # Attach File > Print Setup to a method
wx.EVT_MENU(self, T_FILE_PRINTSETUP, self.OnPrintSetup) # Attach Toolbar Print Setup to a method
wx.EVT_MENU(self, M_FILE_PRINTPREVIEW, self.OnPrintPreview) # Attach File > Print Preview to a method
wx.EVT_MENU(self, T_FILE_PRINTPREVIEW, self.OnPrintPreview) # Attach Toolbar Print Preview to a method
wx.EVT_MENU(self, M_FILE_PRINT, self.OnPrint) # Attach File > Print to a method
wx.EVT_MENU(self, T_FILE_PRINT, self.OnPrint) # Attach Toolbar Print to a method
wx.EVT_MENU(self, M_FILE_EXIT, self.CloseWindow) # Attach CloseWindow to File > Exit
wx.EVT_MENU(self, T_FILE_EXIT, self.CloseWindow) # Attach CloseWindow to Toolbar Exit
wx.EVT_MENU(self, M_HELP_HELP, self.OnHelp)
wx.EVT_MENU(self, T_HELP_HELP, self.OnHelp)
# Bind the form's EVT_CLOSE method
self.Bind(wx.EVT_CLOSE, self.OnClose)
# Determine the window boundaries
(w, h) = self.GetClientSizeTuple()
self.Bounds = (5, 5, w - 10, h - 25)
# Create the Graphic Area using the GraphicControlClass
# NOTE: EVT_LEFT_DOWN, EVT_LEFT_UP, and EVT_RIGHT_UP are caught in GraphicsControlClass and are passed to this routine's
# OnLeftDown and OnLeftUp (for both left and right) methods because of the "passMouseEvents" paramter
self.graphic = GraphicsControlClass.GraphicsControl(self, -1, wx.Point(self.Bounds[0], self.Bounds[1]),
(self.Bounds[2] - self.Bounds[0], self.Bounds[3] - self.Bounds[1]),
(self.Bounds[2] - self.Bounds[0], self.Bounds[3] - self.Bounds[1]),
passMouseEvents=True)
# Add a Status Bar
self.CreateStatusBar()
# Attach the Resize Event
wx.EVT_SIZE(self, self.OnSize)
# We'll detect mouse movement in the GraphicsControlClass from out here, as
# the KeywordMap object is the object that knows what the data is on the graphic.
self.graphic.Bind(wx.EVT_MOTION, self.OnMouseMotion)
# Prepare objects for use in Printing
self.printData = wx.PrintData()
self.printData.SetPaperId(wx.PAPER_LETTER)
# Center on the screen
TransanaGlobal.CenterOnPrimary(self)
# Show the Frame
self.Show(True)
# Populate the drawing
self.ProcessSeries()
self.DrawGraph()
# Trigger the load of the Default Filter, if one exists. An event of None signals we're loading the
# Default config, an dhte OnFilter method will handle drawing the graph!
self.OnFilter(None)
# Restore Cursor to Arrow
TransanaGlobal.menuWindow.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
# Define the Method that implements Filter
def OnFilter(self, event):
""" Implement the Filter Dialog call for Series Maps """
if event == None:
loadDefault = True
else:
loadDefault = False
# Set the Cursor to the Hourglass while the report is assembled
TransanaGlobal.menuWindow.SetCursor(wx.StockCursor(wx.CURSOR_WAIT))
# Set up parameters for creating the Filter Dialog. Series Map Filter requires Series Number (as episodeNum) for the Config Save.
title = string.join([self.title, unicode(_("Filter Dialog"), 'utf8')], ' ')
# See if the Series Map wants the Clip Filter
clipFilter = (len(self.clipFilterList) > 0)
# See if there are Snapshots in the Snapshot Filter List
snapshotFilter = (len(self.snapshotFilterList) > 0)
# See if there are Keywords in the Filter List
keywordFilter = (len(self.unfilteredKeywordList) > 0)
# Series Map wants Keyword Color customization if it has keywords.
keywordColors = (len(self.unfilteredKeywordList) > 0)
# We want the Options tab
options = True
# reportType=5 indicates it is for a Series Sequence Map.
# reportType=6 indicates it is for a Series Bar Graph.
# reportType=7 indicates it is for a Series Percentage Map
reportType = self.reportType + 4
# The Series Keyword Sequence Map has all the usual parameters plus Time Range data and the Single Line Display option
if self.reportType in [1]:
# Create a Filter Dialog, passing all the necessary parameters.
dlgFilter = FilterDialog.FilterDialog(self,
-1,
title,
reportType=reportType,
loadDefault=loadDefault,
configName=self.configName,
reportScope=self.seriesNum,
episodeFilter=True,
episodeSort=True,
clipFilter=clipFilter,
snapshotFilter=snapshotFilter,
keywordFilter=keywordFilter,
keywordSort=True,
keywordColor=keywordColors,
options=options,
startTime=self.startTime,
endTime=self.endTime,
barHeight=self.barHeight,
whitespace=self.whitespaceHeight,
hGridLines=self.hGridLines,
vGridLines=self.vGridLines,
singleLineDisplay=self.singleLineDisplay,
showLegend=self.showLegend,
colorOutput=self.colorOutput)
elif self.reportType in [2, 3]:
# Create a Filter Dialog, passing all the necessary parameters.
dlgFilter = FilterDialog.FilterDialog(self,
-1,
title,
reportType=reportType,
loadDefault=loadDefault,
configName=self.configName,
reportScope=self.seriesNum,
episodeFilter=True,
episodeSort=True,
clipFilter=clipFilter,
snapshotFilter=snapshotFilter,
keywordFilter=keywordFilter,
keywordSort=True,
keywordColor=keywordColors,
options=options,
barHeight=self.barHeight,
whitespace=self.whitespaceHeight,
hGridLines=self.hGridLines,
vGridLines=self.vGridLines,
showLegend=self.showLegend,
colorOutput=self.colorOutput)
# Sort the Episode List
self.episodeList.sort()
# Inform the Filter Dialog of the Episodes
dlgFilter.SetEpisodes(self.episodeList)
# If we requested the Clip Filter ...
if clipFilter:
# We want the Clips sorted in Clip ID order in the FilterDialog. We handle that out here, as the Filter Dialog
# has to deal with manual clip ordering in some instances, though not here, so it can't deal with this.
self.clipFilterList.sort()
# Inform the Filter Dialog of the Clips
dlgFilter.SetClips(self.clipFilterList)
# if there are Snapshots ...
if snapshotFilter:
# ... populate the Filter Dialog with Snapshots
dlgFilter.SetSnapshots(self.snapshotFilterList)
# Keyword Colors must be specified before Keywords! So if we want Keyword Colors, ...
if keywordColors:
# If we're in grayscale mode, the colors are probably mangled, so let's fix them before
# we send them to the Filter dialog.
if not self.colorOutput:
# A shallow copy of the dictionary object should get the job done.
self.keywordColors = self.rememberedKeywordColors.copy()
# Inform the Filter Dialog of the colors used for each Keyword
dlgFilter.SetKeywordColors(self.keywordColors)
if keywordFilter:
# Inform the Filter Dialog of the Keywords
dlgFilter.SetKeywords(self.unfilteredKeywordList)
# Set the Cursor to the Arrow now that the filter dialog is assembled
TransanaGlobal.menuWindow.SetCursor(wx.StockCursor(wx.CURSOR_ARROW))
# Create a dummy error message to get our while loop started.
errorMsg = 'Start Loop'
# Keep trying as long as there is an error message
while errorMsg != '':
# Clear the last (or dummy) error message.
errorMsg = ''
if loadDefault:
# ... get the list of existing configuration names.
profileList = dlgFilter.GetConfigNames()
# If (translated) "Default" is in the list ...
# (NOTE that the default config name is stored in English, but gets translated by GetConfigNames!)
if unicode(_('Default'), 'utf8') in profileList:
# ... then signal that we need to load the config.
dlgFilter.OnFileOpen(None)
# Fake that we asked the user for a filter name and got an OK
result = wx.ID_OK
# If we're loading a Default profile, but there's none in the list, we can skip
# the rest of the Filter method by pretending we got a Cancel from the user.
else:
result = wx.ID_CANCEL
# If we're not loading a Default profile ...
else:
# ... we need to show the Filter Dialog here.
result = dlgFilter.ShowModal()
# Show the Filter Dialog and see if the user clicks OK
if result == wx.ID_OK:
# Get the Episode Data from the Filter Dialog
self.episodeList = dlgFilter.GetEpisodes()
# If we requested Clip Filtering ...
if clipFilter:
# ... then get the filtered clip data
self.clipFilterList = dlgFilter.GetClips()
if snapshotFilter:
self.snapshotFilterList = dlgFilter.GetSnapshots()
# Get the complete list of keywords from the Filter Dialog. We'll deduce the filter info in a moment.
# (This preserves the "check" info for later reuse.)
self.unfilteredKeywordList = dlgFilter.GetKeywords()
# If we requested Keyword Color data ...
if keywordColors:
# ... then get the keyword color data from the Filter Dialog
self.keywordColors = dlgFilter.GetKeywordColors()
# Reset the Filtered Keyword List
self.filteredKeywordList = []
# Iterate through the entire Keword List ...
for (kwg, kw, checked) in self.unfilteredKeywordList:
# ... and determine which keywords were checked.
if checked:
# Only the checked ones go into the filtered keyword list.
self.filteredKeywordList.append((kwg, kw))
# If we had an Options Tab, extract that data.
if options:
# Only the Series Keyword Sequence Map needs the Time Range options.
if self.reportType in [1]:
# Let's get the Time Range data.
# Start Time must be 0 or greater. Otherwise, don't change it!
if Misc.time_in_str_to_ms(dlgFilter.GetStartTime()) >= 0:
self.startTime = Misc.time_in_str_to_ms(dlgFilter.GetStartTime())
else:
errorMsg += _("Illegal value for Start Time.\n")
# If the Start Time is greater than the media length, reset it to 0.
if self.startTime >= self.MediaLength:
dlgFilter.startTime.SetValue(Misc.time_in_ms_to_str(0))
errorMsg += _("Illegal value for Start Time.\n")
# End Time must be at least 0. Otherwise, don't change it!
if (Misc.time_in_str_to_ms(dlgFilter.GetEndTime()) >= 0):
self.endTime = Misc.time_in_str_to_ms(dlgFilter.GetEndTime())
else:
errorMsg += _("Illegal value for End Time.\n")
# If the end time is 0 or greater than the media length, set it to the media length.
if (self.endTime == 0) or (self.endTime > self.MediaLength):
self.endTime = self.MediaLength
# Start time cannot equal end time (but this check must come after setting endtime == 0 to MediaLength)
if self.startTime == self.endTime:
errorMsg += _("Start Time and End Time must be different.")
# We need to alter the time values to prevent "division by zero" errors while the Filter Dialog is not modal.
self.startTime = 0
self.endTime = self.MediaLength
# If the Start Time is greater than the End Time, swap them.
if (self.endTime < self.startTime):
temp = self.startTime
self.startTime = self.endTime
self.endTime = temp
# Get the Bar Height and Whitespace Height for all versions of the Series Map
self.barHeight = dlgFilter.GetBarHeight()
self.whitespaceHeight = dlgFilter.GetWhitespace()
# we need to store the Bar Height and Whitespace values in the Configuration.
TransanaGlobal.configData.seriesMapBarHeight = self.barHeight
TransanaGlobal.configData.seriesMapWhitespace = self.whitespaceHeight
# Get the Grid Line data from the form
self.hGridLines = dlgFilter.GetHGridLines()
self.vGridLines = dlgFilter.GetVGridLines()
# Store the Grid Line data in the Configuration
TransanaGlobal.configData.seriesMapHorizontalGridLines = self.hGridLines
TransanaGlobal.configData.seriesMapVerticalGridLines = self.vGridLines
# Only the Series Keyword Sequence Graph needs the Single Line Display Option data.
if self.reportType in [1]:
# Get the singleLineDisplay value from the dialog
self.singleLineDisplay = dlgFilter.GetSingleLineDisplay()
# Remember the value.
TransanaGlobal.configData.singleLineDisplay = self.singleLineDisplay
# Get the showLegend value from the dialog
self.showLegend = dlgFilter.GetShowLegend()
# Remember the value. (This doesn't get saved.)
TransanaGlobal.configData.showLegend = self.showLegend
# Detect if the colorOutput value is actually changing.
if (self.colorOutput != dlgFilter.GetColorOutput()):
# If we're going from color to grayscale ...
if self.colorOutput:
# ... remember what the colors were before they get all screwed up by displaying
# the graphic without them.
self.rememberedKeywordColors = {}
self.rememberedKeywordColors = self.keywordColors.copy()
# Get the colorOutput value from the dialog
self.colorOutput = dlgFilter.GetColorOutput()
if errorMsg != '':
errorDlg = Dialogs.ErrorDialog(self, errorMsg)
errorDlg.ShowModal()
errorDlg.Destroy()
# Remember the configuration name for later reuse
self.configName = dlgFilter.configName
# Destroy the Filter Dialog. We're done with it.
dlgFilter.Destroy()
# Now we can draw the graph.
self.DrawGraph()
# Define the Method that implements Save As
def OnSaveAs(self, event):
self.graphic.SaveAs()
# Define the Method that implements Printer Setup
def OnPrintSetup(self, event):
# Let's use PAGE Setup here ('cause you can do Printer Setup from Page Setup.) It's a better system
# that allows Landscape on Mac.
pageSetupDialogData = wx.PageSetupDialogData(self.printData)
pageSetupDialogData.CalculatePaperSizeFromId()
pageDialog = wx.PageSetupDialog(self, pageSetupDialogData)
pageDialog.ShowModal()
self.printData = wx.PrintData(pageDialog.GetPageSetupData().GetPrintData())
pageDialog.Destroy()
# Define the Method that implements Print Preview
def OnPrintPreview(self, event):
lineHeight = self.CalcY(1) - self.CalcY(0)
printout = MyPrintout(self.title, self.graphic, multiPage=True, lineStart=self.CalcY(0) - int(lineHeight / 2.0), lineHeight=lineHeight)
printout2 = MyPrintout(self.title, self.graphic, multiPage=True, lineStart=self.CalcY(0) - int(lineHeight / 2.0), lineHeight=lineHeight)
self.preview = wx.PrintPreview(printout, printout2, self.printData)
if not self.preview.Ok():
self.SetStatusText(_("Print Preview Problem"))
return
theWidth = max(wx.Display(TransanaGlobal.configData.primaryScreen).GetClientArea()[2] - 180, 760) # wx.ClientDisplayRect()
theHeight = max(wx.Display(TransanaGlobal.configData.primaryScreen).GetClientArea()[3] - 200, 560) # wx.ClientDisplayRect()
frame2 = wx.PreviewFrame(self.preview, self, _("Print Preview"), size=(theWidth, theHeight))
frame2.Centre()
frame2.Initialize()
frame2.Show(True)
# Define the Method that implements Print
def OnPrint(self, event):
pdd = wx.PrintDialogData()
pdd.SetPrintData(self.printData)
printer = wx.Printer(pdd)
lineHeight = self.CalcY(1) - self.CalcY(0)
printout = MyPrintout(self.title, self.graphic, multiPage=True, lineStart=self.CalcY(0) - int(lineHeight / 2.0), lineHeight=lineHeight)
if not printer.Print(self, printout):
dlg = Dialogs.ErrorDialog(None, _("There was a problem printing this report."))
dlg.ShowModal()
dlg.Destroy()
# NO! REMOVED to prevent crash on 2nd print attempt following Filter Config.
# else:
# self.printData = printer.GetPrintDialogData().GetPrintData()
printout.Destroy()
def OnClose(self, event):
""" Handle the Close Event """
# If the report has a defined Control Object ...
if self.ControlObject != None:
# ... remove this report from the Menu Window's Window Menu
self.ControlObject.RemoveReportWindow(self.title, self.reportNumber)
# Inherit the parent Close event so things will, you know, close.
event.Skip()
# Define the Method that closes the Window on File > Exit
def CloseWindow(self, event):
# Close!
self.Close()
def OnHelp(self, event):
""" Implement the Filter Dialog Box's Help function """
# Define the Help Context
HelpContext = "Library Keyword Graphs"
# If a Help Window is defined ...
if TransanaGlobal.menuWindow != None:
# ... call Help!
TransanaGlobal.menuWindow.ControlObject.Help(HelpContext)
def OnSize(self, event):
""" Handle Resize Events by resizing the Graphic Control and redrawing the graphic """
(w, h) = self.GetClientSizeTuple()
if self.Bounds[1] == 5:
self.Bounds = (5, 5, w - 10, h - 25)
else:
self.Bounds = (5, 40, w - 10, h - 30)
self.DrawGraph()
def CalcX(self, XPos):
""" Determine the proper horizontal coordinate for the given time """
# We need to start by defining the legal range for the type of graph we're working with.
# The Sequence Map is tied to the start and end time variables.
if self.reportType == 1:
startVal = self.startTime
endVal = self.endTime
# The Bar Graph stretches from 0 to the time line Maximum variable
elif self.reportType == 2:
startVal = 0.0
if self.timelineMax == 0:
endVal = 1
else:
endVal = self.timelineMax
# The Percentage Graph ranges from 0 to 100!
elif self.reportType == 3:
startVal = 0.0
endVal = 100.0
# Specify a margin width
marginwidth = (0.06 * (self.Bounds[2] - self.Bounds[0]))
# The Horizonal Adjustment is the global graphic indent
hadjust = self.graphicindent
# The Scaling Factor is the active portion of the drawing area width divided by the total media length
# The idea is to leave the left margin, self.graphicindent for Keyword Labels, and the right margin
if self.MediaLength > 0:
scale = (float(self.Bounds[2]) - self.Bounds[0] - hadjust - 2 * marginwidth) / (endVal - startVal)
else:
scale = 0.0
# The horizontal coordinate is the left margin plus the Horizontal Adjustment for Keyword Labels plus
# position times the scaling factor
res = marginwidth + hadjust + ((XPos - startVal) * scale)
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
# ... adjust for a right-to-left graph
return int(self.Bounds[2] - self.Bounds[0] - res)
# If we are in a Left-To-Right language ...
else:
# ... just return the calculated value
return int(res)
def FindTime(self, x):
""" Given a horizontal pixel position, determine the corresponding time value from
the video time line """
# determine the margin width
marginwidth = (0.06 * (self.Bounds[2] - self.Bounds[0]))
# The Horizonal Adjustment is the global graphic indent
hadjust = self.graphicindent
# The Scaling Factor is the active portion of the drawing area width divided by the total media length
# The idea is to leave the left margin, self.graphicindent for Keyword Labels, and the right margin
if self.MediaLength > 0:
scale = (float(self.Bounds[2]) - self.Bounds[0] - hadjust - 2 * marginwidth) / (self.endTime - self.startTime)
else:
scale = 1.0
# The time is calculated by taking the total width, subtracting the margin values and horizontal indent,
# and then dividing the result by the scale factor calculated above
time = int((x - marginwidth - hadjust) / scale) + self.startTime
return time
def CalcY(self, YPos):
""" Determine the vertical position for a given keyword index """
# Spacing is the larger of (12 pixels for label text or the bar height) plus 2 for whitespace
spacing = max(12, self.barHeight) + self.whitespaceHeight
# Top margin is 30 for titles plus 28 for the timeline
topMargin = 30 + (2 * spacing)
return int(spacing * YPos + topMargin)
def FindKeyword(self, y):
""" Given a vertical pixel position, determine the corresponding Keyword data """
# NOTE: This method is only valid if self.reportType == 1, the Sequence Map.
# Other variations of the Series maps may use different key values for the dictionary.
if self.reportType != 1:
return None
# If the graphic is scrolled, the raw Y value does not point to the correct Keyword.
# Determine the unscrolled equivalent Y position.
(modX, modY) = self.graphic.CalcUnscrolledPosition(0, y)
# Now we need to get the keys for the Lookup Dictionary
keyVals = self.epNameKWGKWLookup.keys()
# We need the keys to be in order, so we can quit when we've found what we're looking for.
keyVals.sort()
# The single-line display and the multi-line display handle the lookup differently, of course.
# Let's start with the single-line display.
if self.singleLineDisplay:
# Initialize the return value to None in case nothing is found. The single-line version expects an Episode Name.
returnVal = None
# We also need a temporary value initialized to None. Our data structure returns complex data, from which we
# extract the desired value.
tempVal = None
# Iterate through the sorted keys. The keys are actually y values for the graph!
for yVal in keyVals:
# If we find a key value that is smaller than the unscrolled Graphic y position ...
if yVal <= modY:
# ... then we've found a candidate for what we're looking for. But we keep iterating,
# because we want the LARGEST yVal that's smaller than the graphic y value.
tempVal = self.epNameKWGKWLookup[yVal]
# Once our y values are too large ...
else:
# ... we should stop iterating through the (sorted) keys.
break
# If we found a valid data structure ...
if tempVal != None:
# ... we can extract the Episode name by looking at the first value of the first value of the first key.
returnVal = tempVal[tempVal.keys()[0]][0][0]
# Here, we handle the multi-line display of the Sequence Map.
else:
# Initialize the return value to a tuple of three Nones in case nothing is found.
# The multi-line version expects an Episode Name, Keyword Group, Keyword tuple.
returnVal = (None, None, None)
# Iterate through the sorted keys. The keys are actually y values for the graph!
for yVal in keyVals:
# If we find a key value that is smaller than the unscrolled Graphic y position ...
if yVal <= modY:
# ... then we've found a candidate for what we're looking for. But we keep iterating,
# because we want the LARGEST yVal that's smaller than the graphic y value.
returnVal = self.epNameKWGKWLookup[yVal]
# Once our y values are too large ...
else:
# ... we should stop iterating through the (sorted) keys.
break
# Return the value we found, or None
return returnVal
def GetScaleIncrements(self, MediaLength):
# The general rule is to try to get logical interval sizes with 8 or fewer time increments.
# You always add a bit (20% at the lower levels) of the time interval to the MediaLength
# because the final time is placed elsewhere and we don't want overlap.
# This routine covers from 1 second to 18 hours in length.
# media Length of 9 seconds or less = 1 second intervals
if MediaLength < 9001:
Num = int(round((MediaLength + 200) / 1000.0))
Interval = 1000
# media length of 18 seconds or less = 2 second intervals
elif MediaLength < 18001:
Num = int(round((MediaLength + 400) / 2000.0))
Interval = 2000
# media length of 30 seconds or less = 5 second intervals
elif MediaLength < 30001:
Num = int(round((MediaLength + 2000) / 5000.0))
Interval = 5000
# media length of 50 seconds or less = 5 second intervals
elif MediaLength < 50001:
Num = int(round((MediaLength + 1000) / 5000.0))
Interval = 5000
# media Length of 1:30 or less = 10 second intervals
elif MediaLength < 90001:
Num = int(round((MediaLength + 2000) / 10000.0))
Interval = 10000
# media length of 2:50 or less = 20 second intervals
elif MediaLength < 160001:
Num = int(round((MediaLength + 4000) / 20000.0))
Interval = 20000
# media length of 4:30 or less = 30 second intervals
elif MediaLength < 270001:
Num = int(round((MediaLength + 6000) / 30000.0))
Interval = 30000
# media length of 6:00 or less = 60 second intervals
elif MediaLength < 360001:
Num = int(round((MediaLength + 12000) / 60000.0))
Interval = 60000
# media length of 10:00 or less = 60 second intervals
elif MediaLength < 600001:
Num = int(round((MediaLength + 8000) / 60000.0))
Interval = 60000
# media length of 16:00 or less = 2 minute intervals
elif MediaLength < 960001:
Num = int(round((MediaLength + 24000) / 120000.0))
Interval = 120000
# media length of 40:00 or less = 5 minute intervals
elif MediaLength < 2400001:
Num = int(round((MediaLength + 60000) / 300000.0))
Interval = 300000
# media length if 1:10:00 or less get 10 minute intervals
elif MediaLength < 4200001:
Num = int(round((MediaLength + 80000) / 600000.0))
Interval = 600000
# media length if 3:00:00 or less get 30 minute intervals
elif MediaLength < 10800001:
Num = int(round((MediaLength + 240000) / 1800000.0))
Interval = 1800000
# media length if 4:00:00 or less get 30 minute intervals
elif MediaLength < 14400001:
Num = int(round((MediaLength + 60000) / 1800000.0))
Interval = 1800000
# media length if 9:00:00 or less get 60 minute intervals
elif MediaLength < 32400001:
Num = int(round((MediaLength + 120000) / 3600000.0))
Interval = 3600000
# Longer videos get 2 hour intervals
else:
Num = int(round((MediaLength + 240000) / 7200000.0))
Interval = 7200000
return Num, Interval
def ProcessSeries(self):
# Initialize Media Length to 0
self.MediaLength = 0
# Initialize all the data Lists to empty
self.episodeList = []
self.filteredEpisodeList = []
self.clipList = []
self.clipFilterList = []
self.snapshotList = []
self.snapshotFilterList = []
self.unfilteredKeywordList = []
self.filteredKeywordList = []
if self.reportType == 2:
epLengths = {}
# Get Series Number, Episode Number, Media File Name, and Length
SQLText = """SELECT e.EpisodeNum, e.EpisodeID, e.SeriesNum, e.MediaFile, e.EpLength, s.SeriesID
FROM Episodes2 e, Series2 s
WHERE s.SeriesNum = e.SeriesNum AND
s.SeriesNum = %s
ORDER BY EpisodeID """
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
# Execute the query
self.DBCursor.execute(SQLText, (self.seriesNum, ))
for (EpisodeNum, EpisodeID, SeriesNum, MediaFile, EpisodeLength, SeriesID) in self.DBCursor.fetchall():
EpisodeID = DBInterface.ProcessDBDataForUTF8Encoding(EpisodeID)
SeriesID = DBInterface.ProcessDBDataForUTF8Encoding(SeriesID)
MediaFile = DBInterface.ProcessDBDataForUTF8Encoding(MediaFile)
self.episodeList.append((EpisodeID, SeriesID, True))
if (EpisodeLength > self.MediaLength):
self.MediaLength = EpisodeLength
self.endTime = self.MediaLength
# Remember the Episode's length
self.episodeLengths[(EpisodeID, SeriesID)] = EpisodeLength
# Get the list of Keywords to be displayed
SQLText = """SELECT ck.KeywordGroup, ck.Keyword
FROM Clips2 cl, ClipKeywords2 ck
WHERE cl.EpisodeNum = %s AND
cl.ClipNum = ck.ClipNum
GROUP BY ck.keywordgroup, ck.keyword
ORDER BY KeywordGroup, Keyword, ClipStart"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
self.DBCursor.execute(SQLText, (EpisodeNum, ))
for (kwg, kw) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
if not (kwg, kw) in self.filteredKeywordList:
self.filteredKeywordList.append((kwg, kw))
if not (kwg, kw, True) in self.unfilteredKeywordList:
self.unfilteredKeywordList.append((kwg, kw, True))
# Get the list of WHOLE SNAPSHOT Keywords to be displayed
SQLText = """SELECT ck.KeywordGroup, ck.Keyword
FROM Snapshots2 sn, ClipKeywords2 ck
WHERE sn.EpisodeNum = %s AND
sn.SnapshotNum = ck.SnapshotNum
GROUP BY ck.keywordgroup, ck.keyword
ORDER BY KeywordGroup, Keyword, SnapshotTimeCode"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
self.DBCursor.execute(SQLText, (EpisodeNum, ))
for (kwg, kw) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
if not (kwg, kw) in self.filteredKeywordList:
self.filteredKeywordList.append((kwg, kw))
if not (kwg, kw, True) in self.unfilteredKeywordList:
self.unfilteredKeywordList.append((kwg, kw, True))
# Get the list of SNAPSHOT CODING Keywords to be displayed
SQLText = """SELECT ck.KeywordGroup, ck.Keyword
FROM Snapshots2 sn, SnapshotKeywords2 ck
WHERE sn.EpisodeNum = %s AND
sn.SnapshotNum = ck.SnapshotNum
GROUP BY ck.keywordgroup, ck.keyword
ORDER BY KeywordGroup, Keyword, SnapshotTimeCode"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
self.DBCursor.execute(SQLText, (EpisodeNum, ))
for (kwg, kw) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
if not (kwg, kw) in self.filteredKeywordList:
self.filteredKeywordList.append((kwg, kw))
if not (kwg, kw, True) in self.unfilteredKeywordList:
self.unfilteredKeywordList.append((kwg, kw, True))
# Sort the Keyword List
self.unfilteredKeywordList.sort()
# Create the Keyword Placement lines to be displayed. We need them to be in ClipStart, ClipNum order so colors will be
# distributed properly across bands.
SQLText = """SELECT ck.KeywordGroup, ck.Keyword, cl.ClipStart, cl.ClipStop, cl.ClipNum, cl.ClipID, cl.CollectNum
FROM Clips2 cl, ClipKeywords2 ck
WHERE cl.EpisodeNum = %s AND
cl.ClipNum = ck.ClipNum
ORDER BY ClipStart, cl.ClipNum, KeywordGroup, Keyword"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
self.DBCursor.execute(SQLText, (EpisodeNum, ))
for (kwg, kw, clipStart, clipStop, clipNum, clipID, collectNum) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
clipID = DBInterface.ProcessDBDataForUTF8Encoding(clipID)
# If we're dealing with an Episode, self.clipNum will be None and we want all clips.
# If we're dealing with a Clip, we only want to deal with THIS clip!
if (self.clipNum == None) or (clipNum == self.clipNum):
self.clipList.append((kwg, kw, clipStart, clipStop, clipNum, clipID, collectNum, EpisodeID, SeriesID))
if not ((clipID, collectNum, True) in self.clipFilterList):
self.clipFilterList.append((clipID, collectNum, True))
# Create the WHOLE SNAPSHOT Keyword Placement lines to be displayed. We need them to be in SnapshotTimeCode, SnapshotNum order so colors will be
# distributed properly across bands.
SQLText = """SELECT ck.KeywordGroup, ck.Keyword, sn.SnapshotTimeCode, sn.SnapshotDuration, sn.SnapshotNum, sn.SnapshotID, sn.CollectNum
FROM Snapshots2 sn, ClipKeywords2 ck
WHERE sn.EpisodeNum = %s AND
sn.SnapshotNum = ck.SnapshotNum
ORDER BY SnapshotTimeCode, sn.SnapshotNum, KeywordGroup, Keyword"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
self.DBCursor.execute(SQLText, (EpisodeNum, ))
for (kwg, kw, SnapshotTimeCode, SnapshotDuration, SnapshotNum, SnapshotID, collectNum) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
SnapshotID = DBInterface.ProcessDBDataForUTF8Encoding(SnapshotID)
# If we're dealing with an Episode, self.clipNum will be None and we want all clips.
# If we're dealing with a Clip, we only want to deal with THIS clip!
if (self.clipNum == None):
self.snapshotList.append((kwg, kw, SnapshotTimeCode, SnapshotTimeCode + SnapshotDuration, SnapshotNum, SnapshotID, collectNum, EpisodeID, SeriesID))
if not ((SnapshotID, collectNum, True) in self.snapshotFilterList):
self.snapshotFilterList.append((SnapshotID, collectNum, True))
# Create the SNAPSHOT CODING Keyword Placement lines to be displayed. We need them to be in SnapshotTimeCode, SnapshotNum order so colors will be
# distributed properly across bands.
SQLText = """SELECT ck.KeywordGroup, ck.Keyword, sn.SnapshotTimeCode, sn.SnapshotDuration, sn.SnapshotNum, sn.SnapshotID, sn.CollectNum
FROM Snapshots2 sn, SnapshotKeywords2 ck
WHERE sn.EpisodeNum = %s AND
sn.SnapshotNum = ck.SnapshotNum
ORDER BY SnapshotTimeCode, sn.SnapshotNum, KeywordGroup, Keyword"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
self.DBCursor.execute(SQLText, (EpisodeNum, ))
for (kwg, kw, SnapshotTimeCode, SnapshotDuration, SnapshotNum, SnapshotID, collectNum) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
SnapshotID = DBInterface.ProcessDBDataForUTF8Encoding(SnapshotID)
# If we're dealing with an Episode, self.clipNum will be None and we want all clips.
# If we're dealing with a Clip, we only want to deal with THIS clip!
if (self.clipNum == None):
self.snapshotList.append((kwg, kw, SnapshotTimeCode, SnapshotTimeCode + SnapshotDuration, SnapshotNum, SnapshotID, collectNum, EpisodeID, SeriesID))
if not ((SnapshotID, collectNum, True) in self.snapshotFilterList):
self.snapshotFilterList.append((SnapshotID, collectNum, True))
# Sort the Keyword List
self.filteredKeywordList.sort()
def UpdateKeywordVisualization(self):
""" Update the Keyword Visualization following something that could have changed it. """
print "LibraryMap.UpdateKeywordVisualization(): This should NEVER get called!!"
# Clear the Clip List
self.clipList = []
# Clear the Filtered Clip List
self.clipFilterList = []
# Clear the graphic itself
self.graphic.Clear()
# Before we start, make a COPY of the keyword list so we can check for keywords that are no longer
# included on the Map and need to be deleted from the KeywordLists
delList = self.unfilteredKeywordList[:]
# Now let's create the SQL to get all relevant Clip and Clip Keyword records
SQLText = """SELECT ck.KeywordGroup, ck.Keyword, cl.ClipStart, cl.ClipStop, cl.ClipNum, cl.ClipID, cl.CollectNum, ep.EpisodeName
FROM Clips2 cl, ClipKeywords2 ck, Episodes2 ep
WHERE cl.EpisodeNum = %s AND
cl.ClipNum = ck.ClipNum AND
ep.EpisodeNum = cl.EpisodeNum
ORDER BY ClipStart, cl.ClipNum, KeywordGroup, Keyword"""
# Adjust the query for sqlite if needed
SQLText = DBInterface.FixQuery(SQLText)
# Execute the query
self.DBCursor.execute(SQLText, (self.episodeNum, ))
# Iterate through the results ...
for (kwg, kw, clipStart, clipStop, clipNum, clipID, collectNum, episodeName) in self.DBCursor.fetchall():
kwg = DBInterface.ProcessDBDataForUTF8Encoding(kwg)
kw = DBInterface.ProcessDBDataForUTF8Encoding(kw)
clipID = DBInterface.ProcessDBDataForUTF8Encoding(clipID)
episodeName = DBInterface.ProcessDBDataForUTF8Encoding(episodeName)
# If we're dealing with an Episode, self.clipNum will be None and we want all clips.
# If we're dealing with a Clip, we only want to deal with THIS clip!
if (self.clipNum == None) or (clipNum == self.clipNum):
# If a Clip is not found in the clipList ...
if not ((kwg, kw, clipStart, clipStop, clipNum, clipID, collectNum, episodeName, seriesName) in self.clipList):
# ... add it to the clipList ...
self.clipList.append((kwg, kw, clipStart, clipStop, clipNum, clipID, collectNum, episodeName, seriesName))
# ... and if it's not in the clipFilter List (which it probably isn't!) ...
if not ((clipID, collectNum, True) in self.clipFilterList):
# ... add it to the clipFilterList.
self.clipFilterList.append((clipID, collectNum, True))
# If the keyword is not in either of the Keyword Lists, ...
if not (((kwg, kw) in self.filteredKeywordList) or ((kwg, kw, False) in self.unfilteredKeywordList)):
# ... add it to both keyword lists.
self.filteredKeywordList.append((kwg, kw))
self.unfilteredKeywordList.append((kwg, kw, True))
# If the keyword is in query results, it should be removed from the list of keywords to be deleted.
# Check that list for either True or False versions of the keyword!
if (kwg, kw, True) in delList:
del(delList[delList.index((kwg, kw, True))])
if (kwg, kw, False) in delList:
del(delList[delList.index((kwg, kw, False))])
# Iterate through ANY keywords left in the list of keywords to be deleted ...
for element in delList:
# ... and delete them from the unfiltered Keyword List
del(self.unfilteredKeywordList[self.unfilteredKeywordList.index(element)])
# If the keyword is also in the filtered keyword list ...
if (element[0], element[1]) in self.filteredKeywordList:
# ... it needs to be deleted from there too!
del(self.filteredKeywordList[self.filteredKeywordList.index((element[0], element[1]))])
# Now that the underlying data structures have been corrected, we're ready to redraw the Keyword Visualization
self.DrawGraph()
def DrawGraph(self):
""" Actually Draw the Series Map """
self.keywordClipList = {}
# Series Keyword Sequence Map, if multi-line display is desired
if (self.reportType == 1) and (not self.singleLineDisplay):
epCount = 0
for (episodeName, seriesName, checked) in self.episodeList:
if checked:
epCount += 1
# Determine the graphic size needed for the number of episodes times the number of keywords plus two lines
# for each episode for the episode title and the blank line!
newheight = max(self.CalcY(epCount * (len(self.filteredKeywordList) + 2)), self.Bounds[3] - self.Bounds[1])
# Series Keyword Sequence Map's single-line display,
# Series Keyword Bar Graph, and Series Keyword Percentage Graph all need the data arranged the same way
else:
# Initialize a dictionary that will hold information about the bars we're drawing.
barData = {}
# We need to know how many Episodes we have on the graph. Initialize a counter
self.episodeCount = 0
for (episodeName, seriesName, checkVal) in self.episodeList:
if checkVal:
# Count all the Episodes that have been "checked" in the Filter.
self.episodeCount += 1
# Now we iterate through the CLIPS.
for (KWG, KW, Start, Stop, ClipNum, ClipName, CollectNum, episodeName, seriesName) in self.clipList:
# We make sure they are selected in the Filter, checking the Episode, Clips and Keyword selections
if ((episodeName, seriesName, True) in self.episodeList) and \
((ClipName, CollectNum, True) in self.clipFilterList) and \
((KWG, KW) in self.filteredKeywordList):
# Now we track the start and end times compared to the current display limits
if Start < self.startTime:
Start = self.startTime
if Start > self.endTime:
Start = self.endTime
if Stop > self.endTime:
Stop = self.endTime
if Stop < self.startTime:
Stop = self.startTime
# Set up the key we use to mark overlaps
overlapKey = (episodeName, KWG, KW)
# If Start and Stop are the same, the Clip is off the graph and should be ignored.
if Start != Stop:
# If the clip is ON the graph, let's check for overlap with other clips with the same keyword at the same spot
if not barData.has_key(overlapKey):
barData[overlapKey] = 0
# Add the bar length to the bar Data dictionary.
barData[overlapKey] += Stop - Start
# For the Series Keyword Bar Graph and the Series Keyword Percentage Graph ...
if self.reportType in [2, 3]:
# Now add the Clip to the keywordClipList. This holds all Keyword/Clip data in memory so it can be searched quickly
# This dictionary object uses the episode name and keyword pair as the key and holds a list of Clip data for all clips with that keyword.
# If the list for a given keyword already exists ...
if self.keywordClipList.has_key(overlapKey):
# Get the list of Clips that contain the current Keyword from the keyword / Clip List dictionary
overlapClips = self.keywordClipList[overlapKey]
# Iterate through the Clip List ...
for (objType, overlapStartTime, overlapEndTime, overlapClipNum, overlapClipName) in overlapClips:
# Let's look for overlap. Overlap artificially inflates the size of the bars, and must be eliminated.
overlapStart = Stop
overlapEnd = Start
if DEBUG and KWG == 'Transana Users' and KW == 'DavidW':
print "Start = %7d, overStart = %7s, Stop = %7s, overEnd = %7s" % (Start, overlapStartTime, Stop, overlapEndTime)
# Look for Start between overlapStartTime and overlapEndTime
if (Start >= overlapStartTime) and (Start < overlapEndTime):
overlapStart = Start
# Look for overlapStartTime between Start and Stop
if (overlapStartTime >= Start) and (overlapStartTime < Stop):
overlapStart = overlapStartTime
# Look for Stop between overlapStartTime and overlapEndTime
if (Stop > overlapStartTime) and (Stop <= overlapEndTime):
overlapEnd = Stop
# Look for overlapEndTime between Start and Stop
if (overlapEndTime > Start) and (overlapEndTime <= Stop):
overlapEnd = overlapEndTime
# If we've found an overlap, it will be indicated by Start being less than End!
if overlapStart < overlapEnd:
# We need to SUBTRACT the overlap time from the barData structure.
barData[overlapKey] -= overlapEnd - overlapStart
if DEBUG:
print "Bar Graph overlap found:", overlapKey, overlapEnd - overlapStart
# ... add the new Clip to the Clip List
self.keywordClipList[overlapKey].append(('Clip', Start, Stop, ClipNum, ClipName))
# If there is no entry for the given keyword ...
else:
# ... create a List object with the first clip's data for this Keyword Pair key
self.keywordClipList[overlapKey] = [('Clip', Start, Stop, ClipNum, ClipName)]
# Now we iterate through the Snapshot List.
for (KWG, KW, Start, Stop, SnapshotNum, SnapshotName, CollectNum, episodeName, seriesName) in self.snapshotList:
# We make sure they are selected in the Filter, checking the Episode, Clips and Keyword selections
if ((episodeName, seriesName, True) in self.episodeList) and \
((SnapshotName, CollectNum, True) in self.snapshotFilterList) and \
((KWG, KW) in self.filteredKeywordList):
# Now we track the start and end times compared to the current display limits
if Start < self.startTime:
Start = self.startTime
if Start > self.endTime:
Start = self.endTime
if Stop > self.endTime:
Stop = self.endTime
if Stop < self.startTime:
Stop = self.startTime
# Set up the key we use to mark overlaps
overlapKey = (episodeName, KWG, KW)
# If Start and Stop are the same, the Clip is off the graph and should be ignored.
if Start != Stop:
# If the snapshot is ON the graph, let's check for overlap with other clips/snapshots with the same keyword at the same spot
if not barData.has_key(overlapKey):
barData[overlapKey] = 0
# Add the bar length to the bar Data dictionary.
barData[overlapKey] += Stop - Start
# For the Series Keyword Bar Graph and the Series Keyword Percentage Graph ...
if self.reportType in [2, 3]:
# Now add the Clip to the keywordClipList. This holds all Keyword/Clip data in memory so it can be searched quickly
# This dictionary object uses the episode name and keyword pair as the key and holds a list of Clip data for all clips with that keyword.
# If the list for a given keyword already exists ...
if self.keywordClipList.has_key(overlapKey):
# Get the list of Clips that contain the current Keyword from the keyword / Clip List dictionary
overlapClips = self.keywordClipList[overlapKey]
# Iterate through the Clip List ...
for (objType, overlapStartTime, overlapEndTime, overlapClipNum, overlapClipName) in overlapClips:
# Let's look for overlap. Overlap artificially inflates the size of the bars, and must be eliminated.
overlapStart = Stop
overlapEnd = Start
if DEBUG and KWG == 'Transana Users' and KW == 'DavidW':
print "Start = %7d, overStart = %7s, Stop = %7s, overEnd = %7s" % (Start, overlapStartTime, Stop, overlapEndTime)
# Look for Start between overlapStartTime and overlapEndTime
if (Start >= overlapStartTime) and (Start < overlapEndTime):
overlapStart = Start
# Look for overlapStartTime between Start and Stop
if (overlapStartTime >= Start) and (overlapStartTime < Stop):
overlapStart = overlapStartTime
# Look for Stop between overlapStartTime and overlapEndTime
if (Stop > overlapStartTime) and (Stop <= overlapEndTime):
overlapEnd = Stop
# Look for overlapEndTime between Start and Stop
if (overlapEndTime > Start) and (overlapEndTime <= Stop):
overlapEnd = overlapEndTime
# If we've found an overlap, it will be indicated by Start being less than End!
if overlapStart < overlapEnd:
# We need to SUBTRACT the overlap time from the barData structure.
barData[overlapKey] -= overlapEnd - overlapStart
if DEBUG:
print "Bar Graph overlap found:", overlapKey, overlapEnd - overlapStart
# ... add the new Snapshot to the Clip List
self.keywordClipList[overlapKey].append(('Snapshot', Start, Stop, SnapshotNum, SnapshotName))
# If there is no entry for the given keyword ...
else:
# ... create a List object with the first Snapshot's data for this Keyword Pair key
self.keywordClipList[overlapKey] = [('Snapshot', Start, Stop, SnapshotNum, SnapshotName)]
# once we're done with checking overlaps here, let's clear out this variable,
# as it may get re-used later for other purposes!
self.keywordClipList = {}
if self.showLegend:
newheight = max(self.CalcY(self.episodeCount + len(self.filteredKeywordList) + 2), self.Bounds[3] - self.Bounds[1])
else:
newheight = max(self.CalcY(self.episodeCount), self.Bounds[3] - self.Bounds[1])
# Now that we have all necessary information, let's create and populate the graphic
# Start by destroying the existing control and creating a new one with the correct Canvas Size
self.graphic.Destroy()
self.graphic = GraphicsControlClass.GraphicsControl(self, -1, wx.Point(self.Bounds[0], self.Bounds[1]),
(self.Bounds[2] - self.Bounds[0], self.Bounds[3] - self.Bounds[1]),
(self.Bounds[2] - self.Bounds[0], newheight + 3),
passMouseEvents=True)
# Put the header information on the graphic.
self.graphic.SetFontColour("BLACK")
if 'wxMac' in wx.PlatformInfo:
self.graphic.SetFontSize(17)
else:
self.graphic.SetFontSize(14)
self.graphic.AddTextCentered("%s" % self.title, (self.Bounds[2] - self.Bounds[0]) / 2, 1)
if 'wxMac' in wx.PlatformInfo:
self.graphic.SetFontSize(13)
else:
self.graphic.SetFontSize(10)
if 'unicode' in wx.PlatformInfo:
# Encode with UTF-8 rather than TransanaGlobal.encoding because this is a prompt, not DB Data.
prompt = unicode(_('Series: %s'), 'utf8')
else:
prompt = _('Series: %s')
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddTextRight(prompt % self.seriesName, self.Bounds[2] - self.Bounds[0] - 2, 2)
else:
self.graphic.AddText(prompt % self.seriesName, 2, 2)
if self.configName != '':
if 'unicode' in wx.PlatformInfo:
# Encode with UTF-8 rather than TransanaGlobal.encoding because this is a prompt, not DB Data.
prompt = unicode(_('Filter Configuration: %s'), 'utf8')
else:
prompt = _('Filter Configuration: %s')
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddTextRight(prompt % self.configName, self.Bounds[2] - self.Bounds[0] - 2, 16)
else:
self.graphic.AddText(prompt % self.configName, 2, 16)
# Initialize a Line Counter, used for vertical positioning
Count = 0
# We'll also need a lookup table for vertical values.
yValLookup = {}
# Initialize the Episode Name / Keyword Lookup table. The multi-line Series Keyword Sequence Map gets a blank first line.
if (self.reportType == 1) and (not self.singleLineDisplay):
self.epNameKWGKWLookup = {0 : ('', '', '')}
else:
self.epNameKWGKWLookup = {}
# Now iterate through the Episode list, adding the Episode Names and (if appropriate) the Keywords as an axis label
for (episodeName, seriesName, episodeShown) in self.episodeList:
if episodeShown:
# Add the Episode Name to the vertical axis
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddTextRight("%s" % episodeName, self.Bounds[2] - self.Bounds[0] - 4, self.CalcY(Count) - 7)
else:
self.graphic.AddText("%s" % episodeName, 4, self.CalcY(Count) - 7)
# if Keyword Series Sequence Map in multi-line mode ...
if (self.reportType == 1) and (not self.singleLineDisplay):
# ... add a blank lookup line for the blank line, as this line gets no data for that report.
self.epNameKWGKWLookup[self.CalcY(Count-1) - int((self.barHeight + self.whitespaceHeight)/2)] = ('', '', '')
# We want Grid Lines in light gray
self.graphic.SetColour('LIGHT GREY')
# if Keyword Series Sequence Map in multi-line mode, we draw Grid Lines and add Keywords to the Vertical Axis.
if (self.reportType == 1) and (not self.singleLineDisplay):
# Draw the top Grid Line, if appropriate
if self.hGridLines:
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddLines([(self.Bounds[2] - self.Bounds[0] - 10, self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2),
self.CalcX(self.endTime), self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2))])
else:
self.graphic.AddLines([(10, self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2), self.CalcX(self.endTime), self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2))])
gridLineCount = Count
Count += 1
# Iterate through the Keyword List from the Filter Dialog ...
for KWG, KW in self.filteredKeywordList:
# ... and add the Keywords to the Vertical Axis.
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddTextRight("%s : %s" % (KWG, KW), self.Bounds[2] - self.Bounds[0] - 10, self.CalcY(Count) - 7)
else:
self.graphic.AddText("%s : %s" % (KWG, KW), 10, self.CalcY(Count) - 7)
# Add this data to the Y Position Lookup dictionary.
yValLookup[(episodeName, KWG, KW)] = Count
# Add a Lookup Line for this episodeName, Keyword Group, Keyword combination
self.epNameKWGKWLookup[self.CalcY(Count) - int((self.barHeight + self.whitespaceHeight)/2)] = (episodeName, KWG, KW)
# Add Horizontal Grid Lines, if appropriate
if self.hGridLines and ((Count - gridLineCount) % 2 == 0):
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddLines([(self.Bounds[2] - self.Bounds[0] - 10, self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2),
self.CalcX(self.endTime), self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2))])
else:
self.graphic.AddLines([(10, self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2), self.CalcX(self.endTime), self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2))])
# Increment the counter for each Keyword
Count = Count + 1
# If it's NOT the multi-line Sequence Map, the Gridline rules are different, but still need to be handled.
else:
# Add Horizontal Grid Lines, if appropriate
if self.hGridLines and (Count % 2 == 1):
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddLines([(self.Bounds[2] - self.Bounds[0] - 4, self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2),
self.CalcX(self.timelineMax), self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2))])
else:
self.graphic.AddLines([(4, self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2), self.CalcX(self.timelineMax), self.CalcY(Count) + 6 + int(self.whitespaceHeight / 2))])
# Add this data to the Y Position Lookup dictionary.
yValLookup[episodeName] = Count
# Increment the counter for each Episode. (This produces a blank line in the Sequence Map, which is OK.)
Count += 1
# If multi-line Sequence Report, we're building the Episode Name / Keyword Lookup table here, otherwise it's later.
if (self.reportType == 1) and (not self.singleLineDisplay):
# Finish with a blank Lookup Line so the bottom of the chart doesn't give false positive information
self.epNameKWGKWLookup[self.CalcY(Count-1) - int((self.barHeight + self.whitespaceHeight)/2)] = ('', '', '')
# Reset the graphic color following drawing the Grid Lines
self.graphic.SetColour("BLACK")
# After we have the axis values specified but before we draw anything else, we determine the amount the
# subsequent graphics must be indented to adjust for the size of the text labels.
self.graphicindent = self.graphic.GetMaxWidth(start=3)
# Draw the Graph Time Line
# For the Sequence Map, the timeline is startTime to endTime.
if (self.reportType == 1):
# If the Media Length is known, display the Time Line
if self.MediaLength > 0:
self.DrawTimeLine(self.startTime, self.endTime)
# For the Sequence Map, we need to know the maximum Episode time, which is already stored under self.endTime.
self.timelineMax = self.endTime
# For the Series Keyword Bar Graph and the Series Keyword Percentage Graph, we need to know the maximum coded
# time and the episode length for each Episode.
# For the Bar Graph, we use the longer of Episode Length or Total Episode Coded Time.
# For the Percentage Graph, we need to know total amount of coded video for each Episode
elif self.reportType in [2, 3]:
# Initialize the time line maximum variable
self.timelineMax = 0
# Create a dictionary to store the episode times.
episodeTimeTotals = {}
# Start by iterating through the Episode List ...
for (episodeName, seriesName, checked) in self.episodeList:
if checked:
# Initialize the Episode's length to 0
episodeTimeTotals[episodeName] = 0
# Iterate through the Keyword List
for (kwg, kw) in self.filteredKeywordList:
# Check to see if we have data for this keyword in this Episode.
if barData.has_key((episodeName, kwg, kw)):
# If so, add the time to the Episode's total time.
episodeTimeTotals[episodeName] += barData[(episodeName, kwg, kw)]
# If this Episode is the longest we've dealt with so far ...
if episodeTimeTotals[episodeName] > self.timelineMax:
# ... note the new time line maximum.
self.timelineMax = episodeTimeTotals[episodeName]
# If we are building the Bar Graph, ...
if self.reportType == 2:
# ... we need to adjust the timelineMax value for the length of the whole Episode, if it's larger.
self.timelineMax = max(self.timelineMax, self.episodeLengths[(episodeName, seriesName)])
# The Series Keyword Bar Graph extends from 0 to the timeLineMax value we just determined.
if self.reportType == 2:
self.DrawTimeLine(0, self.timelineMax)
# The Series Keyword Percentage Graph extends from 0% to 100%
elif (self.reportType == 3):
self.DrawTimeLine(0, 100)
# Add the top Horizontal Grid Line, if appropriate
if self.hGridLines:
# We want Grid Lines in light gray
self.graphic.SetColour('LIGHT GREY')
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddLines([(self.Bounds[2] - self.Bounds[0] - 4, self.CalcY(-1) + 6 + int(self.whitespaceHeight / 2),
self.CalcX(self.timelineMax), self.CalcY(-1) + 6 + int(self.whitespaceHeight / 2))])
else:
self.graphic.AddLines([(4, self.CalcY(-1) + 6 + int(self.whitespaceHeight / 2), self.CalcX(self.timelineMax), self.CalcY(-1) + 6 + int(self.whitespaceHeight / 2))])
# Select the color palate for colors or gray scale as appropriate
if self.colorOutput:
colorSet = TransanaGlobal.keywordMapColourSet
colorLookup = TransanaGlobal.transana_colorLookup
else:
colorSet = TransanaGlobal.keywordMapGraySet
colorLookup = TransanaGlobal.transana_grayLookup
# Set the colourIndex tracker to the last color used.
colourindex = self.keywordColors['lastColor']
# Iterate through the keyword list in order ...
for (KWG, KW) in self.filteredKeywordList:
# ... and assign colors to Keywords
# If we want COLOR output ...
if self.colorOutput:
# If the color is already defined ...
if self.keywordColors.has_key((KWG, KW)):
# ... get the index for the color
colourindex = self.keywordColors[(KWG, KW)]
# If the color has NOT been defined ...
else:
# Load the keyword
tmpKeyword = KeywordObject.Keyword(KWG, KW)
# If the Default Keyword Color is in the set of defined colors ...
if tmpKeyword.lineColorName in colorSet:
# ... define the color for this keyword
self.keywordColors[(KWG, KW)] = colorSet.index(tmpKeyword.lineColorName)
# If the Default Keyword Color is NOT in the defined colors ...
elif tmpKeyword.lineColorName != '':
# ... add the color name to the colorSet List
colorSet.append(tmpKeyword.lineColorName)
# ... add the color's definition to the colorLookup dictionary
colorLookup[tmpKeyword.lineColorName] = (int(tmpKeyword.lineColorDef[1:3], 16), int(tmpKeyword.lineColorDef[3:5], 16), int(tmpKeyword.lineColorDef[5:7], 16))
# ... determine the new color's index
colourindex = colorSet.index(tmpKeyword.lineColorName)
# ... define the new color for this keyword
self.keywordColors[(KWG, KW)] = colourindex
# If there is no Default Keyword Color defined
else:
# ... get the index for the next color in the color list
colourindex = self.keywordColors['lastColor'] + 1
# If we're at the end of the list ...
if colourindex > len(colorSet) - 1:
# ... reset the list to the beginning
colourindex = 0
# ... remember the color index used
self.keywordColors['lastColor'] = colourindex
# ... define the new color for this keyword
self.keywordColors[(KWG, KW)] = colourindex
# If we want Grayscale output ...
else:
# ... get the index for the next color in the color list
colourindex = self.keywordColors['lastColor'] + 1
# If we're at the end of the list ...
if colourindex > len(colorSet) - 1:
# ... reset the list to the beginning
colourindex = 0
# ... remember the color index used
self.keywordColors['lastColor'] = colourindex
# ... define the new color for this keyword
self.keywordColors[(KWG, KW)] = colourindex
# If we're producing a Series Keyword Sequence Map ..
if (self.reportType == 1):
# some clip boundary lines for overlapping clips can get over-written, depeding on the nature of the overlaps.
# Let's create a separate list of these lines, which we'll add to the END of the process so they can't get overwritten.
overlapLines = []
# Iterate through all the Clip/Keyword records in the Clip List ...
for (KWG, KW, Start, Stop, ClipNum, ClipName, CollectNum, episodeName, seriesName) in self.clipList:
# Check the clip against the Episode List, the Clip Filter List, the Snapshot Filter List, and the
# Keyword Filter list to see if it should be included in the report.
if ((episodeName, seriesName, True) in self.episodeList) and \
(((ClipName, CollectNum, True) in self.clipFilterList) or
((ClipName, CollectNum, True) in self.snapshotFilterList))and \
((KWG, KW) in self.filteredKeywordList):
# We compare the Clip's Start Time with the Map's boundaries. We only want the portion of the clip
# that falls within the Map's upper and lower boundaries.
if Start < self.startTime:
Start = self.startTime
if Start > self.endTime:
Start = self.endTime
if Stop > self.endTime:
Stop = self.endTime
if Stop < self.startTime:
Stop = self.startTime
# If Start and Stop match, the clip is off the Map and can be ignored. Otherwise ...
if Start != Stop:
# ... we start drawing the clip's bar by setting the bar thickness.
self.graphic.SetThickness(self.barHeight)
# Initialize a variable for building the line's data record
tempLine = []
# Determine the vertical placement of the line, which requires a different lookup key for the
# single-line report than the multi-line report.
if self.singleLineDisplay:
yPos = self.CalcY(yValLookup[episodeName])
else:
yPos = self.CalcY(yValLookup[(episodeName, KWG, KW)])
# Add the line data
tempLine.append((self.CalcX(Start), yPos, self.CalcX(Stop), yPos))
# Determine the appropriate color for the keyword
colourindex = self.keywordColors[(KWG, KW)]
# Tell the graph to use the selected color, using the appropriate lookup table
self.graphic.SetColour(colorLookup[colorSet[colourindex]])
# Add the line data to the graph
self.graphic.AddLines(tempLine)
# We need to track the bar positions so that the MouseOver can display data correctly. We need to do it
# later for the multi-line report, but here for the single-line report.
if self.singleLineDisplay:
# The first stage of the lookup is the Y-coordinate. If there's not already an
# EpisodeNameKeywordGroupKeywordLookup record for this Y-Coordinate ...
if not self.epNameKWGKWLookup.has_key(self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)):
# ... create an empty dictionary object for the first part of the Lookup Line
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)] = {}
# The second stage of the lookup is the X range in a tuple. If the X range isn't already in the dictionary,
# then add an empty List object for the X range.
if not self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)].has_key((self.CalcX(Start), self.CalcX(Stop))):
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)][(self.CalcX(Start), self.CalcX(Stop))] = []
# Add a Lookup Line for this Y-coordinate and X range containing the Episode Name, the keyword data,
# and the Clip Length.
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)][(self.CalcX(Start), self.CalcX(Stop))].append((episodeName, KWG, KW, Stop - Start))
if DEBUG and KWG == 'Transana Users' and KW == 'DavidW':
print "Looking at %s (%d)" % (ClipName, CollectNum)
# We need to indicate where there is overlap in this map.
# We use a different key to mark overlaps depending on whether we're in singleLineDisplay mode or not.
if self.singleLineDisplay:
overlapKey = (episodeName)
else:
overlapKey = (episodeName, KWG, KW)
# Now add the Clip to the keywordClipList. This holds all Keyword/Clip data in memory so it can be searched quickly
# This dictionary object uses the keyword pair as the key and holds a list of Clip data for all clips with that keyword.
# If the list for a given keyword already exists ...
if self.keywordClipList.has_key(overlapKey):
# Get the list of Clips that contain the current Keyword from the keyword / Clip List dictionary
overlapClips = self.keywordClipList[overlapKey]
# Iterate through the Clip List ...
for (objType, overlapStartTime, overlapEndTime, overlapClipNum, overlapClipName) in overlapClips:
# Let's look for overlap
overlapStart = Stop
overlapEnd = Start
if DEBUG and KWG == 'Transana Users' and KW == 'DavidW':
print "Start = %7d, overStart = %7s, Stop = %7s, overEnd = %7s" % (Start, overlapStartTime, Stop, overlapEndTime)
# Look for Start between overlapStartTime and overlapEndTime
if (Start >= overlapStartTime) and (Start < overlapEndTime):
overlapStart = Start
# Look for overlapStartTime between Start and Stop
if (overlapStartTime >= Start) and (overlapStartTime < Stop):
overlapStart = overlapStartTime
# Look for Stop between overlapStartTime and overlapEndTime
if (Stop > overlapStartTime) and (Stop <= overlapEndTime):
overlapEnd = Stop
# Look for overlapEndTime between Start and Stop
if (overlapEndTime > Start) and (overlapEndTime <= Stop):
overlapEnd = overlapEndTime
# If we've found an overlap, it will be indicated by Start being less than End!
if overlapStart < overlapEnd:
# Draw a multi-colored line to indicate overlap
overlapThickness = int(self.barHeight/ 3) + 1
self.graphic.SetThickness(overlapThickness)
if self.colorOutput:
self.graphic.SetColour("GREEN")
else:
self.graphic.SetColour("WHITE")
tempLine = [(self.CalcX(overlapStart), yPos, self.CalcX(overlapEnd), yPos)]
self.graphic.AddLines(tempLine)
if self.colorOutput:
self.graphic.SetColour("RED")
else:
self.graphic.SetColour("BLACK")
tempLine = [(self.CalcX(overlapStart), yPos - overlapThickness+1, self.CalcX(overlapEnd), yPos - overlapThickness+1)]
self.graphic.AddLines(tempLine)
if self.colorOutput:
self.graphic.SetColour("BLUE")
else:
self.graphic.SetColour("GRAY")
tempLine = [(self.CalcX(overlapStart), yPos + overlapThickness, self.CalcX(overlapEnd), yPos + overlapThickness)]
self.graphic.AddLines(tempLine)
# Let's remember the clip start and stop boundaries, to be drawn at the end so they won't get over-written
overlapLines.append(((self.CalcX(overlapStart), yPos - (self.barHeight / 2), self.CalcX(overlapStart), yPos + (self.barHeight / 2)),))
overlapLines.append(((self.CalcX(overlapEnd), yPos - (self.barHeight / 2), self.CalcX(overlapEnd), yPos + (self.barHeight / 2)),))
# ... add the new Clip to the Clip List
self.keywordClipList[overlapKey].append(('Clip', Start, Stop, ClipNum, ClipName))
# If there is no entry for the given keyword ...
else:
# ... create a List object with the first clip's data for this Keyword Pair key.
self.keywordClipList[overlapKey] = [('Clip', Start, Stop, ClipNum, ClipName)]
# Iterate through all the Snapshot/Keyword records in the Snapshot List ...
for (KWG, KW, Start, Stop, SnapshotNum, SnapshotName, CollectNum, episodeName, seriesName) in self.snapshotList:
# Check the clip against the Episode List, the Snapshot Filter List, and the Keyword Filter list to see if
# it should be included in the report.
if ((episodeName, seriesName, True) in self.episodeList) and \
((SnapshotName, CollectNum, True) in self.snapshotFilterList) and \
((KWG, KW) in self.filteredKeywordList):
# We compare the Snapshot's Start Time with the Map's boundaries. We only want the portion of the clip
# that falls within the Map's upper and lower boundaries.
if Start < self.startTime:
Start = self.startTime
if Start > self.endTime:
Start = self.endTime
if Stop > self.endTime:
Stop = self.endTime
if Stop < self.startTime:
Stop = self.startTime
# If Start and Stop match, the clip is off the Map and can be ignored. Otherwise ...
if Start != Stop:
# ... we start drawing the clip's bar by setting the bar thickness.
self.graphic.SetThickness(self.barHeight)
# Initialize a variable for building the line's data record
tempLine = []
# Determine the vertical placement of the line, which requires a different lookup key for the
# single-line report than the multi-line report.
if self.singleLineDisplay:
yPos = self.CalcY(yValLookup[episodeName])
else:
yPos = self.CalcY(yValLookup[(episodeName, KWG, KW)])
# Add the line data
tempLine.append((self.CalcX(Start), yPos, self.CalcX(Stop), yPos))
# Determine the appropriate color for the keyword
colourindex = self.keywordColors[(KWG, KW)]
# Tell the graph to use the selected color, using the appropriate lookup table
self.graphic.SetColour(colorLookup[colorSet[colourindex]])
# Add the line data to the graph
self.graphic.AddLines(tempLine)
# We need to track the bar positions so that the MouseOver can display data correctly. We need to do it
# later for the multi-line report, but here for the single-line report.
if self.singleLineDisplay:
# The first stage of the lookup is the Y-coordinate. If there's not already an
# EpisodeNameKeywordGroupKeywordLookup record for this Y-Coordinate ...
if not self.epNameKWGKWLookup.has_key(self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)):
# ... create an empty dictionary object for the first part of the Lookup Line
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)] = {}
# The second stage of the lookup is the X range in a tuple. If the X range isn't already in the dictionary,
# then add an empty List object for the X range.
if not self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)].has_key((self.CalcX(Start), self.CalcX(Stop))):
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)][(self.CalcX(Start), self.CalcX(Stop))] = []
# Add a Lookup Line for this Y-coordinate and X range containing the Episode Name, the keyword data,
# and the Clip Length.
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)][(self.CalcX(Start), self.CalcX(Stop))].append((episodeName, KWG, KW, Stop - Start))
if DEBUG and KWG == 'Transana Users' and KW == 'DavidW':
print "Looking at %s (%d)" % (SnapshotName, CollectNum)
# We need to indicate where there is overlap in this map.
# We use a different key to mark overlaps depending on whether we're in singleLineDisplay mode or not.
if self.singleLineDisplay:
overlapKey = (episodeName)
else:
overlapKey = (episodeName, KWG, KW)
# Now add the Clip to the keywordClipList. This holds all Keyword/Clip data in memory so it can be searched quickly
# This dictionary object uses the keyword pair as the key and holds a list of Clip data for all clips with that keyword.
# If the list for a given keyword already exists ...
if self.keywordClipList.has_key(overlapKey):
# Get the list of Clips that contain the current Keyword from the keyword / Clip List dictionary
overlapClips = self.keywordClipList[overlapKey]
# Iterate through the Clip List ...
for (objType, overlapStartTime, overlapEndTime, overlapClipNum, overlapClipName) in overlapClips:
# Let's look for overlap
overlapStart = Stop
overlapEnd = Start
if DEBUG and KWG == 'Transana Users' and KW == 'DavidW':
print "Start = %7d, overStart = %7s, Stop = %7s, overEnd = %7s" % (Start, overlapStartTime, Stop, overlapEndTime)
# Look for Start between overlapStartTime and overlapEndTime
if (Start >= overlapStartTime) and (Start < overlapEndTime):
overlapStart = Start
# Look for overlapStartTime between Start and Stop
if (overlapStartTime >= Start) and (overlapStartTime < Stop):
overlapStart = overlapStartTime
# Look for Stop between overlapStartTime and overlapEndTime
if (Stop > overlapStartTime) and (Stop <= overlapEndTime):
overlapEnd = Stop
# Look for overlapEndTime between Start and Stop
if (overlapEndTime > Start) and (overlapEndTime <= Stop):
overlapEnd = overlapEndTime
# If we've found an overlap, it will be indicated by Start being less than End!
if overlapStart < overlapEnd:
# Draw a multi-colored line to indicate overlap
overlapThickness = int(self.barHeight/ 3) + 1
self.graphic.SetThickness(overlapThickness)
if self.colorOutput:
self.graphic.SetColour("GREEN")
else:
self.graphic.SetColour("WHITE")
tempLine = [(self.CalcX(overlapStart), yPos, self.CalcX(overlapEnd), yPos)]
self.graphic.AddLines(tempLine)
if self.colorOutput:
self.graphic.SetColour("RED")
else:
self.graphic.SetColour("BLACK")
tempLine = [(self.CalcX(overlapStart), yPos - overlapThickness+1, self.CalcX(overlapEnd), yPos - overlapThickness+1)]
self.graphic.AddLines(tempLine)
if self.colorOutput:
self.graphic.SetColour("BLUE")
else:
self.graphic.SetColour("GRAY")
tempLine = [(self.CalcX(overlapStart), yPos + overlapThickness, self.CalcX(overlapEnd), yPos + overlapThickness)]
self.graphic.AddLines(tempLine)
# Let's remember the clip start and stop boundaries, to be drawn at the end so they won't get over-written
overlapLines.append(((self.CalcX(overlapStart), yPos - (self.barHeight / 2), self.CalcX(overlapStart), yPos + (self.barHeight / 2)),))
overlapLines.append(((self.CalcX(overlapEnd), yPos - (self.barHeight / 2), self.CalcX(overlapEnd), yPos + (self.barHeight / 2)),))
# ... add the new Clip to the Clip List
self.keywordClipList[overlapKey].append(('Snapshot', Start, Stop, SnapshotNum, SnapshotName))
# If there is no entry for the given keyword ...
else:
# ... create a List object with the first clip's data for this Keyword Pair key.
self.keywordClipList[overlapKey] = [('Snapshot', Start, Stop, SnapshotNum, SnapshotName)]
# For the single-line display only ...
if self.singleLineDisplay:
# ... finish with a blank Lookup Line so the bottom of the chart doesn't give false positive information
self.epNameKWGKWLookup[self.CalcY(self.episodeCount) - int((self.barHeight + self.whitespaceHeight)/2)] = {}
self.epNameKWGKWLookup[self.CalcY(self.episodeCount) - int((self.barHeight + self.whitespaceHeight)/2)][(0, self.timelineMax)] = [('', '', '', 0)]
# let's add the overlap boundary lines now
self.graphic.SetThickness(1)
self.graphic.SetColour("BLACK")
for tempLine in overlapLines:
self.graphic.AddLines(tempLine)
if not '__WXMAC__' in wx.PlatformInfo:
self.menuFile.Enable(M_FILE_SAVEAS, True)
# We can't enable Print Preview for Right-To-Left languages
if not (TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft):
self.menuFile.Enable(M_FILE_PRINTPREVIEW, True)
self.menuFile.Enable(M_FILE_PRINT, True)
# For the Series Keyword Bar Graph and the Series keyword Percentage Graph, which are VERY similar and therefore use the same
# infrastructure ...
elif self.reportType in [2, 3]:
# ... we first iterate through all the Episodes in the Episode List ...
for (episodeName, seriesName, checked) in self.episodeList:
# .. and check to see if the Episode should be included.
if checked:
# These graphs are cumulative bar charts. We need to track the starting place for the next bar.
barStart = 0
# Create the first part of the Lookup Line, an empty dictionary for the Y coordinate
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)] = {}
# Now we iterate through the Filtered Keyword List. (This gives us the correct presentation ORDER for the bars!)
for (kwg, kw) in self.filteredKeywordList:
# Now we check to see if there's DATA for this Episode / Keyword combination.
if barData.has_key((episodeName, kwg, kw)):
# Start by setting the bar thickness
self.graphic.SetThickness(self.barHeight)
# Initialize a temporary list for accumulating Bar data (not really necessary with this structure, but no harm done.)
tempLine = []
# If we're drawing the Series Keyword Bar Graph ...
if self.reportType == 2:
# ... the bar starts at the unadjusted BarStart position ...
xStart = self.CalcX(barStart)
# ... and ends at the start plus the width of the bar!
xEnd = self.CalcX(barStart + barData[(episodeName, kwg, kw)])
# The mouseover for this report is the unadjusted length of the bar
lookupVal = barData[(episodeName, kwg, kw)]
# If we're drawing the Series Keyword Percentage Graph ...
elif self.reportType == 3:
# This should just be a matter of adjusting barData for episodeTimeTotals[episodeName], which is the total
# coded time for each Episode.
# ... the bar starts at the adjusted BarStart position ...
xStart = self.CalcX(barStart * 100.0 / episodeTimeTotals[episodeName])
# ... and ends at the adjusted (start plus the width of the bar)!
xEnd = self.CalcX((barStart + barData[(episodeName, kwg, kw)]) * 100.0 / episodeTimeTotals[episodeName])
# The mouseover for this report is the adjusted length of the bar, which is the percentage value for the bar!
lookupVal = barData[(episodeName, kwg, kw)] * 100.0 / episodeTimeTotals[episodeName]
# Build the line to be displayed based on these calculated values
tempLine.append((xStart, self.CalcY(yValLookup[episodeName]), xEnd, self.CalcY(yValLookup[episodeName])))
# Determine the index for this Keyword's Color
colourindex = self.keywordColors[(kwg, kw)]
# Tell the graph to use the selected color, using the appropriate lookup table
self.graphic.SetColour(colorLookup[colorSet[colourindex]])
# Actually add the line to the graph's data structure
self.graphic.AddLines(tempLine)
# Add a Lookup Line for this Y-coordinate and X range containing the Episode Name, the keyword data,
# and the Clip 's Lookup Value determined above. Note that this is a bit simpler than for the Sequence Map
# because we don't have to worry about overlaps. Thus, the lookup value can just be a tuple instead of having
# to be a list of tuples to accomodate overlapping clip/keyword values.
if (TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft):
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)][(self.Bounds[2] - self.Bounds[0] - xStart, self.Bounds[2] - self.Bounds[0] - xEnd)] = \
(episodeName, kwg, kw, lookupVal)
else:
self.epNameKWGKWLookup[self.CalcY(yValLookup[episodeName]) - int((self.barHeight + self.whitespaceHeight)/2)][(xStart, xEnd)] = (episodeName, kwg, kw, lookupVal)
# The next bar should start where this bar ends. No need to adjust for the Percentage Graph -- that's handled
# when actually placing the bars.
barStart += barData[(episodeName, kwg, kw)]
# Finish with a blank Lookup Line so the bottom of the chart doesn't give false positive information
self.epNameKWGKWLookup[self.CalcY(self.episodeCount) - int((self.barHeight + self.whitespaceHeight)/2)] = {}
self.epNameKWGKWLookup[self.CalcY(self.episodeCount) - int((self.barHeight + self.whitespaceHeight)/2)][(0, self.timelineMax)] = ('', '', '', 0)
# Enable tracking of mouse movement over the graphic
self.graphic.Bind(wx.EVT_MOTION, self.OnMouseMotion)
# Add Legend. The multi-line Series Keyword Sequence Map doesn't get a legend, nor does any report where the showLegend option
# is turned off.
if (((self.reportType == 1) and self.singleLineDisplay) or (self.reportType in [2, 3])) and self.showLegend:
# Skip two lines from the bottom of the report.
Count +=2
# Let's place the legend at 1/3 of the way across the report horizontally.
startX = int((self.Bounds[2] - self.Bounds[0]) / 3.0)
# Let's place the legend below the report content.
startY = self.CalcY(Count)
# To draw a box around the legend, we'll need to track it's end coordinates too.
endX = startX
endY = startY
# For GetTextExtent to work right, we have to make sure the font is set in the graphic context.
# First, define a font for the current font settings
font = wx.Font(self.graphic.fontsize, self.graphic.fontfamily, self.graphic.fontstyle, self.graphic.fontweight)
# Set the font for the graphics context
self.graphic.SetFont(font)
# Add a label for the legend
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
self.graphic.AddTextRight(_("Legend:"), self.Bounds[2] - self.Bounds[0] - startX, self.CalcY(Count - 1) - 7)
else:
self.graphic.AddText(_("Legend:"), startX, self.CalcY(Count - 1) - 7)
endX = startX + 14 + self.graphic.GetTextExtent(_("Legend:"))[0]
# We'll use a 14 x 12 block to show color. Set the line thickness
self.graphic.SetThickness(12)
# Iterate through teh filtered keyword list (which gives the sorted keyword list) ...
for (kwg, kw) in self.filteredKeywordList:
# Determine the color index for this keyword
colourindex = self.keywordColors[(kwg, kw)]
# Set the color of the line, using the color lookup for the appropriate color set
self.graphic.SetColour(colorLookup[colorSet[colourindex]])
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
# Add the color box to the graphic
self.graphic.AddLines([(self.Bounds[2] - self.Bounds[0] - startX, self.CalcY(Count), self.Bounds[2] - self.Bounds[0] - (startX + 14), self.CalcY(Count) + 14)])
# Add the text associating the keyword with the colored line we just created
self.graphic.AddTextRight("%s : %s" % (kwg, kw), self.Bounds[2] - self.Bounds[0] - startX + 12, self.CalcY(Count) - 7)
else:
# Add the color box to the graphic
self.graphic.AddLines([(startX, self.CalcY(Count), startX + 14, self.CalcY(Count) + 14)])
# Add the text associating the keyword with the colored line we just created
self.graphic.AddText("%s : %s" % (kwg, kw), startX + 18, self.CalcY(Count) - 7)
# If the new text extends past the current right-hand boundary ...
if endX < startX + 14 + self.graphic.GetTextExtent("%s : %s" % (kwg, kw))[0]:
# ... note the new right-hand boundary for the box that outlines the legend
endX = startX + 14 + self.graphic.GetTextExtent("%s : %s" % (kwg, kw))[0]
# Note the new bottom boundary for the box that outlines the legend
endY = self.CalcY(Count) + 14
# Increment the line counter
Count += 1
# Set the line color to black and the line thickness to 1 for the legend bounding box
self.graphic.SetColour("BLACK")
self.graphic.SetThickness(1)
# If we are in a Right-To-Left Language ...
if TransanaGlobal.configData.LayoutDirection == wx.Layout_RightToLeft:
# Draw the legend bounding box, based on the dimensions we've been tracking.
self.graphic.AddLines([(self.Bounds[2] - self.Bounds[0] - (startX - 6), startY - 24, self.Bounds[2] - self.Bounds[0] - (endX + 6), startY - 24),
(self.Bounds[2] - self.Bounds[0] - (endX + 6), startY - 24, self.Bounds[2] - self.Bounds[0] - (endX + 6), endY - 4),
(self.Bounds[2] - self.Bounds[0] - (endX + 6), endY - 4, self.Bounds[2] - self.Bounds[0] - (startX - 6), endY - 4),
(self.Bounds[2] - self.Bounds[0] - (startX - 6), endY - 4, self.Bounds[2] - self.Bounds[0] - (startX - 6), startY - 24)])
else:
# Draw the legend bounding box, based on the dimensions we've been tracking.
self.graphic.AddLines([(startX - 6, startY - 24, endX + 6, startY - 24), (endX + 6, startY - 24, endX + 6, endY - 4),
(endX + 6, endY - 4, startX - 6, endY - 4), (startX - 6, endY - 4, startX - 6, startY - 24)])
def DrawTimeLine(self, startVal, endVal):
""" Draw the time line on the Series Map graphic """
# Set the line thickness to 3
self.graphic.SetThickness(3)
# Add a horizontal line from X = start to end ay Y = -2, which will be above the data area of the graph
self.graphic.AddLines([(self.CalcX(startVal), self.CalcY(-2), self.CalcX(endVal), self.CalcY(-2))])
# Add Time markers
self.graphic.SetThickness(1)
if 'wxMac' in wx.PlatformInfo:
self.graphic.SetFontSize(11)
else:
self.graphic.SetFontSize(8)
# Add the starting point
X = startVal
# Add the line indicator
self.graphic.AddLines([(self.CalcX(X), self.CalcY(-2) + 1, self.CalcX(X), self.CalcY(-2) + 6)])
# The Percentage Graph needs a Percent label. Otherwise, convert to a time representation.
if self.reportType == 3:
XLabel = "%d%%" % X
else:
XLabel = Misc.TimeMsToStr(X)
# Add the time label.
self.graphic.AddTextCentered(XLabel, self.CalcX(X), self.CalcY(-2) + 5)
# Add the ending point
X = endVal
# Add the line indicator
self.graphic.AddLines([(self.CalcX(X), self.CalcY(-2) + 1, self.CalcX(X), self.CalcY(-2) + 6)])
# The Percentage Graph needs a Percent label. Otherwise, convert to a time representation.
if self.reportType == 3:
XLabel = "%d%%" % X
else:
XLabel = Misc.TimeMsToStr(X)
# Add the time label.
self.graphic.AddTextCentered(XLabel, self.CalcX(X), self.CalcY(-2) + 5)
# Add the first and last Vertical Grid Lines, if appropriate
if self.vGridLines:
# Determine how far down on the graph the vertical axis lines should go.
if self.reportType == 1:
vGridBottom = self.graphic.canvassize[1] - (int(1.75 * max(12, self.barHeight)) + self.whitespaceHeight)
else:
vGridBottom = self.CalcY(self.episodeCount - 1) + 7 + int(self.whitespaceHeight / 2)
# We want Grid Lines in light gray
self.graphic.SetColour('LIGHT GREY')
# Add the line for the Start Value
self.graphic.AddLines([(self.CalcX(self.startTime), self.CalcY(0) - 6 - int(self.whitespaceHeight / 2), self.CalcX(self.startTime), vGridBottom)])
# Add the line for the End Value
self.graphic.AddLines([(self.CalcX(endVal), self.CalcY(0) - 6 - int(self.whitespaceHeight / 2), self.CalcX(endVal), vGridBottom)])
# Reset the graphic color following drawing the Grid Lines
self.graphic.SetColour("BLACK")
# Determine the frequency of scale marks for the time line.
# If we're showing the Percentage Graph ...
if self.reportType == 3:
# We'll use marks at every 20%
numMarks = 5
interval = 20.0
# Otherwise ...
else:
# We'll use the same logic as the Visualization's Time Axis
(numMarks, interval) = self.GetScaleIncrements(endVal - startVal)
# using the incrementation values we just determined ...
for loop in range(1, numMarks):
# ... add the intermediate time marks
X = int(round(float(loop) * interval) + startVal)
# Add the line indicator
self.graphic.AddLines([(self.CalcX(X), self.CalcY(-2) + 1, self.CalcX(X), self.CalcY(-2) + 6)])
# The Percentage Graph needs a Percent label. Otherwise, convert to a time representation.
if self.reportType == 3:
XLabel = "%d%%" % X
else:
XLabel = Misc.TimeMsToStr(X)
# Add the time label.
self.graphic.AddTextCentered(XLabel, self.CalcX(X), self.CalcY(-2) + 5)
# Add Vertical Grid Lines, if appropriate
if self.vGridLines:
# We want Grid Lines in light gray
self.graphic.SetColour('LIGHT GREY')
# Add the Vertical Grid Line
self.graphic.AddLines([(self.CalcX(X), self.CalcY(0) - 6 - int(self.whitespaceHeight / 2), self.CalcX(X), vGridBottom)])
# Reset the graphic color following drawing the Grid Lines
self.graphic.SetColour("BLACK")
def GetKeywordCount(self):
""" Returns the number of keywords in the filtered Keyword List and the size of the image that results """
return (len(self.filteredKeywordList), len(self.filteredKeywordList) * (self.barHeight + self.whitespaceHeight) + 4)
def OnMouseMotion(self, event):
""" Process the movement of the mouse over the Series Map. """
# Get the mouse's current position
x = event.GetX()
y = event.GetY()
# For the Series Keyword Sequence Map ...
if (self.reportType == 1):
# Based on the mouse position, determine the time in the video timeline
time = self.FindTime(x)
# Based on the mouse position, determine what keyword is being pointed to
# We use a different key to mark overlaps depending on whether we're in singleLineDisplay mode or not.
overlapKey = self.FindKeyword(y)
# First, let's make sure we're actually on the data portion of the graph
if (time > 0) and (time < self.MediaLength) and (overlapKey != None) and (overlapKey != '') and (overlapKey != ('', '', '')):
if self.singleLineDisplay:
if 'unicode' in wx.PlatformInfo:
# Encode with UTF-8 rather than TransanaGlobal.encoding because this is a prompt, not DB Data.
prompt = unicode(_("Episode: %s, Time: %s"), 'utf8')
else:
prompt = _("Episode: %s, Time: %s")
# Set the Status Text to indicate the current Episode value
self.SetStatusText(prompt % (overlapKey, Misc.time_in_ms_to_str(time)))
else:
if 'unicode' in wx.PlatformInfo:
# Encode with UTF-8 rather than TransanaGlobal.encoding because this is a prompt, not DB Data.
prompt = unicode(_("Episode: %s, Keyword: %s : %s, Time: %s"), 'utf8')
else:
prompt = _("Episode: %s, Keyword: %s : %s, Time: %s")
# Set the Status Text to indicate the current Keyword and Time values
self.SetStatusText(prompt % (overlapKey[0], overlapKey[1], overlapKey[2], Misc.time_in_ms_to_str(time)))
if (self.keywordClipList.has_key(overlapKey)):
# initialize the string that will hold the names of clips being pointed to
clipNames = ''
# Get the list of Clips that contain the current Keyword from the keyword / Clip List dictionary
clips = self.keywordClipList[overlapKey]
# For the single-line display ...
if self.singleLineDisplay:
# Initialize a string for the popup to show
clipNames = ''
currentRow = None
# Get a list of the Lookup dictionary keys. These keys are top Y-coordinate values
keyvals = self.epNameKWGKWLookup.keys()
# Sort the keys
keyvals.sort()
# Iterate through the keys
for yVal in keyvals:
# We need the largest key value that doesn't exceed the Mouse's Y coordinate
if yVal < y:
currentRow = self.epNameKWGKWLookup[yVal]
# Once the key val exceeds the Mouse position, we can stop looking.
else:
break
# Initialize the Episode Name, Keyword Group, and Keyword variables.
epName = KWG = KW = ''
# If we have a data record to look at ...
if currentRow != None:
# Iterate through all the second-level lookup keys, the X ranges ...
for key in currentRow.keys():
# If the horizontal mouse coordinate falls in the X range of a record ...
if (x >= key[0]) and (x < key[1]):
# ... iterate through the records ...
for clipKWRec in currentRow[key]:
# ... extract the Lookup data for the record ...
(epName, KWG, KW, length) = clipKWRec
# ... if it's not the first record in the list, add a comma separator ...
if clipNames != '':
clipNames += ', '
# ... and add the lookup data to the mouseover text string variable
clipNames += "%s : %s (%s)" % (KWG, KW, Misc.time_in_ms_to_str(length))
# If we have the Series Keyword Sequence Map multi-line display ...
else:
# Iterate through the Clip List ...
for (objType, startTime, endTime, clipNum, clipName) in clips:
# If the current Time value falls between the Clip's StartTime and EndTime ...
if (startTime < time) and (endTime > time):
# ... calculate the length of the Clip ...
clipLen = endTime - startTime
# ... and add the Clip Name and Length to the list of Clips with this Keyword at this Time
# First, see if the list is empty.
if clipNames == '':
# If so, just add the keyword name and time
clipNames = "%s (%s)" % (clipName, Misc.time_in_ms_to_str(clipLen))
else:
# ... add the keyword to the end of the list
clipNames += ', ' + "%s (%s)" % (clipName, Misc.time_in_ms_to_str(clipLen))
# If any clips are found for the current mouse position ...
if (clipNames != ''):
# ... add the Clip Names to the ToolTip so they will show up on screen as a hint
self.graphic.SetToolTipString(clipNames)
else:
# ... set the status text to a blank
self.SetStatusText('')
# The Series Keyword Bar Graph and the Series Keyword Percentage Graph both work the same way
elif self.reportType in [2, 3]:
# Initialize the current Row to None, in case we don't find data under the cursor
currentRow = None
# Get a list of the Lookup dictionary keys. These keys are top Y-coordinate values
keyvals = self.epNameKWGKWLookup.keys()
# Sort the keys
keyvals.sort()
# Iterate through the keys
for yVal in keyvals:
# We need the largest key value that doesn't exceed the Mouse's Y coordinate
if yVal < y:
currentRow = self.epNameKWGKWLookup[yVal]
# Once the key val exceeds the Mouse position, we can stop looking.
else:
break
# Initialize the Episode Name, Keyword Group, and Keyword variables.
epName = KWG = KW = ''
# If we have a data record to look at ...
if currentRow != None:
# Iterate through all the second-level lookup keys, the X ranges ...
for key in currentRow.keys():
# If the horizontal mouse coordinate falls in the X range of a record ...
if (x >= key[0]) and (x < key[1]):
# ... extract the Lookup data for the record. There aren't overlapping records to deal with here.
(epName, KWG, KW, length) = currentRow[key]
# If a data record was found ...
if KWG != '':
if 'unicode' in wx.PlatformInfo:
# Encode with UTF-8 rather than TransanaGlobal.encoding because this is a prompt, not DB Data.
prompt = unicode(_("Episode: %s, Keyword: %s : %s"), 'utf8')
else:
prompt = _("Episode: %s, Keyword: %s : %s")
# ... set the Status bar text:
self.SetStatusText(prompt % (epName, KWG, KW))
# If we have a Series Keyword Bar Graph ...
if self.reportType == 2:
# ... report Keyword info and Clip Length.
self.graphic.SetToolTipString("%s : %s (%s)" % (KWG, KW, Misc.time_in_ms_to_str(length)))
# If we have a Series Keyword Percentage Graph ...
elif self.reportType == 3:
# ... report Keyword and Percentage information
self.graphic.SetToolTipString("%s : %s (%3.1f%%)" % (KWG, KW, length))
# If we've got no data ...
else:
# ... reflect that in the Status Text.
self.SetStatusText('')
def OnLeftDown(self, event):
""" Left Mouse Button Down event """
# Pass the event to the parent
event.Skip()
def OnLeftUp(self, event):
""" Left Mouse Button Up event. Triggers the load of a Clip. """
# Note if the Control key is pressed
ctrlPressed = wx.GetKeyState(wx.WXK_CONTROL)
# Pass the event to the parent
event.Skip()
# Get the mouse's current position
x = event.GetX()
y = event.GetY()
# Based on the mouse position, determine the time in the video timeline
time = self.FindTime(x)
# Based on the mouse position, determine what keyword is being pointed to
kw = self.FindKeyword(y)
# Create an empty Dictionary Object for tracking Clip data
clipNames = {}
# First, let's make sure we're actually on the data portion of the graph
if (time > 0) and (time < self.MediaLength) and (kw != None) and (self.keywordClipList.has_key(kw)):
# If we have a Series Keyword Sequence Map ...
# (The Bar Graph and Percentage Graph do not have defined Click behaviors!)
if self.reportType == 1:
if 'unicode' in wx.PlatformInfo:
prompt = unicode(_("Episode: %s, Keyword: %s : %s, Time: %s"), 'utf8')
else:
prompt = _("Episode: %s, Keyword: %s : %s, Time: %s")
# Set the Status Text to indicate the current Keyword and Time values
self.SetStatusText(prompt % (kw[0], kw[1], kw[2], Misc.time_in_ms_to_str(time)))
# Get the list of Clips that contain the current Keyword from the keyword / Clip List dictionary
clips = self.keywordClipList[kw]
# Iterate through the Clip List ...
for (objType, startTime, endTime, clipNum, clipName) in clips:
# If the current Time value falls between the Clip's StartTime and EndTime ...
if (startTime <= time) and (endTime >= time):
# Check to see if this is a duplicate Clip
if clipNames.has_key(clipName) and (clipNames[clipName] != clipNum):
# If so, we need to count the number of duplicates.
# NOTE: This is not perfect. If the Clip Name is a shorter version of another Clip Name, the count
# will be too high.
tmpList = clipNames.keys()
# Initialize the counter to 1 so our end number will be 1 higher than the number counted
cnt = 1
# iterate through the list
for cl in tmpList:
# If we have a match ...
if cl.find(clipName) > -1:
# ... increment the counter
cnt += 1
# Add the clipname and counter to the Clip Names dictionary
clipNames["%s (%d)" % (clipName, cnt)] = (objType, clipNum)
else:
# Add the Clip Name as a Dictionary key pointing to the Clip Number
clipNames[clipName] = (objType, clipNum)
# If only 1 Item is found ...
if len(clipNames) == 1:
# ... load that clip by looking up the clip's number
self.parent.KeywordMapLoadItem(clipNames[clipNames.keys()[0]][0], clipNames[clipNames.keys()[0]][1], ctrlPressed)
# If left-click, close the Series Map. If not, don't!
if event.LeftUp():
# Close the Series Map
self.CloseWindow(event)
# If more than one Clips are found ..
elif len(clipNames) > 1:
# Use a wx.SingleChoiceDialog to allow the user to make the choice between multiple clips here.
dlg = wx.SingleChoiceDialog(self, _("Which Clip would you like to load?"), _("Select a Clip"),
clipNames.keys(), wx.CHOICEDLG_STYLE)
# If the user selects an Item and click OK ...
if dlg.ShowModal() == wx.ID_OK:
# ... load the selected clip
self.parent.KeywordMapLoadItem(clipNames[dlg.GetStringSelection()][0], clipNames[dlg.GetStringSelection()][1], ctrlPressed)
# Destroy the SingleChoiceDialog
dlg.Destroy()
# If left-click, close the Series Map. If not, don't!
if event.LeftUp():
# Close the Series Map
self.CloseWindow(event)
# If the user selects Cancel ...
else:
# ... destroy the SingleChoiceDialog, but that's all
dlg.Destroy()
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Bibauthorid Web Interface Logic and URL handler. """
# pylint: disable=W0105
# pylint: disable=C0301
# pylint: disable=W0613
from cgi import escape
from pprint import pformat
from operator import itemgetter
import re
try:
from invenio.jsonutils import json, json_unicode_to_utf8, CFG_JSON_AVAILABLE
except ImportError:
CFG_JSON_AVAILABLE = False
json = None
from invenio.bibauthorid_webapi import add_cname_to_hepname_record
from invenio.config import CFG_SITE_URL, CFG_BASE_URL
from invenio.bibauthorid_config import AID_ENABLED, PERSON_SEARCH_RESULTS_SHOW_PAPERS_PERSON_LIMIT, \
BIBAUTHORID_UI_SKIP_ARXIV_STUB_PAGE, VALID_EXPORT_FILTERS, PERSONS_PER_PAGE, \
MAX_NUM_SHOW_PAPERS
from invenio.config import CFG_SITE_LANG, CFG_SITE_URL, CFG_SITE_NAME, CFG_INSPIRE_SITE, CFG_SITE_SECURE_URL
from invenio.bibauthorid_name_utils import most_relevant_name
from invenio.webpage import page, pageheaderonly, pagefooteronly
from invenio.messages import gettext_set_language # , wash_language
from invenio.template import load
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.session import get_session
from invenio.urlutils import redirect_to_url, get_canonical_and_alternates_urls
from invenio.webuser import (getUid,
page_not_authorized,
collect_user_info,
set_user_preferences,
get_user_preferences,
email_valid_p,
emailUnique,
get_email_from_username,
get_uid_from_email,
isGuestUser)
from invenio.access_control_admin import acc_get_user_roles
from invenio.search_engine import perform_request_search
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibauthorid_config import CREATE_NEW_PERSON
import invenio.webinterface_handler_config as apache
import invenio.webauthorprofile_interface as webauthorapi
import invenio.bibauthorid_webapi as webapi
from invenio.bibauthorid_general_utils import get_title_of_doi, get_title_of_arxiv_pubid, is_valid_orcid
from invenio.bibauthorid_backinterface import update_external_ids_of_authors, get_orcid_id_of_author, \
get_validated_request_tickets_for_author, get_title_of_paper, get_claimed_papers_of_author
from invenio.bibauthorid_dbinterface import defaultdict, remove_arxiv_papers_of_author
from invenio.webauthorprofile_orcidutils import get_dois_from_orcid
from invenio.bibauthorid_webauthorprofileinterface import is_valid_canonical_id, get_person_id_from_canonical_id, \
get_person_redirect_link, author_has_papers
from invenio.bibauthorid_templates import WebProfileMenu, WebProfilePage
# Imports related to hepnames update form
from invenio.bibedit_utils import get_bibrecord
from invenio.bibrecord import record_get_field_value, record_get_field_values, \
record_get_field_instances, field_get_subfield_values
TEMPLATE = load('bibauthorid')
class WebInterfaceBibAuthorIDClaimPages(WebInterfaceDirectory):
'''
Handles /author/claim pages and AJAX requests.
Supplies the methods:
/author/claim/<string>
/author/claim/action
/author/claim/claimstub
/author/claim/export
/author/claim/generate_autoclaim_data
/author/claim/merge_profiles_ajax
/author/claim/search_box_ajax
/author/claim/tickets_admin
/author/claim/search
'''
_exports = ['',
'action',
'claimstub',
'export',
'generate_autoclaim_data',
'merge_profiles_ajax',
'search_box_ajax',
'tickets_admin'
]
def _lookup(self, component, path):
'''
This handler parses dynamic URLs:
- /author/profile/1332 shows the page of author with id: 1332
- /author/profile/100:5522,1431 shows the page of the author
identified by the bibrefrec: '100:5522,1431'
'''
if not component in self._exports:
return WebInterfaceBibAuthorIDClaimPages(component), path
def _is_profile_owner(self, pid):
return self.person_id == int(pid)
def _is_admin(self, pinfo):
return pinfo['ulevel'] == 'admin'
def __init__(self, identifier=None):
'''
Constructor of the web interface.
@param identifier: identifier of an author. Can be one of:
- an author id: e.g. "14"
- a canonical id: e.g. "J.R.Ellis.1"
- a bibrefrec: e.g. "100:1442,155"
@type identifier: str
'''
self.person_id = -1 # -1 is a non valid author identifier
if identifier is None or not isinstance(identifier, str):
return
# check if it's a canonical id: e.g. "J.R.Ellis.1"
pid = int(webapi.get_person_id_from_canonical_id(identifier))
if pid >= 0:
self.person_id = pid
return
# check if it's an author id: e.g. "14"
try:
pid = int(identifier)
if webapi.author_has_papers(pid):
self.person_id = pid
return
except ValueError:
pass
# check if it's a bibrefrec: e.g. "100:1442,155"
if webapi.is_valid_bibref(identifier):
pid = int(webapi.get_person_id_from_paper(identifier))
if pid >= 0:
self.person_id = pid
return
def __call__(self, req, form):
'''
Serve the main person page.
Will use the object's person id to get a person's information.
@param req: apache request object
@type req: apache request object
@param form: POST/GET variables of the request
@type form: dict
@return: a full page formatted in HTML
@rtype: str
'''
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ulevel = pinfo['ulevel']
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'open_claim': (str, None),
'ticketid': (int, -1),
'verbose': (int, 0)})
debug = "verbose" in argd and argd["verbose"] > 0
ln = argd['ln']
req.argd = argd # needed for perform_req_search
if self.person_id < 0:
return redirect_to_url(req, '%s/author/search' % (CFG_SITE_URL))
no_access = self._page_access_permission_wall(req, [self.person_id])
if no_access:
return no_access
pinfo['claim_in_process'] = True
user_info = collect_user_info(req)
user_info['precached_viewclaimlink'] = pinfo['claim_in_process']
session.dirty = True
if self.person_id != -1:
pinfo['claimpaper_admin_last_viewed_pid'] = self.person_id
rt_ticket_id = argd['ticketid']
if rt_ticket_id != -1:
pinfo["admin_requested_ticket_id"] = rt_ticket_id
session.dirty = True
## Create menu and page using templates
cname = webapi.get_canonical_id_from_person_id(self.person_id)
menu = WebProfileMenu(str(cname), "claim", ln, self._is_profile_owner(pinfo['pid']), self._is_admin(pinfo))
profile_page = WebProfilePage("claim", webapi.get_longest_name_from_pid(self.person_id))
profile_page.add_profile_menu(menu)
gboxstatus = self.person_id
gpid = self.person_id
gNumOfWorkers = 3 # to do: read it from conf file
gReqTimeout = 3000
gPageTimeout = 12000
profile_page.add_bootstrapped_data(json.dumps({
"other": "var gBOX_STATUS = '%s';var gPID = '%s'; var gNumOfWorkers= '%s'; var gReqTimeout= '%s'; var gPageTimeout= '%s';" % (gboxstatus, gpid, gNumOfWorkers, gReqTimeout, gPageTimeout),
"backbone": """
(function(ticketbox) {
var app = ticketbox.app;
app.userops.set(%s);
app.bodyModel.set({userLevel: "%s", guestPrompt: true});
})(ticketbox);""" % (WebInterfaceAuthorTicketHandling.bootstrap_status(pinfo, "user"), ulevel)
}))
if debug:
profile_page.add_debug_info(pinfo)
# content += self._generate_person_info_box(ulevel, ln) #### Name variants
# metaheaderadd = self._scripts() + '\n <meta name="robots" content="nofollow" />'
# body = self._generate_optional_menu(ulevel, req, form)
content = self._generate_tabs(ulevel, req)
content += self._generate_footer(ulevel)
content = content.decode('utf-8', 'strict')
webapi.history_log_visit(req, 'claim', pid=self.person_id)
return page(title=self._generate_title(ulevel),
metaheaderadd=profile_page.get_head().encode('utf-8'),
body=profile_page.get_wrapped_body(content).encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
def _page_access_permission_wall(self, req, req_pid=None, req_level=None):
'''
Display an error page if user not authorized to use the interface.
@param req: Apache Request Object for session management
@type req: Apache Request Object
@param req_pid: Requested person id
@type req_pid: int
@param req_level: Request level required for the page
@type req_level: string
'''
session = get_session(req)
uid = getUid(req)
pinfo = session["personinfo"]
uinfo = collect_user_info(req)
if 'ln' in pinfo:
ln = pinfo["ln"]
else:
ln = CFG_SITE_LANG
_ = gettext_set_language(ln)
is_authorized = True
pids_to_check = []
if not AID_ENABLED:
return page_not_authorized(req, text=_("Fatal: Author ID capabilities are disabled on this system."))
if req_level and 'ulevel' in pinfo and pinfo["ulevel"] != req_level:
return page_not_authorized(req, text=_("Fatal: You are not allowed to access this functionality."))
if req_pid and not isinstance(req_pid, list):
pids_to_check = [req_pid]
elif req_pid and isinstance(req_pid, list):
pids_to_check = req_pid
if (not (uinfo['precached_usepaperclaim']
or uinfo['precached_usepaperattribution'])
and 'ulevel' in pinfo
and not pinfo["ulevel"] == "admin"):
is_authorized = False
if is_authorized and not webapi.user_can_view_CMP(uid):
is_authorized = False
if is_authorized and 'ticket' in pinfo:
for tic in pinfo["ticket"]:
if 'pid' in tic:
pids_to_check.append(tic['pid'])
if pids_to_check and is_authorized:
user_pid = webapi.get_pid_from_uid(uid)
if not uinfo['precached_usepaperattribution']:
if (not user_pid in pids_to_check
and 'ulevel' in pinfo
and not pinfo["ulevel"] == "admin"):
is_authorized = False
elif (user_pid in pids_to_check
and 'ulevel' in pinfo
and not pinfo["ulevel"] == "admin"):
for tic in list(pinfo["ticket"]):
if not tic["pid"] == user_pid:
pinfo['ticket'].remove(tic)
if not is_authorized:
return page_not_authorized(req, text=_("Fatal: You are not allowed to access this functionality."))
else:
return ""
def _generate_title(self, ulevel):
'''
Generates the title for the specified user permission level.
@param ulevel: user permission level
@type ulevel: str
@return: title
@rtype: str
'''
def generate_title_guest():
title = 'Assign papers'
if self.person_id:
title = 'Assign papers for: ' + str(webapi.get_person_redirect_link(self.person_id))
return title
def generate_title_user():
title = 'Assign papers'
if self.person_id:
title = 'Assign papers (user interface) for: ' + str(webapi.get_person_redirect_link(self.person_id))
return title
def generate_title_admin():
title = 'Assign papers'
if self.person_id:
title = 'Assign papers (administrator interface) for: ' + str(webapi.get_person_redirect_link(self.person_id))
return title
generate_title = {'guest': generate_title_guest,
'user': generate_title_user,
'admin': generate_title_admin}
return generate_title[ulevel]()
def _generate_optional_menu(self, ulevel, req, form):
'''
Generates the menu for the specified user permission level.
@param ulevel: user permission level
@type ulevel: str
@param req: apache request object
@type req: apache request object
@param form: POST/GET variables of the request
@type form: dict
@return: menu
@rtype: str
'''
def generate_optional_menu_guest(req, form):
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'verbose': (int, 0)})
menu = TEMPLATE.tmpl_person_menu(self.person_id, argd['ln'])
if "verbose" in argd and argd["verbose"] > 0:
session = get_session(req)
pinfo = session['personinfo']
menu += "\n<pre>" + pformat(pinfo) + "</pre>\n"
return menu
def generate_optional_menu_user(req, form):
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'verbose': (int, 0)})
menu = TEMPLATE.tmpl_person_menu(self.person_id, argd['ln'])
if "verbose" in argd and argd["verbose"] > 0:
session = get_session(req)
pinfo = session['personinfo']
menu += "\n<pre>" + pformat(pinfo) + "</pre>\n"
return menu
def generate_optional_menu_admin(req, form):
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'verbose': (int, 0)})
menu = TEMPLATE.tmpl_person_menu_admin(self.person_id, argd['ln'])
if "verbose" in argd and argd["verbose"] > 0:
session = get_session(req)
pinfo = session['personinfo']
menu += "\n<pre>" + pformat(pinfo) + "</pre>\n"
return menu
generate_optional_menu = {'guest': generate_optional_menu_guest,
'user': generate_optional_menu_user,
'admin': generate_optional_menu_admin}
return "<div class=\"clearfix\">" + generate_optional_menu[ulevel](req, form) + "</div>"
def _generate_ticket_box(self, ulevel, req):
'''
Generates the semi-permanent info box for the specified user permission
level.
@param ulevel: user permission level
@type ulevel: str
@param req: apache request object
@type req: apache request object
@return: info box
@rtype: str
'''
def generate_ticket_box_guest(req):
session = get_session(req)
pinfo = session['personinfo']
ticket = pinfo['ticket']
results = list()
pendingt = list()
for t in ticket:
if 'execution_result' in t:
for res in t['execution_result']:
results.append(res)
else:
pendingt.append(t)
box = ""
if pendingt:
box += TEMPLATE.tmpl_ticket_box('in_process', 'transaction', len(pendingt))
if results:
failed = [messages for status, messages in results if not status]
if failed:
box += TEMPLATE.tmpl_transaction_box('failure', failed)
successfull = [messages for status, messages in results if status]
if successfull:
box += TEMPLATE.tmpl_transaction_box('success', successfull)
return box
def generate_ticket_box_user(req):
return generate_ticket_box_guest(req)
def generate_ticket_box_admin(req):
return generate_ticket_box_guest(req)
generate_ticket_box = {'guest': generate_ticket_box_guest,
'user': generate_ticket_box_user,
'admin': generate_ticket_box_admin}
return generate_ticket_box[ulevel](req)
def _generate_person_info_box(self, ulevel, ln):
'''
Generates the name info box for the specified user permission level.
@param ulevel: user permission level
@type ulevel: str
@param ln: page display language
@type ln: str
@return: name info box
@rtype: str
'''
def generate_person_info_box_guest(ln):
names = webapi.get_person_names_from_id(self.person_id)
box = TEMPLATE.tmpl_admin_person_info_box(ln, person_id=self.person_id,
names=names)
return box
def generate_person_info_box_user(ln):
return generate_person_info_box_guest(ln)
def generate_person_info_box_admin(ln):
return generate_person_info_box_guest(ln)
generate_person_info_box = {'guest': generate_person_info_box_guest,
'user': generate_person_info_box_user,
'admin': generate_person_info_box_admin}
return generate_person_info_box[ulevel](ln)
def _generate_tabs(self, ulevel, req):
'''
Generates the tabs content for the specified user permission level.
@param ulevel: user permission level
@type ulevel: str
@param req: apache request object
@type req: apache request object
@return: tabs content
@rtype: str
'''
from invenio.bibauthorid_templates import verbiage_dict as tmpl_verbiage_dict
from invenio.bibauthorid_templates import buttons_verbiage_dict as tmpl_buttons_verbiage_dict
def generate_tabs_guest(req):
links = list() # ['delete', 'commit','del_entry','commit_entry']
tabs = ['records', 'repealed', 'review']
return generate_tabs_admin(req, show_tabs=tabs, ticket_links=links,
open_tickets=list(),
verbiage_dict=tmpl_verbiage_dict['guest'],
buttons_verbiage_dict=tmpl_buttons_verbiage_dict['guest'],
show_reset_button=False)
def generate_tabs_user(req):
links = ['delete', 'del_entry']
tabs = ['records', 'repealed', 'review', 'tickets']
session = get_session(req)
pinfo = session['personinfo']
uid = getUid(req)
user_is_owner = 'not_owner'
if pinfo["claimpaper_admin_last_viewed_pid"] == webapi.get_pid_from_uid(uid):
user_is_owner = 'owner'
open_tickets = webapi.get_person_request_ticket(self.person_id)
tickets = list()
for t in open_tickets:
owns = False
for row in t[0]:
if row[0] == 'uid-ip' and row[1].split('||')[0] == str(uid):
owns = True
if owns:
tickets.append(t)
return generate_tabs_admin(req, show_tabs=tabs, ticket_links=links,
open_tickets=tickets,
verbiage_dict=tmpl_verbiage_dict['user'][user_is_owner],
buttons_verbiage_dict=tmpl_buttons_verbiage_dict['user'][user_is_owner])
def generate_tabs_admin(req, show_tabs=['records', 'repealed', 'review', 'comments', 'tickets', 'data'],
ticket_links=['delete', 'commit', 'del_entry', 'commit_entry'], open_tickets=None,
verbiage_dict=None, buttons_verbiage_dict=None, show_reset_button=True):
session = get_session(req)
personinfo = dict()
try:
personinfo = session["personinfo"]
except KeyError:
return ""
if 'ln' in personinfo:
ln = personinfo["ln"]
else:
ln = CFG_SITE_LANG
all_papers = webapi.get_papers_by_person_id(self.person_id, ext_out=True)
records = [{'recid': paper[0],
'bibref': paper[1],
'flag': paper[2],
'authorname': paper[3],
'authoraffiliation': paper[4],
'paperdate': paper[5],
'rt_status': paper[6],
'paperexperiment': paper[7]} for paper in all_papers]
rejected_papers = [row for row in records if row['flag'] < -1]
rest_of_papers = [row for row in records if row['flag'] >= -1]
review_needed = webapi.get_review_needing_records(self.person_id)
if len(review_needed) < 1:
if 'review' in show_tabs:
show_tabs.remove('review')
if open_tickets == None:
open_tickets = webapi.get_person_request_ticket(self.person_id)
else:
if len(open_tickets) < 1 and 'tickets' in show_tabs:
show_tabs.remove('tickets')
rt_tickets = None
if "admin_requested_ticket_id" in personinfo:
rt_tickets = personinfo["admin_requested_ticket_id"]
if verbiage_dict is None:
verbiage_dict = translate_dict_values(tmpl_verbiage_dict['admin'], ln)
if buttons_verbiage_dict is None:
buttons_verbiage_dict = translate_dict_values(tmpl_buttons_verbiage_dict['admin'], ln)
# send data to the template function
tabs = TEMPLATE.tmpl_admin_tabs(ln, person_id=self.person_id,
rejected_papers=rejected_papers,
rest_of_papers=rest_of_papers,
review_needed=review_needed,
rt_tickets=rt_tickets,
open_rt_tickets=open_tickets,
show_tabs=show_tabs,
ticket_links=ticket_links,
verbiage_dict=verbiage_dict,
buttons_verbiage_dict=buttons_verbiage_dict,
show_reset_button=show_reset_button)
return tabs
def translate_dict_values(dictionary, ln):
def translate_str_values(dictionary, f=lambda x: x):
translated_dict = dict()
for key, value in dictionary.iteritems():
if isinstance(value, str):
translated_dict[key] = f(value)
elif isinstance(value, dict):
translated_dict[key] = translate_str_values(value, f)
else:
raise TypeError("Value should be either string or dictionary.")
return translated_dict
return translate_str_values(dictionary, f=gettext_set_language(ln))
generate_tabs = {'guest': generate_tabs_guest,
'user': generate_tabs_user,
'admin': generate_tabs_admin}
return generate_tabs[ulevel](req)
def _generate_footer(self, ulevel):
'''
Generates the footer for the specified user permission level.
@param ulevel: user permission level
@type ulevel: str
@return: footer
@rtype: str
'''
def generate_footer_guest():
return TEMPLATE.tmpl_invenio_search_box()
def generate_footer_user():
return generate_footer_guest()
def generate_footer_admin():
return generate_footer_guest()
generate_footer = {'guest': generate_footer_guest,
'user': generate_footer_user,
'admin': generate_footer_admin}
return generate_footer[ulevel]()
def _ticket_dispatch_end(self, req):
'''
The ticket dispatch is finished, redirect to the original page of
origin or to the last_viewed_pid or return to the papers autoassigned box to populate its data
'''
session = get_session(req)
pinfo = session["personinfo"]
webapi.session_bareinit(req)
if 'claim_in_process' in pinfo:
pinfo['claim_in_process'] = False
if "merge_ticket" in pinfo and pinfo['merge_ticket']:
pinfo['merge_ticket'] = []
user_info = collect_user_info(req)
user_info['precached_viewclaimlink'] = True
session.dirty = True
if "referer" in pinfo and pinfo["referer"]:
referer = pinfo["referer"]
del(pinfo["referer"])
session.dirty = True
return redirect_to_url(req, referer)
# if we are coming fromt he autoclaim box we should not redirect and just return to the caller function
if 'autoclaim' in pinfo and pinfo['autoclaim']['review_failed'] == False and pinfo['autoclaim']['begin_autoclaim'] == True:
pinfo['autoclaim']['review_failed'] = False
pinfo['autoclaim']['begin_autoclaim'] = False
session.dirty = True
else:
redirect_page = webapi.history_get_last_visited_url(pinfo['visit_diary'], limit_to_page=['manage_profile', 'claim'])
if not redirect_page:
redirect_page = webapi.get_fallback_redirect_link(req)
if 'autoclaim' in pinfo and pinfo['autoclaim']['review_failed'] == True and pinfo['autoclaim']['checkout'] == True:
redirect_page = '%s/author/claim/action?checkout=True' % (CFG_SITE_URL,)
pinfo['autoclaim']['checkout'] = False
session.dirty = True
elif not 'manage_profile' in redirect_page:
pinfo['autoclaim']['review_failed'] = False
pinfo['autoclaim']['begin_autoclaim'] == False
pinfo['autoclaim']['checkout'] = True
session.dirty = True
redirect_page = '%s/author/claim/%s?open_claim=True' % (CFG_SITE_URL, webapi.get_person_redirect_link(pinfo["claimpaper_admin_last_viewed_pid"]))
else:
pinfo['autoclaim']['review_failed'] = False
pinfo['autoclaim']['begin_autoclaim'] == False
pinfo['autoclaim']['checkout'] = True
session.dirty = True
return redirect_to_url(req, redirect_page)
# redirect_link = diary('get_redirect_link', caller='_ticket_dispatch_end', parameters=[('open_claim','True')])
# return redirect_to_url(req, redirect_link)
# need review if should be deleted
def __user_is_authorized(self, req, action):
'''
Determines if a given user is authorized to perform a specified action
@param req: Apache Request Object
@type req: Apache Request Object
@param action: the action the user wants to perform
@type action: string
@return: True if user is allowed to perform the action, False if not
@rtype: boolean
'''
if not req:
return False
if not action:
return False
else:
action = escape(action)
uid = getUid(req)
if not isinstance(uid, int):
return False
if uid == 0:
return False
allowance = [i[1] for i in acc_find_user_role_actions({'uid': uid})
if i[1] == action]
if allowance:
return True
return False
@staticmethod
def _scripts(kill_browser_cache=False):
'''
Returns html code to be included in the meta header of the html page.
The actual code is stored in the template.
@return: html formatted Javascript and CSS inclusions for the <head>
@rtype: string
'''
return TEMPLATE.tmpl_meta_includes(kill_browser_cache)
def _check_user_fields(self, req, form):
argd = wash_urlargd(
form,
{'ln': (str, CFG_SITE_LANG),
'user_first_name': (str, None),
'user_last_name': (str, None),
'user_email': (str, None),
'user_comments': (str, None)})
session = get_session(req)
pinfo = session["personinfo"]
ulevel = pinfo["ulevel"]
skip_checkout_faulty_fields = False
if ulevel in ['user', 'admin']:
skip_checkout_faulty_fields = True
if not ("user_first_name_sys" in pinfo and pinfo["user_first_name_sys"]):
if "user_first_name" in argd and argd['user_first_name']:
if not argd["user_first_name"] and not skip_checkout_faulty_fields:
pinfo["checkout_faulty_fields"].append("user_first_name")
else:
pinfo["user_first_name"] = escape(argd["user_first_name"])
if not ("user_last_name_sys" in pinfo and pinfo["user_last_name_sys"]):
if "user_last_name" in argd and argd['user_last_name']:
if not argd["user_last_name"] and not skip_checkout_faulty_fields:
pinfo["checkout_faulty_fields"].append("user_last_name")
else:
pinfo["user_last_name"] = escape(argd["user_last_name"])
if not ("user_email_sys" in pinfo and pinfo["user_email_sys"]):
if "user_email" in argd and argd['user_email']:
if not email_valid_p(argd["user_email"]):
pinfo["checkout_faulty_fields"].append("user_email")
else:
pinfo["user_email"] = escape(argd["user_email"])
if (ulevel == "guest"
and emailUnique(argd["user_email"]) > 0):
pinfo["checkout_faulty_fields"].append("user_email_taken")
else:
pinfo["checkout_faulty_fields"].append("user_email")
if "user_comments" in argd:
if argd["user_comments"]:
pinfo["user_ticket_comments"] = escape(argd["user_comments"])
else:
pinfo["user_ticket_comments"] = ""
session.dirty = True
def action(self, req, form):
'''
Initial step in processing of requests: ticket generation/update.
Also acts as action dispatcher for interface mass action requests.
Valid mass actions are:
- add_external_id: add an external identifier to an author
- add_missing_external_ids: add missing external identifiers of an author
- bibref_check_submit:
- cancel: clean the session (erase tickets and so on)
- cancel_rt_ticket:
- cancel_search_ticket:
- cancel_stage:
- checkout:
- checkout_continue_claiming:
- checkout_remove_transaction:
- checkout_submit:
- claim: claim papers for an author
- commit_rt_ticket:
- confirm: confirm assignments to an author
- delete_external_ids: delete external identifiers of an author
- repeal: repeal assignments from an author
- reset: reset assignments of an author
- set_canonical_name: set/swap the canonical name of an author
- to_other_person: assign a document from an author to another author
@param req: apache request object
@type req: apache request object
@param form: parameters sent via GET or POST request
@type form: dict
@return: a full page formatted in HTML
@return: str
'''
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session["personinfo"]
argd = wash_urlargd(form,
{'autoclaim_show_review':(str, None),
'canonical_name': (str, None),
'existing_ext_ids': (list, None),
'ext_id': (str, None),
'uid': (int, None),
'ext_system': (str, None),
'ln': (str, CFG_SITE_LANG),
'pid': (int, -1),
'primary_profile':(str, None),
'search_param': (str, None),
'rt_action': (str, None),
'rt_id': (int, None),
'selection': (list, None),
# permitted actions
'add_external_id': (str, None),
'set_uid': (str, None),
'add_missing_external_ids': (str, None),
'associate_profile': (str, None),
'bibref_check_submit': (str, None),
'cancel': (str, None),
'cancel_merging': (str, None),
'cancel_rt_ticket': (str, None),
'cancel_search_ticket': (str, None),
'cancel_stage': (str, None),
'checkout': (str, None),
'checkout_continue_claiming': (str, None),
'checkout_remove_transaction': (str, None),
'checkout_submit': (str, None),
'assign': (str, None),
'commit_rt_ticket': (str, None),
'confirm': (str, None),
'delete_external_ids': (str, None),
'merge': (str, None),
'reject': (str, None),
'repeal': (str, None),
'reset': (str, None),
'send_message': (str, None),
'set_canonical_name': (str, None),
'to_other_person': (str, None)})
ulevel = pinfo["ulevel"]
ticket = pinfo["ticket"]
uid = getUid(req)
ln = argd['ln']
action = None
permitted_actions = ['add_external_id',
'set_uid',
'add_missing_external_ids',
'associate_profile',
'bibref_check_submit',
'cancel',
'cancel_merging',
'cancel_rt_ticket',
'cancel_search_ticket',
'cancel_stage',
'checkout',
'checkout_continue_claiming',
'checkout_remove_transaction',
'checkout_submit',
'assign',
'commit_rt_ticket',
'confirm',
'delete_external_ids',
'merge',
'reject',
'repeal',
'reset',
'send_message',
'set_canonical_name',
'to_other_person']
for act in permitted_actions:
# one action (the most) is enabled in the form
if argd[act] is not None:
action = act
no_access = self._page_access_permission_wall(req, None)
if no_access and action not in ["assign"]:
return no_access
# incomplete papers (incomplete paper info or other problems) trigger action function without user's interference
# in order to fix those problems and claim papers or remove them from the ticket
if (action is None
and "bibref_check_required" in pinfo
and pinfo["bibref_check_required"]):
if "bibref_check_reviewed_bibrefs" in pinfo:
del(pinfo["bibref_check_reviewed_bibrefs"])
session.dirty = True
def add_external_id():
'''
associates the user with pid to the external id ext_id
'''
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot add external id to unknown person")
if argd['ext_system'] is not None:
ext_sys = argd['ext_system']
else:
return self._error_page(req, ln,
"Fatal: cannot add an external id without specifying the system")
if argd['ext_id'] is not None:
ext_id = argd['ext_id']
else:
return self._error_page(req, ln,
"Fatal: cannot add a custom external id without a suggestion")
userinfo = "%s||%s" % (uid, req.remote_ip)
webapi.add_person_external_id(pid, ext_sys, ext_id, userinfo)
return redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
def set_uid():
'''
associates the user with pid to the external id ext_id
'''
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: current user is unknown")
if argd['uid'] is not None:
dest_uid = int(argd['uid'])
else:
return self._error_page(req, ln,
"Fatal: user id is not valid")
userinfo = "%s||%s" % (uid, req.remote_ip)
webapi.set_person_uid(pid, dest_uid, userinfo)
# remove arxiv pubs of current pid
remove_arxiv_papers_of_author(pid)
dest_uid_pid = webapi.get_pid_from_uid(dest_uid)
if dest_uid_pid > -1:
# move the arxiv pubs of the dest_uid to the current pid
dest_uid_arxiv_papers = webapi.get_arxiv_papers_of_author(dest_uid_pid)
webapi.add_arxiv_papers_to_author(dest_uid_arxiv_papers, pid)
return redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
def add_missing_external_ids():
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot recompute external ids for an unknown person")
update_external_ids_of_authors([pid], overwrite=False)
return redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
def associate_profile():
'''
associates the user with user id to the person profile with pid
'''
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot associate profile without a person id.")
uid = getUid(req)
pid, profile_claimed = webapi.claim_profile(uid, pid)
redirect_pid = pid
if profile_claimed:
pinfo['pid'] = pid
pinfo['should_check_to_autoclaim'] = True
pinfo["login_info_message"] = "confirm_success"
session.dirty = True
redirect_to_url(req, '%s/author/manage_profile/%s' % (CFG_SITE_URL, redirect_pid))
# if someone have already claimed this profile it redirects to choose_profile with an error message
else:
param=''
if 'search_param' in argd and argd['search_param']:
param = '&search_param=' + argd['search_param']
redirect_to_url(req, '%s/author/choose_profile?failed=%s%s' % (CFG_SITE_URL, True, param))
def bibref_check_submit():
pinfo["bibref_check_reviewed_bibrefs"] = list()
add_rev = pinfo["bibref_check_reviewed_bibrefs"].append
if ("bibrefs_auto_assigned" in pinfo
or "bibrefs_to_confirm" in pinfo):
person_reviews = list()
if ("bibrefs_auto_assigned" in pinfo
and pinfo["bibrefs_auto_assigned"]):
person_reviews.append(pinfo["bibrefs_auto_assigned"])
if ("bibrefs_to_confirm" in pinfo
and pinfo["bibrefs_to_confirm"]):
person_reviews.append(pinfo["bibrefs_to_confirm"])
for ref_review in person_reviews:
for person_id in ref_review:
for bibrec in ref_review[person_id]["bibrecs"]:
rec_grp = "bibrecgroup%s" % bibrec
elements = list()
if rec_grp in form:
if isinstance(form[rec_grp], str):
elements.append(form[rec_grp])
elif isinstance(form[rec_grp], list):
elements += form[rec_grp]
else:
continue
for element in elements:
test = element.split("||")
if test and len(test) > 1 and test[1]:
tref = test[1] + "," + str(bibrec)
tpid = webapi.wash_integer_id(test[0])
if (webapi.is_valid_bibref(tref)
and tpid > -1):
add_rev(element + "," + str(bibrec))
session.dirty = True
def cancel():
self.__session_cleanup(req)
return self._ticket_dispatch_end(req)
def cancel_merging():
'''
empties the session out of merge content and redirects to the manage profile page
that the user was viewing before the merge
'''
if argd['primary_profile']:
primary_cname = argd['primary_profile']
else:
return self._error_page(req, ln,
"Fatal: Couldn't redirect to the previous page")
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
if pinfo['merge_profiles']:
pinfo['merge_profiles'] = list()
session.dirty = True
redirect_url = "%s/author/manage_profile/%s" % (CFG_SITE_URL, primary_cname)
return redirect_to_url(req, redirect_url)
def cancel_rt_ticket():
if argd['selection'] is not None:
bibrefrecs = argd['selection']
else:
return self._error_page(req, ln,
"Fatal: cannot cancel unknown ticket")
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln, "Fatal: cannot cancel unknown ticket")
if argd['rt_id'] is not None and argd['rt_action'] is not None:
rt_id = int(argd['rt_id'])
rt_action = argd['rt_action']
for bibrefrec in bibrefrecs:
webapi.delete_transaction_from_request_ticket(pid, rt_id, rt_action, bibrefrec)
else:
rt_id = int(bibrefrecs[0])
webapi.delete_request_ticket(pid, rt_id)
return redirect_to_url(req, "%s/author/claim/%s" % (CFG_SITE_URL, pid))
def cancel_search_ticket(without_return=False):
if 'search_ticket' in pinfo:
del(pinfo['search_ticket'])
session.dirty = True
if "claimpaper_admin_last_viewed_pid" in pinfo:
pid = pinfo["claimpaper_admin_last_viewed_pid"]
if not without_return:
return redirect_to_url(req, "%s/author/claim/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
if not without_return:
return self.search(req, form)
def cancel_stage():
if 'bibref_check_required' in pinfo:
del(pinfo['bibref_check_required'])
if 'bibrefs_auto_assigned' in pinfo:
del(pinfo['bibrefs_auto_assigned'])
if 'bibrefs_to_confirm' in pinfo:
del(pinfo['bibrefs_to_confirm'])
for tt in [row for row in ticket if 'incomplete' in row]:
ticket.remove(tt)
session.dirty = True
return self._ticket_dispatch_end(req)
def checkout():
pass
# return self._ticket_final_review(req)
def checkout_continue_claiming():
pinfo["checkout_faulty_fields"] = list()
self._check_user_fields(req, form)
return self._ticket_dispatch_end(req)
def checkout_remove_transaction():
bibref = argd['checkout_remove_transaction']
if webapi.is_valid_bibref(bibref):
for rmt in [row for row in ticket if row["bibref"] == bibref]:
ticket.remove(rmt)
pinfo["checkout_confirmed"] = False
session.dirty = True
# return self._ticket_final_review(req)
def checkout_submit():
pinfo["checkout_faulty_fields"] = list()
self._check_user_fields(req, form)
if not ticket:
pinfo["checkout_faulty_fields"].append("tickets")
pinfo["checkout_confirmed"] = True
if pinfo["checkout_faulty_fields"]:
pinfo["checkout_confirmed"] = False
session.dirty = True
# return self._ticket_final_review(req)
def claim():
if argd['selection'] is not None:
bibrefrecs = argd['selection']
else:
return self._error_page(req, ln,
"Fatal: cannot create ticket without any bibrefrec")
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot claim papers to an unknown person")
if action == 'assign':
claimed_recs = [paper[2] for paper in get_claimed_papers_of_author(pid)]
for bibrefrec in list(bibrefrecs):
_, rec = webapi.split_bibrefrec(bibrefrec)
if rec in claimed_recs:
bibrefrecs.remove(bibrefrec)
for bibrefrec in bibrefrecs:
operation_parts = {'pid': pid,
'action': action,
'bibrefrec': bibrefrec}
operation_to_be_added = webapi.construct_operation(operation_parts, pinfo, uid)
if operation_to_be_added is None:
continue
ticket = pinfo['ticket']
webapi.add_operation_to_ticket(operation_to_be_added, ticket)
session.dirty = True
return redirect_to_url(req, "%s/author/claim/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
def claim_to_other_person():
if argd['selection'] is not None:
bibrefrecs = argd['selection']
else:
return self._error_page(req, ln,
"Fatal: cannot create ticket without any bibrefrec")
return self._ticket_open_assign_to_other_person(req, bibrefrecs, form)
def commit_rt_ticket():
if argd['selection'] is not None:
tid = argd['selection'][0]
else:
return self._error_page(req, ln,
"Fatal: cannot cancel unknown ticket")
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot cancel unknown ticket")
return self._commit_rt_ticket(req, tid, pid)
def confirm_repeal_reset():
if argd['pid'] > -1 or int(argd['pid']) == CREATE_NEW_PERSON:
pid = argd['pid']
cancel_search_ticket(without_return = True)
else:
return self._ticket_open_assign_to_other_person(req, argd['selection'], form)
#return self._error_page(req, ln, "Fatal: cannot create ticket without a person id! (crr %s)" %repr(argd))
bibrefrecs = argd['selection']
if argd['confirm']:
action = 'assign'
elif argd['repeal']:
action = 'reject'
elif argd['reset']:
action = 'reset'
else:
return self._error_page(req, ln, "Fatal: not existent action!")
for bibrefrec in bibrefrecs:
form['jsondata'] = json.dumps({'pid': str(pid),
'action': action,
'bibrefrec': bibrefrec,
'on': 'user'})
t = WebInterfaceAuthorTicketHandling()
t.add_operation(req, form)
return redirect_to_url(req, "%s/author/claim/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
def delete_external_ids():
'''
deletes association between the user with pid and the external id ext_id
'''
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot delete external ids from an unknown person")
if argd['existing_ext_ids'] is not None:
existing_ext_ids = argd['existing_ext_ids']
else:
return self._error_page(req, ln,
"Fatal: you must select at least one external id in order to delete it")
userinfo = "%s||%s" % (uid, req.remote_ip)
webapi.delete_person_external_ids(pid, existing_ext_ids, userinfo)
return redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid)))
def none_action():
return self._error_page(req, ln,
"Fatal: cannot create ticket if no action selected.")
def merge():
'''
performs a merge if allowed on the profiles that the user chose
'''
if argd['primary_profile']:
primary_cname = argd['primary_profile']
else:
return self._error_page(req, ln,
"Fatal: cannot perform a merge without a primary profile!")
if argd['selection']:
profiles_to_merge = argd['selection']
else:
return self._error_page(req, ln,
"Fatal: cannot perform a merge without any profiles selected!")
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
uid = getUid(req)
primary_pid = webapi.get_person_id_from_canonical_id(primary_cname)
pids_to_merge = [webapi.get_person_id_from_canonical_id(cname) for cname in profiles_to_merge]
is_admin = False
if pinfo['ulevel'] == 'admin':
is_admin = True
# checking if there are restrictions regarding this merge
can_perform_merge, preventing_pid = webapi.merge_is_allowed(primary_pid, pids_to_merge, is_admin)
if not can_perform_merge:
# when redirected back to the merge profiles page display an error message about the currently attempted merge
pinfo['merge_info_message'] = ("failure", "confirm_failure")
session.dirty = True
redirect_url = "%s/author/merge_profiles?primary_profile=%s" % (CFG_SITE_URL, primary_cname)
return redirect_to_url(req, redirect_url)
if is_admin:
webapi.merge_profiles(primary_pid, pids_to_merge)
# when redirected back to the manage profile page display a message about the currently attempted merge
pinfo['merge_info_message'] = ("success", "confirm_success")
else:
name = ''
if 'user_last_name' in pinfo:
name = pinfo['user_last_name']
if 'user_first_name' in pinfo:
name += pinfo['user_first_name']
email = ''
if 'user_email' in pinfo:
email = pinfo['user_email']
selection_str = "&selection=".join(profiles_to_merge)
userinfo = {'uid-ip': "userid: %s (from %s)" % (uid, req.remote_ip),
'name': name,
'email': email,
'merge link': "%s/author/merge_profiles?primary_profile=%s&selection=%s" % (CFG_SITE_URL, primary_cname, selection_str)}
# a message is sent to the admin with info regarding the currently attempted merge
webapi.create_request_message(userinfo, subj='Merge profiles request')
# when redirected back to the manage profile page display a message about the merge
pinfo['merge_info_message'] = ("success", "confirm_operation")
pinfo['merge_profiles'] = list()
session.dirty = True
redirect_url = "%s/author/manage_profile/%s" % (CFG_SITE_URL, primary_cname)
return redirect_to_url(req, redirect_url)
def send_message():
'''
sends a message from the user to the admin
'''
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
#pp = pprint.PrettyPrinter(indent=4)
#session_dump = pp.pprint(pinfo)
session_dump = str(pinfo)
name = ''
name_changed = False
name_given = ''
email = ''
email_changed = False
email_given = ''
comment = ''
last_page_visited = ''
if "user_last_name" in pinfo:
name = pinfo["user_last_name"]
if "user_first_name" in pinfo:
name += pinfo["user_first_name"]
name = name.rstrip()
if "user_email" in pinfo:
email = pinfo["user_email"]
email = email.rstrip()
if 'Name' in form:
if not name:
name = form['Name']
elif name != form['Name']:
name_given = form['Name']
name_changed = True
name = name.rstrip()
if 'E-mail'in form:
if not email:
email = form['E-mail']
elif name != form['E-mail']:
email_given = form['E-mail']
email_changed = True
email = email.rstrip()
if 'Comment' in form:
comment = form['Comment']
comment = comment.rstrip()
if not name or not comment or not email:
redirect_to_url(req, '%s/author/help?incomplete_params=%s' % (CFG_SITE_URL, True))
if 'last_page_visited' in form:
last_page_visited = form['last_page_visited']
uid = getUid(req)
userinfo = {'uid-ip': "userid: %s (from %s)" % (uid, req.remote_ip),
'name': name,
'email': email,
'comment': comment,
'last_page_visited': last_page_visited,
'session_dump': session_dump,
'name_given': name_given,
'email_given': email_given,
'name_changed': name_changed,
'email_changed': email_changed}
webapi.create_request_message(userinfo)
def set_canonical_name():
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln,
"Fatal: cannot set canonical name to unknown person")
if argd['canonical_name'] is not None:
cname = argd['canonical_name']
else:
return self._error_page(req, ln,
"Fatal: cannot set a custom canonical name without a suggestion")
userinfo = "%s||%s" % (uid, req.remote_ip)
if webapi.is_valid_canonical_id(cname):
webapi.swap_person_canonical_name(pid, cname, userinfo)
else:
webapi.update_person_canonical_name(pid, cname, userinfo)
return redirect_to_url(req, "%s/author/claim/%s%s" % (CFG_SITE_URL, webapi.get_person_redirect_link(pid), '#tabData'))
action_functions = {'add_external_id': add_external_id,
'set_uid': set_uid,
'add_missing_external_ids': add_missing_external_ids,
'associate_profile': associate_profile,
'bibref_check_submit': bibref_check_submit,
'cancel': cancel,
'cancel_merging': cancel_merging,
'cancel_rt_ticket': cancel_rt_ticket,
'cancel_search_ticket': cancel_search_ticket,
'cancel_stage': cancel_stage,
'checkout': checkout,
'checkout_continue_claiming': checkout_continue_claiming,
'checkout_remove_transaction': checkout_remove_transaction,
'checkout_submit': checkout_submit,
'assign': claim,
'commit_rt_ticket': commit_rt_ticket,
'confirm': confirm_repeal_reset,
'delete_external_ids': delete_external_ids,
'merge': merge,
'reject': claim,
'repeal': confirm_repeal_reset,
'reset': confirm_repeal_reset,
'send_message': send_message,
'set_canonical_name': set_canonical_name,
'to_other_person': claim_to_other_person,
None: none_action}
return action_functions[action]()
def _ticket_open_claim(self, req, bibrefs, ln):
'''
Generate page to let user choose how to proceed
@param req: Apache Request Object
@type req: Apache Request Object
@param bibrefs: list of record IDs to perform an action on
@type bibrefs: list of int
@param ln: language to display the page in
@type ln: string
'''
session = get_session(req)
uid = getUid(req)
uinfo = collect_user_info(req)
pinfo = session["personinfo"]
if 'ln' in pinfo:
ln = pinfo["ln"]
else:
ln = CFG_SITE_LANG
_ = gettext_set_language(ln)
no_access = self._page_access_permission_wall(req)
session.dirty = True
pid = -1
search_enabled = True
if not no_access and uinfo["precached_usepaperclaim"]:
tpid = webapi.get_pid_from_uid(uid)
if tpid > -1:
pid = tpid
last_viewed_pid = False
if (not no_access
and "claimpaper_admin_last_viewed_pid" in pinfo
and pinfo["claimpaper_admin_last_viewed_pid"]):
names = webapi.get_person_names_from_id(pinfo["claimpaper_admin_last_viewed_pid"])
names = sorted([i for i in names], key=lambda k: k[1], reverse=True)
if len(names) > 0:
if len(names[0]) > 0:
last_viewed_pid = [pinfo["claimpaper_admin_last_viewed_pid"], names[0][0]]
if no_access:
search_enabled = False
pinfo["referer"] = uinfo["referer"]
session.dirty = True
body = TEMPLATE.tmpl_open_claim(bibrefs, pid, last_viewed_pid,
search_enabled=search_enabled)
body = TEMPLATE.tmpl_person_detail_layout(body)
title = _('Claim this paper')
metaheaderadd = WebInterfaceBibAuthorIDClaimPages._scripts(kill_browser_cache=True)
return page(title=title,
metaheaderadd=metaheaderadd,
body=body,
req=req,
language=ln)
def _ticket_open_assign_to_other_person(self, req, bibrefs, form):
'''
Initializes search to find a person to attach the selected records to
@param req: Apache request object
@type req: Apache request object
@param bibrefs: list of record IDs to consider
@type bibrefs: list of int
@param form: GET/POST request parameters
@type form: dict
'''
session = get_session(req)
pinfo = session["personinfo"]
pinfo["search_ticket"] = dict()
search_ticket = pinfo["search_ticket"]
search_ticket['action'] = 'assign'
search_ticket['bibrefs'] = bibrefs
session.dirty = True
return self.search(req, form)
def _cancel_rt_ticket(self, req, tid, pid):
'''
deletes an RT ticket
'''
webapi.delete_request_ticket(pid, tid)
return redirect_to_url(req, "%s/author/claim/%s" %
(CFG_SITE_URL, webapi.get_person_redirect_link(str(pid))))
def _cancel_transaction_from_rt_ticket(self, tid, pid, action, bibref):
'''
deletes a transaction from an rt ticket
'''
webapi.delete_transaction_from_request_ticket(pid, tid, action, bibref)
def _commit_rt_ticket(self, req, tid, pid):
'''
Commit of an rt ticket: creates a real ticket and commits.
'''
session = get_session(req)
pinfo = session["personinfo"]
ticket = pinfo["ticket"]
uid = getUid(req)
tid = int(tid)
rt_ticket = get_validated_request_tickets_for_author(pid, tid)[0]
for action, bibrefrec in rt_ticket['operations']:
operation_parts = {'pid': pid,
'action': action,
'bibrefrec': bibrefrec}
operation_to_be_added = webapi.construct_operation(operation_parts, pinfo, uid)
webapi.add_operation_to_ticket(operation_to_be_added, ticket)
session.dirty = True
webapi.delete_request_ticket(pid, tid)
redirect_to_url(req, "%s/author/claim/%s" % (CFG_SITE_URL, pid))
def _error_page(self, req, ln=CFG_SITE_LANG, message=None, intro=True):
'''
Create a page that contains a message explaining the error.
@param req: Apache Request Object
@type req: Apache Request Object
@param ln: language
@type ln: string
@param message: message to be displayed
@type message: string
'''
body = []
_ = gettext_set_language(ln)
if not message:
message = "No further explanation available. Sorry."
if intro:
body.append(_("<p>We're sorry. An error occurred while "
"handling your request. Please find more information "
"below:</p>"))
body.append("<p><strong>%s</strong></p>" % message)
return page(title=_("Notice"),
body="\n".join(body),
description="%s - Internal Error" % CFG_SITE_NAME,
keywords="%s, Internal Error" % CFG_SITE_NAME,
language=ln,
req=req)
def __session_cleanup(self, req):
'''
Cleans the session from all bibauthorid specific settings and
with that cancels any transaction currently in progress.
@param req: Apache Request Object
@type req: Apache Request Object
'''
session = get_session(req)
try:
pinfo = session["personinfo"]
except KeyError:
return
if "ticket" in pinfo:
pinfo['ticket'] = []
if "search_ticket" in pinfo:
pinfo['search_ticket'] = dict()
# clear up bibref checker if it's done.
if ("bibref_check_required" in pinfo
and not pinfo["bibref_check_required"]):
if 'bibrefs_to_confirm' in pinfo:
del(pinfo['bibrefs_to_confirm'])
if "bibrefs_auto_assigned" in pinfo:
del(pinfo["bibrefs_auto_assigned"])
del(pinfo["bibref_check_required"])
if "checkout_confirmed" in pinfo:
del(pinfo["checkout_confirmed"])
if "checkout_faulty_fields" in pinfo:
del(pinfo["checkout_faulty_fields"])
# pinfo['ulevel'] = ulevel
# pinfo["claimpaper_admin_last_viewed_pid"] = -1
pinfo["admin_requested_ticket_id"] = -1
session.dirty = True
def _generate_search_ticket_box(self, req):
'''
Generate the search ticket to remember a pending search for Person
entities in an attribution process
@param req: Apache request object
@type req: Apache request object
'''
session = get_session(req)
pinfo = session["personinfo"]
search_ticket = None
if 'search_ticket' in pinfo:
search_ticket = pinfo['search_ticket']
if not search_ticket:
return ''
else:
return TEMPLATE.tmpl_search_ticket_box('person_search', 'assign_papers', search_ticket['bibrefs'])
def search_box(self, query, shown_element_functions):
'''
collecting the persons' data that the search function returned
@param req: Apache request object
@type req: Apache request object
@param query: the query string
@type query: string
@param shown_element_functions: contains the functions that will tell to the template which columns to show and what buttons to print
@type shown_element_functions: dict
@return: html body
@rtype: string
'''
pid_list = self._perform_search(query)
search_results = []
for pid in pid_list:
result = defaultdict(list)
result['pid'] = pid
result['canonical_id'] = webapi.get_canonical_id_from_person_id(pid)
result['name_variants'] = webapi.get_person_names_from_id(pid)
result['external_ids'] = webapi.get_external_ids_from_person_id(pid)
# this variable shows if we want to use the following data in the search template
if 'pass_status' in shown_element_functions and shown_element_functions['pass_status']:
result['status'] = webapi.is_profile_available(pid)
search_results.append(result)
body = TEMPLATE.tmpl_author_search(query, search_results, shown_element_functions)
body = TEMPLATE.tmpl_person_detail_layout(body)
return body
def search(self, req, form):
'''
Function used for searching a person based on a name with which the
function is queried.
@param req: Apache Request Object
@type form: dict
@return: a full page formatted in HTML
@rtype: string
'''
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ulevel = pinfo['ulevel']
person_id = self.person_id
uid = getUid(req)
argd = wash_urlargd(
form,
{'ln': (str, CFG_SITE_LANG),
'verbose': (int, 0),
'q': (str, None)})
debug = "verbose" in argd and argd["verbose"] > 0
ln = argd['ln']
cname = ''
is_owner = False
last_visited_pid = webapi.history_get_last_visited_pid(session['personinfo']['visit_diary'])
if last_visited_pid is not None:
cname = webapi.get_canonical_id_from_person_id(last_visited_pid)
is_owner = self._is_profile_owner(last_visited_pid)
menu = WebProfileMenu(str(cname), "search", ln, is_owner, self._is_admin(pinfo))
title = "Person search"
# Create Wrapper Page Markup
profile_page = WebProfilePage("search", title, no_cache=True)
profile_page.add_profile_menu(menu)
profile_page.add_bootstrapped_data(json.dumps({
"other": "var gBOX_STATUS = '10';var gPID = '10'; var gNumOfWorkers= '10'; var gReqTimeout= '10'; var gPageTimeout= '10';",
"backbone": """
(function(ticketbox) {
var app = ticketbox.app;
app.userops.set(%s);
app.bodyModel.set({userLevel: "%s"});
})(ticketbox);""" % (WebInterfaceAuthorTicketHandling.bootstrap_status(pinfo, "user"), ulevel)
}))
if debug:
profile_page.add_debug_info(pinfo)
no_access = self._page_access_permission_wall(req)
shown_element_functions = dict()
shown_element_functions['show_search_bar'] = TEMPLATE.tmpl_general_search_bar()
if no_access:
return no_access
search_ticket = None
bibrefs = []
if 'search_ticket' in pinfo:
search_ticket = pinfo['search_ticket']
for r in search_ticket['bibrefs']:
bibrefs.append(r)
if search_ticket and "ulevel" in pinfo:
if pinfo["ulevel"] == "admin":
shown_element_functions['new_person_gen'] = TEMPLATE.tmpl_assigning_search_new_person_generator(bibrefs)
content = ""
if search_ticket:
shown_element_functions['button_gen'] = TEMPLATE.tmpl_assigning_search_button_generator(bibrefs)
content = content + self._generate_search_ticket_box(req)
query = None
if 'q' in argd:
if argd['q']:
query = escape(argd['q'])
content += self.search_box(query, shown_element_functions)
body = profile_page.get_wrapped_body(content)
parameter = None
if query:
parameter = '?search_param=%s' + query
webapi.history_log_visit(req, 'search', params = parameter)
return page(title=title,
metaheaderadd=profile_page.get_head().encode('utf-8'),
body=body.encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
def merge_profiles(self, req, form):
'''
begginig of the proccess that performs the merge over multipe person profiles
@param req: Apache Request Object
@type form: dict
@return: a full page formatted in HTML
@rtype: string
'''
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'primary_profile': (str, None),
'search_param': (str, ''),
'selection': (list, None),
'verbose': (int, 0)})
ln = argd['ln']
primary_cname = argd['primary_profile']
search_param = argd['search_param']
selection = argd['selection']
debug = 'verbose' in argd and argd['verbose'] > 0
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
profiles_to_merge = pinfo['merge_profiles']
_ = gettext_set_language(ln)
if not primary_cname:
return page_not_authorized(req, text=_('This page is not accessible directly.'))
no_access = self._page_access_permission_wall(req)
if no_access:
return no_access
if selection is not None:
profiles_to_merge_session = [cname for cname, is_available in profiles_to_merge]
for profile in selection:
if profile not in profiles_to_merge_session:
pid = webapi.get_person_id_from_canonical_id(profile)
is_available = webapi.is_profile_available(pid)
pinfo['merge_profiles'].append([profile, '1' if is_available else '0'])
session.dirty = True
primary_pid = webapi.get_person_id_from_canonical_id(primary_cname)
is_available = webapi.is_profile_available(primary_pid)
if not session['personinfo']['merge_primary_profile']:
session['personinfo']['merge_primary_profile'] = [primary_cname, '1' if is_available else '0']
session.dirty = True
body = ''
cname = ''
is_owner = False
last_visited_pid = webapi.history_get_last_visited_pid(session['personinfo']['visit_diary'])
if last_visited_pid is not None:
cname = webapi.get_canonical_id_from_person_id(last_visited_pid)
is_owner = self._is_profile_owner(last_visited_pid)
title = 'Merge Profiles'
menu = WebProfileMenu(str(cname), "manage_profile", ln, is_owner, self._is_admin(pinfo))
merge_page = WebProfilePage("merge_profile", title, no_cache=True)
merge_page.add_profile_menu(menu)
if debug:
merge_page.add_debug_info(pinfo)
# display status for any previously attempted merge
if pinfo['merge_info_message']:
teaser_key, message = pinfo['merge_info_message']
body += TEMPLATE.tmpl_merge_transaction_box(teaser_key, [message])
pinfo['merge_info_message'] = None
session.dirty = True
body += TEMPLATE.tmpl_merge_ticket_box('person_search', 'merge_profiles', primary_cname)
shown_element_functions = dict()
shown_element_functions['show_search_bar'] = TEMPLATE.tmpl_merge_profiles_search_bar(primary_cname)
shown_element_functions['button_gen'] = TEMPLATE.merge_profiles_button_generator()
shown_element_functions['pass_status'] = 'True'
merge_page.add_bootstrapped_data(json.dumps({
"other": "var gMergeProfile = %s; var gMergeList = %s;" % ([primary_cname, '1' if is_available else '0'], profiles_to_merge)
}))
body += self.search_box(search_param, shown_element_functions)
body = merge_page.get_wrapped_body(body)
return page(title=title,
metaheaderadd=merge_page.get_head().encode('utf-8'),
body=body.encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
def _perform_search(self, search_param):
'''
calls the search function on the search_param and returns the results
@param search_param: query string
@type search_param: String
@return: list of pids that the search found they match with the search query
@return: list
'''
pid_canditates_list = []
nquery = None
if search_param:
if search_param.count(":"):
try:
left, right = search_param.split(":")
try:
nsearch_param = str(right)
except (ValueError, TypeError):
try:
nsearch_param = str(left)
except (ValueError, TypeError):
nsearch_param = search_param
except ValueError:
nsearch_param = search_param
else:
nsearch_param = search_param
sorted_results = webapi.search_person_ids_by_name(nsearch_param)
for result in sorted_results:
pid_canditates_list.append(result[0])
return pid_canditates_list
def merge_profiles_ajax(self, req, form):
'''
Function used for handling Ajax requests used in order to add/remove profiles
in/from the merging profiles list, which is saved in the session.
@param req: Apache Request Object
@type req: Apache Request Object
@param form: Parameters sent via Ajax request
@type form: dict
@return: json data
'''
# Abort if the simplejson module isn't available
if not CFG_JSON_AVAILABLE:
print "Json not configurable"
# If it is an Ajax request, extract any JSON data.
ajax_request = False
# REcent papers request
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_data = json_unicode_to_utf8(json_data)
ajax_request = True
json_response = {'resultCode': 0}
# Handle request.
if ajax_request:
req_type = json_data['requestType']
if req_type == 'addProfile':
if json_data.has_key('profile'):
profile = json_data['profile']
person_id = webapi.get_person_id_from_canonical_id(profile)
if person_id != -1:
webapi.session_bareinit(req)
session = get_session(req)
profiles_to_merge = session["personinfo"]["merge_profiles"]
profile_availability = webapi.is_profile_available(person_id)
if profile_availability:
profile_availability = "1"
else:
profile_availability = "0"
if profile not in [el[0] for el in profiles_to_merge]:
profiles_to_merge.append([profile, profile_availability])
session.dirty = True
# TODO check access rights and get profile from db
json_response.update({'resultCode': 1})
json_response.update({'addedPofile': profile})
json_response.update({'addedPofileAvailability': profile_availability})
else:
json_response.update({'result': 'Error: Profile does not exist'})
else:
json_response.update({'result': 'Error: Profile was already in the list'})
else:
json_response.update({'result': 'Error: Missing profile'})
elif req_type == 'removeProfile':
if json_data.has_key('profile'):
profile = json_data['profile']
if webapi.get_person_id_from_canonical_id(profile) != -1:
webapi.session_bareinit(req)
session = get_session(req)
profiles_to_merge = session["personinfo"]["merge_profiles"]
# print (str(profiles_to_merge))
if profile in [el[0] for el in profiles_to_merge]:
for prof in list(profiles_to_merge):
if prof[0] == profile:
profiles_to_merge.remove(prof)
session.dirty = True
# TODO check access rights and get profile from db
json_response.update({'resultCode': 1})
json_response.update({'removedProfile': profile})
else:
json_response.update({'result': 'Error: Profile was missing already from the list'})
else:
json_response.update({'result': 'Error: Profile does not exist'})
else:
json_response.update({'result': 'Error: Missing profile'})
elif req_type == 'setPrimaryProfile':
if json_data.has_key('profile'):
profile = json_data['profile']
profile_id = webapi.get_person_id_from_canonical_id(profile)
if profile_id != -1:
webapi.session_bareinit(req)
session = get_session(req)
profile_availability = webapi.is_profile_available(profile_id)
if profile_availability:
profile_availability = "1"
else:
profile_availability = "0"
profiles_to_merge = session["personinfo"]["merge_profiles"]
if profile in [el[0] for el in profiles_to_merge if el and el[0]]:
for prof in list(profiles_to_merge):
if prof[0] == profile:
profiles_to_merge.remove(prof)
primary_profile = session["personinfo"]["merge_primary_profile"]
if primary_profile and primary_profile not in profiles_to_merge:
profiles_to_merge.append(primary_profile)
session["personinfo"]["merge_primary_profile"] = [profile, profile_availability]
session.dirty = True
json_response.update({'resultCode': 1})
json_response.update({'primaryProfile': profile})
json_response.update({'primaryPofileAvailability': profile_availability})
else:
json_response.update({'result': 'Error: Profile was already in the list'})
else:
json_response.update({'result': 'Error: Missing profile'})
else:
json_response.update({'result': 'Error: Wrong request type'})
return json.dumps(json_response)
def search_box_ajax(self, req, form):
'''
Function used for handling Ajax requests used in the search box.
@param req: Apache Request Object
@type req: Apache Request Object
@param form: Parameters sent via Ajax request
@type form: dict
@return: json data
'''
# Abort if the simplejson module isn't available
if not CFG_JSON_AVAILABLE:
print "Json not configurable"
# If it is an Ajax request, extract any JSON data.
ajax_request = False
# REcent papers request
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_data = json_unicode_to_utf8(json_data)
ajax_request = True
json_response = {'resultCode': 0}
# Handle request.
if ajax_request:
req_type = json_data['requestType']
if req_type == 'getPapers':
if json_data.has_key('personId'):
pId = json_data['personId']
papers = sorted([[p[0]] for p in webapi.get_papers_by_person_id(int(pId), -1)],
key=itemgetter(0))
papers_html = TEMPLATE.tmpl_gen_papers(papers[0:MAX_NUM_SHOW_PAPERS])
json_response.update({'result': "\n".join(papers_html)})
json_response.update({'totalPapers': len(papers)})
json_response.update({'resultCode': 1})
json_response.update({'pid': str(pId)})
else:
json_response.update({'result': 'Error: Missing person id'})
elif req_type == 'getNames':
if json_data.has_key('personId'):
pId = json_data['personId']
names = webapi.get_person_names_from_id(int(pId))
names_html = TEMPLATE.tmpl_gen_names(names)
json_response.update({'result': "\n".join(names_html)})
json_response.update({'resultCode': 1})
json_response.update({'pid': str(pId)})
elif req_type == 'getIDs':
if json_data.has_key('personId'):
pId = json_data['personId']
ids = webapi.get_external_ids_from_person_id(int(pId))
ids_html = TEMPLATE.tmpl_gen_ext_ids(ids)
json_response.update({'result': "\n".join(ids_html)})
json_response.update({'resultCode': 1})
json_response.update({'pid': str(pId)})
elif req_type == 'isProfileClaimed':
if json_data.has_key('personId'):
pId = json_data['personId']
isClaimed = webapi.get_uid_from_personid(pId)
if isClaimed != -1:
json_response.update({'resultCode': 1})
json_response.update({'pid': str(pId)})
else:
json_response.update({'result': 'Error: Wrong request type'})
return json.dumps(json_response)
def choose_profile(self, req, form):
'''
Generate SSO landing/choose_profile page
@param req: Apache request object
@type req: Apache request object
@param form: GET/POST request params
@type form: dict
'''
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'search_param': (str, None),
'failed': (str, None),
'verbose': (int, 0)})
ln = argd['ln']
debug = "verbose" in argd and argd["verbose"] > 0
req.argd = argd # needed for perform_req_search
search_param = argd['search_param']
webapi.session_bareinit(req)
session = get_session(req)
uid = getUid(req)
pinfo = session['personinfo']
failed = True
if not argd['failed']:
failed = False
_ = gettext_set_language(ln)
if not CFG_INSPIRE_SITE:
return page_not_authorized(req, text=_("This page is not accessible directly."))
params = WebInterfaceBibAuthorIDClaimPages.get_params_to_check_login_info(session)
login_info = webapi.get_login_info(uid, params)
if 'arXiv' not in login_info['logged_in_to_remote_systems']:
return page_not_authorized(req, text=_("This page is not accessible directly."))
pid = webapi.get_user_pid(login_info['uid'])
# Create Wrapper Page Markup
is_owner = False
menu = WebProfileMenu('', "choose_profile", ln, is_owner, self._is_admin(pinfo))
choose_page = WebProfilePage("choose_profile", "Choose your profile", no_cache=True)
choose_page.add_profile_menu(menu)
if debug:
choose_page.add_debug_info(pinfo)
content = TEMPLATE.tmpl_choose_profile(failed)
body = choose_page.get_wrapped_body(content)
#In any case, when we step by here, an autoclaim should be performed right after!
pinfo = session["personinfo"]
pinfo['should_check_to_autoclaim'] = True
session.dirty = True
last_visited_pid = webapi.history_get_last_visited_pid(session['personinfo']['visit_diary'])
# if already logged in then redirect the user to the page he was viewing
if pid != -1:
redirect_pid = pid
if last_visited_pid:
redirect_pid = last_visited_pid
redirect_to_url(req, '%s/author/manage_profile/%s' % (CFG_SITE_URL, str(redirect_pid)))
else:
# get name strings and email addresses from SSO/Oauth logins: {'system':{'name':[variant1,...,variantn], 'email':'blabla@bla.bla', 'pants_size':20}}
remote_login_systems_info = webapi.get_remote_login_systems_info(req, login_info['logged_in_to_remote_systems'])
# get union of recids that are associated to the ids from all the external systems: set(inspire_recids_list)
recids = webapi.get_remote_login_systems_recids(req, login_info['logged_in_to_remote_systems'])
# this is the profile with the biggest intersection of papers so it's more probable that this is the profile the user seeks
probable_pid = webapi.match_profile(req, recids, remote_login_systems_info)
# if not search_param and probable_pid > -1 and probable_pid == last_visited_pid:
# # try to assign the user to the profile he chose. If for some reason the profile is not available we assign him to an empty profile
# redirect_pid, profile_claimed = webapi.claim_profile(login_info['uid'], probable_pid)
# if profile_claimed:
# redirect_to_url(req, '%s/author/claim/action?associate_profile=True&redirect_pid=%s' % (CFG_SITE_URL, str(redirect_pid)))
probable_profile_suggestion_info = None
last_viewed_profile_suggestion_info = None
if last_visited_pid > -1 and webapi.is_profile_available(last_visited_pid):
# get information about the most probable profile and show it to the user
last_viewed_profile_suggestion_info = webapi.get_profile_suggestion_info(req, last_visited_pid, recids)
if probable_pid > -1 and webapi.is_profile_available(probable_pid):
# get information about the most probable profile and show it to the user
probable_profile_suggestion_info = webapi.get_profile_suggestion_info(req, probable_pid, recids )
if not search_param:
# we prefil the search with most relevant among the names that we get from external systems
name_variants = webapi.get_name_variants_list_from_remote_systems_names(remote_login_systems_info)
search_param = most_relevant_name(name_variants)
body = body + TEMPLATE.tmpl_probable_profile_suggestion(probable_profile_suggestion_info, last_viewed_profile_suggestion_info, search_param)
shown_element_functions = dict()
shown_element_functions['button_gen'] = TEMPLATE.tmpl_choose_profile_search_button_generator()
shown_element_functions['new_person_gen'] = TEMPLATE.tmpl_choose_profile_search_new_person_generator()
shown_element_functions['show_search_bar'] = TEMPLATE.tmpl_choose_profile_search_bar()
# show in the templates the column status (if profile is bound to a user or not)
shown_element_functions['show_status'] = True
# pass in the templates the data of the column status (if profile is bound to a user or not)
# we might need the data without having to show them in the columne (fi merge_profiles
shown_element_functions['pass_status'] = True
# show search results to the user
body = body + self.search_box(search_param, shown_element_functions)
body = body + TEMPLATE.tmpl_choose_profile_footer()
title = _(' ')
return page(title=title,
metaheaderadd=choose_page.get_head().encode('utf-8'),
body=body,
req=req,
language=ln)
@staticmethod
def _arxiv_box(req, login_info, person_id, user_pid):
'''
Proccess and collect data for arXiv box
@param req: Apache request object
@type req: Apache request object
@param login_info: status of login in the following format: {'logged_in': True, 'uid': 2, 'logged_in_to_remote_systems':['Arxiv', ...]}
@type login_info: dict
@param login_info: person id of the current page's profile
@type login_info: int
@param login_info: person id of the user
@type login_info: int
@return: data required to built the arXiv box
@rtype: dict
'''
session = get_session(req)
pinfo = session["personinfo"]
arxiv_data = dict()
arxiv_data['view_own_profile'] = person_id == user_pid
# if the user is not a guest and he is connected through arXiv
arxiv_data['login'] = login_info['logged_in']
arxiv_data['user_pid'] = user_pid
arxiv_data['user_has_pid'] = user_pid != -1
# if the profile the use is logged in is the same with the profile of the page that the user views
arxiv_data['view_own_profile'] = user_pid == person_id
return arxiv_data
@staticmethod
def _orcid_box(arxiv_logged_in, person_id, user_pid, ulevel):
'''
Proccess and collect data for orcid box
@param req: Apache request object
@type req: Apache request object
@param arxiv_logged_in: shows if the user is logged in through arXiv or not
@type arxiv_logged_in: boolean
@param person_id: person id of the current page's profile
@type person_id: int
@param user_pid: person id of the user
@type user_pid: int
@param ulevel: user's level
@type ulevel: string
@return: data required to built the orcid box
@rtype: dict
'''
orcid_data = dict()
orcid_data['arxiv_login'] = arxiv_logged_in
orcid_data['orcids'] = None
orcid_data['add_power'] = False
orcid_data['own_profile'] = False
orcid_data['pid'] = person_id
# if the profile the use is logged in is the same with the profile of the page that the user views
if person_id == user_pid:
orcid_data['own_profile'] = True
# if the user is an admin then he can add an existing orcid to the profile
if ulevel == "admin":
orcid_data['add_power'] = True
orcids = webapi.get_orcids_by_pid(person_id)
if orcids:
orcid_data['orcids'] = orcids
return orcid_data
@staticmethod
def _autoclaim_papers_box(req, person_id, user_pid, remote_logged_in_systems):
'''
Proccess and collect data for orcid box
@param req: Apache request object
@type req: Apache request object
@param person_id: person id of the current page's profile
@type person_id: int
@param user_pid: person id of the user
@type user_pid: int
@param remote_logged_in_systems: the remote logged in systems
@type remote_logged_in_systems: list
@return: data required to built the autoclaim box
@rtype: dict
'''
autoclaim_data = dict()
# if no autoclaim should occur or had occured and results should be shown then the box should remain hidden
autoclaim_data['hidden'] = True
autoclaim_data['person_id'] = person_id
# if the profile the use is logged in is the same with the profile of the page that the user views
if person_id == user_pid:
recids_to_autoclaim = webapi.get_remote_login_systems_recids(req, remote_logged_in_systems)
autoclaim_data['hidden'] = False
autoclaim_data['num_of_claims'] = len(recids_to_autoclaim)
return autoclaim_data
############################################
# New autoclaim functions #
############################################
def generate_autoclaim_data(self, req, form):
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
pid = int(json_data['personId'])
except:
raise NotImplementedError("Some error with the parameter from the Ajax request occured.")
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
# If autoclaim was done already and no new remote systems exist
# in order to autoclaim new papers send the cached result
if not pinfo['orcid']['import_pubs'] and pinfo['autoclaim']['res'] is not None:
autoclaim_data = pinfo['autoclaim']['res']
json_response = {'resultCode': 1, 'result': TEMPLATE.tmpl_autoclaim_box(autoclaim_data, CFG_SITE_LANG, add_box=False, loading=False)}
return json.dumps(json_response)
external_pubs_association = pinfo['autoclaim']['external_pubs_association']
autoclaim_ticket = pinfo['autoclaim']['ticket']
ulevel = pinfo['ulevel']
uid = getUid(req)
params = WebInterfaceBibAuthorIDClaimPages.get_params_to_check_login_info(session)
login_status = webapi.get_login_info(uid, params)
remote_systems = login_status['logged_in_to_remote_systems']
papers_to_autoclaim = set(webapi.get_papers_from_remote_systems(remote_systems, params, external_pubs_association))
already_claimed_recids = set([rec for _, _, rec in get_claimed_papers_of_author(pid)]) & papers_to_autoclaim
papers_to_autoclaim = papers_to_autoclaim - set([rec for _, _, rec in get_claimed_papers_of_author(pid)])
for paper in papers_to_autoclaim:
operation_parts = {'pid': pid,
'action': 'assign',
'bibrefrec': str(paper)}
operation_to_be_added = webapi.construct_operation(operation_parts, pinfo, uid)
if operation_to_be_added is None:
# In case the operation could not be created (because of an
# erroneous bibrefrec) ignore it and continue with the rest
continue
webapi.add_operation_to_ticket(operation_to_be_added, autoclaim_ticket)
additional_info = {'first_name': '', 'last_name': '', 'email': '',
'comments': 'Assigned automatically when autoclaim was triggered.'}
userinfo = webapi.fill_out_userinfo(additional_info, uid, req.remote_ip, ulevel, strict_check=False)
webapi.commit_operations_from_ticket(autoclaim_ticket, userinfo, uid, ulevel)
autoclaim_data = dict()
autoclaim_data['hidden'] = False
autoclaim_data['person_id'] = pid
autoclaim_data['successfull_recids'] = set([op['rec'] for op in webapi.get_ticket_status(autoclaim_ticket) if 'execution_result' in op]) | already_claimed_recids
webapi.clean_ticket(autoclaim_ticket)
autoclaim_data['unsuccessfull_recids'] = [op['rec'] for op in webapi.get_ticket_status(autoclaim_ticket)]
autoclaim_data['num_of_unsuccessfull_recids'] = len(autoclaim_data['unsuccessfull_recids'])
autoclaim_data['recids_to_external_ids'] = dict()
for key, value in external_pubs_association.iteritems():
ext_system, ext_id = key
rec = value
title = get_title_of_paper(rec)
autoclaim_data['recids_to_external_ids'][rec] = title
# cache the result in the session
pinfo['autoclaim']['res'] = autoclaim_data
if pinfo['orcid']['import_pubs']:
pinfo['orcid']['import_pubs'] = False
session.dirty = True
json_response = {'resultCode': 1, 'result': TEMPLATE.tmpl_autoclaim_box(autoclaim_data, CFG_SITE_LANG, add_box=False, loading=False)}
req.write(json.dumps(json_response))
@staticmethod
def get_params_to_check_login_info(session):
def get_params_to_check_login_info_of_arxiv(session):
try:
return session['user_info']
except KeyError:
return None
def get_params_to_check_login_info_of_orcid(session):
pinfo = session['personinfo']
try:
pinfo['orcid']['has_orcid_id'] = bool(get_orcid_id_of_author(pinfo['pid'])[0][0] and pinfo['orcid']['import_pubs'])
except:
pinfo['orcid']['has_orcid_id'] = False
session.dirty = True
return pinfo['orcid']
get_params_for_remote_system = {'arXiv': get_params_to_check_login_info_of_arxiv,
'orcid': get_params_to_check_login_info_of_orcid}
params = dict()
for system, get_params in get_params_for_remote_system.iteritems():
params[system] = get_params(session)
return params
@staticmethod
def _claim_paper_box(person_id):
'''
Proccess and collect data for claim paper box
@param person_id: person id of the current page's profile
@type person_id: int
@return: data required to built the claim paper box
@rtype: dict
'''
claim_paper_data = dict()
claim_paper_data['canonical_id'] = str(webapi.get_canonical_id_from_person_id(person_id))
return claim_paper_data
@staticmethod
def _support_box():
'''
Proccess and collect data for support box
@return: data required to built the support box
@rtype: dict
'''
support_data = dict()
return support_data
@staticmethod
def _merge_box(person_id):
'''
Proccess and collect data for merge box
@param person_id: person id of the current page's profile
@type person_id: int
@return: data required to built the merge box
@rtype: dict
'''
merge_data = dict()
search_param = webapi.get_canonical_id_from_person_id(person_id)
name_variants = [element[0] for element in webapi.get_person_names_from_id(person_id)]
relevant_name = most_relevant_name(name_variants)
if relevant_name:
search_param = relevant_name.split(",")[0]
merge_data['search_param'] = search_param
merge_data['canonical_id'] = webapi.get_canonical_id_from_person_id(person_id)
return merge_data
@staticmethod
def _internal_ids_box(person_id, user_pid, ulevel):
'''
Proccess and collect data for external_ids box
@param person_id: person id of the current page's profile
@type person_id: int
@param user_pid: person id of the user
@type user_pid: int
@param remote_logged_in_systems: the remote logged in systems
@type remote_logged_in_systems: list
@return: data required to built the external_ids box
@rtype: dict
'''
external_ids_data = dict()
external_ids_data['uid'],external_ids_data['old_uids'] = webapi.get_internal_user_id_from_person_id(person_id)
external_ids_data['person_id'] = person_id
external_ids_data['user_pid'] = user_pid
external_ids_data['ulevel'] = ulevel
return external_ids_data
@staticmethod
def _external_ids_box(person_id, user_pid, ulevel):
'''
Proccess and collect data for external_ids box
@param person_id: person id of the current page's profile
@type person_id: int
@param user_pid: person id of the user
@type user_pid: int
@param remote_logged_in_systems: the remote logged in systems
@type remote_logged_in_systems: list
@return: data required to built the external_ids box
@rtype: dict
'''
internal_ids_data = dict()
internal_ids_data['ext_ids'] = webapi.get_external_ids_from_person_id(person_id)
internal_ids_data['person_id'] = person_id
internal_ids_data['user_pid'] = user_pid
internal_ids_data['ulevel'] = ulevel
return internal_ids_data
@staticmethod
def _hepnames_box(person_id):
return webapi.get_hepnames(person_id)
def tickets_admin(self, req, form):
'''
Generate SSO landing/welcome page
@param req: Apache request object
@type req: Apache request object
@param form: GET/POST request params
@type form: dict
'''
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
webapi.session_bareinit(req)
no_access = self._page_access_permission_wall(req, req_level='admin')
if no_access:
return no_access
session = get_session(req)
pinfo = session['personinfo']
cname = ''
is_owner = False
last_visited_pid = webapi.history_get_last_visited_pid(pinfo['visit_diary'])
if last_visited_pid is not None:
cname = webapi.get_canonical_id_from_person_id(last_visited_pid)
is_owner = self._is_profile_owner(last_visited_pid)
menu = WebProfileMenu(str(cname), "open_tickets", ln, is_owner, self._is_admin(pinfo))
title = "Open RT tickets"
profile_page = WebProfilePage("help", title, no_cache=True)
profile_page.add_profile_menu(menu)
tickets = webapi.get_persons_with_open_tickets_list()
tickets = list(tickets)
for t in list(tickets):
tickets.remove(t)
tickets.append([webapi.get_most_frequent_name_from_pid(int(t[0])),
webapi.get_person_redirect_link(t[0]), t[0], t[1]])
content = TEMPLATE.tmpl_tickets_admin(tickets)
content = TEMPLATE.tmpl_person_detail_layout(content)
body = profile_page.get_wrapped_body(content)
return page(title=title,
metaheaderadd=profile_page.get_head().encode('utf-8'),
body=body.encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
def help(self, req, form):
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
_ = gettext_set_language(ln)
if not CFG_INSPIRE_SITE:
return page_not_authorized(req, text=_("This page is not accessible directly."))
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
cname = ''
is_owner = False
last_visited_pid = webapi.history_get_last_visited_pid(pinfo['visit_diary'])
if last_visited_pid is not None:
cname = webapi.get_canonical_id_from_person_id(last_visited_pid)
is_owner = self._is_profile_owner(last_visited_pid)
menu = WebProfileMenu(str(cname), "help", ln, is_owner, self._is_admin(pinfo))
title = "Help page"
profile_page = WebProfilePage("help", title, no_cache=True)
profile_page.add_profile_menu(menu)
content = TEMPLATE.tmpl_help_page()
body = profile_page.get_wrapped_body(content)
return page(title=title,
metaheaderadd=profile_page.get_head().encode('utf-8'),
body=body.encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
def export(self, req, form):
'''
Generate JSONized export of Person data
@param req: Apache request object
@type req: Apache request object
@param form: GET/POST request params
@type form: dict
'''
argd = wash_urlargd(
form,
{'ln': (str, CFG_SITE_LANG),
'request': (str, None),
'userid': (str, None)})
if not CFG_JSON_AVAILABLE:
return "500_json_not_found__install_package"
# session = get_session(req)
request = None
userid = None
if "userid" in argd and argd['userid']:
userid = argd['userid']
else:
return "404_user_not_found"
if "request" in argd and argd['request']:
request = argd["request"]
# find user from ID
user_email = get_email_from_username(userid)
if user_email == userid:
return "404_user_not_found"
uid = get_uid_from_email(user_email)
uinfo = collect_user_info(uid)
# find person by uid
pid = webapi.get_pid_from_uid(uid)
# find papers py pid that are confirmed through a human.
papers = webapi.get_papers_by_person_id(pid, 2)
# filter by request param, e.g. arxiv
if not request:
return "404__no_filter_selected"
if not request in VALID_EXPORT_FILTERS:
return "500_filter_invalid"
if request == "arxiv":
query = "(recid:"
query += " OR recid:".join(papers)
query += ") AND 037:arxiv"
db_docs = perform_request_search(p=query, rg=0)
nickmail = ""
nickname = ""
db_arxiv_ids = []
try:
nickname = uinfo["nickname"]
except KeyError:
pass
if not nickname:
try:
nickmail = uinfo["email"]
except KeyError:
nickmail = user_email
nickname = nickmail
db_arxiv_ids = get_fieldvalues(db_docs, "037__a")
construct = {"nickname": nickname,
"claims": ";".join(db_arxiv_ids)}
jsondmp = json.dumps(construct)
signature = webapi.sign_assertion("arXiv", jsondmp)
construct["digest"] = signature
return json.dumps(construct)
index = __call__
class WebInterfaceBibAuthorIDManageProfilePages(WebInterfaceDirectory):
_exports = ['',
'import_orcid_pubs',
'connect_author_with_hepname',
'connect_author_with_hepname_ajax',
'suggest_orcid',
'suggest_orcid_ajax']
def _lookup(self, component, path):
'''
This handler parses dynamic URLs:
- /author/profile/1332 shows the page of author with id: 1332
- /author/profile/100:5522,1431 shows the page of the author
identified by the bibrefrec: '100:5522,1431'
'''
if not component in self._exports:
return WebInterfaceBibAuthorIDManageProfilePages(component), path
def _is_profile_owner(self, pid):
return self.person_id == int(pid)
def _is_admin(self, pinfo):
return pinfo['ulevel'] == 'admin'
def __init__(self, identifier=None):
'''
Constructor of the web interface.
@param identifier: identifier of an author. Can be one of:
- an author id: e.g. "14"
- a canonical id: e.g. "J.R.Ellis.1"
- a bibrefrec: e.g. "100:1442,155"
@type identifier: str
'''
self.person_id = -1 # -1 is a non valid author identifier
if identifier is None or not isinstance(identifier, str):
self.original_identifier = " "
return
self.original_identifier = identifier
# check if it's a canonical id: e.g. "J.R.Ellis.1"
try:
pid = int(identifier)
except ValueError:
pid = int(webapi.get_person_id_from_canonical_id(identifier))
if pid >= 0:
self.person_id = pid
return
# check if it's an author id: e.g. "14"
try:
pid = int(identifier)
if webapi.author_has_papers(pid):
self.person_id = pid
return
except ValueError:
pass
# check if it's a bibrefrec: e.g. "100:1442,155"
if webapi.is_valid_bibref(identifier):
pid = int(webapi.get_person_id_from_paper(identifier))
if pid >= 0:
self.person_id = pid
return
def __call__(self, req, form):
'''
Generate SSO landing/author management page
@param req: Apache request object
@type req: Apache request object
@param form: GET/POST request params
@type form: dict
'''
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ulevel = pinfo['ulevel']
person_id = self.person_id
uid = getUid(req)
pinfo['claim_in_process'] = True
argd = wash_urlargd(form, {
'ln': (str, CFG_SITE_LANG),
'verbose': (int, 0)})
debug = "verbose" in argd and argd["verbose"] > 0
ln = argd['ln']
_ = gettext_set_language(ln)
if not CFG_INSPIRE_SITE or self.person_id is None:
return page_not_authorized(req, text=_("This page is not accessible directly."))
if person_id < 0:
return self._error_page(req, message=("Identifier %s is not a valid person identifier or does not exist anymore!" % self.original_identifier))
# log the visit
webapi.history_log_visit(req, 'manage_profile', pid=person_id)
# store the arxiv papers the user owns
if uid > 0 and not pinfo['arxiv_status']:
uinfo = collect_user_info(req)
arxiv_papers = list()
if 'external_arxivids' in uinfo and uinfo['external_arxivids']:
arxiv_papers = uinfo['external_arxivids'].split(';')
if arxiv_papers:
webapi.add_arxiv_papers_to_author(arxiv_papers, person_id)
pinfo['arxiv_status'] = True
params = WebInterfaceBibAuthorIDClaimPages.get_params_to_check_login_info(session)
login_info = webapi.get_login_info(uid, params)
title_message = _('Profile management')
ssl_param = 0
if req.is_https():
ssl_param = 1
# Create Wrapper Page Markup
cname = webapi.get_canonical_id_from_person_id(self.person_id)
if cname == self.person_id:
return page_not_authorized(req, text=_("This page is not accessible directly."))
menu = WebProfileMenu(cname, "manage_profile", ln, self._is_profile_owner(pinfo['pid']), self._is_admin(pinfo))
profile_page = WebProfilePage("manage_profile", webapi.get_longest_name_from_pid(self.person_id), no_cache=True)
profile_page.add_profile_menu(menu)
gboxstatus = self.person_id
gpid = self.person_id
gNumOfWorkers = 3 # to do: read it from conf file
gReqTimeout = 3000
gPageTimeout = 12000
profile_page.add_bootstrapped_data(json.dumps({
"other": "var gBOX_STATUS = '%s';var gPID = '%s'; var gNumOfWorkers= '%s'; var gReqTimeout= '%s'; var gPageTimeout= '%s';" % (gboxstatus, gpid, gNumOfWorkers, gReqTimeout, gPageTimeout),
"backbone": """
(function(ticketbox) {
var app = ticketbox.app;
app.userops.set(%s);
app.bodyModel.set({userLevel: "%s"});
})(ticketbox);""" % (WebInterfaceAuthorTicketHandling.bootstrap_status(pinfo, "user"), ulevel)
}))
if debug:
profile_page.add_debug_info(pinfo)
user_pid = webapi.get_user_pid(login_info['uid'])
person_data = webapi.get_person_info_by_pid(person_id)
# proccess and collect data for every box [LEGACY]
arxiv_data = WebInterfaceBibAuthorIDClaimPages._arxiv_box(req, login_info, person_id, user_pid)
orcid_data = WebInterfaceBibAuthorIDClaimPages._orcid_box(arxiv_data['login'], person_id, user_pid, ulevel)
claim_paper_data = WebInterfaceBibAuthorIDClaimPages._claim_paper_box(person_id)
support_data = WebInterfaceBibAuthorIDClaimPages._support_box()
ext_ids_data = None
int_ids_data = None
if ulevel == 'admin':
ext_ids_data = WebInterfaceBibAuthorIDClaimPages._external_ids_box(person_id, user_pid, ulevel)
int_ids_data = WebInterfaceBibAuthorIDClaimPages._internal_ids_box(person_id, user_pid, ulevel)
autoclaim_data = WebInterfaceBibAuthorIDClaimPages._autoclaim_papers_box(req, person_id, user_pid, login_info['logged_in_to_remote_systems'])
merge_data = WebInterfaceBibAuthorIDClaimPages._merge_box(person_id)
hepnames_data = WebInterfaceBibAuthorIDClaimPages._hepnames_box(person_id)
content = ''
# display status for any previously attempted merge
if pinfo['merge_info_message']:
teaser_key, message = pinfo['merge_info_message']
content += TEMPLATE.tmpl_merge_transaction_box(teaser_key, [message])
pinfo['merge_info_message'] = None
session.dirty = True
content += TEMPLATE.tmpl_profile_management(ln, person_data, arxiv_data,
orcid_data, claim_paper_data,
int_ids_data, ext_ids_data,
autoclaim_data, support_data,
merge_data, hepnames_data)
body = profile_page.get_wrapped_body(content)
return page(title=title_message,
metaheaderadd=profile_page.get_head().encode('utf-8'),
body=body.encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
def import_orcid_pubs(self, req, form):
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
orcid_info = pinfo['orcid']
# author should have already an orcid if this method was triggered
try:
orcid_id = get_orcid_id_of_author(pinfo['pid'])[0][0]
except IndexError:
#weird, no orcid id in the database? Let's not do anything...
orcid_id = None
orcid_dois = get_dois_from_orcid(orcid_id)
# TODO: what to do in case some ORCID server error occurs?
if orcid_id is None or orcid_dois is None:
redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_SECURE_URL, pinfo['pid']))
# TODO: it would be smarter if:
# 1. we save in the db the orcid_dois
# 2. to expire only the external pubs box in the profile page
webauthorapi.expire_all_cache_for_personid(pinfo['pid'])
orcid_info['imported_pubs'] = orcid_dois
orcid_info['import_pubs'] = True
session.dirty = True
redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_SECURE_URL, pinfo['pid']))
def connect_author_with_hepname(self, req, form):
argd = wash_urlargd(form, {'cname':(str, None),
'hepname': (str, None),
'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
if argd['cname'] is not None:
cname = argd['cname']
else:
return self._error_page(req, ln, "Fatal: cannot associate a hepname without a person id.")
if argd['hepname'] is not None:
hepname = argd['hepname']
else:
return self._error_page(req, ln, "Fatal: cannot associate an author with a non valid hepname.")
webapi.connect_author_with_hepname(cname, hepname)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
last_visited_page = webapi.history_get_last_visited_url(pinfo['visit_diary'], just_page=True)
redirect_to_url(req, "%s/author/%s/%s" % (CFG_SITE_URL, last_visited_page, cname))
def connect_author_with_hepname_ajax(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
cname = json_data['cname']
hepname = json_data['hepname']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
if not self._is_admin(pinfo):
webapi.connect_author_with_hepname(cname, hepname)
else:
uid = getUid(req)
add_cname_to_hepname_record(cname, hepname, uid)
def suggest_orcid(self, req, form):
argd = wash_urlargd(form, {'orcid':(str, None),
'pid': (int, -1),
'ln': (str, CFG_SITE_LANG)})
ln = argd['ln']
if argd['pid'] > -1:
pid = argd['pid']
else:
return self._error_page(req, ln, "Fatal: cannot associate an orcid without a person id.")
if argd['orcid'] is not None and is_valid_orcid(argd['orcid']):
orcid = argd['orcid']
else:
return self._error_page(req, ln, "Fatal: cannot associate an author with a non valid ORCiD.")
webapi.connect_author_with_orcid(webapi.get_canonical_id_from_person_id(pid), orcid)
redirect_to_url(req, "%s/author/manage_profile/%s" % (CFG_SITE_URL, pid))
def suggest_orcid_ajax(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
orcid = json_data['orcid']
pid = json_data['pid']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
if not is_valid_orcid(orcid):
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.connect_author_with_orcid(webapi.get_canonical_id_from_person_id(pid), orcid)
def _fail(self, req, code):
req.status = code
return
def _error_page(self, req, ln=CFG_SITE_LANG, message=None, intro=True):
'''
Create a page that contains a message explaining the error.
@param req: Apache Request Object
@type req: Apache Request Object
@param ln: language
@type ln: string
@param message: message to be displayed
@type message: string
'''
body = []
_ = gettext_set_language(ln)
if not message:
message = "No further explanation available. Sorry."
if intro:
body.append(_("<p>We're sorry. An error occurred while "
"handling your request. Please find more information "
"below:</p>"))
body.append("<p><strong>%s</strong></p>" % message)
return page(title=_("Notice"),
body="\n".join(body),
description="%s - Internal Error" % CFG_SITE_NAME,
keywords="%s, Internal Error" % CFG_SITE_NAME,
language=ln,
req=req)
index = __call__
class WebInterfaceAuthorTicketHandling(WebInterfaceDirectory):
_exports = ['get_status',
'update_status',
'add_operation',
'modify_operation',
'remove_operation',
'commit',
'abort']
@staticmethod
def bootstrap_status(pinfo, on_ticket):
'''
Function used for generating get_status json bootstrapping.
@param pinfo: person_info
@type req: dict
@param on_ticket: ticket target
@type on_ticket: str
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
author_ticketing = WebInterfaceAuthorTicketHandling()
ticket = author_ticketing._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return "{}"
ticket_status = webapi.get_ticket_status(ticket)
return json.dumps(ticket_status)
def get_status(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
ticket_status = webapi.get_ticket_status(ticket)
session.dirty = True
req.content_type = 'application/json'
req.write(json.dumps(ticket_status))
def update_status(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.update_ticket_status(ticket)
session.dirty = True
def add_operation(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
operation_parts = {'pid': int(json_data['pid']),
'action': json_data['action'],
'bibrefrec': json_data['bibrefrec']}
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
uid = getUid(req)
operation_to_be_added = webapi.construct_operation(operation_parts, pinfo, uid)
if operation_to_be_added is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.add_operation_to_ticket(operation_to_be_added, ticket)
session.dirty = True
def modify_operation(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
operation_parts = {'pid': int(json_data['pid']),
'action': json_data['action'],
'bibrefrec': json_data['bibrefrec']}
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
uid = getUid(req)
operation_to_be_modified = webapi.construct_operation(operation_parts, pinfo, uid, should_have_bibref=False)
if operation_to_be_modified is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
operation_is_modified = webapi.modify_operation_from_ticket(operation_to_be_modified, ticket)
if not operation_is_modified:
# Operation couldn't be modified because it doesn't exist in the
# ticket. Wrong parameters were given hence we should fail!
return self._fail(req, apache.HTTP_NOT_FOUND)
session.dirty = True
def remove_operation(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
operation_parts = {'pid': int(json_data['pid']),
'action': json_data['action'],
'bibrefrec': json_data['bibrefrec']}
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
uid = getUid(req)
operation_to_be_removed = webapi.construct_operation(operation_parts, pinfo, uid)
if operation_to_be_removed is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
operation_is_removed = webapi.remove_operation_from_ticket(operation_to_be_removed, ticket)
if not operation_is_removed:
# Operation couldn't be removed because it doesn't exist in the
# ticket. Wrong parameters were given hence we should fail!
return self._fail(req, apache.HTTP_NOT_FOUND)
session.dirty = True
def commit(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
additional_info = {'first_name': json_data.get('first_name',"Default"),
'last_name': json_data.get('last_name',"Default"),
'email': json_data.get('email',"Default"),
'comments': json_data['comments']}
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ulevel = pinfo['ulevel']
uid = getUid(req)
user_is_guest = isGuestUser(uid)
if not user_is_guest:
try:
additional_info['first_name'] = session['user_info']['external_firstname']
additional_info['last_name'] = session['user_info']['external_familyname']
additional_info['email'] = session['user_info']['email']
except KeyError:
additional_info['first_name'] = additional_info['last_name'] = additional_info['email'] = str(uid)
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
# When a guest is claiming we should not commit if he
# doesn't provide us his full personal information
strict_check = user_is_guest
userinfo = webapi.fill_out_userinfo(additional_info, uid, req.remote_ip, ulevel, strict_check=strict_check)
if userinfo is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.commit_operations_from_ticket(ticket, userinfo, uid, ulevel)
session.dirty = True
def abort(self, req, form):
'''
Function used for handling Ajax requests.
@param req: apache request object
@type req: apache request object
@param form: parameters sent via Ajax request
@type form: dict
@return:
@rtype: json data
'''
# Abort if the simplejson module isn't available
assert CFG_JSON_AVAILABLE, "Json not available"
# Fail if no json data exists in the Ajax request
if not form.has_key('jsondata'):
return self._fail(req, apache.HTTP_NOT_FOUND)
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
try:
on_ticket = json_data['on']
except:
return self._fail(req, apache.HTTP_NOT_FOUND)
webapi.session_bareinit(req)
session = get_session(req)
pinfo = session['personinfo']
ticket = self._get_according_ticket(on_ticket, pinfo)
if ticket is None:
return self._fail(req, apache.HTTP_NOT_FOUND)
# When a user is claiming we should completely delete his ticket if he
# aborts the claiming procedure
delete_ticket = (on_ticket == 'user')
webapi.abort_ticket(ticket, delete_ticket=delete_ticket)
session.dirty = True
def _get_according_ticket(self, on_ticket, pinfo):
ticket = None
if on_ticket == 'user':
ticket = pinfo['ticket']
elif on_ticket == 'autoclaim':
ticket = pinfo['autoclaim']['ticket']
return ticket
def _fail(self, req, code):
req.status = code
return
class WebAuthorSearch(WebInterfaceDirectory):
"""
Provides an interface to profile search using AJAX queries.
"""
_exports = ['list',
'details']
# This class requires JSON libraries
assert CFG_JSON_AVAILABLE, "[WebAuthorSearch] JSON must be enabled."
class QueryPerson(WebInterfaceDirectory):
_exports = ['']
MIN_QUERY_LENGTH = 2
QUERY_REGEX = re.compile(r"[\w\s\.\-,@]+$", re.UNICODE)
def __init__(self, query=None):
self.query = query
def _lookup(self, component, path):
if component not in self._exports:
return WebAuthorSearch.QueryPerson(component), path
def __call__(self, req, form):
if self.query is None or len(self.query) < self.MIN_QUERY_LENGTH:
req.status = apache.HTTP_BAD_REQUEST
return "Query too short"
if not self.QUERY_REGEX.match(self.query):
req.status = apache.HTTP_BAD_REQUEST
return "Invalid query."
pid_results = [{"pid": pid[0]} for pid in webapi.search_person_ids_by_name(self.query)]
req.content_type = 'application/json'
return json.dumps(pid_results)
# Request for index handled by __call__
index = __call__
def _JSON_received(self, form):
try:
return "jsondata" in form
except TypeError:
return False
def _extract_JSON(self, form):
try:
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
return json_data
except ValueError:
return None
def _get_pid_details(self, pid):
details = webapi.get_person_info_by_pid(pid)
details.update({
"names": [{"name": x, "paperCount": y} for x, y in webapi.get_person_names_from_id(pid)],
"externalIds": [{x: y} for x, y in webapi.get_external_ids_from_person_id(pid).items()]
})
details['cname'] = details.pop("canonical_name", None)
return details
def details(self, req, form):
if self._JSON_received(form):
try:
json_data = self._extract_JSON(form)
pids = json_data['pids']
req.content_type = 'application/json'
details = [self._get_pid_details(pid) for pid in pids]
return json.dumps(details)
except (TypeError, KeyError):
req.status = apache.HTTP_BAD_REQUEST
return "Invalid query."
else:
req.status = apache.HTTP_BAD_REQUEST
return "Incorrect query format."
list = QueryPerson()
class WebInterfaceAuthor(WebInterfaceDirectory):
'''
Handles /author/* pages.
Supplies the methods:
/author/choose_profile
/author/claim/
/author/help
/author/manage_profile
/author/merge_profiles
/author/profile/
/author/search
/author/ticket/
'''
_exports = ['',
'choose_profile',
'claim',
'help',
'manage_profile',
'merge_profiles',
'profile',
'search',
'search_ajax',
'ticket']
from invenio.webauthorprofile_webinterface import WebAuthorPages
claim = WebInterfaceBibAuthorIDClaimPages()
profile = WebAuthorPages()
choose_profile = claim.choose_profile
help = claim.help
manage_profile = WebInterfaceBibAuthorIDManageProfilePages()
merge_profiles = claim.merge_profiles
search = claim.search
search_ajax = WebAuthorSearch()
ticket = WebInterfaceAuthorTicketHandling()
def _lookup(self, component, path):
if component not in self._exports:
return WebInterfaceAuthor(component), path
def __init__(self, component=None):
self.path = component
def __call__(self, req, form):
if self.path is None or len(self.path) < 1:
redirect_to_url(req, "%s/author/search" % CFG_BASE_URL)
# Check if canonical id: e.g. "J.R.Ellis.1"
pid = get_person_id_from_canonical_id(self.path)
if pid >= 0:
url = "%s/author/profile/%s" % (CFG_BASE_URL, get_person_redirect_link(pid))
redirect_to_url(req, url, redirection_type=apache.HTTP_MOVED_PERMANENTLY)
return
else:
try:
pid = int(self.path)
except ValueError:
redirect_to_url(req, "%s/author/search?q=%s" % (CFG_BASE_URL, self.path))
return
else:
if author_has_papers(pid):
cid = get_person_redirect_link(pid)
if is_valid_canonical_id(cid):
redirect_id = cid
else:
redirect_id = pid
url = "%s/author/profile/%s" % (CFG_BASE_URL, redirect_id)
redirect_to_url(req, url, redirection_type=apache.HTTP_MOVED_PERMANENTLY)
return
redirect_to_url(req, "%s/author/search" % CFG_BASE_URL)
return
index = __call__
class WebInterfacePerson(WebInterfaceDirectory):
'''
Handles /person/* pages.
Supplies the methods:
/person/welcome
'''
_exports = ['welcome','update', 'you']
def welcome(self, req, form):
redirect_to_url(req, "%s/author/choose_profile" % CFG_SITE_SECURE_URL)
def you(self, req, form):
redirect_to_url(req, "%s/author/choose_profile" % CFG_SITE_SECURE_URL)
def update(self, req, form):
"""
Generate hepnames update form
"""
argd = wash_urlargd(form,
{'ln': (str, CFG_SITE_LANG),
'email': (str, ''),
'IRN': (str, ''),
})
# Retrieve info for HEP name based on email or IRN
recids = []
if argd['email']:
recids = perform_request_search(p="371__m:%s" % argd['email'], cc="HepNames")
elif argd['IRN']:
recids = perform_request_search(p="001:%s" % argd['IRN'], cc="HepNames")
else:
redirect_to_url(req, "%s/collection/HepNames" % (CFG_SITE_URL))
if not recids:
redirect_to_url(req, "%s/collection/HepNames" % (CFG_SITE_URL))
else:
hepname_bibrec = get_bibrecord(recids[0])
# Extract all info from recid that should be included in the form
full_name = record_get_field_value(hepname_bibrec, tag="100", ind1="", ind2="", code="a")
display_name = record_get_field_value(hepname_bibrec, tag="880", ind1="", ind2="", code="a")
email = record_get_field_value(hepname_bibrec, tag="371", ind1="", ind2="", code="m")
status = record_get_field_value(hepname_bibrec, tag="100", ind1="", ind2="", code="g")
keynumber = record_get_field_value(hepname_bibrec, tag="970", ind1="", ind2="", code="a")
try:
keynumber = keynumber.split('-')[1]
except IndexError:
pass
research_field_list = record_get_field_values(hepname_bibrec, tag="650", ind1="1", ind2="7", code="a")
institution_list = []
for instance in record_get_field_instances(hepname_bibrec, tag="371", ind1="", ind2=""):
if not instance or field_get_subfield_values(instance, "m"):
continue
institution_info = ["", "", "", "", ""]
if field_get_subfield_values(instance, "a"):
institution_info[0] = field_get_subfield_values(instance, "a")[0]
if field_get_subfield_values(instance, "r"):
institution_info[1] = field_get_subfield_values(instance, "r")[0]
if field_get_subfield_values(instance, "s"):
institution_info[2] = field_get_subfield_values(instance, "s")[0]
if field_get_subfield_values(instance, "t"):
institution_info[3] = field_get_subfield_values(instance, "t")[0]
if field_get_subfield_values(instance, "z"):
institution_info[4] = field_get_subfield_values(instance, "z")[0]
institution_list.append(institution_info)
phd_advisor_list = record_get_field_values(hepname_bibrec, tag="701", ind1="", ind2="", code="a")
experiment_list = record_get_field_values(hepname_bibrec, tag="693", ind1="", ind2="", code="e")
web_page = record_get_field_value(hepname_bibrec, tag="856", ind1="1", ind2="", code="u")
# Create form and pass as parameters all the content from the record
body = TEMPLATE.tmpl_update_hep_name(full_name, display_name, email,
status, research_field_list,
institution_list, phd_advisor_list,
experiment_list, web_page)
title = "HEPNames"
return page(title=title,
metaheaderadd = TEMPLATE.tmpl_update_hep_name_headers(),
body=body,
req=req,
)
# pylint: enable=C0301
# pylint: enable=W0613
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from indico.util.contextManager import ContextManager
import time
import pkg_resources
from persistent import Persistent
from hashlib import md5
from MaKaC.common.Counter import Counter
from MaKaC.common.utils import formatDateTime, parseDateTime
from MaKaC.common.timezoneUtils import getAdjustedDate, setAdjustedDate,\
datetimeToUnixTimeInt
from MaKaC.webinterface import wcomponents
from MaKaC.plugins import PluginsHolder
from MaKaC.errors import MaKaCError, NoReportError
from MaKaC.services.interface.rpc.common import ServiceError
from MaKaC.common.timezoneUtils import nowutc
from indico.core.logger import Logger
from MaKaC.common.indexes import IndexesHolder
from MaKaC.plugins.Collaboration.collaborationTools import CollaborationTools,\
MailTools
from MaKaC.plugins.Collaboration.urlHandlers import UHConfModifCollaboration
from indico.core.index import Catalog
from MaKaC.conference import Observer
from MaKaC.webinterface.common.tools import hasTags
from MaKaC.plugins.Collaboration import mail
from MaKaC.common.mail import GenericMailer
import os, inspect
from indico.modules.scheduler.client import Client
from indico.modules.scheduler.tasks import HTTPTask
from indico.util import json
from indico.util.date_time import now_utc
from MaKaC.common.fossilize import Fossilizable, fossilizes
from MaKaC.common.externalOperationsManager import ExternalOperationsManager
from BTrees.OOBTree import OOBTree
from MaKaC.plugins.Collaboration.fossils import ICSErrorBaseFossil, ICSSanitizationErrorFossil,\
ICSBookingBaseConfModifFossil, ICSBookingBaseIndexingFossil,\
ISpeakerWrapperBaseFossil
from MaKaC.conference import Contribution
class CSBookingManager(Persistent, Observer):
""" Class for managing the bookins of a meeting.
It will store the list of bookings. Adding / removing / editing bookings should be through this class.
"""
_shouldBeTitleNotified = True
_shouldBeDateChangeNotified = True
_shouldBeLocationChangeNotified = True
_shouldBeDeletionNotified = True
def __init__(self, conf):
""" Constructor for the CSBookingManager class.
conf: a Conference object. The meeting that owns this CSBookingManager.
"""
self._conf = conf
self._counter = Counter(1)
# a dict where the bookings will be stored. The key will be the booking id, the value a CSBookingBase object.
self._bookings = {}
# an index of bookings by type. The key will be a booking type (string), the value a list of booking id
self._bookingsByType = {}
# an index of bookings to video services by event.uniqueId : video.uniqueId pairind.
self._bookingsToVideoServices = OOBTree()
# a list of ids with hidden bookings
self._hiddenBookings = set()
# an index of video services managers for each plugin. key: plugin name, value: list of users
self._managers = {}
# list of speaker wrapper for a conference
self._speakerWrapperList = []
self.updateSpeakerWrapperList()
# Send email to managers when Electronic Agreement accepted
self._notifyElectronicAgreementAnswer = True
def getOwner(self):
""" Returns the Conference (the meeting) that owns this CSBookingManager object.
"""
return self._conf
def isCSAllowed(self, user = None):
""" Returns if the associated event should display a Video Services tab
This can depend on the kind of event (meeting, lecture, conference), on the equipment of the room...
If a user is provided, we will take into account if the user can manage the plugin (for example,
an event manager cannot manage an admin-only plugin)
"""
pluginsPerEventType = CollaborationTools.getCollaborationPluginType().getOption("pluginsPerEventType").getValue()
if pluginsPerEventType:
for plugin in pluginsPerEventType[self._conf.getType()]:
if plugin.isActive() and (user is None or CollaborationTools.canUserManagePlugin(self._conf, plugin, user)):
return True
return False
def getAllowedPlugins(self):
""" Returns a list of allowed plugins (Plugin objects) for this event.
Only active plugins are returned.
This can depend on the kind of event (meeting, lecture, conference), on the equipment of the room...
"""
pluginsPerEventType = CollaborationTools.getCollaborationPluginType().getOption("pluginsPerEventType").getValue()
if pluginsPerEventType is not None:
allowedForThisEvent = pluginsPerEventType[self._conf.getType()]
return [plugin for plugin in allowedForThisEvent if plugin.isActive()]
def getBookingList(self, sorted = False, filterByType = None, notify = False, onlyPublic = False):
""" Returns a list of all the bookings.
If sorted = True, the list of bookings will be sorted by id.
If filterByType = None, all bookings are returned.
Otherwise, just those of the type "filterByType" if filterByType is a string,
or if it is a list of strings, those who have a type included in filterByType.
"""
if not hasattr(self, "_bookingsByType"): #TODO: remove when safe
self._bookingsByType = {}
if filterByType is not None:
if isinstance(filterByType, basestring):
keys = self._bookingsByType.get(filterByType, [])
elif isinstance(filterByType, list):
keys = []
for pluginName in filterByType:
keys.extend(self._bookingsByType.get(pluginName, []))
else:
raise ValueError('Unexpected filterByType type: {}'.format(type(filterByType)))
else:
keys = self._bookings.keys()
if onlyPublic and self.getHiddenBookings():
keys = set(keys)
keys = keys.difference(self.getHiddenBookings())
keys = list(keys)
if sorted:
keys.sort(key = lambda k: int(k))
bookingList = [self._bookings[k] for k in keys if not self._bookings[k].hasSessionOrContributionLink() or self._bookings[k].getLinkObject()]
#we notify all the bookings that they have been viewed. If a booking doesn't need to be viewed, nothing will happen
if notify:
for booking in bookingList:
if booking.needsToBeNotifiedOnView():
try:
booking._notifyOnView()
except Exception, e:
Logger.get('VideoServ').error("Exception while notifying to a booking that it is being viewed. Exception: " + str(e))
return bookingList
def getBooking(self, id):
""" Returns a booking given its id.
"""
return self._bookings.get(id,None)
def getSingleBooking(self, type, notify = False):
""" Returns the single booking of a plugin who only allows one booking.
type: a string with the name of the plugin
If the plugin actually allows multiple bookings, an exception will be thrown
If the plugin has no booking, None will be returned.
Otherwise the booking will be returned
"""
if CollaborationTools.getCSBookingClass(type)._allowMultiple:
raise CollaborationException("Plugin type " + str(type) + " is not a single-booking plugin")
blist = self._bookingsByType.get(type,[])
if blist:
booking = self._bookings[blist[0]]
if notify:
try:
booking._notifyOnView()
except Exception, e:
Logger.get('VideoServ').error("Exception while notifying to a booking that it is being viewed. Exception: " + str(e))
return booking
else:
return None
def getHiddenBookings(self):
if not hasattr(self, '_hiddenBookings'):
self._hiddenBookings = set()
return self._hiddenBookings
def hasBookings(self):
return len(self._bookings) > 0
def canCreateBooking(self, type):
""" Returns if it's possible to create a booking of this given type
"""
if not CollaborationTools.getCSBookingClass(type)._allowMultiple:
return len(self.getBookingList(filterByType = type)) == 0
return True
def checkVideoLink(self, bookingParams):
if bookingParams.get('videoLinkType',"") == "session":
sessSlotId = bookingParams.get("videoLinkSession","")
import re
regExp = re.match(r"""(s[0-9a]*)(l[0-9]*)""", sessSlotId)
if not regExp:
raise CollaborationException(_('No session has been passed when the type is session.'))
sessionId = regExp.group(1)[1:]
slotId = regExp.group(2)[1:]
session = self._conf.getSessionById(sessionId)
if session is None:
raise CollaborationException(_('The session does not exist.'))
slot = session.getSlotById(slotId)
if slot is None:
raise CollaborationException(_('The session does not exist.'))
return slot.getUniqueId()
elif bookingParams.get('videoLinkType',"") == "contribution":
contId = bookingParams.get("videoLinkContribution","")
if contId == "":
raise CollaborationException(_('No contribution has been passed when the type is contribution.'))
cont = self._conf.getContributionById(contId)
if cont is None:
raise CollaborationException(_('The contribution does not exist.'))
return cont.getUniqueId()
return self._conf.getUniqueId()
def addBooking(self, booking):
""" Adds an existing booking to the list of bookings.
booking: The existing booking to be added.
"""
booking.setId( self._getNewBookingId())
self._bookings[booking.getId()] = booking
self._bookingsByType.setdefault(booking.getType(),[]).append(booking.getId())
if booking.isHidden():
self.getHiddenBookings().add(booking.getId())
self._indexBooking(booking)
booking.index_instances()
self._notifyModification()
# the unique id can be diferent for the new conference
booking.setLinkType({booking.getLinkType():ContextManager.get('clone.unique_id_map').get(booking.getLinkId(),"")})
if booking.hasSessionOrContributionLink():
linkObject = booking.getLinkObject()
bp=booking.getBookingParams()
if isinstance(linkObject, Contribution):
bp["videoLinkContribution"] = linkObject.getId()
else: #session
bp["videoLinkSession"] = linkObject.getId()
booking.setBookingParams(bp)
self.addVideoService(booking.getLinkId(), booking)
def _createBooking(self, bookingType, bookingParams = {}, operation = "_create"):
if self.canCreateBooking(bookingType):
uniqueId = self.checkVideoLink(bookingParams)
if (self.hasVideoService(uniqueId) and bookingParams.has_key("videoLinkType") and bookingParams.get("videoLinkType","") != "event"): # Restriction: 1 video service per session or contribution.
raise NoReportError(_('Only one video service per contribution or session is allowed.'))
newBooking = CollaborationTools.getCSBookingClass(bookingType)(bookingType, self._conf)
if bookingParams.has_key("videoLinkType"):
newBooking.setLinkType({bookingParams["videoLinkType"] : uniqueId})
error = newBooking.setBookingParams(bookingParams)
if isinstance(error, CSErrorBase):
return error
elif error:
raise CollaborationServiceException("Problem while creating a booking of type " + bookingType)
else:
newId = self._getNewBookingId()
newBooking.setId(newId)
createResult = getattr(newBooking, operation)()
if isinstance(createResult, CSErrorBase):
return createResult
else:
self._bookings[newId] = newBooking
self._bookingsByType.setdefault(bookingType,[]).append(newId)
if newBooking.isHidden():
self.getHiddenBookings().add(newId)
newBooking.index_instances()
self._indexBooking(newBooking)
self._notifyModification()
if uniqueId is not None: # if we're here and uniqueId has a value, register the video service.
self.addVideoService(uniqueId, newBooking)
if MailTools.needToSendEmails(bookingType):
newBooking._sendNotifications('new')
return newBooking
else:
#we raise an exception because the web interface should take care of this never actually happening
raise CollaborationServiceException(bookingType + " only allows to create 1 booking per event")
def createBooking(self, bookingType, bookingParams = {}):
""" Adds a new booking to the list of bookings.
The id of the new booking is auto-generated incrementally.
After generating the booking, its "performBooking" method will be called.
bookingType: a String with the booking's plugin. Example: "DummyPlugin", "EVO"
bookingParams: a dictionary with the parameters necessary to create the booking.
"create the booking" usually means Indico deciding if the booking can take place.
if "startDate" and "endDate" are among the keys, they will be taken out of the dictionary.
"""
return self._createBooking(bookingType, bookingParams)
def attachBooking(self, bookingType, bookingParams = {}):
""" Attach an existing booking to the list of bookings.
The checking and the params are the same as create the booking
"""
for booking in self.getBookingList(sorted, bookingType):
result = booking.checkAttachParams(bookingParams)
if isinstance(result, CSErrorBase):
return result
return self._createBooking(bookingType, bookingParams, "_attach")
def searchBookings(self, bookingType, user, query, offset=0, limit=None):
""" Adds a new booking to the list of bookings.
The id of the new booking is auto-generated incrementally.
After generating the booking, its "performBooking" method will be called.
bookingType: a String with the booking's plugin. Example: "DummyPlugin", "EVO"
bookingParams: a dictionary with the parameters necessary to create the booking.
"create the booking" usually means Indico deciding if the booking can take place.
if "startDate" and "endDate" are among the keys, they will be taken out of the dictionary.
"""
if CollaborationTools.hasOption(bookingType, "searchAllow") \
and CollaborationTools.getOptionValue(bookingType, "searchAllow"):
res = CollaborationTools.getCSBookingClass(bookingType)._search(user, query, offset, limit)
return {'results': res[0],
'offset': res[1]}
else:
raise CollaborationException("Plugin type " + str(bookingType) + " does not allow search.")
def _indexBooking(self, booking, index_names=None):
indexes = self._getIndexList(booking)
if index_names is not None:
ci = IndexesHolder().getById('collaboration')
all_indexes = list(ci.getIndex(index) for index in index_names)
indexes = list(index for index in all_indexes if index in indexes)
if booking.shouldBeIndexed():
for index in indexes:
index.indexBooking(booking)
def changeBooking(self, bookingId, bookingParams):
"""
Changes the bookingParams of a CSBookingBase object.
After updating the booking, its 'performBooking' method will be called.
bookingId: the id of the CSBookingBase object to change
bookingParams: a dictionary with the new parameters that will modify the booking
'modify the booking' can mean that maybe the booking will be rejected with the new parameters.
if 'startDate' and 'endDate' are among the keys, they will be taken out of the dictionary.
"""
booking = self.getBooking(bookingId)
oldStartDate = booking.getStartDate()
oldModificationDate = booking.getModificationDate()
oldBookingParams = booking.getBookingParams() #this is a copy so it's ok
booking.unindex_instances()
error = booking.setBookingParams(bookingParams)
if isinstance(error, CSSanitizationError):
return error
elif error:
CSBookingManager._rollbackChanges(booking, oldBookingParams, oldModificationDate)
if isinstance(error, CSErrorBase):
return error
raise CollaborationServiceException("Problem while modifying a booking of type " + booking.getType())
else:
modifyResult = booking._modify(oldBookingParams)
if isinstance(modifyResult, CSErrorBase):
CSBookingManager._rollbackChanges(booking, oldBookingParams, oldModificationDate)
return modifyResult
else:
modificationDate = now_utc()
booking.setModificationDate(modificationDate)
if booking.isHidden():
self.getHiddenBookings().add(booking.getId())
elif booking.getId() in self.getHiddenBookings():
self.getHiddenBookings().remove(booking.getId())
eventLinkUpdated = False
newLinkId = self.checkVideoLink(bookingParams)
if bookingParams.has_key("videoLinkType"):
oldLinkData = booking.getLinkIdDict()
oldLinkId = oldLinkData.values()[0]
# Details changed, we need to remove the association and re-create it
if not (oldLinkData.has_key(bookingParams.get('videoLinkType','')) and oldLinkId == newLinkId):
self.removeVideoSingleService(booking.getLinkId(), booking)
eventLinkUpdated = True
if eventLinkUpdated or (bookingParams.has_key("videoLinkType") and bookingParams.get("videoLinkType","") != "event"):
if self.hasVideoService(newLinkId, booking):
pass # No change in the event linking
elif newLinkId is not None:
if (self.hasVideoService(newLinkId) and bookingParams.has_key("videoLinkType") and bookingParams.get("videoLinkType","") != "event"): # Restriction: 1 video service per session or contribution.
raise NoReportError(_('Only one video service per contribution or session is allowed.'))
else:
self.addVideoService(newLinkId, booking)
if bookingParams.has_key("videoLinkType"):
booking.setLinkType({bookingParams['videoLinkType']: newLinkId})
else: # If it's still None, event linking has been completely removed.
booking.resetLinkParams()
self._changeStartDateInIndex(booking, oldStartDate, booking.getStartDate())
self._changeModificationDateInIndex(booking, oldModificationDate, modificationDate)
booking.index_instances()
if booking.hasAcceptReject():
if booking.getAcceptRejectStatus() is not None:
booking.clearAcceptRejectStatus()
self._addToPendingIndex(booking)
self._notifyModification()
if MailTools.needToSendEmails(booking.getType()):
booking._sendNotifications('modify')
return booking
@classmethod
def _rollbackChanges(cls, booking, oldBookingParams, oldModificationDate):
booking.setBookingParams(oldBookingParams)
booking.setModificationDate(oldModificationDate)
def _changeConfTitleInIndex(self, booking, oldTitle, newTitle):
if booking.shouldBeIndexed():
indexes = self._getIndexList(booking)
for index in indexes:
index.changeEventTitle(booking, oldTitle, newTitle)
def _changeStartDateInIndex(self, booking, oldStartDate, newStartDate):
if booking.shouldBeIndexed() and booking.hasStartDate():
indexes = self._getIndexList(booking)
for index in indexes:
index.changeStartDate(booking, oldStartDate, newStartDate)
def _changeModificationDateInIndex(self, booking, oldModificationDate, newModificationDate):
if booking.shouldBeIndexed():
indexes = self._getIndexList(booking)
for index in indexes:
index.changeModificationDate(booking, oldModificationDate, newModificationDate)
def _changeConfStartDateInIndex(self, booking, oldConfStartDate, newConfStartDate):
if booking.shouldBeIndexed() and oldConfStartDate is not None and newConfStartDate is not None:
indexes = self._getIndexList(booking)
for index in indexes:
index.changeConfStartDate(booking, oldConfStartDate, newConfStartDate)
def removeBooking(self, id):
""" Removes a booking given its id.
"""
booking = self.getBooking(id)
bookingType = booking.getType()
bookingLinkId = booking.getLinkId()
removeResult = booking._delete()
if isinstance(removeResult, CSErrorBase):
return removeResult
else:
del self._bookings[id]
self._bookingsByType[bookingType].remove(id)
if not self._bookingsByType[bookingType]:
del self._bookingsByType[bookingType]
if id in self.getHiddenBookings():
self.getHiddenBookings().remove(id)
# If there is an association to a session or contribution, remove it
if bookingLinkId is not None:
self.removeVideoSingleService(bookingLinkId, booking)
booking.unindex_instances()
self._unindexBooking(booking)
self._notifyModification()
if MailTools.needToSendEmails(booking.getType()):
booking._sendNotifications('remove')
return booking
def _unindexBooking(self, booking):
if booking.shouldBeIndexed() and not booking.keepForever():
indexes = self._getIndexList(booking)
for index in indexes:
index.unindexBooking(booking)
def startBooking(self, id):
booking = self._bookings[id]
if booking.canBeStarted():
booking._start()
return booking
else:
raise CollaborationException(_("Tried to start booking ") + str(id) + _(" of meeting ") + str(self._conf.getId()) + _(" but this booking cannot be started."))
def stopBooking(self, id):
booking = self._bookings[id]
if booking.canBeStopped():
booking._stop()
return booking
else:
raise CollaborationException(_("Tried to stop booking ") + str(id) + _(" of meeting ") + str(self._conf.getId()) + _(" but this booking cannot be stopped."))
def checkBookingStatus(self, id):
booking = self._bookings[id]
if booking.hasCheckStatus():
result = booking._checkStatus()
if isinstance(result, CSErrorBase):
return result
else:
return booking
else:
raise ServiceError(message=_("Tried to check status of booking ") + str(id) + _(" of meeting ") + str(self._conf.getId()) + _(" but this booking does not support the check status service."))
def acceptBooking(self, id, user = None):
booking = self._bookings[id]
if booking.hasAcceptReject():
if booking.getAcceptRejectStatus() is None:
self._removeFromPendingIndex(booking)
booking.accept(user)
return booking
else:
raise ServiceError(message=_("Tried to accept booking ") + str(id) + _(" of meeting ") + str(self._conf.getId()) + _(" but this booking cannot be accepted."))
def rejectBooking(self, id, reason):
booking = self._bookings[id]
if booking.hasAcceptReject():
if booking.getAcceptRejectStatus() is None:
self._removeFromPendingIndex(booking)
booking.reject(reason)
return booking
else:
raise ServiceError("ERR-COLL10", _("Tried to reject booking ") + str(id) + _(" of meeting ") + str(self._conf.getId()) + _(" but this booking cannot be rejected."))
def makeMeModeratorBooking(self, id, user):
booking = self._bookings[id]
bookingParams = booking.getBookingParams()
bookingParams["owner"] = user
return self.changeBooking(id,bookingParams)
def _addToPendingIndex(self, booking):
if booking.shouldBeIndexed():
indexes = self._getPendingIndexList(booking)
for index in indexes:
index.indexBooking(booking)
def _removeFromPendingIndex(self, booking):
if booking.shouldBeIndexed():
indexes = self._getPendingIndexList(booking)
for index in indexes:
index.unindexBooking(booking)
def _getNewBookingId(self):
return self._counter.newCount()
def _getIndexList(self, booking):
""" Returns a list of BookingsIndex objects where the booking should be indexed.
This list includes:
-an index of all bookings
-an index of bookings of the given type
-an index of all bookings in the category of the event
-an index of booking of the given type, in the category of the event
If the booking type declared common indexes:
-the common indexes
-the common indexes for the category of the event
If the booking is of the Accept/Reject type
-same indexes as above, but only for pending bookings
"""
collaborationIndex = IndexesHolder().getById("collaboration")
indexes = [collaborationIndex.getAllBookingsIndex(),
collaborationIndex.getIndex(booking.getType())]
for commonIndexName in booking.getCommonIndexes():
indexes.append(collaborationIndex.getIndex(commonIndexName))
if booking.hasAcceptReject() and booking.getAcceptRejectStatus() is None:
indexes.extend(self._getPendingIndexList(booking))
return indexes
def _getPendingIndexList(self, booking):
collaborationIndex = IndexesHolder().getById("collaboration")
indexes = [collaborationIndex.getIndex("all_pending"),
collaborationIndex.getIndex(booking.getType() + "_pending")]
for commonIndexName in booking.getCommonIndexes():
indexes.append(collaborationIndex.getIndex(commonIndexName + "_pending"))
return indexes
def getManagers(self):
if not hasattr(self, "_managers"):
self._managers = {}
return self._managers
def addPluginManager(self, plugin, user):
#TODO: use .linkTo on the user. To be done when the list of roles of a user is actually needed for smth...
self.getManagers().setdefault(plugin, []).append(user)
self._notifyModification()
def removePluginManager(self, plugin, user):
#TODO: use .unlinkTo on the user. To be done when the list of roles of a user is actually needed for smth...
if user in self.getManagers().setdefault(plugin,[]):
self.getManagers()[plugin].remove(user)
self._notifyModification()
def getVideoServicesManagers(self):
return self.getManagers().setdefault('all', [])
def isVideoServicesManager(self, user):
return user in self.getManagers().setdefault('all', [])
def getPluginManagers(self, plugin):
return self.getManagers().setdefault(plugin, [])
def isPluginManager(self, plugin, user):
return user in self.getManagers().setdefault(plugin, [])
def getAllManagers(self):
""" Returns a list with all the managers, no matter their type
The returned list is not ordered.
"""
managers = set()
for managerList in self.getManagers().itervalues():
managers = managers.union(managerList)
return list(managers)
def isPluginManagerOfAnyPlugin(self, user):
#TODO: this method is not optimal. to be optimal, we should store somewhere an index where the key
#is the user, and the value is a list of plugins where they are managers.
#this could be done with .getLinkTo, but we would need to change the .linkTo method to add extra information
#(since we cannot create a role for each plugin)
if self.isVideoServicesManager(user):
return True
else:
for plugin in self.getManagers().iterkeys():
if self.isPluginManager(plugin, user):
return True
return False
def notifyTitleChange(self, oldTitle, newTitle):
""" Notifies the CSBookingManager that the title of the event (meeting) it's attached to has changed.
The CSBookingManager will reindex all its bookings in the event title index.
This method will be called by the event (meeting) object
"""
for booking in self.getBookingList():
try:
self._changeConfTitleInIndex(booking, oldTitle, newTitle)
except Exception, e:
Logger.get('VideoServ').exception("Exception while reindexing a booking in the event title index because its event's title changed: " + str(e))
def notifyInfoChange(self):
self.updateSpeakerWrapperList()
def notifyEventDateChanges(self, oldStartDate = None, newStartDate = None, oldEndDate = None, newEndDate = None):
""" Notifies the CSBookingManager that the start and / or end dates of the event it's attached to have changed.
The CSBookingManager will change the dates of all the bookings that want to be updated.
If there are problems (such as a booking not being able to be modified)
it will write a list of strings describing the problems in the 'dateChangeNotificationProblems' context variable.
(each string is produced by the _booking2NotifyProblem method).
This method will be called by the event (meeting) object.
"""
startDateChanged = oldStartDate is not None and newStartDate is not None and not oldStartDate == newStartDate
endDateChanged = oldEndDate is not None and newEndDate is not None and not oldEndDate == newEndDate
someDateChanged = startDateChanged or endDateChanged
Logger.get("VideoServ").info("""CSBookingManager: starting notifyEventDateChanges. Arguments: confId=%s, oldStartDate=%s, newStartDate=%s, oldEndDate=%s, newEndDate=%s""" %
(str(self._conf.getId()), str(oldStartDate), str(newStartDate), str(oldEndDate), str(newEndDate)))
if someDateChanged:
problems = []
for booking in self.getBookingList():
# booking "instances" provide higher granularity in search
booking.unindex_instances()
booking.index_instances()
if startDateChanged:
try:
self._changeConfStartDateInIndex(booking, oldStartDate, newStartDate)
except Exception, e:
Logger.get('VideoServ').error("Exception while reindexing a booking in the event start date index because its event's start date changed: " + str(e))
if booking.hasStartDate():
if booking.needsToBeNotifiedOfDateChanges():
Logger.get("VideoServ").info("""CSBookingManager: notifying date changes to booking %s of event %s""" %
(str(booking.getId()), str(self._conf.getId())))
oldBookingStartDate = booking.getStartDate()
oldBookingEndDate = booking.getEndDate()
oldBookingParams = booking.getBookingParams() #this is a copy so it's ok
if startDateChanged:
booking.setStartDate(oldBookingStartDate + (newStartDate - oldStartDate) )
if endDateChanged:
booking.setEndDate(oldBookingEndDate + (newEndDate - oldEndDate) )
rollback = False
modifyResult = None
try:
modifyResult = booking._modify(oldBookingParams)
if isinstance(modifyResult, CSErrorBase):
Logger.get('VideoServ').warning("""Error while changing the dates of booking %s of event %s after event dates changed: %s""" %
(str(booking.getId()), str(self._conf.getId()), modifyResult.getLogMessage()))
rollback = True
except Exception, e:
Logger.get('VideoServ').error("""Exception while changing the dates of booking %s of event %s after event dates changed: %s""" %
(str(booking.getId()), str(self._conf.getId()), str(e)))
rollback = True
if rollback:
booking.setStartDate(oldBookingStartDate)
booking.setEndDate(oldBookingEndDate)
problems.append(CSBookingManager._booking2NotifyProblem(booking, modifyResult))
elif startDateChanged:
self._changeStartDateInIndex(booking, oldBookingStartDate, booking.getStartDate())
if hasattr(booking, "notifyEventDateChanges"):
try:
booking.notifyEventDateChanges(oldStartDate, newStartDate, oldEndDate, newEndDate)
except Exception, e:
Logger.get('VideoServ').exception("Exception while notifying a plugin of an event date changed: " + str(e))
if problems:
ContextManager.get('dateChangeNotificationProblems')['Collaboration'] = [
'Some Video Services bookings could not be moved:',
problems,
'Go to [[' + str(UHConfModifCollaboration.getURL(self.getOwner(), secure = ContextManager.get('currentRH').use_https())) + ' the Video Services section]] to modify them yourself.'
]
def notifyTimezoneChange(self, oldTimezone, newTimezone):
""" Notifies the CSBookingManager that the timezone of the event it's attached to has changed.
The CSBookingManager will change the dates of all the bookings that want to be updated.
This method will be called by the event (Conference) object
"""
return []
def notifyLocationChange(self):
for booking in self.getBookingList():
if hasattr(booking, "notifyLocationChange"):
try:
booking.notifyLocationChange()
except Exception, e:
Logger.get('VideoServ').exception("Exception while notifying a plugin of a location change: " + str(e))
@classmethod
def _booking2NotifyProblem(cls, booking, modifyError):
""" Turns a booking into a string used to tell the user
why a date change of a booking triggered by the event's start or end date change
went bad.
"""
message = []
message.extend(["The dates of the ", booking.getType(), " booking"])
if booking.hasTitle():
message.extend([': "', booking._getTitle(), '" (', booking.getStartDateAsString(), ' - ', booking.getEndDateAsString(), ')'])
else:
message.extend([' ongoing from ', booking.getStartDateAsString(), ' to ', booking.getEndDateAsString(), ''])
message.append(' could not be changed.')
if modifyError and modifyError.getUserMessage():
message.extend([' Reason: ', modifyError.getUserMessage()])
return "".join(message)
def notifyDeletion(self):
""" Notifies the CSBookingManager that the Conference object it is attached to has been deleted.
The CSBookingManager will change the dates of all the bookings that want to be updated.
This method will be called by the event (Conference) object
"""
for booking in self.getBookingList():
try:
# We will delete the bookings connected to the event, not Contribution or
if booking.getLinkType() and booking.getLinkType() != "event":
continue
removeResult = booking._delete()
if isinstance(removeResult, CSErrorBase):
Logger.get('VideoServ').warning("Error while deleting a booking of type %s after deleting an event: %s"%(booking.getType(), removeResult.getLogMessage() ))
booking.unindex_instances()
self._unindexBooking(booking)
except Exception, e:
Logger.get('VideoServ').exception("Exception while deleting a booking of type %s after deleting an event: %s" % (booking.getType(), str(e)))
def getEventDisplayPlugins(self, sorted = False):
""" Returns a list of names (strings) of plugins which have been configured
as showing bookings in the event display page, and which have bookings
already (or previously) created in the event.
(does not check if the bookings are hidden or not)
"""
pluginsWithEventDisplay = CollaborationTools.pluginsWithEventDisplay()
l = []
for pluginName in self._bookingsByType:
if pluginName in pluginsWithEventDisplay:
l.append(pluginName)
if sorted:
l.sort()
return l
def createTestBooking(self, bookingParams = {}):
""" Function that creates a 'test' booking for performance test.
Avoids to use any of the plugins except DummyPlugin
"""
from MaKaC.plugins.Collaboration.DummyPlugin.collaboration import CSBooking as DummyBooking
bookingType = 'DummyPlugin'
newBooking = DummyBooking(bookingType, self._conf)
error = newBooking.setBookingParams(bookingParams)
if error:
raise CollaborationServiceException("Problem while creating a test booking")
else:
newId = self._getNewBookingId()
newBooking.setId(newId)
createResult = newBooking._create()
if isinstance(createResult, CSErrorBase):
return createResult
else:
self._bookings[newId] = newBooking
self._bookingsByType.setdefault(bookingType,[]).append(newId)
if newBooking.isHidden():
self.getHiddenBookings().add(newId)
self._indexBooking(newBooking)
self._notifyModification()
return newBooking
def _notifyModification(self):
self._p_changed = 1
def getSortedContributionSpeaker(self, exclusive):
''' This method will create a dictionary by sorting the contribution/speakers
that they are in recording, webcast or in both.
bool: exclusive - if True, every dicts (recording, webcast, both) will
have different speaker list (no repetition allowed)
if an element is present in 'both', it will be deleted from
'recording and 'webcast'
returns d = { 'recording': {}, 'webcast' : {}, 'both': {} }
'''
d = {}
recordingBooking = self.getSingleBooking("RecordingRequest")
webcastBooking = self.getSingleBooking("WebcastRequest")
d["recording"] = recordingBooking.getContributionSpeakerSingleBooking() if recordingBooking else {}
d["webcast"] = webcastBooking.getContributionSpeakerSingleBooking() if webcastBooking else {}
contributions = {}
''' Look for speaker intersections between 'recording' and 'webcast' dicts
and put them in 'both' dict. Additionally, if any intersection has been found,
we exclude them from the original dictionary.
'''
for cont in d["recording"].copy():
if cont in d["webcast"].copy():
# Check if same contribution/speaker in 'recording' and 'webcast'
intersection = set(d['recording'][cont]) & set(d['webcast'][cont])
if intersection:
contributions[cont] = list(intersection)
# if exclusive is True, and as we found same contribution/speaker,
# we delete them from 'recording' and 'webcast' dicts
if exclusive:
exclusion = set(d['recording'][cont]) ^ set(contributions[cont])
if not exclusion:
del d["recording"][cont]
else:
d["recording"][cont] = list(exclusion)
exclusion = set(d['webcast'][cont]) ^ set(contributions[cont])
if not exclusion:
del d["webcast"][cont]
else:
d["webcast"][cont] = list(exclusion)
d["both"] = contributions
return d
def getContributionSpeakerByType(self, requestType):
''' Return a plain dict of contribution/speaker according to the requestType
if the request type is 'both', we need to merge the lists
'''
d = self.getSortedContributionSpeaker(False) # We want non exclusive dict
if requestType == "recording":
return d['recording']
elif requestType == "webcast":
return d['webcast']
elif requestType == "both":
# We merge 'recording' and 'webcast'
m = dict(((cont, list(set(spks) | \
set(d['webcast'].get(cont, [])))) for cont, spks in d['recording'].iteritems()))
m.update(dict((cont, spks) for cont, spks in d['webcast'].iteritems() if cont not in m))
return m
else:
return {}
def updateSpeakerWrapperList(self, newList = False):
"""
if newList arg is True, don't check if there is an existing speakerWrapperList
and create a new one straight forward. (Done to avoid loops)
"""
SWList = []
contributions = self.getSortedContributionSpeaker(True)
requestType = ['recording', 'webcast', 'both']
for type in requestType:
for cont in contributions[type]:
for spk in contributions[type][cont]:
if newList:
sw = None
else:
sw = self.getSpeakerWrapperByUniqueId("%s.%s"%(cont, spk.getId()))
if sw:
if not sw.getObject().getEmail():
if sw.getStatus() not in [SpeakerStatusEnum.SIGNED,
SpeakerStatusEnum.FROMFILE,
SpeakerStatusEnum.REFUSED]:
sw.setStatus(SpeakerStatusEnum.NOEMAIL)
elif sw.getStatus() == SpeakerStatusEnum.NOEMAIL:
sw.setStatus(SpeakerStatusEnum.NOTSIGNED)
sw.setRequestType(type)
SWList.append(sw)
else:
newSw = SpeakerWrapper(spk, cont, type)
if not newSw.getObject().getEmail():
newSw.setStatus(SpeakerStatusEnum.NOEMAIL)
SWList.append(newSw)
self._speakerWrapperList = SWList
def getSpeakerWrapperList(self):
if not hasattr(self, "_speakerWrapperList"):#TODO: remove when safe
self.updateSpeakerWrapperList(True)
return self._speakerWrapperList
def getSpeakerWrapperByUniqueId(self, id):
if not hasattr(self, "_speakerWrapperList"):#TODO: remove when safe
self.updateSpeakerWrapperList(True)
for spkWrap in self._speakerWrapperList:
if spkWrap.getUniqueId() == id:
return spkWrap
return None
def areSignatureCompleted(self):
value = True;
for spkWrap in self._speakerWrapperList:
if spkWrap.getStatus() != SpeakerStatusEnum.FROMFILE and \
spkWrap.getStatus() != SpeakerStatusEnum.SIGNED:
value = False;
return value
def getSpeakerWrapperListByStatus(self, status):
'''Return a list of SpeakerWrapper matching the status.
'''
list = []
for spkWrap in self._speakerWrapperList:
if spkWrap.getStatus() == status:
list.append(spkWrap)
return list
def getSpeakerEmailByUniqueId(self, id, user):
''' Return the email of a speaker according to the uniqueId.
id: uniqueId of the speaker wrapper.
user: user object of the sender of the emails, in order to check the rights.
'''
canManageRequest = CollaborationTools.getRequestTypeUserCanManage(self._conf, user)
requestTypeAccepted = ""
if canManageRequest == "recording":
requestTypeAccepted = ["recording"]
elif canManageRequest == "webcast":
requestTypeAccepted = ["webcast"]
elif canManageRequest == "both":
requestTypeAccepted = ["recording", "webcast", "both"]
list = []
for spkWrap in self._speakerWrapperList:
if spkWrap.getUniqueId() == id and \
spkWrap.hasEmail() and spkWrap.getStatus() not in \
[SpeakerStatusEnum.SIGNED, SpeakerStatusEnum.FROMFILE] and \
spkWrap.getRequestType() in requestTypeAccepted:
list.append(spkWrap.getObject().getEmail())
return list
def addVideoService(self, uniqueId, videoService):
""" Adds a video service to Contribution / Session link in the tracking
dictionary in order {uniqueId : videoService}
"""
if self.getVideoServices().has_key(uniqueId):
self.getVideoServices()[uniqueId].append(videoService)
else:
self.getVideoServices()[uniqueId] = [videoService]
def removeVideoAllServices(self, uniqueId):
""" Removes all associations of Contributions / Sessions with video
services from the dictionary, key included.
"""
if not self.hasVideoService(uniqueId):
return None
del self.getVideoServices()[uniqueId]
def removeVideoSingleService(self, uniqueId, videoService):
""" Removes a specific video service from a specific contribution. As
the list of services is unordered, iterate through to match for
removal - performance cost therefore occurs here.
"""
if not self.hasVideoService(uniqueId):
return None
target = self.getVideoServicesById(uniqueId)
for service in target:
if service == videoService:
target.remove(service)
break
# There are no more entries, therefore remove the dictionary entry too.
if len(target) == 0:
self.removeVideoAllServices(uniqueId)
def getVideoServices(self):
""" Returns the OOBTree associating event unique IDs with the List
of video services associated.
"""
if not hasattr(self, "_bookingsToVideoServices"):
self._bookingsToVideoServices = OOBTree()
return self._bookingsToVideoServices
def getVideoServicesById(self, uniqueId):
""" Returns a list of video services associated with the uniqueId
for printing in event timetable. Returns None if no video services
are found.
"""
if not self.hasVideoService(uniqueId):
return None
return self.getVideoServices()[uniqueId]
def hasVideoService(self, uniqueId, service=None):
""" Returns True if the uniqueId of the Contribution or Session provided
has an entry in the self._bookingsToVideoServices dictionary, thusly
denoting the presence of linked bookings. Second parameter is for more
specific matching, i.e. returns True if unique ID is associated with
specific service.
"""
if service is None:
return self.getVideoServices().has_key(uniqueId)
if self.getVideoServices().has_key(uniqueId):
for serv in self.getVideoServicesById(uniqueId):
if serv == service:
return True
else:
return self.getVideoServices().has_key(uniqueId)
def isAnyRequestAccepted(self):
'''
Return true if at least one between recording and webcast request
has been accepted.
'''
value = False
rr = self.getSingleBooking("RecordingRequest")
wr = self.getSingleBooking("WebcastRequest")
if rr:
value = rr.getAcceptRejectStatus()
if wr:
value = value or wr.getAcceptRejectStatus()
return value
def isContributionReadyToBePublished(self, contId):
if not hasattr(self, "_speakerWrapperList"):#TODO: remove when safe
self.updateSpeakerWrapperList(True)
exists = False
for spkWrap in self._speakerWrapperList:
if spkWrap.getContId() == contId:
exists = True
if spkWrap.getStatus() != SpeakerStatusEnum.SIGNED and \
spkWrap.getStatus() != SpeakerStatusEnum.FROMFILE:
return False
#The list has to have at least one spkWrap with the given contId
return exists
def notifyElectronicAgreementAnswer(self):
if not hasattr(self, "_notifyElectronicAgreementAnswer"):
self._notifyElectronicAgreementAnswer = True
return self._notifyElectronicAgreementAnswer
def setNotifyElectronicAgreementAnswer(self, notifyElectronicAgreementAnswer):
self._notifyElectronicAgreementAnswer = notifyElectronicAgreementAnswer
class CSBookingBase(Persistent, Fossilizable):
fossilizes(ICSBookingBaseConfModifFossil, ICSBookingBaseIndexingFossil)
""" Base class that represents a Collaboration Systems booking.
Every Collaboration plugin will have to implement this class.
In the base class are gathered all the functionalities / elements that are common for all plugins.
A booking is Persistent (DateChangeObserver inherits from Persistent) so it will be stored in the database.
Also, every CSBookingBase object in the server will be mirrored by a Javascript object in the client, through "Pickling".
Every class that implements the CSBookingBase has to declare the following class attributes:
_hasStart : True if the plugin has a "start" concept. Otherwise, the "start" button will not appear, etc.
_hasStop : True if the plugin has a "stop" concept. Otherwise, the "stop" button will not appear, etc.
_hasConnect : True if the plugin has a "connect" concept. Otherwise, the "connect" button will not appear, etc.
_hasCheckStatus: True if the plugin has a "check status" concept. Otherwise, the "check status" button will not appear, etc.
_hasAcceptReject: True if the plugin has a "accept or reject" concept. Otherwise, the "accept" and "reject" buttons will not appear, etc.
_requiresServerCallForStart : True if we should notify the server when the user presses the "start" button.
_requiresServerCallForStop : True if we should notify the server when the user presses the "stop" button.
_requiresClientCallForStart : True if the browser should execute some JS action when the user presses the "start" button.
_requiresClientCallForStop : True if the browser should execute some JS action when the user presses the "stop" button.
_needsBookingParamsCheck : True if the booking parameters should be checked after the booking is added / edited.
If True, the _checkBookingParams method will be called by the setBookingParams method.
_needsToBeNotifiedOnView: True if the booking object needs to be notified (through the "notifyOnView" method)
when the user "sees" the booking, for example when returning the list of bookings.
_canBeNotifiedOfEventDateChanges: True if bookings of this type should be able to be notified
of their owner Event changing start date, end date or timezone.
_allowMultiple: True if this booking type allows more than 1 booking per event.
_keepForever: True if this booking has to be in the Video Services Overview indexes forever
"""
_hasStart = False
_hasStop = False
_hasCheckStatus = False
_hasAcceptReject = False
_hasStartStopAll = False
_requiresServerCallForStart = False
_requiresServerCallForStop = False
_requiresClientCallForStart = False
_requiresClientCallForStop = False
_needsBookingParamsCheck = False
_needsToBeNotifiedOnView = False
_canBeNotifiedOfEventDateChanges = True
_allowMultiple = True
_shouldBeIndexed = True
_commonIndexes = []
_hasStartDate = True
_hasEventDisplay = False
_hasTitle = False
_adminOnly = False
_complexParameters = []
_linkVideoType = None
_linkVideoId = None
_keepForever = False
def __init__(self, bookingType, conf):
""" Constructor for the CSBookingBase class.
id: a string with the id of the booking
bookingType: a string with the type of the booking. Example: "DummyPlugin", "EVO"
conf: a Conference object to which this booking belongs (through the CSBookingManager object). The meeting of this booking.
startTime: TODO
endTime: TODO
Other attributes initialized by this constructor:
-_bookingParams: the parameters necessary to perform the booking.
The plugins will decide if the booking gets authorized or not depending on this.
Needs to be defined by the implementing class, as keys with empty values.
-_startingParams: the parameters necessary to start the booking.
They will be used on the client for the local start action.
Needs to be defined by the implementing class, as keys with empty values.
-_warning: A warning is a plugin-defined object, with information to show to the user when
the operation went well but we still have to show some info to the user.
-_permissionToStart : Even if the "start" button for a booking is able to be pushed, there may be cases where the booking should
not start. For example, if it's not the correct time yet.
In that case "permissionToStart" should be set to false so that the booking doesn't start.
-_permissionToStop: Same as permissionToStart. Sometimes the booking should not be allowed to stop even if the "stop" button is available.
"""
self._id = None
self._type = bookingType
self._plugin = CollaborationTools.getPlugin(self._type)
self._conf = conf
self._warning = None
self._creationDate = nowutc()
self._modificationDate = nowutc()
self._creationDateTimestamp = int(datetimeToUnixTimeInt(self._creationDate))
self._modificationDateTimestamp = int(datetimeToUnixTimeInt(self._modificationDate))
self._startDate = None
self._endDate = None
self._startDateTimestamp = None
self._endDateTimestamp = None
self._acceptRejectStatus = None #None = not yet accepted / rejected; True = accepted; False = rejected
self._rejectReason = ""
self._bookingParams = {}
self._canBeDeleted = True
self._permissionToStart = False
self._permissionToStop = False
self._needsToBeNotifiedOfDateChanges = self._canBeNotifiedOfEventDateChanges
self._hidden = False
self._play_status = None
setattr(self, "_" + bookingType + "Options", CollaborationTools.getPlugin(bookingType).getOptions())
#NOTE: Should maybe notify the creation of a new booking, specially if it's a single booking
# like that can update requestType of the speaker wrapper...
def getId(self):
""" Returns the internal, per-conference id of the booking.
This attribute will be available in Javascript with the "id" identifier.
"""
return self._id
def setId(self, id):
""" Sets the internal, per-conference id of the booking
"""
self._id = id
def getUniqueId(self):
""" Returns an unique Id that identifies this booking server-wide.
Useful for ExternalOperationsManager
"""
return "%scsbook%s" % (self.getConference().getUniqueId(), self.getId())
def getType(self):
""" Returns the type of the booking, as a string: "EVO", "DummyPlugin"
This attribute will be available in Javascript with the "type" identifier.
"""
return self._type
def getConference(self):
""" Returns the owner of this CSBookingBase object, which is a Conference object representing the meeting.
"""
return self._conf
def setConference(self, conf):
""" Sets the owner of this CSBookingBase object, which is a Conference object representing the meeting.
"""
self._conf = conf
def getWarning(self):
""" Returns a warning attached to this booking.
A warning is a plugin-defined object, with information to show to the user when
the operation went well but we still have to show some info to the user.
To be overloaded by plugins.
"""
if not hasattr(self, '_warning'):
self._warning = None
return self._warning
def setWarning(self, warning):
""" Sets a warning attached to this booking.
A warning is a plugin-defined object, with information to show to the user when
the operation went well but we still have to show some info to the user.
To be overloaded by plugins.
"""
self._warning = warning
def getCreationDate(self):
""" Returns the date this booking was created, as a timezone localized datetime object
"""
if not hasattr(self, "_creationDate"): #TODO: remove when safe
self._creationDate = nowutc()
return self._creationDate
def getAdjustedCreationDate(self, tz=None):
""" Returns the booking creation date, adjusted to a given timezone.
If no timezone is provided, the event's timezone is used
"""
return getAdjustedDate(self.getCreationDate(), self.getConference(), tz)
def getCreationDateTimestamp(self):
if not hasattr(object, "_creationDateTimestamp"): #TODO: remove when safe
self._creationDateTimestamp = int(datetimeToUnixTimeInt(self._creationDate))
return self._creationDateTimestamp
def getModificationDate(self):
""" Returns the date this booking was modified last
"""
if not hasattr(self, "_modificationDate"): #TODO: remove when safe
self._modificationDate = nowutc()
return self._modificationDate
def getAdjustedModificationDate(self, tz=None):
""" Returns the booking last modification date, adjusted to a given timezone.
If no timezone is provided, the event's timezone is used
"""
return getAdjustedDate(self.getModificationDate(), self.getConference(), tz)
def getModificationDateTimestamp(self):
if not hasattr(object, "_modificationDateTimestamp"): #TODO: remove when safe
self._modificationDateTimestamp = int(datetimeToUnixTimeInt(self._modificationDate))
return self._modificationDateTimestamp
def setModificationDate(self, date):
""" Sets the date this booking was modified last
"""
self._modificationDate = date
if date:
self._modificationDateTimestamp = int(datetimeToUnixTimeInt(date))
else:
self._modificationDateTimestamp = None
def getBookingsOfSameType(self, sorted = False):
""" Returns a list of the bookings of the same type as this one (including this one)
sorted: if true, bookings will be sorted by id
"""
return Catalog.getIdx("cs_bookingmanager_conference").get(self._conf.getId()).getBookingList(sorted, self._type)
def getPlugin(self):
""" Returns the Plugin object associated to this booking.
"""
return self._plugin
def setPlugin(self, plugin):
""" Sets the Plugin object associated to this booking.
"""
self._plugin = plugin
def getPluginOptions(self):
""" Utility method that returns the plugin options for this booking's type of plugin
"""
return self._plugin.getOptions()
def getPluginOptionByName(self, optionName):
""" Utility method that returns a plugin option, given its name, for this booking's type of plugin
"""
return self.getPluginOptions()[optionName]
def getStartDate(self):
""" Returns the start date as an datetime object with timezone information (adjusted to the meeting's timezone)
"""
return self._startDate
def getAdjustedStartDate(self, tz=None):
""" Returns the booking start date, adjusted to a given timezone.
If no timezone is provided, the event's timezone is used
"""
if self.getStartDate():
return getAdjustedDate(self.getStartDate(), self.getConference(), tz)
else:
return None
def getStartDateTimestamp(self):
if not hasattr(object, "_startDateTimestamp"): #TODO: remove when safe
self._startDateTimestamp = int(datetimeToUnixTimeInt(self._startDate))
return self._startDateTimestamp
def setStartDateTimestamp(self, startDateTimestamp):
self._startDateTimestamp = startDateTimestamp
def getStartDateAsString(self):
""" Returns the start date as a string, expressed in the meeting's timezone
"""
if self.getStartDate() == None:
return ""
else:
return formatDateTime(self.getAdjustedStartDate(), locale='en_US')
def setStartDate(self, startDate):
""" Sets the start date as an datetime object with timezone information (adjusted to the meeting's timezone)
"""
self._startDate = startDate
if startDate:
self._startDateTimestamp = int(datetimeToUnixTimeInt(startDate))
else:
self._startDateTimestamp = None
def setStartDateFromString(self, startDateString):
""" Sets the start date from a string. It is assumed that the date is expressed in the meeting's timezone
"""
if startDateString == "":
self.setStartDate(None)
else:
try:
self.setStartDate(setAdjustedDate(parseDateTime(startDateString), self._conf))
except ValueError:
raise CollaborationServiceException("startDate parameter (" + startDateString +" ) is in an incorrect format for booking with id: " + str(self._id))
def getEndDate(self):
""" Returns the end date as an datetime object with timezone information (adjusted to the meeting's timezone)
"""
return self._endDate
def isHappeningNow(self):
now = nowutc()
return self.getStartDate() < now and self.getEndDate() > now
def hasHappened(self):
now = nowutc()
return now > self.getEndDate()
def getAdjustedEndDate(self, tz=None):
""" Returns the booking end date, adjusted to a given timezone.
If no timezone is provided, the event's timezone is used
"""
return getAdjustedDate(self.getEndDate(), self.getConference(), tz)
def getEndDateTimestamp(self):
if not hasattr(object, "_endDateTimestamp"): #TODO: remove when safe
self._endDateTimestamp = int(datetimeToUnixTimeInt(self._endDate))
return self._endDateTimestamp
def setEndDateTimestamp(self, endDateTimestamp):
self._endDateTimestamp = endDateTimestamp
def getEndDateAsString(self):
""" Returns the start date as a string, expressed in the meeting's timezone
"""
if self.getEndDate() == None:
return ""
else:
return formatDateTime(self.getAdjustedEndDate(), locale='en_US')
def setEndDate(self, endDate):
""" Sets the start date as an datetime object with timezone information (adjusted to the meeting's timezone)
"""
self._endDate = endDate
if endDate:
self._endDateTimestamp = int(datetimeToUnixTimeInt(endDate))
else:
self._endDateTimestamp = None
def setEndDateFromString(self, endDateString):
""" Sets the start date from a string. It is assumed that the date is expressed in the meeting's timezone
"""
if endDateString == "":
self.setEndDate(None)
else:
try:
self.setEndDate(setAdjustedDate(parseDateTime(endDateString), self._conf))
except ValueError:
raise CollaborationServiceException("endDate parameter (" + endDateString +" ) is in an incorrect format for booking with id: " + str(self._id))
def getStatusMessage(self):
""" Returns the status message as a string.
This attribute will be available in Javascript with the "statusMessage"
"""
status = self.getPlayStatus()
if status == None:
if self.isHappeningNow():
return _("Ready to start!")
elif self.hasHappened():
return _("Already took place")
else:
return _("Booking created")
elif status:
return _("Conference started")
elif not status:
return _("Conference stopped")
def getStatusClass(self):
""" Returns the status message CSS class as a string.
This attribute will be available in Javascript with the "statusClass"
"""
if self.getPlayStatus() == None or self.hasHappened():
return "statusMessageOther"
else:
return "statusMessageOK"
def accept(self, user = None):
""" Sets this booking as accepted
"""
self._acceptRejectStatus = True
self._accept(user)
def reject(self, reason):
""" Sets this booking as rejected, and stores the reason
"""
self._acceptRejectStatus = False
self._rejectReason = reason
self._reject()
def clearAcceptRejectStatus(self):
""" Sets back the accept / reject status to None
"""
self._acceptRejectStatus = None
def getAcceptRejectStatus(self):
""" Returns the Accept/Reject status of the booking
This attribute will be available in Javascript with the "acceptRejectStatus"
Its value will be:
-None if the booking has not been accepted or rejected yet,
-True if it has been accepted,
-False if it has been rejected
"""
if not hasattr(self, "_acceptRejectStatus"):
self._acceptRejectStatus = None
return self._acceptRejectStatus
def getRejectReason(self):
""" Returns the rejection reason.
This attribute will be available in Javascript with the "rejectReason"
"""
if not hasattr(self, "_rejectReason"):
self._rejectReason = ""
return self._rejectReason
## methods relating to the linking of CSBooking objects to Contributions & Sessions
def hasSessionOrContributionLink(self):
return (self.isLinkedToContribution() or self.isLinkedToSession())
def isLinkedToSession(self):
return (self._linkVideoType == "session")
def isLinkedToContribution(self):
return (self._linkVideoType == "contribution")
def getLinkId(self):
""" Returns the unique ID of the Contribution or Session which this
object is associated with, completely agnostic of the link type.
Returns None if no association (default) found.
"""
return self._linkVideoId
def getLinkIdDict(self):
""" Returns a dictionary of structure linkType (session | contribution)
: unique ID of referenced object.
Returns None if no association is found.
"""
linkId = self.getLinkId()
if linkId == None:
return linkId
return {self._linkVideoType : linkId}
def getLinkType(self):
""" Returns a string denoting the link type, that is whether linked
to a session or contribution.
"""
return self._linkVideoType
def setLinkType(self, linkDict):
""" Accepts a dictionary of linkType: linkId """
# case of non-linked bookings
if linkDict is None:
return
self._linkVideoType = linkDict.keys()[0]
self._linkVideoId = linkDict.values()[0]
def resetLinkParams(self):
""" Removes all association with a Session or Contribution from this
CSBooking only.
"""
self._linkVideoType = self._linkVideoId = None
def getLocation(self):
return self._conf.getLocation().getName() if self._conf.getLocation() else ""
def getRoom(self):
return self._conf.getRoom().getName() if self._conf.getRoom() else ""
def getBookingParams(self):
""" Returns a dictionary with the booking params.
This attribute will be available in Javascript with the "bookingParams"
If self._bookingParams has not been set by the implementing class, an exception is thrown.
Support for "complex" parameters, that are not defined in the self._bookingParams dict, but can
be retrieved through getter methods.
If a subclass defines a class attributes called _complexParameters (a list of strings),
parameter names that are in this list will also be included in the returned dictionary.
Their value will be retrieved by calling the corresponding getXXX methods
but instead the inheriting class's setXXX method will be called.
Example: _complexParameters = ["communityName", "accessPassword", "hasAccessPassword"] correspond
to the methods getCommunityName, getAccessPassword, getHasAccessPassword.
If you include a parameter in the _complexParameters list, you always have to implement the corresponding getter method.
"""
bookingParams = {}
for k, v in self.__class__._simpleParameters.iteritems():
if k in self._bookingParams:
value = self._bookingParams[k]
else:
value = v[1] #we use the default value
if v[0] is bool and value is True: #we assume it will be used in a single checkbox
value = ["yes"]
if value is not False: #we do not include False, it means the single checkbox is not checked
bookingParams[k] = value
if hasattr(self.__class__, "_complexParameters") and len(self.__class__._complexParameters) > 0:
getterMethods = dict(inspect.getmembers(self, lambda m: inspect.ismethod(m) and m.__name__.startswith('get')))
for paramName in self.__class__._complexParameters:
getMethodName = 'get' + paramName[0].upper() + paramName[1:]
if getMethodName in getterMethods:
bookingParams[paramName] = getterMethods[getMethodName]()
else:
raise CollaborationServiceException("Tried to retrieve complex parameter " + str(paramName) + " but the corresponding getter method " + getMethodName + " is not implemented")
bookingParams["startDate"] = self.getStartDateAsString()
bookingParams["endDate"] = self.getEndDateAsString()
if self.needsToBeNotifiedOfDateChanges():
bookingParams["notifyOnDateChanges"] = ["yes"]
if self.isHidden():
bookingParams["hidden"] = ["yes"]
return bookingParams
def getBookingParamByName(self, paramName):
if paramName in self.__class__._simpleParameters:
if not paramName in self._bookingParams:
self._bookingParams[paramName] = self.__class__._simpleParameters[paramName][1]
return self._bookingParams[paramName]
elif hasattr(self.__class__, "_complexParameters") and paramName in self.__class__._complexParameters:
getterMethods = dict(inspect.getmembers(self, lambda m: inspect.ismethod(m) and m.__name__.startswith('get')))
getMethodName = 'get' + paramName[0].upper() + paramName[1:]
if getMethodName in getterMethods:
return getterMethods[getMethodName]()
else:
raise CollaborationServiceException("Tried to retrieve complex parameter " + str(paramName) + " but the corresponding getter method " + getMethodName + " is not implemented")
else:
raise CollaborationServiceException("Tried to retrieve parameter " + str(paramName) + " but this parameter does not exist")
def getContributionSpeakerSingleBooking(self):
''' Return a dictionnary with the contributions and their speakers that need to be recorded
e.g: {contId:[Spk1Object, Spk2Object, Spk3Object], cont2:[Spk1Object]}...
'''
request = {}
recordingTalksChoice = self.getBookingParams()["talks"] #either "all", "choose" or ""
listTalksToRecord = self.getBookingParams()["talkSelection"]
if self._conf.getType() == "simple_event":
request[self._conf.getId()] = []
for chair in self._conf.getChairList():
request[self._conf.getId()].append(chair)
else:
for cont in self._conf.getContributionList():
''' We select the contributions that respect the following conditions:
- They have Speakers assigned.
- They are scheduled. (to discuss...)
- They have been chosen for the recording request.
'''
if recordingTalksChoice != "choose" or cont.getId() in listTalksToRecord:
if cont.isScheduled():
request[cont.getId()] = []
for spk in cont.getSpeakerList():
request[cont.getId()].append(spk)
return request
def setBookingParams(self, params):
""" Sets new booking parameters.
params: a dict with key/value pairs with the new values for the booking parameters.
If the plugin's _needsBookingParamsCheck is True, the _checkBookingParams() method will be called.
This function will return False if all the checks were OK or if there were no checks, and otherwise will throw
an exception or return a CSReturnedErrorBase error.
Support for "complex" parameters, that are not defined in the self._bookingParams dict, but can
be set through setter methods.
If a subclass defines a class attributes called _complexParameters (a list of strings),
parameter names that are in 'params' and also in this list will not be assigned directly,
but instead the inheriting class's setXXX method will be called.
Example: _complexParameters = ["communityName", "accessPassword", "hasAccessPassword"] corresponds
to methods setCommunityName, setAccessPassword, setHasAccessPassword.
Note: even if a parameter is in this list, you can decide not to implement its corresponding set
method if you never expect the parameter name to come up inside 'params'.
"""
sanitizeResult = self.sanitizeParams(params)
if sanitizeResult:
return sanitizeResult
self.setHidden(params.pop("hidden", False) == ["yes"])
self.setNeedsToBeNotifiedOfDateChanges(params.pop("notifyOnDateChanges", False) == ["yes"])
startDate = params.pop("startDate", None)
if startDate is not None:
self.setStartDateFromString(startDate)
endDate = params.pop("endDate", None)
if endDate is not None:
self.setEndDateFromString(endDate)
for k,v in params.iteritems():
if k in self.__class__._simpleParameters:
if self.__class__._simpleParameters[k][0]:
try:
v = self.__class__._simpleParameters[k][0](v)
except ValueError:
raise CollaborationServiceException("Tried to set value of parameter with name " + str(k) + ", recognized as a simple parameter of type" + str(self._simpleParameters[k]) + ", but the conversion failed")
self._bookingParams[k] = v
elif k in self.__class__._complexParameters:
setterMethods = dict(inspect.getmembers(self, lambda m: inspect.ismethod(m) and m.__name__.startswith('set')))
setMethodName = 'set' + k[0].upper() + k[1:]
if setMethodName in setterMethods:
setterMethods[setMethodName](v)
else:
raise CollaborationServiceException("Tried to set value of parameter with name " + str(k) + ", recognized as a complex parameter, but the corresponding setter method " + setMethodName + " is not implemented")
else:
raise CollaborationServiceException("Tried to set the value of a parameter with name " + str(k) + " that was not declared")
for k, v in self.__class__._simpleParameters.iteritems():
if not k in self._bookingParams:
self._bookingParams[k] = self.__class__._simpleParameters[k][1]
if self.needsBookingParamsCheck():
return self._checkBookingParams()
return False
def sanitizeParams(self, params):
""" Checks if the fields introduced into the booking / request form
have any kind of HTML or script tag.
"""
if not isinstance(params, dict):
raise CollaborationServiceException("Booking parameters are not a dictionary")
invalidFields = []
for k, v in params.iteritems():
if isinstance(v, basestring) and hasTags(v):
invalidFields.append(k)
if invalidFields:
return CSSanitizationError(invalidFields)
else:
return None
def _getTypeDisplayName(self):
return CollaborationTools.getXMLGenerator(self._type).getDisplayName()
def _getFirstLineInfo(self, tz):
return CollaborationTools.getXMLGenerator(self._type).getFirstLineInfo(self, tz)
def _getTitle(self):
if self.hasEventDisplay():
raise CollaborationException("Method _getTitle was not overriden for the plugin type " + str(self._type))
def _getInformationDisplay(self, tz):
templateClass = CollaborationTools.getTemplateClass(self.getType(), "WInformationDisplay")
if templateClass:
return templateClass(self, tz).getHTML()
else:
return None
def _getLaunchDisplayInfo(self):
""" To be overloaded by plugins
"""
return None
def _checkBookingParams(self):
""" To be overriden by inheriting classes.
Verifies that the booking parameters are correct. For example, that a numeric field is actually a number.
Otherwise, an exception should be thrown.
If there are no errors, the method should just return.
"""
if self.needsBookingParamsCheck():
raise CollaborationServiceException("Method _checkBookingParams was not overriden for the plugin type " + str(self._type))
def hasStart(self):
""" Returns if this booking belongs to a plugin who has a "start" concept.
This attribute will be available in Javascript with the "hasStart" attribute
"""
return self._hasStart
def hasStartStopAll(self):
""" Returns if this booking belongs to a plugin who has a "start" concept, and all of its bookings for a conference
can be started simultanously.
This attribute will be available in Javascript with the "hasStart" attribute
"""
return self._hasStartStopAll
def hasStop(self):
""" Returns if this booking belongs to a plugin who has a "stop" concept.
This attribute will be available in Javascript with the "hasStop" attribute
"""
return self._hasStop
def hasConnect(self):
""" Returns if this booking belongs to a plugin who has a "connect" concept.
This attribute will be available in Javascript with the "hasConnect" attribute
"""
if not hasattr(self, '_hasConnect'):
self._hasConnect = False
return self._hasConnect
def hasDisconnect(self):
""" Returns if this booking belongs to a plugin who has a "connect" concept.
This attribute will be available in Javascript with the "hasConnect" attribute
"""
if not hasattr(self, '_hasDisconnect'):
self._hasDisconnect = False
return self._hasDisconnect
def hasCheckStatus(self):
""" Returns if this booking belongs to a plugin who has a "check status" concept.
This attribute will be available in Javascript with the "hasCheckStatus" attribute
"""
return self._hasCheckStatus
def isLinkedToEquippedRoom(self):
return None
def hasAcceptReject(self):
""" Returns if this booking belongs to a plugin who has a "accept or reject" concept.
This attribute will be available in Javascript with the "hasAcceptReject" attribute
"""
return self._hasAcceptReject
def requiresServerCallForStart(self):
""" Returns if this booking belongs to a plugin who requires a server call when the start button is pressed.
This attribute will be available in Javascript with the "requiresServerCallForStart" attribute
"""
return self._requiresServerCallForStart
def requiresServerCallForStop(self):
""" Returns if this booking belongs to a plugin who requires a server call when the stop button is pressed.
This attribute will be available in Javascript with the "requiresServerCallForStop" attribute
"""
return self._requiresServerCallForStop
def requiresClientCallForStart(self):
""" Returns if this booking belongs to a plugin who requires a client call when the start button is pressed.
This attribute will be available in Javascript with the "requiresClientCallForStart" attribute
"""
return self._requiresClientCallForStart
def requiresClientCallForStop(self):
""" Returns if this booking belongs to a plugin who requires a client call when the stop button is pressed.
This attribute will be available in Javascript with the "requiresClientCallForStop" attribute
"""
return self._requiresClientCallForStop
def requiresClientCallForConnect(self):
""" Returns if this booking belongs to a plugin who requires a client call when the connect button is pressed.
This attribute will be available in Javascript with the "requiresClientCallForConnect" attribute
"""
if not hasattr(self, '_requiresClientCallForConnect'):
self._requiresClientCallForConnect = False
return self._requiresClientCallForConnect
def requiresClientCallForDisconnect(self):
""" Returns if this booking belongs to a plugin who requires a client call when the connect button is pressed.
This attribute will be available in Javascript with the "requiresClientCallForDisconnect" attribute
"""
if not hasattr(self, '_requiresClientCallForDisconnect'):
self._requiresClientCallForDisconnect = False
return self._requiresClientCallForDisconnect
def canBeDeleted(self):
""" Returns if this booking can be deleted, in the sense that the "Remove" button will be active and able to be pressed.
This attribute will be available in Javascript with the "canBeDeleted" attribute
"""
return self._canBeDeleted
def setCanBeDeleted(self, canBeDeleted):
""" Sets if this booking can be deleted, in the sense that the "Remove" button will be active and able to be pressed.
This attribute will be available in Javascript with the "canBeDeleted" attribute
"""
self._canBeDeleted = canBeDeleted
def canBeStarted(self):
""" Returns if this booking can be started, in the sense that the "Start" button will be active and able to be pressed.
This attribute will be available in Javascript with the "canBeStarted" attribute
"""
return self.isHappeningNow()
def canBeStopped(self):
""" Returns if this booking can be stopped, in the sense that the "Stop" button will be active and able to be pressed.
This attribute will be available in Javascript with the "canBeStopped" attribute
"""
return self.isHappeningNow()
def isPermittedToStart(self):
""" Returns if this booking is allowed to start, in the sense that it will be started after the "Start" button is pressed.
For example a booking should not be permitted to start before a given time, even if the button is active.
This attribute will be available in Javascript with the "isPermittedToStart" attribute
"""
return self._permissionToStart
def isPermittedToStop(self):
""" Returns if this booking is allowed to stop, in the sense that it will be started after the "Stop" button is pressed.
This attribute will be available in Javascript with the "isPermittedToStop" attribute
"""
return self._permissionToStop
def needsBookingParamsCheck(self):
""" Returns if this booking belongs to a plugin that needs to verify the booking parameters.
"""
return self._needsBookingParamsCheck
def needsToBeNotifiedOnView(self):
""" Returns if this booking needs to be notified when someone views it (for example when the list of bookings is returned)
"""
return self._needsToBeNotifiedOnView
def canBeNotifiedOfEventDateChanges(self):
""" Returns if bookings of this type should be able to be notified
of their owner Event changing start date, end date or timezone.
"""
return False
def needsToBeNotifiedOfDateChanges(self):
""" Returns if this booking in particular needs to be notified
of their owner Event changing start date, end date or timezone.
"""
return self._needsToBeNotifiedOfDateChanges
def setNeedsToBeNotifiedOfDateChanges(self, needsToBeNotifiedOfDateChanges):
""" Sets if this booking in particular needs to be notified
of their owner Event changing start date, end date or timezone.
"""
self._needsToBeNotifiedOfDateChanges = needsToBeNotifiedOfDateChanges
def isHidden(self):
""" Return if this booking is "hidden"
A hidden booking will not appear in display pages
"""
if not hasattr(self, '_hidden'):
self._hidden = False
return self._hidden
def setHidden(self, hidden):
""" Sets if this booking is "hidden"
A hidden booking will not appear in display pages
hidden: a Boolean
"""
self._hidden = hidden
def isAllowMultiple(self):
""" Returns if this booking belongs to a type that allows multiple bookings per event.
"""
return self._allowMultiple
def shouldBeIndexed(self):
""" Returns if bookings of this type should be indexed
"""
return self._shouldBeIndexed
def getCommonIndexes(self):
""" Returns a list of strings with the names of the
common (shared) indexes that bookings of this type want to
be included in.
"""
return self._commonIndexes
def index_instances(self):
"""
To be overloaded
"""
return
def unindex_instances(self):
"""
To be overloaded
"""
return
def index_talk(self, talk):
"""
To be overloaded
"""
return
def unindex_talk(self, talk):
"""
To be overloaded
"""
return
def getModificationURL(self):
return UHConfModifCollaboration.getURL(self.getConference(),
secure = ContextManager.get('currentRH').use_https(),
tab = CollaborationTools.getPluginTab(self.getPlugin()))
def hasStartDate(self):
""" Returns if bookings of this type have a start date
(they may only have creation / modification date)
"""
return self._hasStartDate
def hasTitle(self):
""" Returns if bookings of this type have a title
"""
return self._hasTitle
def hasEventDisplay(self):
""" Returns if the type of this booking should display something on
an event display page
"""
return self._hasEventDisplay
def keepForever(self):
""" Returns if this booking has to be in the Video Services Overview indexes forever
"""
return self._keepForever
def canBeDisplayed(self):
""" Returns if this booking can be displayed in the event page.
By default is True and it will be shown as "Active" but can be overriden
"""
return True
def isAdminOnly(self):
""" Returns if this booking / this booking's plugin pages should only be displayed
to Server Admins, Video Service Admins, or the respective plugin admins.
"""
return self._adminOnly
def _create(self):
""" To be overriden by inheriting classes.
This method is called when a booking is created, after setting the booking parameters.
The plugin should decide if the booking is accepted or not.
Often this will involve communication with another entity, like an MCU for the multi-point H.323 plugin,
or a EVO HTTP server in the EVO case.
"""
raise CollaborationException("Method _create was not overriden for the plugin type " + str(self._type))
def _attach(self):
""" To be overriden by inheriting classes.
This method is called when a booking is attached, after setting the booking parameters.
The plugin should decide if the booking is accepted or not.
Often this will involve communication with another entity, like an MCU for the multi-point H.323 plugin,
or a EVO HTTP server in the EVO case.
"""
raise CollaborationException("Method _attach was not overriden for the plugin type " + str(self._type))
def _modify(self, oldBookingParams):
""" To be overriden by inheriting classes.
This method is called when a booking is modifying, after setting the booking parameters.
The plugin should decide if the booking is accepted or not.
Often this will involve communication with another entity, like an MCU for the multi-point H.323 plugin
or a EVO HTTP server in the EVO case.
A dictionary with the previous booking params is passed. This dictionary is the one obtained
by the method self.getBookingParams() before the new params input by the user are applied.
"""
raise CollaborationException("Method _modify was not overriden for the plugin type " + str(self._type))
def _start(self):
""" To be overriden by inheriting classes
This method is called when the user presses the "Start" button in a plugin who has a "Start" concept
and whose flag _requiresServerCallForStart is True.
Often this will involve communication with another entity.
"""
if self.hasStart():
raise CollaborationException("Method _start was not overriden for the plugin type " + str(self._type))
else:
pass
def _stop(self):
""" To be overriden by inheriting classes
This method is called when the user presses the "Stop" button in a plugin who has a "Stop" concept
and whose flag _requiresServerCallForStop is True.
Often this will involve communication with another entity.
"""
if self.hasStop():
raise CollaborationException("Method _stop was not overriden for the plugin type " + str(self._type))
else:
pass
def _checkStatus(self):
""" To be overriden by inheriting classes
This method is called when the user presses the "Check Status" button in a plugin who has a "check status" concept.
Often this will involve communication with another entity.
"""
if self.hasCheckStatus():
raise CollaborationException("Method _checkStatus was not overriden for the plugin type " + str(self._type))
else:
pass
def _accept(self, user = None):
""" To be overriden by inheriting classes
This method is called when a user with privileges presses the "Accept" button
in a plugin who has a "accept or reject" concept.
Often this will involve communication with another entity.
"""
if self.hasAcceptReject():
raise CollaborationException("Method _accept was not overriden for the plugin type " + str(self._type))
else:
pass
def _reject(self):
""" To be overriden by inheriting classes
This method is called when a user with privileges presses the "Reject" button
in a plugin who has a "accept or reject" concept.
Often this will involve communication with another entity.
"""
if self.hasAcceptReject():
raise CollaborationException("Method _reject was not overriden for the plugin type " + str(self._type))
else:
pass
def _notifyOnView(self):
""" To be overriden by inheriting classes
This method is called when a user "sees" a booking, for example when the list of bookings is displayed.
Maybe in this moment the booking wants to update its status.
"""
if self.needsToBeNotifiedOnView():
raise CollaborationException("Method _notifyOnView was not overriden for the plugin type " + str(self._type))
else:
pass
def _delete(self):
""" To be overriden by inheriting classes
This method is called whent he user removes a booking. Maybe the plugin will need to liberate
ressources that were allocated to it.
This method does not unregister the booking from the list of date change observer of the meeting
"""
raise CollaborationException("Method _delete was not overriden for the plugin type " + str(self._type))
def _sendNotifications(self, operation):
"""
Sends a mail, wrapping it with ExternalOperationsManager
"""
ExternalOperationsManager.execute(self, "sendMail_" + operation, self._sendMail, operation)
def _sendMail(self, operation):
if operation == 'new':
try:
notification = mail.NewBookingNotification(self)
GenericMailer.sendAndLog(notification, self._conf,
self.getPlugin().getName())
except Exception, e:
Logger.get('VideoServ').error(
"""Could not send NewBookingNotification for booking with id %s of event with id %s, exception: %s""" %
(self.getId(), self._conf.getId(), str(e)))
raise
elif operation == 'modify':
try:
notification = mail.BookingModifiedNotification(self)
GenericMailer.sendAndLog(notification, self._conf,
self.getPlugin().getName())
except Exception, e:
Logger.get('VideoServ').error(
"""Could not send BookingModifiedNotification for booking with id %s of event with id %s, exception: %s""" %
(self.getId(), self._conf.getId(), str(e)))
raise
elif operation == 'remove':
try:
notification = mail.BookingDeletedNotification(self)
GenericMailer.sendAndLog(notification, self._conf,
self.getPlugin().getName())
except Exception, e:
Logger.get('VideoServ').error(
"""Could not send BookingDeletedNotification for booking with id %s of event with id %s, exception: %s""" %
(self.getId(), self._conf.getId(), str(e)))
raise
def getPlayStatus(self):
if not hasattr(self, '_play_status'):
self._play_status = None
return self._play_status
""" Methods relating to the certain plugin architectures whereby talk
selection is appropriate through the inheriting class' attributes.
"""
def hasTalkSelection(self):
""" Some plugin types select individual contributions stored as a list
of IDs in this parameter, returns param if this instance is one of them.
"""
return self._bookingParams.has_key('talkSelection')
def _getTalkSelection(self):
""" Returns the attribute if it is defined, None on error. """
if self.hasTalkSelection():
return self._bookingParams.get('talkSelection')
return None
def _hasTalkSelectionContent(self):
""" If the talkSelection attribute is present and it has a quantity of
items in its list greater than 0, individual talks have been chosen.
"""
ts = self._getTalkSelection()
if ts is None:
return False
return len(ts) > 0
def getTalkSelectionList(self):
""" Returns the resultant list if it is present and populated. None if
neither are true.
"""
if not self._hasTalkSelectionContent():
return None
return self._getTalkSelection()
def _hasTalks(self):
""" Returns the attribute if it is defined, None on error. """
return self._bookingParams.has_key('talks')
def isChooseTalkSelected(self):
""" Returns if the talks are choosen"""
if self._hasTalks():
return self._bookingParams.get('talks') == "choose"
else:
return False
def __cmp__(self, booking):
return cmp(self.getUniqueId(), booking.getUniqueId()) if booking else 1
def checkAttachParams(self, bookingParams):
return None
def notifyDeletion(self, obj):
""" To be overriden by inheriting classes
This method is called when the parent object has been deleted and some actions are needed.
"""
pass
class WCSTemplateBase(wcomponents.WTemplated):
""" Base class for Collaboration templates.
It stores the following attributes:
_conf : the corresponding Conference object.
_pluginName: the corresponding plugin ("EVO", "DummyPlugin", etc.).
_XXXOptions: a dictionary whose values are the options of the plugin called pluginName.
So, for example, if an EVO template inherits from this class, an attribute self._EVOOptions will be available.
This class also overloads the _setTPLFile method so that Indico knows where each plugin's *.tpl files are.
"""
def __init__(self, pluginId):
""" Constructor for the WCSTemplateBase class.
conf: a Conference object
plugin: the corresponding plugin
"""
self._plugin = CollaborationTools.getPlugin(pluginId)
self._pluginId = self._plugin.getId()
self._ph = PluginsHolder()
setattr(self, "_" + self._pluginId + "Options", self._plugin.getOptions())
def _setTPLFile(self, extension='tpl'):
tplDir = pkg_resources.resource_filename(self._plugin.getModule().__name__, "tpls")
fname = "%s.%s" % (self.tplId, extension)
self.tplFile = os.path.join(tplDir, fname)
hfile = self._getSpecificTPL(os.path.join(tplDir,self._pluginId,'chelp'), self.tplId,extension='wohl')
self.helpFile = os.path.join(tplDir,'chelp',hfile)
class WCSPageTemplateBase(WCSTemplateBase):
""" Base class for Collaboration templates for the create / modify booking form.
"""
def __init__(self, conf, pluginId, user):
WCSTemplateBase.__init__(self, pluginId)
self._conf = conf
self._user = user
class WJSBase(WCSTemplateBase):
""" Base class for Collaboration templates for Javascript code template.
It overloads _setTPLFile so that indico can find the Main.js, Extra.js and Indexing.js files.
"""
def __init__(self, conf, plugin, user):
WCSTemplateBase.__init__(self, plugin)
self._conf = conf
self._user = user
def _setTPLFile(self):
WCSTemplateBase._setTPLFile(self, extension='js')
self.helpFile = ''
class WCSCSSBase(WCSTemplateBase):
""" Base class for Collaboration templates for CSS code template
It overloads _setTPLFile so that indico can find the style.css files.
"""
def _setTPLFile(self):
tplDir = pkg_resources.resource_filename(self._plugin.getModule().__name__, "")
fname = "%s.css" % self.tplId
self.tplFile = os.path.join(tplDir, fname)
self.helpFile = ''
class CSErrorBase(Fossilizable):
fossilizes(ICSErrorBaseFossil)
""" When _create, _modify or _remove want to return an error,
they should return an error that inherits from this class
"""
def __init__(self):
pass
def getUserMessage(self):
""" To be overloaded.
Returns the string that will be shown to the user when this error will happen.
"""
raise CollaborationException("Method getUserMessage was not overriden for the a CSErrorBase object of class " + self.__class__.__name__)
def getLogMessage(self):
""" To be overloaded.
Returns the string that will be printed in Indico's log when this error will happen.
"""
raise CollaborationException("Method getLogMessage was not overriden for the a CSErrorBase object of class " + self.__class__.__name__)
class CSSanitizationError(CSErrorBase): #already Fossilizable
fossilizes(ICSSanitizationErrorFossil)
""" Class used to return which fields have a sanitization error (invalid html / script tags)
"""
def __init__(self, invalidFields):
self._invalidFields = invalidFields
def invalidFields(self):
return self._invalidFields
class CollaborationException(MaKaCError):
""" Error for the Collaboration System "core". Each plugin should declare their own EVOError, etc.
"""
def __init__(self, msg, area = 'Collaboration', inner = None):
MaKaCError.__init__(self, msg, area)
self._inner = inner
def getInner(self):
return self._inner
def __str__(self):
return MaKaCError.__str__(self) + '. Inner: ' + str(self._inner)
class CollaborationServiceException(ServiceError):
""" Error for the Collaboration System "core", for Service calls.
"""
def __init__(self, message, inner = None):
ServiceError.__init__(self, "ERR-COLL", message, inner)
class SpeakerStatusEnum:
(NOEMAIL, NOTSIGNED, SIGNED, FROMFILE, PENDING, REFUSED) = xrange(6)
class SpeakerWrapper(Persistent, Fossilizable):
fossilizes(ISpeakerWrapperBaseFossil)
def __init__(self, speaker, contId, requestType):
self.status = not speaker.getEmail() and SpeakerStatusEnum.NOEMAIL or SpeakerStatusEnum.NOTSIGNED
self.speaker = speaker
self.contId = contId
self.requestType = requestType
self.reason = ""
self.localFile = None
self.dateAgreement = 0
self.ipSignature = None
self.modificationDate = nowutc()
self.uniqueIdHash = md5("%s.%s"%(time.time(), self.getUniqueId())).hexdigest()
def getUniqueId(self):
return "%s.%s"%(self.contId, self.speaker.getId())
def getUniqueIdHash(self):
# to remove once saved
if not hasattr(self, "uniqueIdHash"):#TODO: remove when safe
return md5(self.getUniqueId()).hexdigest()
else:
return self.uniqueIdHash
def getStatus(self):
return self.status
def setStatus(self, newStatus, ip=None):
try:
self.status = newStatus
if newStatus == SpeakerStatusEnum.SIGNED or newStatus == SpeakerStatusEnum.FROMFILE:
self.dateAgreement = now_utc()
if newStatus == SpeakerStatusEnum.SIGNED:
self.ipSignature = ip
except Exception, e:
Logger.get('VideoServ').error("Exception while changing the speaker status. Exception: " + str(e))
def getDateAgreementSigned(self):
if hasattr(self, "dateAgreement"):#TODO: remove when safe
return self.dateAgreement
return 0
def getIpAddressWhenSigned(self):
if hasattr(self, "ipSignature"):#TODO: remove when safe
return self.ipSignature
return None
def getRejectReason(self):
if hasattr(self, "reason"):#TODO: remove when safe
if self.status == SpeakerStatusEnum.REFUSED and hasattr(self, "reason"):
return self.reason
else:
return "This speaker has not refused the agreement."
else:
return "Information not available."
def setRejectReason(self, reason):
if hasattr(self, "reason"):#TODO: remove when safe
self.reason = reason
def getObject(self):
return self.speaker
def getContId(self):
return self.contId
def getRequestType(self):
if hasattr(self, "requestType"):#TODO: remove when safe
return self.requestType
return "NA"
def setRequestType(self, type):
self.requestType = type
def getSpeakerId(self):
return self.speaker.getId()
def getLocalFile(self):
'''
If exists, return path to paper agreement
'''
if hasattr(self, "localFile"):#TODO: remove when safe
return self.localFile
def setLocalFile(self, localFile):
'''
Set localFile of paper agreement
'''
if hasattr(self, "localFile"):#TODO: remove when safe
self.localFile = localFile
def hasEmail(self):
if self.speaker.getEmail():
return True
return False
def getCategory(self):
return None
def getConference(self):
return self.speaker.getConference()
def getContribution(self):
# if the conference is a lecture, the getContribution will fail.
if self.getConference().getType() == "simple_event":
return None
else:
return self.speaker.getContribution()
def getSession(self):
return None
def getSubContribution(self):
return None
def getModificationDate(self):
if hasattr(self, "modificationDate"): # TODO: remove when safe
return self.modificationDate
return None
def setModificationDate(self):
if hasattr(self, "modificationDate"): # TODO: remove when safe
self.modificationDate = now_utc()
def getLocator(self):
return self.getContribution().getLocator()
def triggerNotification(self):
if self.getRequestType() in ('recording', 'webcast'):
self._triggerNotification(self.getRequestType())
elif self.getRequestType() == 'both':
self._triggerNotification('recording')
self._triggerNotification('webcast')
def _triggerNotification(self, type):
url = None
if type == 'recording':
url = CollaborationTools.getOptionValue('RecordingRequest', 'AgreementNotificationURL')
elif type == 'webcast':
url = CollaborationTools.getOptionValue('WebcastRequest', 'AgreementNotificationURL')
if not url:
return
signed = None
if self.getStatus() in (SpeakerStatusEnum.FROMFILE, SpeakerStatusEnum.SIGNED):
signed = True
elif self.getStatus() == SpeakerStatusEnum.REFUSED:
signed = False
spk = self.getObject()
payload = {
'confId': self.getConference().getId(),
'contrib': self.getContId(),
'type': type,
'status': self.getStatus(),
'signed': signed,
'speaker': {
'id': spk.getId(),
'name': spk.getFullName(),
'email': spk.getEmail()
}
}
cl = Client()
cl.enqueue(HTTPTask(url, {'data': json.dumps(payload)}))
| codeparrot/github-code-clean |
"""
Instructor Dashboard API views
JSON views which the instructor dashboard requests.
Many of these GETs may become PUTs in the future.
"""
import csv
import json
import logging
import random
import re
import string
import six
import unicodecsv
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist, PermissionDenied, ValidationError
from django.core.validators import validate_email
from django.db import IntegrityError, transaction
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseNotFound
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.html import strip_tags
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_control
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_http_methods, require_POST
from edx_rest_framework_extensions.auth.jwt.authentication import JwtAuthentication
from edx_rest_framework_extensions.auth.session.authentication import SessionAuthenticationAllowInactiveUser
from edx_when.api import get_date_for_block
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from rest_framework import status
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework.response import Response
from rest_framework.views import APIView
from six import text_type
from six.moves import map, range
from submissions import api as sub_api # installed from the edx-submissions repository
import instructor_analytics.basic
import instructor_analytics.csvs
import instructor_analytics.distributions
from bulk_email.api import is_bulk_email_feature_enabled
from bulk_email.models import CourseEmail
from course_modes.models import CourseMode
from lms.djangoapps.certificates import api as certs_api
from lms.djangoapps.certificates.models import (
CertificateInvalidation,
CertificateStatuses,
CertificateWhitelist,
GeneratedCertificate
)
from lms.djangoapps.courseware.access import has_access
from lms.djangoapps.courseware.courses import get_course_by_id, get_course_with_access
from lms.djangoapps.courseware.models import StudentModule
from lms.djangoapps.discussion.django_comment_client.utils import (
get_course_discussion_settings,
get_group_id_for_user,
get_group_name,
has_forum_access
)
from lms.djangoapps.instructor import enrollment
from lms.djangoapps.instructor.access import ROLES, allow_access, list_with_level, revoke_access, update_forum_role
from lms.djangoapps.instructor.enrollment import (
enroll_email,
get_email_params,
get_user_email_language,
send_beta_role_email,
send_mail_to_student,
unenroll_email
)
from lms.djangoapps.instructor.views import INVOICE_KEY
from lms.djangoapps.instructor.views.instructor_task_helpers import extract_email_features, extract_task_features
from lms.djangoapps.instructor_task import api as task_api
from lms.djangoapps.instructor_task.api_helper import AlreadyRunningError, QueueConnectionError
from lms.djangoapps.instructor_task.models import ReportStore
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from openedx.core.djangoapps.django_comment_common.models import (
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR,
Role
)
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.user_api.preferences.api import get_user_preference
from openedx.core.djangolib.markup import HTML, Text
from openedx.core.lib.api.authentication import BearerAuthenticationAllowInactiveUser
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin
from student import auth
from student.models import (
ALLOWEDTOENROLL_TO_ENROLLED,
ALLOWEDTOENROLL_TO_UNENROLLED,
DEFAULT_TRANSITION_STATE,
ENROLLED_TO_ENROLLED,
ENROLLED_TO_UNENROLLED,
UNENROLLED_TO_ALLOWEDTOENROLL,
UNENROLLED_TO_ENROLLED,
UNENROLLED_TO_UNENROLLED,
CourseEnrollment,
CourseEnrollmentAllowed,
EntranceExamConfiguration,
ManualEnrollmentAudit,
Registration,
UserProfile,
anonymous_id_for_user,
get_user_by_username_or_email,
is_email_retired,
unique_id_for_user
)
from student.roles import CourseFinanceAdminRole, CourseSalesAdminRole
from util.file import (
FileValidationException,
UniversalNewlineIterator,
course_and_time_based_filename_generator,
store_uploaded_file
)
from util.json_request import JsonResponse, JsonResponseBadRequest
from util.views import require_global_staff
from xmodule.modulestore.django import modulestore
from .. import permissions
from .tools import (
dump_module_extensions,
dump_student_extensions,
find_unit,
get_student_from_identifier,
handle_dashboard_error,
parse_datetime,
require_student_from_identifier,
set_due_date_extension,
strip_if_string
)
log = logging.getLogger(__name__)
TASK_SUBMISSION_OK = 'created'
SUCCESS_MESSAGE_TEMPLATE = _(u"The {report_type} report is being created. "
"To view the status of the report, see Pending Tasks below.")
def common_exceptions_400(func):
"""
Catches common exceptions and renders matching 400 errors.
(decorator without arguments)
"""
def wrapped(request, *args, **kwargs):
use_json = (request.is_ajax() or
request.META.get("HTTP_ACCEPT", "").startswith("application/json"))
try:
return func(request, *args, **kwargs)
except User.DoesNotExist:
message = _('User does not exist.')
except MultipleObjectsReturned:
message = _('Found a conflict with given identifier. Please try an alternative identifier')
except (AlreadyRunningError, QueueConnectionError, AttributeError) as err:
message = six.text_type(err)
if use_json:
return JsonResponseBadRequest(message)
else:
return HttpResponseBadRequest(message)
return wrapped
def require_post_params(*args, **kwargs):
"""
Checks for required parameters or renders a 400 error.
(decorator with arguments)
`args` is a *list of required POST parameter names.
`kwargs` is a **dict of required POST parameter names
to string explanations of the parameter
"""
required_params = []
required_params += [(arg, None) for arg in args]
required_params += [(key, kwargs[key]) for key in kwargs]
# required_params = e.g. [('action', 'enroll or unenroll'), ['emails', None]]
def decorator(func):
def wrapped(*args, **kwargs):
request = args[0]
error_response_data = {
'error': 'Missing required query parameter(s)',
'parameters': [],
'info': {},
}
for (param, extra) in required_params:
default = object()
if request.POST.get(param, default) == default:
error_response_data['parameters'].append(param)
error_response_data['info'][param] = extra
if error_response_data['parameters']:
return JsonResponse(error_response_data, status=400)
else:
return func(*args, **kwargs)
return wrapped
return decorator
def require_course_permission(permission):
"""
Decorator with argument that requires a specific permission of the requesting
user. If the requirement is not satisfied, returns an
HttpResponseForbidden (403).
Assumes that request is in args[0].
Assumes that course_id is in kwargs['course_id'].
"""
def decorator(func):
def wrapped(*args, **kwargs):
request = args[0]
course = get_course_by_id(CourseKey.from_string(kwargs['course_id']))
if request.user.has_perm(permission, course):
return func(*args, **kwargs)
else:
return HttpResponseForbidden()
return wrapped
return decorator
def require_sales_admin(func):
"""
Decorator for checking sales administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id):
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseSalesAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
def require_finance_admin(func):
"""
Decorator for checking finance administrator access before executing an HTTP endpoint. This decorator
is designed to be used for a request based action on a course. It assumes that there will be a
request object as well as a course_id attribute to leverage to check course level privileges.
If the user does not have privileges for this operation, this will return HttpResponseForbidden (403).
"""
def wrapped(request, course_id):
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
log.error(u"Unable to find course with course key %s", course_id)
return HttpResponseNotFound()
access = auth.user_has_role(request.user, CourseFinanceAdminRole(course_key))
if access:
return func(request, course_id)
else:
return HttpResponseForbidden()
return wrapped
EMAIL_INDEX = 0
USERNAME_INDEX = 1
NAME_INDEX = 2
COUNTRY_INDEX = 3
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
def register_and_enroll_students(request, course_id): # pylint: disable=too-many-statements
"""
Create new account and Enroll students in this course.
Passing a csv file that contains a list of students.
Order in csv should be the following email = 0; username = 1; name = 2; country = 3.
Requires staff access.
-If the email address and username already exists and the user is enrolled in the course,
do nothing (including no email gets sent out)
-If the email address already exists, but the username is different,
match on the email address only and continue to enroll the user in the course using the email address
as the matching criteria. Note the change of username as a warning message (but not a failure).
Send a standard enrollment email which is the same as the existing manual enrollment
-If the username already exists (but not the email), assume it is a different user and fail
to create the new account.
The failure will be messaged in a response in the browser.
"""
if not configuration_helpers.get_value(
'ALLOW_AUTOMATED_SIGNUPS',
settings.FEATURES.get('ALLOW_AUTOMATED_SIGNUPS', False),
):
return HttpResponseForbidden()
course_id = CourseKey.from_string(course_id)
warnings = []
row_errors = []
general_errors = []
# for white labels we use 'shopping cart' which uses CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG as
# course mode for creating course enrollments.
if CourseMode.is_white_label(course_id):
course_mode = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
else:
course_mode = None
if 'students_list' in request.FILES:
students = []
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().decode('utf-8').splitlines())]
course = get_course_by_id(course_id)
else:
general_errors.append({
'username': '', 'email': '',
'response': _(
'Make sure that the file you upload is in CSV format with no extraneous characters or rows.')
})
except Exception: # pylint: disable=broad-except
general_errors.append({
'username': '', 'email': '', 'response': _('Could not read uploaded file.')
})
finally:
upload_file.close()
generated_passwords = []
row_num = 0
for student in students:
row_num = row_num + 1
# verify that we have exactly four columns in every row but allow for blank lines
if len(student) != 4:
if student:
error = _(u'Data in row #{row_num} must have exactly four columns: '
'email, username, full name, and country').format(row_num=row_num)
general_errors.append({
'username': '',
'email': '',
'response': error
})
continue
# Iterate each student in the uploaded csv file.
email = student[EMAIL_INDEX]
username = student[USERNAME_INDEX]
name = student[NAME_INDEX]
country = student[COUNTRY_INDEX][:2]
email_params = get_email_params(course, True, secure=request.is_secure())
try:
validate_email(email) # Raises ValidationError if invalid
except ValidationError:
row_errors.append({
'username': username,
'email': email,
'response': _(u'Invalid email {email_address}.').format(email_address=email)
})
else:
if User.objects.filter(email=email).exists():
# Email address already exists. assume it is the correct user
# and just register the user in the course and send an enrollment email.
user = User.objects.get(email=email)
# see if it is an exact match with email and username
# if it's not an exact match then just display a warning message, but continue onwards
if not User.objects.filter(email=email, username=username).exists():
warning_message = _(
u'An account with email {email} exists but the provided username {username} '
u'is different. Enrolling anyway with {email}.'
).format(email=email, username=username)
warnings.append({
'username': username, 'email': email, 'response': warning_message
})
log.warning(u'email %s already exist', email)
else:
log.info(
u"user already exists with username '%s' and email '%s'",
username,
email
)
# enroll a user if it is not already enrolled.
if not CourseEnrollment.is_enrolled(user, course_id):
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=request.user,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
enroll_email(course_id=course_id,
student_email=email,
auto_enroll=True,
email_students=True,
email_params=email_params)
elif is_email_retired(email):
# We are either attempting to enroll a retired user or create a new user with an email which is
# already associated with a retired account. Simply block these attempts.
row_errors.append({
'username': username,
'email': email,
'response': _(u'Invalid email {email_address}.').format(email_address=email),
})
log.warning(u'Email address %s is associated with a retired user, so course enrollment was ' +
u'blocked.', email)
else:
# This email does not yet exist, so we need to create a new account
# If username already exists in the database, then create_and_enroll_user
# will raise an IntegrityError exception.
password = generate_unique_password(generated_passwords)
errors = create_and_enroll_user(
email, username, name, country, password, course_id, course_mode, request.user, email_params
)
row_errors.extend(errors)
else:
general_errors.append({
'username': '', 'email': '', 'response': _('File is not attached.')
})
results = {
'row_errors': row_errors,
'general_errors': general_errors,
'warnings': warnings
}
return JsonResponse(results)
def generate_random_string(length):
"""
Create a string of random characters of specified length
"""
chars = [
char for char in string.ascii_uppercase + string.digits + string.ascii_lowercase
if char not in 'aAeEiIoOuU1l'
]
return ''.join((random.choice(chars) for i in range(length)))
def generate_unique_password(generated_passwords, password_length=12):
"""
generate a unique password for each student.
"""
password = generate_random_string(password_length)
while password in generated_passwords:
password = generate_random_string(password_length)
generated_passwords.append(password)
return password
def create_user_and_user_profile(email, username, name, country, password):
"""
Create a new user, add a new Registration instance for letting user verify its identity and create a user profile.
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:return: User instance of the new user.
"""
user = User.objects.create_user(username, email, password)
reg = Registration()
reg.register(user)
profile = UserProfile(user=user)
profile.name = name
profile.country = country
profile.save()
return user
def create_manual_course_enrollment(user, course_id, mode, enrolled_by, reason, state_transition):
"""
Create course enrollment for the given student and create manual enrollment audit trail.
:param user: User who is to enroll in course
:param course_id: course identifier of the course in which to enroll the user.
:param mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param reason: Reason behind manual enrollment
:param state_transition: state transition denoting whether student enrolled from un-enrolled,
un-enrolled from enrolled etc.
:return CourseEnrollment instance.
"""
enrollment_obj = CourseEnrollment.enroll(user, course_id, mode=mode)
ManualEnrollmentAudit.create_manual_enrollment_audit(
enrolled_by, user.email, state_transition, reason, enrollment_obj
)
log.info(u'user %s enrolled in the course %s', user.username, course_id)
return enrollment_obj
def create_and_enroll_user(email, username, name, country, password, course_id, course_mode, enrolled_by, email_params):
"""
Create a new user and enroll him/her to the given course, return list of errors in the following format
Error format:
each error is key-value pait dict with following key-value pairs.
1. username: username of the user to enroll
1. email: email of the user to enroll
1. response: readable error message
:param email: user's email address
:param username: user's username
:param name: user's name
:param country: user's country
:param password: user's password
:param course_id: course identifier of the course in which to enroll the user.
:param course_mode: mode for user enrollment, e.g. 'honor', 'audit' etc.
:param enrolled_by: User who made the manual enrollment entry (usually instructor or support)
:param email_params: information to send to the user via email
:return: list of errors
"""
errors = list()
try:
with transaction.atomic():
# Create a new user
user = create_user_and_user_profile(email, username, name, country, password)
# Enroll user to the course and add manual enrollment audit trail
create_manual_course_enrollment(
user=user,
course_id=course_id,
mode=course_mode,
enrolled_by=enrolled_by,
reason='Enrolling via csv upload',
state_transition=UNENROLLED_TO_ENROLLED,
)
except IntegrityError:
errors.append({
'username': username,
'email': email,
'response': _(u'Username {user} already exists.').format(user=username)
})
except Exception as ex: # pylint: disable=broad-except
log.exception(type(ex).__name__)
errors.append({
'username': username, 'email': email, 'response': type(ex).__name__,
})
else:
try:
# It's a new user, an email will be sent to each newly created user.
email_params.update({
'message_type': 'account_creation_and_enrollment',
'email_address': email,
'password': password,
'platform_name': configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME),
})
send_mail_to_student(email, email_params)
except Exception as ex: # pylint: disable=broad-except
log.exception(
u"Exception '{exception}' raised while sending email to new user.".format(exception=type(ex).__name__)
)
errors.append({
'username': username,
'email': email,
'response':
_(u"Error '{error}' while sending email to new user (user email={email}). "
u"Without the email student would not be able to login. "
u"Please contact support for further information.").format(error=type(ex).__name__, email=email),
})
else:
log.info(u'email sent to new created user at %s', email)
return errors
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
@require_post_params(action="enroll or unenroll", identifiers="stringified list of emails and/or usernames")
def students_update_enrollment(request, course_id):
"""
Enroll or unenroll students by email.
Requires staff access.
Query Parameters:
- action in ['enroll', 'unenroll']
- identifiers is string containing a list of emails and/or usernames separated by anything split_input_list can handle.
- auto_enroll is a boolean (defaults to false)
If auto_enroll is false, students will be allowed to enroll.
If auto_enroll is true, students will be enrolled as soon as they register.
- email_students is a boolean (defaults to false)
If email_students is true, students will be sent email notification
If email_students is false, students will not be sent email notification
Returns an analog to this JSON structure: {
"action": "enroll",
"auto_enroll": false,
"results": [
{
"email": "testemail@test.org",
"before": {
"enrollment": false,
"auto_enroll": false,
"user": true,
"allowed": false
},
"after": {
"enrollment": true,
"auto_enroll": false,
"user": true,
"allowed": false
}
}
]
}
"""
course_id = CourseKey.from_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
auto_enroll = _get_boolean_param(request, 'auto_enroll')
email_students = _get_boolean_param(request, 'email_students')
reason = request.POST.get('reason')
role = request.POST.get('role')
allowed_role_choices = configuration_helpers.get_value('MANUAL_ENROLLMENT_ROLE_CHOICES',
settings.MANUAL_ENROLLMENT_ROLE_CHOICES)
if role and role not in allowed_role_choices:
return JsonResponse(
{
'action': action,
'results': [{'error': True, 'message': 'Not a valid role choice'}],
'auto_enroll': auto_enroll,
}, status=400)
enrollment_obj = None
state_transition = DEFAULT_TRANSITION_STATE
email_params = {}
if email_students:
course = get_course_by_id(course_id)
email_params = get_email_params(course, auto_enroll, secure=request.is_secure())
results = []
for identifier in identifiers:
# First try to get a user object from the identifer
user = None
email = None
language = None
try:
user = get_student_from_identifier(identifier)
except User.DoesNotExist:
email = identifier
else:
email = user.email
language = get_user_email_language(user)
try:
# Use django.core.validators.validate_email to check email address
# validity (obviously, cannot check if email actually /exists/,
# simply that it is plausibly valid)
validate_email(email) # Raises ValidationError if invalid
if action == 'enroll':
before, after, enrollment_obj = enroll_email(
course_id, email, auto_enroll, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_user_registered = before.to_dict()['user']
before_allowed = before.to_dict()['allowed']
after_enrollment = after.to_dict()['enrollment']
after_allowed = after.to_dict()['allowed']
if before_user_registered:
if after_enrollment:
if before_enrollment:
state_transition = ENROLLED_TO_ENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_ENROLLED
else:
state_transition = UNENROLLED_TO_ENROLLED
else:
if after_allowed:
state_transition = UNENROLLED_TO_ALLOWEDTOENROLL
elif action == 'unenroll':
before, after = unenroll_email(
course_id, email, email_students, email_params, language=language
)
before_enrollment = before.to_dict()['enrollment']
before_allowed = before.to_dict()['allowed']
enrollment_obj = CourseEnrollment.get_enrollment(user, course_id) if user else None
if before_enrollment:
state_transition = ENROLLED_TO_UNENROLLED
else:
if before_allowed:
state_transition = ALLOWEDTOENROLL_TO_UNENROLLED
else:
state_transition = UNENROLLED_TO_UNENROLLED
else:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized action '{}'".format(action)
))
except ValidationError:
# Flag this email as an error if invalid, but continue checking
# the remaining in the list
results.append({
'identifier': identifier,
'invalidIdentifier': True,
})
except Exception as exc: # pylint: disable=broad-except
# catch and log any exceptions
# so that one error doesn't cause a 500.
log.exception(u"Error while #{}ing student")
log.exception(exc)
results.append({
'identifier': identifier,
'error': True,
})
else:
ManualEnrollmentAudit.create_manual_enrollment_audit(
request.user, email, state_transition, reason, enrollment_obj, role
)
results.append({
'identifier': identifier,
'before': before.to_dict(),
'after': after.to_dict(),
})
response_payload = {
'action': action,
'results': results,
'auto_enroll': auto_enroll,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_BETATEST)
@common_exceptions_400
@require_post_params(
identifiers="stringified list of emails and/or usernames",
action="add or remove",
)
def bulk_beta_modify_access(request, course_id):
"""
Enroll or unenroll users in beta testing program.
Query parameters:
- identifiers is string containing a list of emails and/or usernames separated by
anything split_input_list can handle.
- action is one of ['add', 'remove']
"""
course_id = CourseKey.from_string(course_id)
action = request.POST.get('action')
identifiers_raw = request.POST.get('identifiers')
identifiers = _split_input_list(identifiers_raw)
email_students = _get_boolean_param(request, 'email_students')
auto_enroll = _get_boolean_param(request, 'auto_enroll')
results = []
rolename = 'beta'
course = get_course_by_id(course_id)
email_params = {}
if email_students:
secure = request.is_secure()
email_params = get_email_params(course, auto_enroll=auto_enroll, secure=secure)
for identifier in identifiers:
try:
error = False
user_does_not_exist = False
user = get_student_from_identifier(identifier)
user_active = user.is_active
if action == 'add':
allow_access(course, user, rolename)
elif action == 'remove':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized action '{}'".format(action)
))
except User.DoesNotExist:
error = True
user_does_not_exist = True
user_active = None
# catch and log any unexpected exceptions
# so that one error doesn't cause a 500.
except Exception as exc: # pylint: disable=broad-except
log.exception(u"Error while #{}ing student")
log.exception(exc)
error = True
else:
# If no exception thrown, see if we should send an email
if email_students:
send_beta_role_email(action, user, email_params)
# See if we should autoenroll the student
if auto_enroll:
# Check if student is already enrolled
if not CourseEnrollment.is_enrolled(user, course_id):
CourseEnrollment.enroll(user, course_id)
finally:
# Tabulate the action result of this email address
results.append({
'identifier': identifier,
'error': error,
'userDoesNotExist': user_does_not_exist,
'is_active': user_active
})
response_payload = {
'action': action,
'results': results,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EDIT_COURSE_ACCESS)
@require_post_params(
unique_student_identifier="email or username of user to change access",
rolename="'instructor', 'staff', 'beta', or 'ccx_coach'",
action="'allow' or 'revoke'"
)
@common_exceptions_400
def modify_access(request, course_id):
"""
Modify staff/instructor access of other user.
Requires instructor access.
NOTE: instructors cannot remove their own instructor access.
Query parameters:
unique_student_identifer is the target user's username or email
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
action is one of ['allow', 'revoke']
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
try:
user = get_student_from_identifier(request.POST.get('unique_student_identifier'))
except User.DoesNotExist:
response_payload = {
'unique_student_identifier': request.POST.get('unique_student_identifier'),
'userDoesNotExist': True,
}
return JsonResponse(response_payload)
# Check that user is active, because add_users
# in common/djangoapps/student/roles.py fails
# silently when we try to add an inactive user.
if not user.is_active:
response_payload = {
'unique_student_identifier': user.username,
'inactiveUser': True,
}
return JsonResponse(response_payload)
rolename = request.POST.get('rolename')
action = request.POST.get('action')
if rolename not in ROLES:
error = strip_tags(u"unknown rolename '{}'".format(rolename))
log.error(error)
return HttpResponseBadRequest(error)
# disallow instructors from removing their own instructor access.
if rolename == 'instructor' and user == request.user and action != 'allow':
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'removingSelfAsInstructor': True,
}
return JsonResponse(response_payload)
if action == 'allow':
allow_access(course, user, rolename)
elif action == 'revoke':
revoke_access(course, user, rolename)
else:
return HttpResponseBadRequest(strip_tags(
u"unrecognized action u'{}'".format(action)
))
response_payload = {
'unique_student_identifier': user.username,
'rolename': rolename,
'action': action,
'success': 'yes',
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EDIT_COURSE_ACCESS)
@require_post_params(rolename="'instructor', 'staff', or 'beta'")
def list_course_role_members(request, course_id):
"""
List instructors and staff.
Requires instructor access.
rolename is one of ['instructor', 'staff', 'beta', 'ccx_coach']
Returns JSON of the form {
"course_id": "some/course/id",
"staff": [
{
"username": "staff1",
"email": "staff1@example.org",
"first_name": "Joe",
"last_name": "Shmoe",
}
]
}
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'instructor', course_id, depth=None
)
rolename = request.POST.get('rolename')
if rolename not in ROLES:
return HttpResponseBadRequest()
def extract_user_info(user):
""" convert user into dicts for json view """
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
}
response_payload = {
'course_id': text_type(course_id),
rolename: list(map(extract_user_info, list_with_level(
course, rolename
))),
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def get_problem_responses(request, course_id):
"""
Initiate generation of a CSV file containing all student answers
to a given problem.
Responds with JSON
{"status": "... status message ...", "task_id": created_task_UUID}
if initiation is successful (or generation task is already running).
Responds with BadRequest if problem location is faulty.
"""
course_key = CourseKey.from_string(course_id)
problem_location = request.POST.get('problem_location', '')
report_type = _('problem responses')
try:
problem_key = UsageKey.from_string(problem_location)
# Are we dealing with an "old-style" problem location?
run = problem_key.run
if not run:
problem_key = UsageKey.from_string(problem_location).map_into_course(course_key)
if problem_key.course_key != course_key:
raise InvalidKeyError(type(problem_key), problem_key)
except InvalidKeyError:
return JsonResponseBadRequest(_("Could not find problem with this location."))
task = task_api.submit_calculate_problem_responses_csv(
request, course_key, problem_location
)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status, "task_id": task.task_id})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
def get_grading_config(request, course_id):
"""
Respond with json which contains a html formatted grade summary.
"""
course_id = CourseKey.from_string(course_id)
# course = get_course_with_access(
# request.user, 'staff', course_id, depth=None
# )
course = get_course_by_id(course_id)
grading_config_summary = instructor_analytics.basic.dump_grading_context(course)
response_payload = {
'course_id': text_type(course_id),
'grading_config_summary': grading_config_summary,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.VIEW_ISSUED_CERTIFICATES)
def get_issued_certificates(request, course_id):
"""
Responds with JSON if CSV is not required. contains a list of issued certificates.
Arguments:
course_id
Returns:
{"certificates": [{course_id: xyz, mode: 'honor'}, ...]}
"""
course_key = CourseKey.from_string(course_id)
csv_required = request.GET.get('csv', 'false')
query_features = ['course_id', 'mode', 'total_issued_certificate', 'report_run_date']
query_features_names = [
('course_id', _('CourseID')),
('mode', _('Certificate Type')),
('total_issued_certificate', _('Total Certificates Issued')),
('report_run_date', _('Date Report Run'))
]
certificates_data = instructor_analytics.basic.issued_certificates(course_key, query_features)
if csv_required.lower() == 'true':
__, data_rows = instructor_analytics.csvs.format_dictlist(certificates_data, query_features)
return instructor_analytics.csvs.create_csv_response(
'issued_certificates.csv',
[col_header for __, col_header in query_features_names],
data_rows
)
else:
response_payload = {
'certificates': certificates_data,
'queried_features': query_features,
'feature_names': dict(query_features_names)
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def get_students_features(request, course_id, csv=False): # pylint: disable=redefined-outer-name
"""
Respond with json which contains a summary of all enrolled students profile information.
Responds with JSON
{"students": [{-student-info-}, ...]}
TO DO accept requests for different attribute sets.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_by_id(course_key)
report_type = _('enrolled learner profile')
available_features = instructor_analytics.basic.AVAILABLE_FEATURES
# Allow for sites to be able to define additional columns.
# Note that adding additional columns has the potential to break
# the student profile report due to a character limit on the
# asynchronous job input which in this case is a JSON string
# containing the list of columns to include in the report.
# TODO: Refactor the student profile report code to remove the list of columns
# that should be included in the report from the asynchronous job input.
# We need to clone the list because we modify it below
query_features = list(configuration_helpers.get_value('student_profile_download_fields', []))
if not query_features:
query_features = [
'id', 'username', 'name', 'email', 'language', 'location',
'year_of_birth', 'gender', 'level_of_education', 'mailing_address',
'goals', 'enrollment_mode', 'verification_status',
'last_login', 'date_joined',
]
# Provide human-friendly and translatable names for these features. These names
# will be displayed in the table generated in data_download.js. It is not (yet)
# used as the header row in the CSV, but could be in the future.
query_features_names = {
'id': _('User ID'),
'username': _('Username'),
'name': _('Name'),
'email': _('Email'),
'language': _('Language'),
'location': _('Location'),
'year_of_birth': _('Birth Year'),
'gender': _('Gender'),
'level_of_education': _('Level of Education'),
'mailing_address': _('Mailing Address'),
'goals': _('Goals'),
'enrollment_mode': _('Enrollment Mode'),
'verification_status': _('Verification Status'),
'last_login': _('Last Login'),
'date_joined': _('Date Joined'),
}
if is_course_cohorted(course.id):
# Translators: 'Cohort' refers to a group of students within a course.
query_features.append('cohort')
query_features_names['cohort'] = _('Cohort')
if course.teams_enabled:
query_features.append('team')
query_features_names['team'] = _('Team')
# For compatibility reasons, city and country should always appear last.
query_features.append('city')
query_features_names['city'] = _('City')
query_features.append('country')
query_features_names['country'] = _('Country')
if not csv:
student_data = instructor_analytics.basic.enrolled_students_features(course_key, query_features)
response_payload = {
'course_id': six.text_type(course_key),
'students': student_data,
'students_count': len(student_data),
'queried_features': query_features,
'feature_names': query_features_names,
'available_features': available_features,
}
return JsonResponse(response_payload)
else:
task_api.submit_calculate_students_features_csv(
request,
course_key,
query_features
)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def get_students_who_may_enroll(request, course_id):
"""
Initiate generation of a CSV file containing information about
students who may enroll in a course.
Responds with JSON
{"status": "... status message ..."}
"""
course_key = CourseKey.from_string(course_id)
query_features = ['email']
report_type = _('enrollment')
task_api.submit_calculate_may_enroll_csv(request, course_key, query_features)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
def _cohorts_csv_validator(file_storage, file_to_validate):
"""
Verifies that the expected columns are present in the CSV used to add users to cohorts.
"""
with file_storage.open(file_to_validate) as f:
if six.PY2:
reader = unicodecsv.reader(UniversalNewlineIterator(f), encoding='utf-8')
else:
reader = csv.reader(f.read().decode('utf-8').splitlines())
try:
fieldnames = next(reader)
except StopIteration:
fieldnames = []
msg = None
if "cohort" not in fieldnames:
msg = _("The file must contain a 'cohort' column containing cohort names.")
elif "email" not in fieldnames and "username" not in fieldnames:
msg = _("The file must contain a 'username' column, an 'email' column, or both.")
if msg:
raise FileValidationException(msg)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_POST
@require_course_permission(permissions.ASSIGN_TO_COHORTS)
@common_exceptions_400
def add_users_to_cohorts(request, course_id):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = CourseKey.from_string(course_id)
try:
__, filename = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, "cohorts"),
max_file_size=2000000, # limit to 2 MB
validator=_cohorts_csv_validator
)
# The task will assume the default file storage.
task_api.submit_cohort_students(request, course_key, filename)
except (FileValidationException, PermissionDenied) as err:
return JsonResponse({"error": six.text_type(err)}, status=400)
return JsonResponse()
# The non-atomic decorator is required because this view calls a celery
# task which uses the 'outer_atomic' context manager.
@method_decorator(transaction.non_atomic_requests, name='dispatch')
class CohortCSV(DeveloperErrorViewMixin, APIView):
"""
**Use Cases**
Submit a CSV file to assign users to cohorts
**Example Requests**:
POST /api/cohorts/v1/courses/{course_id}/users/
**Response Values**
* Empty as this is executed asynchronously.
"""
authentication_classes = (
JwtAuthentication,
BearerAuthenticationAllowInactiveUser,
SessionAuthenticationAllowInactiveUser,
)
permission_classes = (IsAuthenticated, IsAdminUser)
def post(self, request, course_key_string):
"""
View method that accepts an uploaded file (using key "uploaded-file")
containing cohort assignments for users. This method spawns a celery task
to do the assignments, and a CSV file with results is provided via data downloads.
"""
course_key = CourseKey.from_string(course_key_string)
try:
__, file_name = store_uploaded_file(
request, 'uploaded-file', ['.csv'],
course_and_time_based_filename_generator(course_key, 'cohorts'),
max_file_size=2000000, # limit to 2 MB
validator=_cohorts_csv_validator
)
task_api.submit_cohort_students(request, course_key, file_name)
except (FileValidationException, ValueError) as e:
raise self.api_error(status.HTTP_400_BAD_REQUEST, str(e), 'failed-validation')
return Response(status=status.HTTP_204_NO_CONTENT)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.ENROLLMENT_REPORT)
@common_exceptions_400
def get_course_survey_results(request, course_id):
"""
get the survey results report for the particular course.
"""
course_key = CourseKey.from_string(course_id)
report_type = _('survey')
task_api.submit_course_survey_report(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EXAM_RESULTS)
@common_exceptions_400
def get_proctored_exam_results(request, course_id):
"""
get the proctored exam resultsreport for the particular course.
"""
course_key = CourseKey.from_string(course_id)
report_type = _('proctored exam results')
task_api.submit_proctored_exam_results_report(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
def get_anon_ids(request, course_id):
"""
Respond with 2-column CSV output of user-id, anonymized-user-id
"""
# TODO: the User.objects query and CSV generation here could be
# centralized into instructor_analytics. Currently instructor_analytics
# has similar functionality but not quite what's needed.
course_id = CourseKey.from_string(course_id)
def csv_response(filename, header, rows):
"""Returns a CSV http response for the given header and rows (excel/utf-8)."""
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = u'attachment; filename={0}'.format(
text_type(filename).encode('utf-8') if six.PY2 else text_type(filename)
)
writer = csv.writer(response, dialect='excel', quotechar='"', quoting=csv.QUOTE_ALL)
# In practice, there should not be non-ascii data in this query,
# but trying to do the right thing anyway.
encoded = [text_type(s) for s in header]
writer.writerow(encoded)
for row in rows:
encoded = [text_type(s) for s in row]
writer.writerow(encoded)
return response
students = User.objects.filter(
courseenrollment__course_id=course_id,
).order_by('id')
header = ['User ID', 'Anonymized User ID', 'Course Specific Anonymized User ID']
rows = [[s.id, unique_id_for_user(s, save=False), anonymous_id_for_user(s, course_id, save=False)]
for s in students]
return csv_response(text_type(course_id).replace('/', '-') + '-anon-ids.csv', header, rows)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
@require_post_params(
unique_student_identifier="email or username of student for whom to get enrollment status"
)
def get_student_enrollment_status(request, course_id):
"""
Get the enrollment status of a student.
Limited to staff access.
Takes query parameter unique_student_identifier
"""
error = ''
user = None
mode = None
is_active = None
course_id = CourseKey.from_string(course_id)
unique_student_identifier = request.POST.get('unique_student_identifier')
try:
user = get_student_from_identifier(unique_student_identifier)
mode, is_active = CourseEnrollment.enrollment_mode_for_user(user, course_id)
except User.DoesNotExist:
# The student could have been invited to enroll without having
# registered. We'll also look at CourseEnrollmentAllowed
# records, so let the lack of a User slide.
pass
enrollment_status = _(u'Enrollment status for {student}: unknown').format(student=unique_student_identifier)
if user and mode:
if is_active:
enrollment_status = _(u'Enrollment status for {student}: active').format(student=user)
else:
enrollment_status = _(u'Enrollment status for {student}: inactive').format(student=user)
else:
email = user.email if user else unique_student_identifier
allowed = CourseEnrollmentAllowed.may_enroll_and_unenrolled(course_id)
if allowed and email in [cea.email for cea in allowed]:
enrollment_status = _(u'Enrollment status for {student}: pending').format(student=email)
else:
enrollment_status = _(u'Enrollment status for {student}: never enrolled').format(student=email)
response_payload = {
'course_id': text_type(course_id),
'error': error,
'enrollment_status': enrollment_status
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@common_exceptions_400
@require_course_permission(permissions.ENROLLMENT_REPORT)
@require_post_params(
unique_student_identifier="email or username of student for whom to get progress url"
)
@common_exceptions_400
def get_student_progress_url(request, course_id):
"""
Get the progress url of a student.
Limited to staff access.
Takes query parameter unique_student_identifier and if the student exists
returns e.g. {
'progress_url': '/../...'
}
"""
course_id = CourseKey.from_string(course_id)
user = get_student_from_identifier(request.POST.get('unique_student_identifier'))
progress_url = reverse('student_progress', kwargs={'course_id': text_type(course_id), 'student_id': user.id})
response_payload = {
'course_id': text_type(course_id),
'progress_url': progress_url,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params(
problem_to_reset="problem urlname to reset"
)
@common_exceptions_400
def reset_student_attempts(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters. Optionally deletes student state for a problem. Limited
to staff access. Some sub-methods limited to instructor access.
Takes some of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
all_students = _get_boolean_param(request, 'all_students')
if all_students and not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
delete_module = _get_boolean_param(request, 'delete_module')
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
"all_students and unique_student_identifier are mutually exclusive."
)
if all_students and delete_module:
return HttpResponseBadRequest(
"all_students and delete_module are mutually exclusive."
)
try:
module_state_key = UsageKey.from_string(problem_to_reset).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest()
response_payload = {}
response_payload['problem_to_reset'] = problem_to_reset
if student:
try:
enrollment.reset_student_attempts(
course_id,
student,
module_state_key,
requesting_user=request.user,
delete_module=delete_module
)
except StudentModule.DoesNotExist:
return HttpResponseBadRequest(_("Module does not exist."))
except sub_api.SubmissionError:
# Trust the submissions API to log the error
error_msg = _("An error occurred while deleting the score.")
return HttpResponse(error_msg, status=500)
response_payload['student'] = student_identifier
elif all_students:
task_api.submit_reset_problem_attempts_for_all_students(request, module_state_key)
response_payload['task'] = TASK_SUBMISSION_OK
response_payload['student'] = 'All Students'
else:
return HttpResponseBadRequest()
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@common_exceptions_400
def reset_student_attempts_for_entrance_exam(request, course_id):
"""
Resets a students attempts counter or starts a task to reset all students
attempts counters for entrance exam. Optionally deletes student state for
entrance exam. Limited to staff access. Some sub-methods limited to instructor access.
Following are possible query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
requires instructor access
mutually exclusive with delete_module
- delete_module is a boolean
requires instructor access
mutually exclusive with all_students
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = _get_boolean_param(request, 'all_students')
delete_module = _get_boolean_param(request, 'delete_module')
# parameter combinations
if all_students and student:
return HttpResponseBadRequest(
_("all_students and unique_student_identifier are mutually exclusive.")
)
if all_students and delete_module:
return HttpResponseBadRequest(
_("all_students and delete_module are mutually exclusive.")
)
# instructor authorization
if all_students or delete_module:
if not has_access(request.user, 'instructor', course):
return HttpResponseForbidden(_("Requires instructor access."))
try:
entrance_exam_key = UsageKey.from_string(course.entrance_exam_id).map_into_course(course_id)
if delete_module:
task_api.submit_delete_entrance_exam_state_for_student(
request,
entrance_exam_key,
student
)
else:
task_api.submit_reset_problem_attempts_in_entrance_exam(
request,
entrance_exam_key,
student
)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {'student': student_identifier or _('All Students'), 'task': TASK_SUBMISSION_OK}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.OVERRIDE_GRADES)
@require_post_params(problem_to_reset="problem urlname to reset")
@common_exceptions_400
def rescore_problem(request, course_id):
"""
Starts a background process a students attempts counter. Optionally deletes student state for a problem.
Rescore for all students is limited to instructor access.
Takes either of the following query paremeters
- problem_to_reset is a urlname of a problem
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'staff', course_id)
all_students = _get_boolean_param(request, 'all_students')
if all_students and not has_access(request.user, 'instructor', course):
return HttpResponseForbidden("Requires instructor access.")
only_if_higher = _get_boolean_param(request, 'only_if_higher')
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
if not (problem_to_reset and (all_students or student)):
return HttpResponseBadRequest("Missing query parameters.")
if all_students and student:
return HttpResponseBadRequest(
"Cannot rescore with all_students and unique_student_identifier."
)
try:
module_state_key = UsageKey.from_string(problem_to_reset).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest("Unable to parse problem id")
response_payload = {'problem_to_reset': problem_to_reset}
if student:
response_payload['student'] = student_identifier
try:
task_api.submit_rescore_problem_for_student(
request,
module_state_key,
student,
only_if_higher,
)
except NotImplementedError as exc:
return HttpResponseBadRequest(text_type(exc))
elif all_students:
try:
task_api.submit_rescore_problem_for_all_students(
request,
module_state_key,
only_if_higher,
)
except NotImplementedError as exc:
return HttpResponseBadRequest(text_type(exc))
else:
return HttpResponseBadRequest()
response_payload['task'] = TASK_SUBMISSION_OK
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.OVERRIDE_GRADES)
@require_post_params(problem_to_reset="problem urlname to reset", score='overriding score')
@common_exceptions_400
def override_problem_score(request, course_id):
course_key = CourseKey.from_string(course_id)
score = strip_if_string(request.POST.get('score'))
problem_to_reset = strip_if_string(request.POST.get('problem_to_reset'))
student_identifier = request.POST.get('unique_student_identifier', None)
if not problem_to_reset:
return HttpResponseBadRequest("Missing query parameter problem_to_reset.")
if not student_identifier:
return HttpResponseBadRequest("Missing query parameter student_identifier.")
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
else:
return _create_error_response(request, u"Invalid student ID {}.".format(student_identifier))
try:
usage_key = UsageKey.from_string(problem_to_reset).map_into_course(course_key)
except InvalidKeyError:
return _create_error_response(request, u"Unable to parse problem id {}.".format(problem_to_reset))
# check the user's access to this specific problem
if not has_access(request.user, "staff", modulestore().get_item(usage_key)):
_create_error_response(request, u"User {} does not have permission to override scores for problem {}.".format(
request.user.id,
problem_to_reset
))
response_payload = {
'problem_to_reset': problem_to_reset,
'student': student_identifier
}
try:
task_api.submit_override_score(
request,
usage_key,
student,
score,
)
except NotImplementedError as exc: # if we try to override the score of a non-scorable block, catch it here
return _create_error_response(request, text_type(exc))
except ValueError as exc:
return _create_error_response(request, text_type(exc))
response_payload['task'] = TASK_SUBMISSION_OK
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.RESCORE_EXAMS)
@common_exceptions_400
def rescore_entrance_exam(request, course_id):
"""
Starts a background process a students attempts counter for entrance exam.
Optionally deletes student state for a problem. Limited to instructor access.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
all_students and unique_student_identifier cannot both be present.
"""
course_id = CourseKey.from_string(course_id)
course = get_course_with_access(
request.user, 'staff', course_id, depth=None
)
student_identifier = request.POST.get('unique_student_identifier', None)
only_if_higher = request.POST.get('only_if_higher', None)
student = None
if student_identifier is not None:
student = get_student_from_identifier(student_identifier)
all_students = _get_boolean_param(request, 'all_students')
if not course.entrance_exam_id:
return HttpResponseBadRequest(
_("Course has no entrance exam section.")
)
if all_students and student:
return HttpResponseBadRequest(
_("Cannot rescore with all_students and unique_student_identifier.")
)
try:
entrance_exam_key = UsageKey.from_string(course.entrance_exam_id).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
response_payload = {}
if student:
response_payload['student'] = student_identifier
else:
response_payload['student'] = _("All Students")
task_api.submit_rescore_entrance_exam_for_student(
request, entrance_exam_key, student, only_if_higher,
)
response_payload['task'] = TASK_SUBMISSION_OK
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EMAIL)
def list_background_email_tasks(request, course_id):
"""
List background email tasks.
"""
course_id = CourseKey.from_string(course_id)
task_type = 'bulk_course_email'
# Specifying for the history of a single task type
tasks = task_api.get_instructor_task_history(
course_id,
task_type=task_type
)
response_payload = {
'tasks': list(map(extract_task_features, tasks)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EMAIL)
def list_email_content(request, course_id):
"""
List the content of bulk emails sent
"""
course_id = CourseKey.from_string(course_id)
task_type = 'bulk_course_email'
# First get tasks list of bulk emails sent
emails = task_api.get_instructor_task_history(course_id, task_type=task_type)
response_payload = {
'emails': list(map(extract_email_features, emails)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.SHOW_TASKS)
def list_instructor_tasks(request, course_id):
"""
List instructor tasks.
Takes optional query paremeters.
- With no arguments, lists running tasks.
- `problem_location_str` lists task history for problem
- `problem_location_str` and `unique_student_identifier` lists task
history for problem AND student (intersection)
"""
course_id = CourseKey.from_string(course_id)
problem_location_str = strip_if_string(request.POST.get('problem_location_str', False))
student = request.POST.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
if student and not problem_location_str:
return HttpResponseBadRequest(
"unique_student_identifier must accompany problem_location_str"
)
if problem_location_str:
try:
module_state_key = UsageKey.from_string(problem_location_str).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest()
if student:
# Specifying for a single student's history on this problem
tasks = task_api.get_instructor_task_history(course_id, module_state_key, student)
else:
# Specifying for single problem's history
tasks = task_api.get_instructor_task_history(course_id, module_state_key)
else:
# If no problem or student, just get currently running tasks
tasks = task_api.get_running_instructor_tasks(course_id)
response_payload = {
'tasks': list(map(extract_task_features, tasks)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.SHOW_TASKS)
def list_entrance_exam_instructor_tasks(request, course_id):
"""
List entrance exam related instructor tasks.
Takes either of the following query parameters
- unique_student_identifier is an email or username
- all_students is a boolean
"""
course_id = CourseKey.from_string(course_id)
course = get_course_by_id(course_id)
student = request.POST.get('unique_student_identifier', None)
if student is not None:
student = get_student_from_identifier(student)
try:
entrance_exam_key = UsageKey.from_string(course.entrance_exam_id).map_into_course(course_id)
except InvalidKeyError:
return HttpResponseBadRequest(_("Course has no valid entrance exam section."))
if student:
# Specifying for a single student's entrance exam history
tasks = task_api.get_entrance_exam_instructor_task_history(
course_id,
entrance_exam_key,
student
)
else:
# Specifying for all student's entrance exam history
tasks = task_api.get_entrance_exam_instructor_task_history(
course_id,
entrance_exam_key
)
response_payload = {
'tasks': list(map(extract_task_features, tasks)),
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
def list_report_downloads(request, course_id):
"""
List grade CSV files that are available for download for this course.
Takes the following query parameters:
- (optional) report_name - name of the report
"""
course_id = CourseKey.from_string(course_id)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_name = request.POST.get("report_name", None)
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML(u'<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id) if report_name is None or name == report_name
]
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@require_finance_admin
def list_financial_report_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
course_id = CourseKey.from_string(course_id)
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
response_payload = {
'downloads': [
dict(name=name, url=url, link=HTML(u'<a href="{}">{}</a>').format(HTML(url), Text(name)))
for name, url in report_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def export_ora2_data(request, course_id):
"""
Pushes a Celery task which will aggregate ora2 responses for a course into a .csv
"""
course_key = CourseKey.from_string(course_id)
report_type = _('ORA data')
task_api.submit_export_ora2_data(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
report_type = _('grade')
course_key = CourseKey.from_string(course_id)
task_api.submit_calculate_grades_csv(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_RESEARCH)
@common_exceptions_400
def problem_grade_report(request, course_id):
"""
Request a CSV showing students' grades for all problems in the
course.
AlreadyRunningError is raised if the course's grades are already being
updated.
"""
course_key = CourseKey.from_string(course_id)
report_type = _('problem grade')
task_api.submit_problem_grade_report(request, course_key)
success_status = SUCCESS_MESSAGE_TEMPLATE.format(report_type=report_type)
return JsonResponse({"status": success_status})
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.CAN_ENROLL)
@require_post_params('rolename')
def list_forum_members(request, course_id):
"""
Lists forum members of a certain rolename.
Limited to staff access.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
Takes query parameter `rolename`.
"""
course_id = CourseKey.from_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
rolename = request.POST.get('rolename')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
# filter out unsupported for roles
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized rolename '{}'.".format(rolename)
))
try:
role = Role.objects.get(name=rolename, course_id=course_id)
users = role.users.all().order_by('username')
except Role.DoesNotExist:
users = []
course_discussion_settings = get_course_discussion_settings(course_id)
def extract_user_info(user):
""" Convert user to dict for json rendering. """
group_id = get_group_id_for_user(user, course_discussion_settings)
group_name = get_group_name(group_id, course_discussion_settings)
return {
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name,
'group_name': group_name,
}
response_payload = {
'course_id': text_type(course_id),
rolename: list(map(extract_user_info, users)),
'division_scheme': course_discussion_settings.division_scheme,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EMAIL)
@require_post_params(send_to="sending to whom", subject="subject line", message="message text")
@common_exceptions_400
def send_email(request, course_id):
"""
Send an email to self, staff, cohorts, or everyone involved in a course.
Query Parameters:
- 'send_to' specifies what group the email should be sent to
Options are defined by the CourseEmail model in
lms/djangoapps/bulk_email/models.py
- 'subject' specifies email's subject
- 'message' specifies email's content
"""
course_id = CourseKey.from_string(course_id)
if not is_bulk_email_feature_enabled(course_id):
log.warning(u'Email is not enabled for course %s', course_id)
return HttpResponseForbidden("Email is not enabled for this course.")
targets = json.loads(request.POST.get("send_to"))
subject = request.POST.get("subject")
message = request.POST.get("message")
# allow two branding points to come from Site Configuration: which CourseEmailTemplate should be used
# and what the 'from' field in the email should be
#
# If these are None (there is no site configuration enabled for the current site) than
# the system will use normal system defaults
course_overview = CourseOverview.get_from_id(course_id)
from_addr = configuration_helpers.get_value('course_email_from_addr')
if isinstance(from_addr, dict):
# If course_email_from_addr is a dict, we are customizing
# the email template for each organization that has courses
# on the site. The dict maps from addresses by org allowing
# us to find the correct from address to use here.
from_addr = from_addr.get(course_overview.display_org_with_default)
template_name = configuration_helpers.get_value('course_email_template_name')
if isinstance(template_name, dict):
# If course_email_template_name is a dict, we are customizing
# the email template for each organization that has courses
# on the site. The dict maps template names by org allowing
# us to find the correct template to use here.
template_name = template_name.get(course_overview.display_org_with_default)
# Create the CourseEmail object. This is saved immediately, so that
# any transaction that has been pending up to this point will also be
# committed.
try:
email = CourseEmail.create(
course_id,
request.user,
targets,
subject, message,
template_name=template_name,
from_addr=from_addr
)
except ValueError as err:
log.exception(u'Cannot create course email for course %s requested by user %s for targets %s',
course_id, request.user, targets)
return HttpResponseBadRequest(repr(err))
# Submit the task, so that the correct InstructorTask object gets created (for monitoring purposes)
task_api.submit_bulk_course_email(request, course_id, email.id)
response_payload = {
'course_id': text_type(course_id),
'success': True,
}
return JsonResponse(response_payload)
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.EDIT_FORUM_ROLES)
@require_post_params(
unique_student_identifier="email or username of user to change access",
rolename="the forum role",
action="'allow' or 'revoke'",
)
@common_exceptions_400
def update_forum_role_membership(request, course_id):
"""
Modify user's forum role.
The requesting user must be at least staff.
Staff forum admins can access all roles EXCEPT for FORUM_ROLE_ADMINISTRATOR
which is limited to instructors.
No one can revoke an instructors FORUM_ROLE_ADMINISTRATOR status.
Query parameters:
- `email` is the target users email
- `rolename` is one of [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR, FORUM_ROLE_COMMUNITY_TA]
- `action` is one of ['allow', 'revoke']
"""
course_id = CourseKey.from_string(course_id)
course = get_course_by_id(course_id)
has_instructor_access = has_access(request.user, 'instructor', course)
has_forum_admin = has_forum_access(
request.user, course_id, FORUM_ROLE_ADMINISTRATOR
)
unique_student_identifier = request.POST.get('unique_student_identifier')
rolename = request.POST.get('rolename')
action = request.POST.get('action')
# default roles require either (staff & forum admin) or (instructor)
if not (has_forum_admin or has_instructor_access):
return HttpResponseBadRequest(
"Operation requires staff & forum admin or instructor access"
)
# EXCEPT FORUM_ROLE_ADMINISTRATOR requires (instructor)
if rolename == FORUM_ROLE_ADMINISTRATOR and not has_instructor_access:
return HttpResponseBadRequest("Operation requires instructor access.")
if rolename not in [FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_COMMUNITY_TA]:
return HttpResponseBadRequest(strip_tags(
u"Unrecognized rolename '{}'.".format(rolename)
))
user = get_student_from_identifier(unique_student_identifier)
try:
update_forum_role(course_id, user, rolename, action)
except Role.DoesNotExist:
return HttpResponseBadRequest("Role does not exist.")
response_payload = {
'course_id': text_type(course_id),
'action': action,
}
return JsonResponse(response_payload)
@require_POST
def get_user_invoice_preference(request, course_id):
"""
Gets invoice copy user's preferences.
"""
invoice_copy_preference = True
invoice_preference_value = get_user_preference(request.user, INVOICE_KEY)
if invoice_preference_value is not None:
invoice_copy_preference = invoice_preference_value == 'True'
return JsonResponse({
'invoice_copy': invoice_copy_preference
})
def _display_unit(unit):
"""
Gets string for displaying unit to user.
"""
name = getattr(unit, 'display_name', None)
if name:
return u'{0} ({1})'.format(name, text_type(unit.location))
else:
return text_type(unit.location)
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('student', 'url', 'due_datetime')
def change_due_date(request, course_id):
"""
Grants a due date extension to a student for a particular unit.
"""
course = get_course_by_id(CourseKey.from_string(course_id))
student = require_student_from_identifier(request.POST.get('student'))
unit = find_unit(course, request.POST.get('url'))
due_date = parse_datetime(request.POST.get('due_datetime'))
reason = strip_tags(request.POST.get('reason', ''))
set_due_date_extension(course, unit, student, due_date, request.user, reason=reason)
return JsonResponse(_(
u'Successfully changed due date for student {0} for {1} '
u'to {2}').format(student.profile.name, _display_unit(unit),
due_date.strftime(u'%Y-%m-%d %H:%M')))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('student', 'url')
def reset_due_date(request, course_id):
"""
Rescinds a due date extension for a student on a particular unit.
"""
course = get_course_by_id(CourseKey.from_string(course_id))
student = require_student_from_identifier(request.POST.get('student'))
unit = find_unit(course, request.POST.get('url'))
reason = strip_tags(request.POST.get('reason', ''))
original_due_date = get_date_for_block(course_id, unit.location)
set_due_date_extension(course, unit, student, None, request.user, reason=reason)
if not original_due_date:
# It's possible the normal due date was deleted after an extension was granted:
return JsonResponse(
_("Successfully removed invalid due date extension (unit has no due date).")
)
original_due_date_str = original_due_date.strftime(u'%Y-%m-%d %H:%M')
return JsonResponse(_(
u'Successfully reset due date for student {0} for {1} '
u'to {2}').format(student.profile.name, _display_unit(unit),
original_due_date_str))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('url')
def show_unit_extensions(request, course_id):
"""
Shows all of the students which have due date extensions for the given unit.
"""
course = get_course_by_id(CourseKey.from_string(course_id))
unit = find_unit(course, request.POST.get('url'))
return JsonResponse(dump_module_extensions(course, unit))
@handle_dashboard_error
@require_POST
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GIVE_STUDENT_EXTENSION)
@require_post_params('student')
def show_student_extensions(request, course_id):
"""
Shows all of the due date extensions granted to a particular student in a
particular course.
"""
student = require_student_from_identifier(request.POST.get('student'))
course = get_course_by_id(CourseKey.from_string(course_id))
return JsonResponse(dump_student_extensions(course, student))
def _split_input_list(str_list):
"""
Separate out individual student email from the comma, or space separated string.
e.g.
in: "Lorem@ipsum.dolor, sit@amet.consectetur\nadipiscing@elit.Aenean\r convallis@at.lacus\r, ut@lacinia.Sed"
out: ['Lorem@ipsum.dolor', 'sit@amet.consectetur', 'adipiscing@elit.Aenean', 'convallis@at.lacus', 'ut@lacinia.Sed']
`str_list` is a string coming from an input text area
returns a list of separated values
"""
new_list = re.split(r'[\n\r\s,]', str_list)
new_list = [s.strip() for s in new_list]
new_list = [s for s in new_list if s != '']
return new_list
def _instructor_dash_url(course_key, section=None):
"""Return the URL for a section in the instructor dashboard.
Arguments:
course_key (CourseKey)
Keyword Arguments:
section (str): The name of the section to load.
Returns:
unicode: The URL of a section in the instructor dashboard.
"""
url = reverse('instructor_dashboard', kwargs={'course_id': six.text_type(course_key)})
if section is not None:
url += u'#view-{section}'.format(section=section)
return url
@require_global_staff
@require_POST
def generate_example_certificates(request, course_id=None):
"""Start generating a set of example certificates.
Example certificates are used to verify that certificates have
been configured correctly for the course.
Redirects back to the intructor dashboard once certificate
generation has begun.
"""
course_key = CourseKey.from_string(course_id)
certs_api.generate_example_certificates(course_key)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@require_course_permission(permissions.ENABLE_CERTIFICATE_GENERATION)
@require_POST
def enable_certificate_generation(request, course_id=None):
"""Enable/disable self-generated certificates for a course.
Once self-generated certificates have been enabled, students
who have passed the course will be able to generate certificates.
Redirects back to the intructor dashboard once the
setting has been updated.
"""
course_key = CourseKey.from_string(course_id)
is_enabled = (request.POST.get('certificates-enabled', 'false') == 'true')
certs_api.set_cert_generation_enabled(course_key, is_enabled)
return redirect(_instructor_dash_url(course_key, section='certificates'))
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.ALLOW_STUDENT_TO_BYPASS_ENTRANCE_EXAM)
@require_POST
def mark_student_can_skip_entrance_exam(request, course_id):
"""
Mark a student to skip entrance exam.
Takes `unique_student_identifier` as required POST parameter.
"""
course_id = CourseKey.from_string(course_id)
student_identifier = request.POST.get('unique_student_identifier')
student = get_student_from_identifier(student_identifier)
__, created = EntranceExamConfiguration.objects.get_or_create(user=student, course_id=course_id)
if created:
message = _(u'This student (%s) will skip the entrance exam.') % student_identifier
else:
message = _(u'This student (%s) is already allowed to skip the entrance exam.') % student_identifier
response_payload = {
'message': message,
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
@common_exceptions_400
def start_certificate_generation(request, course_id):
"""
Start generating certificates for all students enrolled in given course.
"""
course_key = CourseKey.from_string(course_id)
task = task_api.generate_certificates_for_students(request, course_key)
message = _('Certificate generation task for all students of this course has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.')
response_payload = {
'message': message,
'task_id': task.task_id
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_POST
@common_exceptions_400
def start_certificate_regeneration(request, course_id):
"""
Start regenerating certificates for students whose certificate statuses lie with in 'certificate_statuses'
entry in POST data.
"""
course_key = CourseKey.from_string(course_id)
certificates_statuses = request.POST.getlist('certificate_statuses', [])
if not certificates_statuses:
return JsonResponse(
{'message': _('Please select one or more certificate statuses that require certificate regeneration.')},
status=400
)
# Check if the selected statuses are allowed
allowed_statuses = [
CertificateStatuses.downloadable,
CertificateStatuses.error,
CertificateStatuses.notpassing,
CertificateStatuses.audit_passing,
CertificateStatuses.audit_notpassing,
]
if not set(certificates_statuses).issubset(allowed_statuses):
return JsonResponse(
{'message': _('Please select certificate statuses from the list only.')},
status=400
)
task_api.regenerate_certificates(request, course_key, certificates_statuses)
response_payload = {
'message': _('Certificate regeneration task has been started. '
'You can view the status of the generation task in the "Pending Tasks" section.'),
'success': True
}
return JsonResponse(response_payload)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_exception_view(request, course_id):
"""
Add/Remove students to/from certificate white list.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate exception data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_exception, student = parse_request_data_and_get_user(request, course_key)
except ValueError as error:
return JsonResponse({'success': False, 'message': text_type(error)}, status=400)
# Add new Certificate Exception for the student passed in request data
if request.method == 'POST':
try:
exception = add_certificate_exception(course_key, student, certificate_exception)
except ValueError as error:
return JsonResponse({'success': False, 'message': text_type(error)}, status=400)
return JsonResponse(exception)
# Remove Certificate Exception for the student passed in request data
elif request.method == 'DELETE':
try:
remove_certificate_exception(course_key, student)
except ValueError as error:
return JsonResponse({'success': False, 'message': text_type(error)}, status=400)
return JsonResponse({}, status=204)
def add_certificate_exception(course_key, student, certificate_exception):
"""
Add a certificate exception to CertificateWhitelist table.
Raises ValueError in case Student is already white listed.
:param course_key: identifier of the course whose certificate exception will be added.
:param student: User object whose certificate exception will be added.
:param certificate_exception: A dict object containing certificate exception info.
:return: CertificateWhitelist item in dict format containing certificate exception info.
"""
if CertificateWhitelist.get_certificate_white_list(course_key, student):
raise ValueError(
_(u"Student (username/email={user}) already in certificate exception list.").format(user=student.username)
)
certificate_white_list, __ = CertificateWhitelist.objects.get_or_create(
user=student,
course_id=course_key,
defaults={
'whitelist': True,
'notes': certificate_exception.get('notes', '')
}
)
log.info(u'%s has been added to the whitelist in course %s', student.username, course_key)
generated_certificate = GeneratedCertificate.eligible_certificates.filter(
user=student,
course_id=course_key,
status=CertificateStatuses.downloadable,
).first()
exception = dict({
'id': certificate_white_list.id,
'user_email': student.email,
'user_name': student.username,
'user_id': student.id,
'certificate_generated': generated_certificate and generated_certificate.created_date.strftime(u"%B %d, %Y"),
'created': certificate_white_list.created.strftime(u"%A, %B %d, %Y"),
})
return exception
def remove_certificate_exception(course_key, student):
"""
Remove certificate exception for given course and student from CertificateWhitelist table and
invalidate its GeneratedCertificate if present.
Raises ValueError in case no exception exists for the student in the given course.
:param course_key: identifier of the course whose certificate exception needs to be removed.
:param student: User object whose certificate exception needs to be removed.
:return:
"""
try:
certificate_exception = CertificateWhitelist.objects.get(user=student, course_id=course_key)
except ObjectDoesNotExist:
raise ValueError(
_(u'Certificate exception (user={user}) does not exist in certificate white list. '
'Please refresh the page and try again.').format(user=student.username)
)
try:
generated_certificate = GeneratedCertificate.objects.get(
user=student,
course_id=course_key
)
generated_certificate.invalidate()
log.info(
u'Certificate invalidated for %s in course %s when removed from certificate exception list',
student.username,
course_key
)
except ObjectDoesNotExist:
# Certificate has not been generated yet, so just remove the certificate exception from white list
pass
log.info(u'%s has been removed from the whitelist in course %s', student.username, course_key)
certificate_exception.delete()
def parse_request_data_and_get_user(request, course_key):
"""
Parse request data into Certificate Exception and User object.
Certificate Exception is the dict object containing information about certificate exception.
:param request:
:param course_key: Course Identifier of the course for whom to process certificate exception
:return: key-value pairs containing certificate exception data and User object
"""
certificate_exception = parse_request_data(request)
user = certificate_exception.get('user_name', '') or certificate_exception.get('user_email', '')
if not user:
raise ValueError(_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Add to Exception List" button.'))
db_user = get_student(user, course_key)
return certificate_exception, db_user
def parse_request_data(request):
"""
Parse and return request data, raise ValueError in case of invalid JSON data.
:param request: HttpRequest request object.
:return: dict object containing parsed json data.
"""
try:
data = json.loads(request.body.decode('utf8') or u'{}')
except ValueError:
raise ValueError(_('The record is not in the correct format. Please add a valid username or email address.'))
return data
def get_student(username_or_email, course_key):
"""
Retrieve and return User object from db, raise ValueError
if user is does not exists or is not enrolled in the given course.
:param username_or_email: String containing either user name or email of the student.
:param course_key: CourseKey object identifying the current course.
:return: User object
"""
try:
student = get_user_by_username_or_email(username_or_email)
except ObjectDoesNotExist:
raise ValueError(_(u"{user} does not exist in the LMS. Please check your spelling and retry.").format(
user=username_or_email
))
# Make Sure the given student is enrolled in the course
if not CourseEnrollment.is_enrolled(student, course_key):
raise ValueError(_(u"{user} is not enrolled in this course. Please check your spelling and retry.")
.format(user=username_or_email))
return student
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GENERATE_CERTIFICATE_EXCEPTIONS)
@require_POST
@common_exceptions_400
def generate_certificate_exceptions(request, course_id, generate_for=None):
"""
Generate Certificate for students in the Certificate White List.
:param request: HttpRequest object,
:param course_id: course identifier of the course for whom to generate certificates
:param generate_for: string to identify whether to generate certificates for 'all' or 'new'
additions to the certificate white-list
:return: JsonResponse object containing success/failure message and certificate exception data
"""
course_key = CourseKey.from_string(course_id)
if generate_for == 'all':
# Generate Certificates for all white listed students
students = 'all_whitelisted'
elif generate_for == 'new':
students = 'whitelisted_not_generated'
else:
# Invalid data, generate_for must be present for all certificate exceptions
return JsonResponse(
{
'success': False,
'message': _('Invalid data, generate_for must be "new" or "all".'),
},
status=400
)
task_api.generate_certificates_for_students(request, course_key, student_set=students)
response_payload = {
'success': True,
'message': _('Certificate generation started for white listed students.'),
}
return JsonResponse(response_payload)
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_course_permission(permissions.GENERATE_BULK_CERTIFICATE_EXCEPTIONS)
@require_POST
def generate_bulk_certificate_exceptions(request, course_id):
"""
Add Students to certificate white list from the uploaded csv file.
:return response in dict format.
{
general_errors: [errors related to csv file e.g. csv uploading, csv attachment, content reading etc. ],
row_errors: {
data_format_error: [users/data in csv file that are not well formatted],
user_not_exist: [csv with none exiting users in LMS system],
user_already_white_listed: [users that are already white listed],
user_not_enrolled: [rows with not enrolled users in the given course]
},
success: [list of successfully added users to the certificate white list model]
}
"""
user_index = 0
notes_index = 1
row_errors_key = ['data_format_error', 'user_not_exist', 'user_already_white_listed', 'user_not_enrolled']
course_key = CourseKey.from_string(course_id)
students, general_errors, success = [], [], []
row_errors = {key: [] for key in row_errors_key}
def build_row_errors(key, _user, row_count):
"""
inner method to build dict of csv data as row errors.
"""
row_errors[key].append(_(u'user "{user}" in row# {row}').format(user=_user, row=row_count))
if 'students_list' in request.FILES:
try:
upload_file = request.FILES.get('students_list')
if upload_file.name.endswith('.csv'):
students = [row for row in csv.reader(upload_file.read().decode('utf-8').splitlines())]
else:
general_errors.append(_('Make sure that the file you upload is in CSV format with no '
'extraneous characters or rows.'))
except Exception: # pylint: disable=broad-except
general_errors.append(_('Could not read uploaded file.'))
finally:
upload_file.close()
row_num = 0
for student in students:
row_num += 1
# verify that we have exactly two column in every row either email or username and notes but allow for
# blank lines
if len(student) != 2:
if student:
build_row_errors('data_format_error', student[user_index], row_num)
log.info(u'invalid data/format in csv row# %s', row_num)
continue
user = student[user_index]
try:
user = get_user_by_username_or_email(user)
except ObjectDoesNotExist:
build_row_errors('user_not_exist', user, row_num)
log.info(u'student %s does not exist', user)
else:
if CertificateWhitelist.get_certificate_white_list(course_key, user):
build_row_errors('user_already_white_listed', user, row_num)
log.warning(u'student %s already exist.', user.username)
# make sure user is enrolled in course
elif not CourseEnrollment.is_enrolled(user, course_key):
build_row_errors('user_not_enrolled', user, row_num)
log.warning(u'student %s is not enrolled in course.', user.username)
else:
CertificateWhitelist.objects.create(
user=user,
course_id=course_key,
whitelist=True,
notes=student[notes_index]
)
success.append(_(u'user "{username}" in row# {row}').format(username=user.username, row=row_num))
else:
general_errors.append(_('File is not attached.'))
results = {
'general_errors': general_errors,
'row_errors': row_errors,
'success': success
}
return JsonResponse(results)
@transaction.non_atomic_requests
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_global_staff
@require_http_methods(['POST', 'DELETE'])
def certificate_invalidation_view(request, course_id):
"""
Invalidate/Re-Validate students to/from certificate.
:param request: HttpRequest object
:param course_id: course identifier of the course for whom to add/remove certificates exception.
:return: JsonResponse object with success/error message or certificate invalidation data.
"""
course_key = CourseKey.from_string(course_id)
# Validate request data and return error response in case of invalid data
try:
certificate_invalidation_data = parse_request_data(request)
certificate = validate_request_data_and_get_certificate(certificate_invalidation_data, course_key)
except ValueError as error:
return JsonResponse({'message': text_type(error)}, status=400)
# Invalidate certificate of the given student for the course course
if request.method == 'POST':
try:
certificate_invalidation = invalidate_certificate(request, certificate, certificate_invalidation_data)
except ValueError as error:
return JsonResponse({'message': text_type(error)}, status=400)
return JsonResponse(certificate_invalidation)
# Re-Validate student certificate for the course course
elif request.method == 'DELETE':
try:
re_validate_certificate(request, course_key, certificate)
except ValueError as error:
return JsonResponse({'message': text_type(error)}, status=400)
return JsonResponse({}, status=204)
def invalidate_certificate(request, generated_certificate, certificate_invalidation_data):
"""
Invalidate given GeneratedCertificate and add CertificateInvalidation record for future reference or re-validation.
:param request: HttpRequest object
:param generated_certificate: GeneratedCertificate object, the certificate we want to invalidate
:param certificate_invalidation_data: dict object containing data for CertificateInvalidation.
:return: dict object containing updated certificate invalidation data.
"""
if CertificateInvalidation.get_certificate_invalidations(
generated_certificate.course_id,
generated_certificate.user,
):
raise ValueError(
_(u"Certificate of {user} has already been invalidated. Please check your spelling and retry.").format(
user=generated_certificate.user.username,
)
)
# Verify that certificate user wants to invalidate is a valid one.
if not generated_certificate.is_valid():
raise ValueError(
_(u"Certificate for student {user} is already invalid, kindly verify that certificate was generated "
"for this student and then proceed.").format(user=generated_certificate.user.username)
)
# Add CertificateInvalidation record for future reference or re-validation
certificate_invalidation, __ = CertificateInvalidation.objects.update_or_create(
generated_certificate=generated_certificate,
defaults={
'invalidated_by': request.user,
'notes': certificate_invalidation_data.get("notes", ""),
'active': True,
}
)
# Invalidate GeneratedCertificate
generated_certificate.invalidate()
return {
'id': certificate_invalidation.id,
'user': certificate_invalidation.generated_certificate.user.username,
'invalidated_by': certificate_invalidation.invalidated_by.username,
'created': certificate_invalidation.created.strftime(u"%B %d, %Y"),
'notes': certificate_invalidation.notes,
}
@common_exceptions_400
def re_validate_certificate(request, course_key, generated_certificate):
"""
Remove certificate invalidation from db and start certificate generation task for this student.
Raises ValueError if certificate invalidation is present.
:param request: HttpRequest object
:param course_key: CourseKey object identifying the current course.
:param generated_certificate: GeneratedCertificate object of the student for the given course
"""
try:
# Fetch CertificateInvalidation object
certificate_invalidation = CertificateInvalidation.objects.get(generated_certificate=generated_certificate)
except ObjectDoesNotExist:
raise ValueError(_("Certificate Invalidation does not exist, Please refresh the page and try again."))
else:
# Deactivate certificate invalidation if it was fetched successfully.
certificate_invalidation.deactivate()
# We need to generate certificate only for a single student here
student = certificate_invalidation.generated_certificate.user
task_api.generate_certificates_for_students(
request, course_key, student_set="specific_student", specific_student_id=student.id
)
def validate_request_data_and_get_certificate(certificate_invalidation, course_key):
"""
Fetch and return GeneratedCertificate of the student passed in request data for the given course.
Raises ValueError in case of missing student username/email or
if student does not have certificate for the given course.
:param certificate_invalidation: dict containing certificate invalidation data
:param course_key: CourseKey object identifying the current course.
:return: GeneratedCertificate object of the student for the given course
"""
user = certificate_invalidation.get("user")
if not user:
raise ValueError(
_('Student username/email field is required and can not be empty. '
'Kindly fill in username/email and then press "Invalidate Certificate" button.')
)
student = get_student(user, course_key)
certificate = GeneratedCertificate.certificate_for_student(student, course_key)
if not certificate:
raise ValueError(_(
u"The student {student} does not have certificate for the course {course}. Kindly verify student "
"username/email and the selected course are correct and try again."
).format(student=student.username, course=course_key.course))
return certificate
def _get_boolean_param(request, param_name):
"""
Returns the value of the boolean parameter with the given
name in the POST request. Handles translation from string
values to boolean values.
"""
return request.POST.get(param_name, False) in ['true', 'True', True]
def _create_error_response(request, msg):
"""
Creates the appropriate error response for the current request,
in JSON form.
"""
return JsonResponse({"error": msg}, 400)
| codeparrot/github-code-clean |
""" openconfig_mpls
This module provides data definitions for configuration of
Multiprotocol Label Switching (MPLS) and associated protocols for
signaling and traffic engineering.
RFC 3031\: Multiprotocol Label Switching Architecture
The MPLS / TE data model consists of several modules and
submodules as shown below. The top\-level MPLS module describes
the overall framework. Three types of LSPs are supported\:
i) traffic\-engineered (or constrained\-path)
ii) IGP\-congruent (LSPs that follow the IGP path)
iii) static LSPs which are not signaled
The structure of each of these LSP configurations is defined in
corresponding submodules. Companion modules define the relevant
configuration and operational data specific to key signaling
protocols used in operational practice.
+\-\-\-\-\-\-\-+
+\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\->\| MPLS \|<\-\-\-\-\-\-\-\-\-\-\-\-\-\-+
\| +\-\-\-\-\-\-\-+ \|
\| ^ \|
\| \| \|
+\-\-\-\-+\-\-\-\-\-+ +\-\-\-\-\-\-\-\-+\-\-\-\-\-\-\-+ +\-\-\-\-\-+\-\-\-\-\-+
\| TE LSPs \| \| IGP\-based LSPs \| \|static LSPs\|
\| \| \| \| \| \|
+\-\-\-\-\-\-\-\-\-\-+ +\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-+ +\-\-\-\-\-\-\-\-\-\-\-+
^ ^ ^ ^
\| +\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-+ \| +\-\-\-\-\-\-\-\-+
\| \| \| \|
\| +\-\-\-\-\-\-+ +\-+\-\-\-+\-+ +\-\-+\-\-+
+\-\-\-+ RSVP \| \|SEGMENT\| \| LDP \|
+\-\-\-\-\-\-+ \|ROUTING\| +\-\-\-\-\-+
+\-\-\-\-\-\-\-+
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
from ydk.models.openconfig.openconfig_mpls_types import LspOperStatusIdentity
from ydk.models.openconfig.openconfig_mpls_types import LspRoleIdentity
from ydk.models.openconfig.openconfig_mpls_types import MplsLabelEnum
from ydk.models.openconfig.openconfig_mpls_types import NullLabelTypeIdentity
from ydk.models.openconfig.openconfig_mpls_types import ProtectionTypeIdentity
from ydk.models.openconfig.openconfig_mpls_types import TunnelAdminStatusIdentity
from ydk.models.openconfig.openconfig_mpls_types import TunnelTypeEnum
from ydk.models.openconfig.openconfig_mpls_types import TunnelTypeIdentity
class CspfTieBreakingEnum(Enum):
"""
CspfTieBreakingEnum
type to indicate the CSPF selection policy when
multiple equal cost paths are available
.. data:: RANDOM = 0
CSPF calculation selects a random path among
multiple equal-cost paths to the destination
.. data:: LEAST_FILL = 1
CSPF calculation selects the path with greatest
available bandwidth
.. data:: MOST_FILL = 2
CSPF calculation selects the path with the least
available bandwidth
"""
RANDOM = 0
LEAST_FILL = 1
MOST_FILL = 2
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['CspfTieBreakingEnum']
class MplsHopTypeEnum(Enum):
"""
MplsHopTypeEnum
enumerated type for specifying loose or strict
paths
.. data:: LOOSE = 0
loose hop in an explicit path
.. data:: STRICT = 1
strict hop in an explicit path
"""
LOOSE = 0
STRICT = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['MplsHopTypeEnum']
class MplsSrlgFloodingTypeEnum(Enum):
"""
MplsSrlgFloodingTypeEnum
Enumerated bype for specifying how the SRLG is flooded
.. data:: FLOODED_SRLG = 0
SRLG is flooded in the IGP
.. data:: STATIC_SRLG = 1
SRLG is not flooded, the members are
statically configured
"""
FLOODED_SRLG = 0
STATIC_SRLG = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['MplsSrlgFloodingTypeEnum']
class TeBandwidthTypeEnum(Enum):
"""
TeBandwidthTypeEnum
enumerated type for specifying whether bandwidth is
explicitly specified or automatically computed
.. data:: SPECIFIED = 0
Bandwidth is explicitly specified
.. data:: AUTO = 1
Bandwidth is automatically computed
"""
SPECIFIED = 0
AUTO = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['TeBandwidthTypeEnum']
class TeMetricTypeEnum(Enum):
"""
TeMetricTypeEnum
union type for setting the LSP TE metric to a
static value, or to track the IGP metric
.. data:: IGP = 0
set the LSP metric to track the underlying
IGP metric
"""
IGP = 0
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['TeMetricTypeEnum']
class PathComputationMethodIdentity(object):
"""
base identity for supported path computation
mechanisms
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
pass
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['PathComputationMethodIdentity']['meta_info']
class Mpls(object):
"""
Anchor point for mpls configuration and operational
data
.. attribute:: global_
general mpls configuration applicable to any type of LSP and signaling protocol \- label ranges, entropy label supportmay be added here
**type**\: :py:class:`Global <ydk.models.openconfig.openconfig_mpls.Mpls.Global>`
.. attribute:: lsps
LSP definitions and configuration
**type**\: :py:class:`Lsps <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps>`
.. attribute:: signaling_protocols
top\-level signaling protocol configuration
**type**\: :py:class:`SignalingProtocols <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols>`
.. attribute:: te_global_attributes
traffic\-engineering global attributes
**type**\: :py:class:`TeGlobalAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes>`
.. attribute:: te_interface_attributes
traffic engineering attributes specific for interfaces
**type**\: :py:class:`TeInterfaceAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes>`
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self._is_presence = True
self.global_ = Mpls.Global()
self.global_.parent = self
self.lsps = Mpls.Lsps()
self.lsps.parent = self
self.signaling_protocols = Mpls.SignalingProtocols()
self.signaling_protocols.parent = self
self.te_global_attributes = Mpls.TeGlobalAttributes()
self.te_global_attributes.parent = self
self.te_interface_attributes = Mpls.TeInterfaceAttributes()
self.te_interface_attributes.parent = self
class Global(object):
"""
general mpls configuration applicable to any
type of LSP and signaling protocol \- label ranges,
entropy label supportmay be added here
.. attribute:: config
Top level global MPLS configuration
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Global.Config>`
.. attribute:: mpls_interface_attributes
Parameters related to MPLS interfaces
**type**\: :py:class:`MplsInterfaceAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes>`
.. attribute:: state
Top level global MPLS state
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Global.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.Global.Config()
self.config.parent = self
self.mpls_interface_attributes = Mpls.Global.MplsInterfaceAttributes()
self.mpls_interface_attributes.parent = self
self.state = Mpls.Global.State()
self.state.parent = self
class Config(object):
"""
Top level global MPLS configuration
.. attribute:: null_label
The null\-label type used, implicit or explicit
**type**\: :py:class:`NullLabelTypeIdentity <ydk.models.openconfig.openconfig_mpls_types.NullLabelTypeIdentity>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.null_label = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:global/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.null_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global.Config']['meta_info']
class State(object):
"""
Top level global MPLS state
.. attribute:: null_label
The null\-label type used, implicit or explicit
**type**\: :py:class:`NullLabelTypeIdentity <ydk.models.openconfig.openconfig_mpls_types.NullLabelTypeIdentity>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.null_label = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:global/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.null_label is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global.State']['meta_info']
class MplsInterfaceAttributes(object):
"""
Parameters related to MPLS interfaces
.. attribute:: interface
List of TE interfaces
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
List of TE interfaces
.. attribute:: name <key>
The interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface.Config>`
.. attribute:: config
Configuration parameters related to MPLS interfaces\:
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface.Config>`
.. attribute:: state
State parameters related to TE interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.Global.MplsInterfaceAttributes.Interface.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.name = None
self.config = Mpls.Global.MplsInterfaceAttributes.Interface.Config()
self.config.parent = self
self.state = Mpls.Global.MplsInterfaceAttributes.Interface.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters related to MPLS interfaces\:
.. attribute:: mpls_enabled
Enable MPLS forwarding on this interfacek
**type**\: bool
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.mpls_enabled = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.mpls_enabled is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global.MplsInterfaceAttributes.Interface.Config']['meta_info']
class State(object):
"""
State parameters related to TE interfaces
.. attribute:: mpls_enabled
Enable MPLS forwarding on this interfacek
**type**\: bool
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.mpls_enabled = None
self.name = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.mpls_enabled is not None:
return True
if self.name is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global.MplsInterfaceAttributes.Interface.State']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/openconfig-mpls:mpls/openconfig-mpls:global/openconfig-mpls:mpls-interface-attributes/openconfig-mpls:interface[openconfig-mpls:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global.MplsInterfaceAttributes.Interface']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:global/openconfig-mpls:mpls-interface-attributes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global.MplsInterfaceAttributes']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:global'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.mpls_interface_attributes is not None and self.mpls_interface_attributes._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.Global']['meta_info']
class TeGlobalAttributes(object):
"""
traffic\-engineering global attributes
.. attribute:: igp_flooding_bandwidth
Interface bandwidth change percentages that trigger update events into the IGP traffic engineering database (TED)
**type**\: :py:class:`IgpFloodingBandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth>`
.. attribute:: mpls_admin_groups
Top\-level container for admin\-groups configuration and state
**type**\: :py:class:`MplsAdminGroups <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups>`
.. attribute:: srlg
Shared risk link groups attributes
**type**\: :py:class:`Srlg <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg>`
.. attribute:: te_lsp_timers
Definition for delays associated with setup and cleanup of TE LSPs
**type**\: :py:class:`TeLspTimers <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.TeLspTimers>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.igp_flooding_bandwidth = Mpls.TeGlobalAttributes.IgpFloodingBandwidth()
self.igp_flooding_bandwidth.parent = self
self.mpls_admin_groups = Mpls.TeGlobalAttributes.MplsAdminGroups()
self.mpls_admin_groups.parent = self
self.srlg = Mpls.TeGlobalAttributes.Srlg()
self.srlg.parent = self
self.te_lsp_timers = Mpls.TeGlobalAttributes.TeLspTimers()
self.te_lsp_timers.parent = self
class Srlg(object):
"""
Shared risk link groups attributes
.. attribute:: srlg
List of shared risk link groups
**type**\: list of :py:class:`Srlg <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.srlg = YList()
self.srlg.parent = self
self.srlg.name = 'srlg'
class Srlg(object):
"""
List of shared risk link groups
.. attribute:: name <key>
The SRLG group identifier
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.Config>`
.. attribute:: config
Configuration parameters related to the SRLG
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.Config>`
.. attribute:: state
State parameters related to the SRLG
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.State>`
.. attribute:: static_srlg_members
SRLG members for static (not flooded) SRLGs
**type**\: :py:class:`StaticSrlgMembers <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.name = None
self.config = Mpls.TeGlobalAttributes.Srlg.Srlg.Config()
self.config.parent = self
self.state = Mpls.TeGlobalAttributes.Srlg.Srlg.State()
self.state.parent = self
self.static_srlg_members = Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers()
self.static_srlg_members.parent = self
class Config(object):
"""
Configuration parameters related to the SRLG
.. attribute:: cost
The cost of the SRLG to the computation algorithm
**type**\: int
**range:** 0..4294967295
.. attribute:: flooding_type
The type of SRLG, either flooded in the IGP or statically configured
**type**\: :py:class:`MplsSrlgFloodingTypeEnum <ydk.models.openconfig.openconfig_mpls.MplsSrlgFloodingTypeEnum>`
.. attribute:: name
SRLG group identifier
**type**\: str
.. attribute:: value
group ID for the SRLG
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.cost = None
self.flooding_type = None
self.name = None
self.value = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.cost is not None:
return True
if self.flooding_type is not None:
return True
if self.name is not None:
return True
if self.value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg.Config']['meta_info']
class State(object):
"""
State parameters related to the SRLG
.. attribute:: cost
The cost of the SRLG to the computation algorithm
**type**\: int
**range:** 0..4294967295
.. attribute:: flooding_type
The type of SRLG, either flooded in the IGP or statically configured
**type**\: :py:class:`MplsSrlgFloodingTypeEnum <ydk.models.openconfig.openconfig_mpls.MplsSrlgFloodingTypeEnum>`
.. attribute:: name
SRLG group identifier
**type**\: str
.. attribute:: value
group ID for the SRLG
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.cost = None
self.flooding_type = None
self.name = None
self.value = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.cost is not None:
return True
if self.flooding_type is not None:
return True
if self.name is not None:
return True
if self.value is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg.State']['meta_info']
class StaticSrlgMembers(object):
"""
SRLG members for static (not flooded) SRLGs
.. attribute:: members_list
List of SRLG members, which are expressed as IP address endpoints of links contained in the SRLG
**type**\: list of :py:class:`MembersList <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.members_list = YList()
self.members_list.parent = self
self.members_list.name = 'members_list'
class MembersList(object):
"""
List of SRLG members, which are expressed
as IP address endpoints of links contained in the
SRLG
.. attribute:: from_address <key>
The from address of the link in the SRLG
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: config
Configuration parameters relating to the SRLG members
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList.Config>`
.. attribute:: state
State parameters relating to the SRLG members
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.from_address = None
self.config = Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList.Config()
self.config.parent = self
self.state = Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters relating to the
SRLG members
.. attribute:: from_address
IP address of the a\-side of the SRLG link
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: to_address
IP address of the z\-side of the SRLG link
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.from_address = None
self.to_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.from_address is not None:
return True
if self.to_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList.Config']['meta_info']
class State(object):
"""
State parameters relating to the SRLG
members
.. attribute:: from_address
IP address of the a\-side of the SRLG link
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: to_address
IP address of the z\-side of the SRLG link
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.from_address = None
self.to_address = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.from_address is not None:
return True
if self.to_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.from_address is None:
raise YPYModelError('Key property from_address is None')
return self.parent._common_path +'/openconfig-mpls:members-list[openconfig-mpls:from-address = ' + str(self.from_address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.from_address is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers.MembersList']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:static-srlg-members'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.members_list is not None:
for child_ref in self.members_list:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg.StaticSrlgMembers']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:srlg/openconfig-mpls:srlg[openconfig-mpls:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
if self.static_srlg_members is not None and self.static_srlg_members._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg.Srlg']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:srlg'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.srlg is not None:
for child_ref in self.srlg:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.Srlg']['meta_info']
class IgpFloodingBandwidth(object):
"""
Interface bandwidth change percentages
that trigger update events into the IGP traffic
engineering database (TED)
.. attribute:: config
Configuration parameters for TED update threshold
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config>`
.. attribute:: state
State parameters for TED update threshold
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config()
self.config.parent = self
self.state = Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters for TED
update threshold
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecificationEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config.ThresholdSpecificationEnum>`
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdTypeEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config.ThresholdTypeEnum>`
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.delta_percentage = None
self.down_thresholds = YLeafList()
self.down_thresholds.parent = self
self.down_thresholds.name = 'down_thresholds'
self.threshold_specification = None
self.threshold_type = None
self.up_down_thresholds = YLeafList()
self.up_down_thresholds.parent = self
self.up_down_thresholds.name = 'up_down_thresholds'
self.up_thresholds = YLeafList()
self.up_thresholds.parent = self
self.up_thresholds.name = 'up_thresholds'
class ThresholdSpecificationEnum(Enum):
"""
ThresholdSpecificationEnum
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = 0
SEPARATE_UP_DOWN = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config.ThresholdSpecificationEnum']
class ThresholdTypeEnum(Enum):
"""
ThresholdTypeEnum
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = 0
THRESHOLD_CROSSED = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config.ThresholdTypeEnum']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:igp-flooding-bandwidth/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.delta_percentage is not None:
return True
if self.down_thresholds is not None:
for child in self.down_thresholds:
if child is not None:
return True
if self.threshold_specification is not None:
return True
if self.threshold_type is not None:
return True
if self.up_down_thresholds is not None:
for child in self.up_down_thresholds:
if child is not None:
return True
if self.up_thresholds is not None:
for child in self.up_thresholds:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth.Config']['meta_info']
class State(object):
"""
State parameters for TED update threshold
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecificationEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State.ThresholdSpecificationEnum>`
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdTypeEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State.ThresholdTypeEnum>`
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.delta_percentage = None
self.down_thresholds = YLeafList()
self.down_thresholds.parent = self
self.down_thresholds.name = 'down_thresholds'
self.threshold_specification = None
self.threshold_type = None
self.up_down_thresholds = YLeafList()
self.up_down_thresholds.parent = self
self.up_down_thresholds.name = 'up_down_thresholds'
self.up_thresholds = YLeafList()
self.up_thresholds.parent = self
self.up_thresholds.name = 'up_thresholds'
class ThresholdSpecificationEnum(Enum):
"""
ThresholdSpecificationEnum
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = 0
SEPARATE_UP_DOWN = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State.ThresholdSpecificationEnum']
class ThresholdTypeEnum(Enum):
"""
ThresholdTypeEnum
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = 0
THRESHOLD_CROSSED = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State.ThresholdTypeEnum']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:igp-flooding-bandwidth/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.delta_percentage is not None:
return True
if self.down_thresholds is not None:
for child in self.down_thresholds:
if child is not None:
return True
if self.threshold_specification is not None:
return True
if self.threshold_type is not None:
return True
if self.up_down_thresholds is not None:
for child in self.up_down_thresholds:
if child is not None:
return True
if self.up_thresholds is not None:
for child in self.up_thresholds:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:igp-flooding-bandwidth'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.IgpFloodingBandwidth']['meta_info']
class MplsAdminGroups(object):
"""
Top\-level container for admin\-groups configuration
and state
.. attribute:: admin_group
configuration of value to name mapping for mpls affinities/admin\-groups
**type**\: list of :py:class:`AdminGroup <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.admin_group = YList()
self.admin_group.parent = self
self.admin_group.name = 'admin_group'
class AdminGroup(object):
"""
configuration of value to name mapping
for mpls affinities/admin\-groups
.. attribute:: admin_group_name <key>
name for mpls admin\-group
**type**\: str
**refers to**\: :py:class:`admin_group_name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config>`
.. attribute:: config
Configurable items for admin\-groups
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config>`
.. attribute:: state
Operational state for admin\-groups
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.admin_group_name = None
self.config = Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config()
self.config.parent = self
self.state = Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State()
self.state.parent = self
class Config(object):
"""
Configurable items for admin\-groups
.. attribute:: admin_group_name
name for mpls admin\-group
**type**\: str
.. attribute:: bit_position
bit\-position value for mpls admin\-group. The value for the admin group is an integer that represents one of the bit positions in the admin\-group bitmask. Values between 0 and 31 are interpreted as the original limit of 32 admin groups. Values >=32 are interpreted as extended admin group values as per RFC7308
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.admin_group_name = None
self.bit_position = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.admin_group_name is not None:
return True
if self.bit_position is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.Config']['meta_info']
class State(object):
"""
Operational state for admin\-groups
.. attribute:: admin_group_name
name for mpls admin\-group
**type**\: str
.. attribute:: bit_position
bit\-position value for mpls admin\-group. The value for the admin group is an integer that represents one of the bit positions in the admin\-group bitmask. Values between 0 and 31 are interpreted as the original limit of 32 admin groups. Values >=32 are interpreted as extended admin group values as per RFC7308
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.admin_group_name = None
self.bit_position = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.admin_group_name is not None:
return True
if self.bit_position is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup.State']['meta_info']
@property
def _common_path(self):
if self.admin_group_name is None:
raise YPYModelError('Key property admin_group_name is None')
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:mpls-admin-groups/openconfig-mpls:admin-group[openconfig-mpls:admin-group-name = ' + str(self.admin_group_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.admin_group_name is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.MplsAdminGroups.AdminGroup']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:mpls-admin-groups'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.admin_group is not None:
for child_ref in self.admin_group:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.MplsAdminGroups']['meta_info']
class TeLspTimers(object):
"""
Definition for delays associated with setup
and cleanup of TE LSPs
.. attribute:: config
Configuration parameters related to timers for TE LSPs
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.TeLspTimers.Config>`
.. attribute:: state
State related to timers for TE LSPs
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.TeLspTimers.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.TeGlobalAttributes.TeLspTimers.Config()
self.config.parent = self
self.state = Mpls.TeGlobalAttributes.TeLspTimers.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters related
to timers for TE LSPs
.. attribute:: cleanup_delay
delay the removal of old te lsp for a specified amount of time
**type**\: int
**range:** 0..65535
.. attribute:: install_delay
delay the use of newly installed te lsp for a specified amount of time
**type**\: int
**range:** 0..3600
.. attribute:: reoptimize_timer
frequency of reoptimization of a traffic engineered LSP
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.cleanup_delay = None
self.install_delay = None
self.reoptimize_timer = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:te-lsp-timers/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.cleanup_delay is not None:
return True
if self.install_delay is not None:
return True
if self.reoptimize_timer is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.TeLspTimers.Config']['meta_info']
class State(object):
"""
State related to timers for TE LSPs
.. attribute:: cleanup_delay
delay the removal of old te lsp for a specified amount of time
**type**\: int
**range:** 0..65535
.. attribute:: install_delay
delay the use of newly installed te lsp for a specified amount of time
**type**\: int
**range:** 0..3600
.. attribute:: reoptimize_timer
frequency of reoptimization of a traffic engineered LSP
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.cleanup_delay = None
self.install_delay = None
self.reoptimize_timer = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:te-lsp-timers/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.cleanup_delay is not None:
return True
if self.install_delay is not None:
return True
if self.reoptimize_timer is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.TeLspTimers.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes/openconfig-mpls:te-lsp-timers'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes.TeLspTimers']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-global-attributes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.igp_flooding_bandwidth is not None and self.igp_flooding_bandwidth._has_data():
return True
if self.mpls_admin_groups is not None and self.mpls_admin_groups._has_data():
return True
if self.srlg is not None and self.srlg._has_data():
return True
if self.te_lsp_timers is not None and self.te_lsp_timers._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeGlobalAttributes']['meta_info']
class TeInterfaceAttributes(object):
"""
traffic engineering attributes specific
for interfaces
.. attribute:: interface
List of TE interfaces
**type**\: list of :py:class:`Interface <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.interface = YList()
self.interface.parent = self
self.interface.name = 'interface'
class Interface(object):
"""
List of TE interfaces
.. attribute:: name <key>
The interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.Config>`
.. attribute:: config
Configuration parameters related to TE interfaces\:
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.Config>`
.. attribute:: igp_flooding_bandwidth
Interface bandwidth change percentages that trigger update events into the IGP traffic engineering database (TED)
**type**\: :py:class:`IgpFloodingBandwidth <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth>`
.. attribute:: state
State parameters related to TE interfaces
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.name = None
self.config = Mpls.TeInterfaceAttributes.Interface.Config()
self.config.parent = self
self.igp_flooding_bandwidth = Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth()
self.igp_flooding_bandwidth.parent = self
self.state = Mpls.TeInterfaceAttributes.Interface.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters related to TE interfaces\:
.. attribute:: admin_group
list of admin groups (by name) on the interface
**type**\: list of str
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: srlg_membership
list of references to named shared risk link groups that the interface belongs to
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg>`
.. attribute:: te_metric
TE specific metric for the link
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.admin_group = YLeafList()
self.admin_group.parent = self
self.admin_group.name = 'admin_group'
self.name = None
self.srlg_membership = YLeafList()
self.srlg_membership.parent = self
self.srlg_membership.name = 'srlg_membership'
self.te_metric = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.admin_group is not None:
for child in self.admin_group:
if child is not None:
return True
if self.name is not None:
return True
if self.srlg_membership is not None:
for child in self.srlg_membership:
if child is not None:
return True
if self.te_metric is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.Config']['meta_info']
class State(object):
"""
State parameters related to TE interfaces
.. attribute:: admin_group
list of admin groups (by name) on the interface
**type**\: list of str
.. attribute:: name
reference to interface name
**type**\: str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_interfaces.Interfaces.Interface>`
.. attribute:: srlg_membership
list of references to named shared risk link groups that the interface belongs to
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.TeGlobalAttributes.Srlg.Srlg>`
.. attribute:: te_metric
TE specific metric for the link
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.admin_group = YLeafList()
self.admin_group.parent = self
self.admin_group.name = 'admin_group'
self.name = None
self.srlg_membership = YLeafList()
self.srlg_membership.parent = self
self.srlg_membership.name = 'srlg_membership'
self.te_metric = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.admin_group is not None:
for child in self.admin_group:
if child is not None:
return True
if self.name is not None:
return True
if self.srlg_membership is not None:
for child in self.srlg_membership:
if child is not None:
return True
if self.te_metric is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.State']['meta_info']
class IgpFloodingBandwidth(object):
"""
Interface bandwidth change percentages
that trigger update events into the IGP traffic
engineering database (TED)
.. attribute:: config
Configuration parameters for TED update threshold
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config>`
.. attribute:: state
State parameters for TED update threshold
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config()
self.config.parent = self
self.state = Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters for TED
update threshold
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecificationEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config.ThresholdSpecificationEnum>`
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdTypeEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config.ThresholdTypeEnum>`
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.delta_percentage = None
self.down_thresholds = YLeafList()
self.down_thresholds.parent = self
self.down_thresholds.name = 'down_thresholds'
self.threshold_specification = None
self.threshold_type = None
self.up_down_thresholds = YLeafList()
self.up_down_thresholds.parent = self
self.up_down_thresholds.name = 'up_down_thresholds'
self.up_thresholds = YLeafList()
self.up_thresholds.parent = self
self.up_thresholds.name = 'up_thresholds'
class ThresholdSpecificationEnum(Enum):
"""
ThresholdSpecificationEnum
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = 0
SEPARATE_UP_DOWN = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config.ThresholdSpecificationEnum']
class ThresholdTypeEnum(Enum):
"""
ThresholdTypeEnum
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = 0
THRESHOLD_CROSSED = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config.ThresholdTypeEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.delta_percentage is not None:
return True
if self.down_thresholds is not None:
for child in self.down_thresholds:
if child is not None:
return True
if self.threshold_specification is not None:
return True
if self.threshold_type is not None:
return True
if self.up_down_thresholds is not None:
for child in self.up_down_thresholds:
if child is not None:
return True
if self.up_thresholds is not None:
for child in self.up_thresholds:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.Config']['meta_info']
class State(object):
"""
State parameters for TED update threshold
.. attribute:: delta_percentage
The percentage of the maximum\-reservable\-bandwidth considered as the delta that results in an IGP update being flooded
**type**\: int
**range:** 0..100
.. attribute:: down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: threshold_specification
This value specifies whether a single set of threshold values should be used for both increasing and decreasing bandwidth when determining whether to trigger updated bandwidth values to be flooded in the IGP TE extensions. MIRRORED\-UP\-DOWN indicates that a single value (or set of values) should be used for both increasing and decreasing values, where SEPARATE\-UP\-DOWN specifies that the increasing and decreasing values will be separately specified
**type**\: :py:class:`ThresholdSpecificationEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State.ThresholdSpecificationEnum>`
.. attribute:: threshold_type
The type of threshold that should be used to specify the values at which bandwidth is flooded. DELTA indicates that the local system should flood IGP updates when a change in reserved bandwidth >= the specified delta occurs on the interface. Where THRESHOLD\-CROSSED is specified, the local system should trigger an update (and hence flood) the reserved bandwidth when the reserved bandwidth changes such that it crosses, or becomes equal to one of the threshold values
**type**\: :py:class:`ThresholdTypeEnum <ydk.models.openconfig.openconfig_mpls.Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State.ThresholdTypeEnum>`
.. attribute:: up_down_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth of the interface) at which bandwidth updates are flooded \- used both when the bandwidth is increasing and decreasing
**type**\: list of int
**range:** 0..100
.. attribute:: up_thresholds
The thresholds (expressed as a percentage of the maximum reservable bandwidth) at which bandwidth updates are to be triggered when the bandwidth is increasing
**type**\: list of int
**range:** 0..100
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.delta_percentage = None
self.down_thresholds = YLeafList()
self.down_thresholds.parent = self
self.down_thresholds.name = 'down_thresholds'
self.threshold_specification = None
self.threshold_type = None
self.up_down_thresholds = YLeafList()
self.up_down_thresholds.parent = self
self.up_down_thresholds.name = 'up_down_thresholds'
self.up_thresholds = YLeafList()
self.up_thresholds.parent = self
self.up_thresholds.name = 'up_thresholds'
class ThresholdSpecificationEnum(Enum):
"""
ThresholdSpecificationEnum
This value specifies whether a single set of threshold
values should be used for both increasing and decreasing
bandwidth when determining whether to trigger updated
bandwidth values to be flooded in the IGP TE extensions.
MIRRORED\-UP\-DOWN indicates that a single value (or set of
values) should be used for both increasing and decreasing
values, where SEPARATE\-UP\-DOWN specifies that the increasing
and decreasing values will be separately specified
.. data:: MIRRORED_UP_DOWN = 0
MIRRORED-UP-DOWN indicates that a single set of
threshold values should be used for both increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
.. data:: SEPARATE_UP_DOWN = 1
SEPARATE-UP-DOWN indicates that a separate
threshold values should be used for the increasing
and decreasing bandwidth when determining whether
to trigger updated bandwidth values to be flooded
in the IGP TE extensions.
"""
MIRRORED_UP_DOWN = 0
SEPARATE_UP_DOWN = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State.ThresholdSpecificationEnum']
class ThresholdTypeEnum(Enum):
"""
ThresholdTypeEnum
The type of threshold that should be used to specify the
values at which bandwidth is flooded. DELTA indicates that
the local system should flood IGP updates when a change in
reserved bandwidth >= the specified delta occurs on the
interface. Where THRESHOLD\-CROSSED is specified, the local
system should trigger an update (and hence flood) the
reserved bandwidth when the reserved bandwidth changes such
that it crosses, or becomes equal to one of the threshold
values
.. data:: DELTA = 0
DELTA indicates that the local
system should flood IGP updates when a
change in reserved bandwidth >= the specified
delta occurs on the interface.
.. data:: THRESHOLD_CROSSED = 1
THRESHOLD-CROSSED indicates that
the local system should trigger an update (and
hence flood) the reserved bandwidth when the
reserved bandwidth changes such that it crosses,
or becomes equal to one of the threshold values.
"""
DELTA = 0
THRESHOLD_CROSSED = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State.ThresholdTypeEnum']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.delta_percentage is not None:
return True
if self.down_thresholds is not None:
for child in self.down_thresholds:
if child is not None:
return True
if self.threshold_specification is not None:
return True
if self.threshold_type is not None:
return True
if self.up_down_thresholds is not None:
for child in self.up_down_thresholds:
if child is not None:
return True
if self.up_thresholds is not None:
for child in self.up_thresholds:
if child is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth.State']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/openconfig-mpls:igp-flooding-bandwidth'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface.IgpFloodingBandwidth']['meta_info']
@property
def _common_path(self):
if self.name is None:
raise YPYModelError('Key property name is None')
return '/openconfig-mpls:mpls/openconfig-mpls:te-interface-attributes/openconfig-mpls:interface[openconfig-mpls:name = ' + str(self.name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.name is not None:
return True
if self.config is not None and self.config._has_data():
return True
if self.igp_flooding_bandwidth is not None and self.igp_flooding_bandwidth._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes.Interface']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:te-interface-attributes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.interface is not None:
for child_ref in self.interface:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.TeInterfaceAttributes']['meta_info']
class SignalingProtocols(object):
"""
top\-level signaling protocol configuration
.. attribute:: ldp
LDP global signaling configuration
**type**\: :py:class:`Ldp <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.Ldp>`
.. attribute:: rsvp_te
RSVP\-TE global signaling protocol configuration
**type**\: :py:class:`RsvpTe <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe>`
.. attribute:: segment_routing
SR global signaling config
**type**\: :py:class:`SegmentRouting <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.SegmentRouting>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.ldp = Mpls.SignalingProtocols.Ldp()
self.ldp.parent = self
self.rsvp_te = Mpls.SignalingProtocols.RsvpTe()
self.rsvp_te.parent = self
self.segment_routing = Mpls.SignalingProtocols.SegmentRouting()
self.segment_routing.parent = self
class RsvpTe(object):
"""
RSVP\-TE global signaling protocol configuration
.. attribute:: global_
Platform wide RSVP configuration and state
**type**\: :py:class:`Global <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global>`
.. attribute:: interface_attributes
Attributes relating to RSVP\-TE enabled interfaces
**type**\: :py:class:`InterfaceAttributes <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes>`
.. attribute:: neighbors
Configuration and state for RSVP neighbors connecting to the device
**type**\: :py:class:`Neighbors <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors>`
.. attribute:: sessions
Configuration and state of RSVP sessions
**type**\: :py:class:`Sessions <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.global_ = Mpls.SignalingProtocols.RsvpTe.Global()
self.global_.parent = self
self.interface_attributes = Mpls.SignalingProtocols.RsvpTe.InterfaceAttributes()
self.interface_attributes.parent = self
self.neighbors = Mpls.SignalingProtocols.RsvpTe.Neighbors()
self.neighbors.parent = self
self.sessions = Mpls.SignalingProtocols.RsvpTe.Sessions()
self.sessions.parent = self
class Sessions(object):
"""
Configuration and state of RSVP sessions
.. attribute:: config
Configuration of RSVP sessions on the device
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.Config>`
.. attribute:: state
State information relating to RSVP sessions on the device
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.SignalingProtocols.RsvpTe.Sessions.Config()
self.config.parent = self
self.state = Mpls.SignalingProtocols.RsvpTe.Sessions.State()
self.state.parent = self
class Config(object):
"""
Configuration of RSVP sessions on the device
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:sessions/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Sessions.Config']['meta_info']
class State(object):
"""
State information relating to RSVP sessions
on the device
.. attribute:: session
List of RSVP sessions
**type**\: list of :py:class:`Session <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.session = YList()
self.session.parent = self
self.session.name = 'session'
class Session(object):
"""
List of RSVP sessions
.. attribute:: destination_address <key>
Destination address of RSVP session
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: destination_port <key>
RSVP source port
**type**\: int
**range:** 0..65535
.. attribute:: source_address <key>
Origin address of RSVP session
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: source_port <key>
RSVP source port
**type**\: int
**range:** 0..65535
.. attribute:: associated_lsps
List of label switched paths associated with this RSVP session
**type**\: list of str
**refers to**\: :py:class:`name <ydk.models.openconfig.openconfig_mpls.Mpls.Lsps.ConstrainedPath.Tunnel.Config>`
.. attribute:: label_in
Incoming MPLS label associated with this RSVP session
**type**\: one of the below types:
**type**\: int
**range:** 16..1048575
----
**type**\: :py:class:`MplsLabelEnum <ydk.models.openconfig.openconfig_mpls_types.MplsLabelEnum>`
----
.. attribute:: label_out
Outgoing MPLS label associated with this RSVP session
**type**\: one of the below types:
**type**\: int
**range:** 16..1048575
----
**type**\: :py:class:`MplsLabelEnum <ydk.models.openconfig.openconfig_mpls_types.MplsLabelEnum>`
----
.. attribute:: status
Enumeration of RSVP session states
**type**\: :py:class:`StatusEnum <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session.StatusEnum>`
.. attribute:: tunnel_id
Unique identifier of RSVP session
**type**\: int
**range:** 0..65535
.. attribute:: type
Enumeration of possible RSVP session types
**type**\: :py:class:`TypeEnum <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session.TypeEnum>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.destination_address = None
self.destination_port = None
self.source_address = None
self.source_port = None
self.associated_lsps = YLeafList()
self.associated_lsps.parent = self
self.associated_lsps.name = 'associated_lsps'
self.label_in = None
self.label_out = None
self.status = None
self.tunnel_id = None
self.type = None
class StatusEnum(Enum):
"""
StatusEnum
Enumeration of RSVP session states
.. data:: UP = 0
RSVP session is up
.. data:: DOWN = 1
RSVP session is down
"""
UP = 0
DOWN = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session.StatusEnum']
class TypeEnum(Enum):
"""
TypeEnum
Enumeration of possible RSVP session types
.. data:: SOURCE = 0
RSVP session originates on this device
.. data:: TRANSIT = 1
RSVP session transits this device only
.. data:: DESTINATION = 2
RSVP session terminates on this device
"""
SOURCE = 0
TRANSIT = 1
DESTINATION = 2
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session.TypeEnum']
@property
def _common_path(self):
if self.destination_address is None:
raise YPYModelError('Key property destination_address is None')
if self.destination_port is None:
raise YPYModelError('Key property destination_port is None')
if self.source_address is None:
raise YPYModelError('Key property source_address is None')
if self.source_port is None:
raise YPYModelError('Key property source_port is None')
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:sessions/openconfig-mpls:state/openconfig-mpls:session[openconfig-mpls:destination-address = ' + str(self.destination_address) + '][openconfig-mpls:destination-port = ' + str(self.destination_port) + '][openconfig-mpls:source-address = ' + str(self.source_address) + '][openconfig-mpls:source-port = ' + str(self.source_port) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.destination_address is not None:
return True
if self.destination_port is not None:
return True
if self.source_address is not None:
return True
if self.source_port is not None:
return True
if self.associated_lsps is not None:
for child in self.associated_lsps:
if child is not None:
return True
if self.label_in is not None:
return True
if self.label_out is not None:
return True
if self.status is not None:
return True
if self.tunnel_id is not None:
return True
if self.type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Sessions.State.Session']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:sessions/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.session is not None:
for child_ref in self.session:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Sessions.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:sessions'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Sessions']['meta_info']
class Neighbors(object):
"""
Configuration and state for RSVP neighbors connecting
to the device
.. attribute:: config
Configuration of RSVP neighbor information
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.Config>`
.. attribute:: state
State information relating to RSVP neighbors
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.SignalingProtocols.RsvpTe.Neighbors.Config()
self.config.parent = self
self.state = Mpls.SignalingProtocols.RsvpTe.Neighbors.State()
self.state.parent = self
class Config(object):
"""
Configuration of RSVP neighbor information
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:neighbors/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Neighbors.Config']['meta_info']
class State(object):
"""
State information relating to RSVP neighbors
.. attribute:: neighbor
List of RSVP neighbors connecting to the device, keyed by neighbor address
**type**\: list of :py:class:`Neighbor <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
class Neighbor(object):
"""
List of RSVP neighbors connecting to the device,
keyed by neighbor address
.. attribute:: address <key>
Address of RSVP neighbor
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: detected_interface
Interface where RSVP neighbor was detected
**type**\: str
.. attribute:: neighbor_status
Enumuration of possible RSVP neighbor states
**type**\: :py:class:`NeighborStatusEnum <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor.NeighborStatusEnum>`
.. attribute:: refresh_reduction
Suppport of neighbor for RSVP refresh reduction
**type**\: bool
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.address = None
self.detected_interface = None
self.neighbor_status = None
self.refresh_reduction = None
class NeighborStatusEnum(Enum):
"""
NeighborStatusEnum
Enumuration of possible RSVP neighbor states
.. data:: UP = 0
RSVP hello messages are detected from the neighbor
.. data:: DOWN = 1
RSVP neighbor not detected as up, due to a
communication failure or IGP notification
the neighbor is unavailable
"""
UP = 0
DOWN = 1
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor.NeighborStatusEnum']
@property
def _common_path(self):
if self.address is None:
raise YPYModelError('Key property address is None')
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:neighbors/openconfig-mpls:state/openconfig-mpls:neighbor[openconfig-mpls:address = ' + str(self.address) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.address is not None:
return True
if self.detected_interface is not None:
return True
if self.neighbor_status is not None:
return True
if self.refresh_reduction is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Neighbors.State.Neighbor']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:neighbors/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Neighbors.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:neighbors'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Neighbors']['meta_info']
class Global(object):
"""
Platform wide RSVP configuration and state
.. attribute:: graceful_restart
Operational state and configuration parameters relating to graceful\-restart for RSVP
**type**\: :py:class:`GracefulRestart <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart>`
.. attribute:: hellos
Top level container for RSVP hello parameters
**type**\: :py:class:`Hellos <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.Hellos>`
.. attribute:: soft_preemption
Protocol options relating to RSVP soft preemption
**type**\: :py:class:`SoftPreemption <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption>`
.. attribute:: state
Platform wide RSVP state, including counters
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.graceful_restart = Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart()
self.graceful_restart.parent = self
self.hellos = Mpls.SignalingProtocols.RsvpTe.Global.Hellos()
self.hellos.parent = self
self.soft_preemption = Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption()
self.soft_preemption.parent = self
self.state = Mpls.SignalingProtocols.RsvpTe.Global.State()
self.state.parent = self
class GracefulRestart(object):
"""
Operational state and configuration parameters relating to
graceful\-restart for RSVP
.. attribute:: config
Configuration parameters relating to graceful\-restart
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config>`
.. attribute:: state
State information associated with RSVP graceful\-restart
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config()
self.config.parent = self
self.state = Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters relating to
graceful\-restart
.. attribute:: enable
Enables graceful restart on the node
**type**\: bool
.. attribute:: recovery_time
RSVP state recovery time
**type**\: int
**range:** 0..4294967295
.. attribute:: restart_time
Graceful restart time (seconds)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.enable = None
self.recovery_time = None
self.restart_time = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:graceful-restart/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.recovery_time is not None:
return True
if self.restart_time is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.Config']['meta_info']
class State(object):
"""
State information associated with
RSVP graceful\-restart
.. attribute:: enable
Enables graceful restart on the node
**type**\: bool
.. attribute:: recovery_time
RSVP state recovery time
**type**\: int
**range:** 0..4294967295
.. attribute:: restart_time
Graceful restart time (seconds)
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.enable = None
self.recovery_time = None
self.restart_time = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:graceful-restart/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.recovery_time is not None:
return True
if self.restart_time is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:graceful-restart'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.GracefulRestart']['meta_info']
class SoftPreemption(object):
"""
Protocol options relating to RSVP
soft preemption
.. attribute:: config
Configuration parameters relating to RSVP soft preemption support
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config>`
.. attribute:: state
State parameters relating to RSVP soft preemption support
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config()
self.config.parent = self
self.state = Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters relating to RSVP
soft preemption support
.. attribute:: enable
Enables soft preemption on a node
**type**\: bool
.. attribute:: soft_preemption_timeout
Timeout value for soft preemption to revert to hard preemption
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.enable = None
self.soft_preemption_timeout = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:soft-preemption/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.soft_preemption_timeout is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.Config']['meta_info']
class State(object):
"""
State parameters relating to RSVP
soft preemption support
.. attribute:: enable
Enables soft preemption on a node
**type**\: bool
.. attribute:: soft_preemption_timeout
Timeout value for soft preemption to revert to hard preemption
**type**\: int
**range:** 0..65535
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.enable = None
self.soft_preemption_timeout = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:soft-preemption/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.enable is not None:
return True
if self.soft_preemption_timeout is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:soft-preemption'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.SoftPreemption']['meta_info']
class Hellos(object):
"""
Top level container for RSVP hello parameters
.. attribute:: config
Configuration parameters relating to RSVP hellos
**type**\: :py:class:`Config <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config>`
.. attribute:: state
State information associated with RSVP hellos
**type**\: :py:class:`State <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.config = Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config()
self.config.parent = self
self.state = Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State()
self.state.parent = self
class Config(object):
"""
Configuration parameters relating to RSVP
hellos
.. attribute:: hello_interval
set the interval in ms between RSVP hello messages
**type**\: int
**range:** 1000..60000
.. attribute:: refresh_reduction
enables all RSVP refresh reduction message bundling, RSVP message ID, reliable message delivery and summary refresh
**type**\: bool
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.hello_interval = None
self.refresh_reduction = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:hellos/openconfig-mpls:config'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.hello_interval is not None:
return True
if self.refresh_reduction is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.Hellos.Config']['meta_info']
class State(object):
"""
State information associated with RSVP hellos
.. attribute:: hello_interval
set the interval in ms between RSVP hello messages
**type**\: int
**range:** 1000..60000
.. attribute:: refresh_reduction
enables all RSVP refresh reduction message bundling, RSVP message ID, reliable message delivery and summary refresh
**type**\: bool
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.hello_interval = None
self.refresh_reduction = None
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:hellos/openconfig-mpls:state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.hello_interval is not None:
return True
if self.refresh_reduction is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.Hellos.State']['meta_info']
@property
def _common_path(self):
return '/openconfig-mpls:mpls/openconfig-mpls:signaling-protocols/openconfig-mpls:rsvp-te/openconfig-mpls:global/openconfig-mpls:hellos'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.config is not None and self.config._has_data():
return True
if self.state is not None and self.state._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.openconfig._meta import _openconfig_mpls as meta
return meta._meta_table['Mpls.SignalingProtocols.RsvpTe.Global.Hellos']['meta_info']
class State(object):
"""
Platform wide RSVP state, including counters
.. attribute:: counters
Platform wide RSVP statistics and counters
**type**\: :py:class:`Counters <ydk.models.openconfig.openconfig_mpls.Mpls.SignalingProtocols.RsvpTe.Global.State.Counters>`
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.counters = Mpls.SignalingProtocols.RsvpTe.Global.State.Counters()
self.counters.parent = self
class Counters(object):
"""
Platform wide RSVP statistics and counters
.. attribute:: in_ack_messages
Number of received RSVP refresh reduction ack messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_hello_messages
Number of received RSVP hello messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_path_error_messages
Number of received RSVP Path Error messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_path_messages
Number of received RSVP Path messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_path_tear_messages
Number of received RSVP Path Tear messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_reservation_error_messages
Number of received RSVP Resv Error messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_reservation_messages
Number of received RSVP Resv messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_reservation_tear_messages
Number of received RSVP Resv Tear messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: in_srefresh_messages
Number of received RSVP summary refresh messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_ack_messages
Number of sent RSVP refresh reduction ack messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_hello_messages
Number of sent RSVP hello messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_path_error_messages
Number of sent RSVP Path Error messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_path_messages
Number of sent RSVP PATH messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_path_tear_messages
Number of sent RSVP Path Tear messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_reservation_error_messages
Number of sent RSVP Resv Error messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_reservation_messages
Number of sent RSVP Resv messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_reservation_tear_messages
Number of sent RSVP Resv Tear messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: out_srefresh_messages
Number of sent RSVP summary refresh messages
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: path_timeouts
TODO
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: rate_limited_messages
RSVP messages dropped due to rate limiting
**type**\: long
**range:** 0..18446744073709551615
.. attribute:: reservation_timeouts
TODO
**type**\: long
**range:** 0..18446744073709551615
"""
_prefix = 'mpls'
_revision = '2015-11-05'
def __init__(self):
self.parent = None
self.in_ack_messages = None
self.in_hello_messages = None
self.in_path_error_messages = None
self.in_path_messages = None
self.in_path_tear_messages = None
self.in_reservation_error_messages = None
self.in_reservation_messages = None
self.in_reservation_tear_messages = None
self.in_srefresh_messages = None
self.out_ack_messages = None
self.out_hello_messages = None
self.out_path_error_messages = None
self.out_path_messages = None
self.out_path_tear_messages = None
self.out_reservation_error_messages = None
self.out_reservation_messages = None
self.out_reservation_tear_messages = None
self.out_srefresh_messages = None
self.path_timeouts = None
self.rate_limited_messages = None
self.reservation_timeouts = None
@property
| codeparrot/github-code-clean |
# -*- coding: utf-8; -*-
import sys
from Tkinter import*
import os
import symbols
import draft_gui
import calc
import get_conf
import get_object
import param_edit
import select_clone
import trace
import trace_object
import save_file
import undo_redo
import to_dxf
import to_svg
import from_dxf
import from_svg
import copy_prop
import trim_extend
import trim_dim
import fillet
import edit
import offset
import scale_object
import rotate_object
import mirror_object
import move_object
import copy_object
import grab_object
import print_ps
import line
import dimension
import text_line
import circle
import arc
import math
import time
import tkFileDialog
import tkMessageBox
from random import randint
import codecs
import copy
from shutil import copyfile
#font = 'Arial'
zoomm = 0.8
zoomp = 1.0/0.8
class Graphics:
def __init__(self):
self.appPath = os.getcwd()
#переменные для рисования
self.zoomOLDres = 0
self.ex = 0.0
self.ey = 0.0
self.ex2 = 0.0
self.ey2 = 0.0
self.ex3 = 0.0
self.ey3 = 0.0
self.min_e = 0.00001 #Минимальная величина чертежа
#переменные для отображениия
self.zoomOLD = 0
self.sloy = '1' #Текущий слой
self.color = 'white' #Текущий цвет
self.width = 2 #Текущая толщина
self.stipple = None
self.stipple_size = 200 #размер типа линий
self.select_color = 'green' #Цвет выделяемых объектов
self.priv_color = 'red' #Цвет привязки
self.fon_color = 'black'
self.left_color = 'light blue'
self.right_color = 'red'
self.size_t=-500 #Текущий размер шрифта текста (5 мм)
self.size_f=-350 #Текущий размер шрифта размеров (3.5 мм)
self.size_simbol_p = 10 #Размер значка привязки
self.anchor = 'sw' #Текущая привязка текста
self.font = 'Architectural'
self.s_s = 1.2 #Межбуквенное расстояние
self.w_text = 1 #Ширина буквы
self.s = 50 #Переменная, определяющая пропорции в размерах
self.arrow_s = 200
self.vr_s = 200
self.vv_s = 200
self.type_arrow = 'Arch'
self.s_s_dim = 1.3
self.w_text_dim = 1
self.font_dim = 'Architectural'
self.snap_s = 10 #Определяет дальнобойность привязки (расстояние в точках на экране)
self.angle_s = 15.0
self.auto_save_step = 30 #Количество действий между автосохранениями
#Типы линий
self.stipples = {
'_____________':None,
'_ _ _ _ _ _ _':(1,1),
'____ _ ____ _':(4,1,1,1),
'____ _ _ ____':(4,1,1,1,1,1),
}
self.old_func = 'self.copyEvent()'
self.prog_version = 'SAMoCAD - v0.0.8.5 alpha'
self.old_text = self.prog_version
self.old_offset = 0
self.old_fillet_R = 0
self.old_scale = 1
self.old_print_scale = 100.0
self.Old_sel = None
self.ortoFlag=False #Если True - значит орто вкл
self.trace_on = False
self.trace_obj_on = False
self.tracingFlag = True
self.tracing_obj_Flag = True
self.snap_near = True
self.lappingFlag = False #Если True - значит активен квадрат выделения
self.resFlag = False #Если True - значит рисуем
self.anchorFlag = False #Если True - режим выбора привязки текста
self.saveFlag = False
self.changeFlag = False
self.current_file = 'New draft'
self.s_dxf = False
self.curent_class = None
self.unpriv = False
self.edit_clone = False
self.move_clone = False
self.mirror_clone = False
self.rotate_clone = False
self.edit_dim_clone = False
self.copy_clone = False
self.line_clone = False
self.circle_clone = False
self.arc_clone = False
self.dim_clone = False
self.dimR_clone = False
self.edit_dim_text_clone = False
self.trim_dim_clone = False
self.enumerator = 0
self.com=None #переменная команды
self.colorC = None #Запоминает цвет объекта, когда на него наезжает курсор
self.rect = None #Прямоугольник выделения
self.priv_coord = (0,0) #Текущая точка привязки
self.x_priv = 0 #Координаты привязок
self.y_priv = 0
self.tip_p = '' #тип привязки
self.Ndimd = 0 #Количество размеров
self.Nlined = 0 #Количество линий
self.Ncircled = 2 #Количество кругов
self.Ntextd = 0 #Количество текстовых строк
self.Narcd = 0 #Количество дуг
self.Ncloned = 0
self.Ndimrd = 0
self.Ndim = ''
self.Nline = ''
self.Ntext = ''
self.Ncircle = ''
self.Narc = ''
self.Ndimr = ''
self.Nclone = ''
self.func_collection = [] #Объекты из коллекции, над которыми уже было проведено действие
self.collection = [] #Выделенные объекты
self.find_privs = [] #Набор объектов-привязок
self.collectionBack = [] #Сброшенный набор объектов
self.temp_collection = []
self.temp_lines_list = []
self.ALLOBJECT = {} #ВСЕ объекты (Объект : {параметр : значение}}
self.all_clone = {}
self.history_undo = [] #Список событий
#self.history_redo = [] #Список событий
def initial(self, master1):#Создает GUI
draft_gui.gui = draft_gui.Gui(master1, graf)
self.master1 = draft_gui.gui.master1
self.dialog = draft_gui.gui.dialog
self.command = draft_gui.gui.command
self.info = draft_gui.gui.info
self.button_orto = draft_gui.gui.button_orto
self.button_trace = draft_gui.gui.button_trace
self.button_trace_obj = draft_gui.gui.button_trace_obj
self.button_snap_N = draft_gui.gui.button_snap_N
self.frame1 = draft_gui.gui.frame1
self.c = draft_gui.gui.canvas
#Начало коорданат
self.nachCoordy = self.c.create_line(10,10,100,10,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(100,10,80,5,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(100,10,80,15,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.nachCoordx = self.c.create_line(10,10,10,100,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(10,100,5,80,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(10,100,15,80,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#Перехват закрытия окна
self.col = 0
self.master1.protocol('WM_DELETE_WINDOW', self.exitMethod)
#События
self.master1.bind_class(self.c,"<MouseWheel>", self.Mzoommer)#Windows OS
self.master1.bind_class(self.c,'<Button-4>', self.Mzoommer)#Linux OS
self.master1.bind_class(self.c,'<Button-5>', self.Mzoommer)#Linux OS
self.c.bind_class(self.master1,"<B2-Motion>", self.mouseMove)
self.c.bind_class(self.master1,"<2>", self.OnMouseMove)
self.c.bind_class(self.c,"<Motion>", self.gpriv)
#self.c.tag_bind('t_LOD', '<Button-3>', self.editText)
#self.c.tag_bind('dim_text_priv', '<Button-3>', self.editDimTextPlace)
self.c.bind_class(self.master1, "<Control-Button-3>", self.BackCol)
self.c.bind('<Button-3>', self.edit_butt_3)
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
self.c.bind_class(self.master1, "<Delete>", self.delete)
self.c.bind_class(self.master1, "<Escape>", self.kill)
self.c.bind_class(self.master1, "<Return>", self.old_function)
#Горячие клавиши
self.c.bind_class(self.master1, "<Control-KeyPress-x>", self.mirrorEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-z>", self.copyEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-a>", self.moveEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-s>", self.rotateEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-l>", self.ort)
self.c.bind_class(self.master1, "<Control-KeyPress-e>", self.tt)
self.c.bind_class(self.master1, "<Control-KeyPress-d>", self.copy_prop)
self.c.bind_class(self.master1, "<Control-KeyPress-q>", self.trimEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-w>", self.extendEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-r>", self.scaleEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-p>", self.print_postScript)
self.c.bind_class(self.master1, "<Control-KeyPress-o>", self.fileOpen)
self.c.bind_class(self.master1, "<Control-KeyPress-n>", self.new)
self.c.bind_class(self.master1, "<Control-KeyPress-m>", self.trim_dim)
self.c.bind_class(self.master1, "<F1>", draft_gui.gui.obj_prop)
self.set_coord()
j = 0 #Сделать масштаб нормальным (-20х)
while j < 20:
self.zoommerM()
j+=1
def tt(self, event):
for i in self.ALLOBJECT:
print ('______________________________')
print (i, self.ALLOBJECT[i])
#print self.ALLOBJECT
#print self.collection
#print self.ALLOBJECT
#print self.ALLOBJECT[self.collection[0]]['text_change']
#print self.temp_lines_list
#print '_______'
#print 'undo', self.history_undo
#print 'redo', self.history_redo
def undo(self, event = None):
self.kill()
if self.history_undo:
undo_redo.undo(self.history_undo[-1], graf)
'''
def redo(self, event = None):
self.kill()
if self.history_redo:
undo_redo.redo(self.history_redo[-1], graf)
'''
#РЕДАКТИРОВАНИЕ ОБЪЕКТОВ
def old_function(self, event):#При нажатии Enter вне режима рисования - вызывает последнюю вызванную функцию
exec(self.old_func)
#ПРОДОЛЖЕНИЕ РАЗМЕРНОЙ ЛИНИИ
def trim_dim(self, event = None):
self.curent_class = trim_dim.Trim_dim(graf)
#КОПИРОВАНИЕ СВОЙСТВ
def copy_prop(self, event = None):
self.curent_class = copy_prop.Copy_prop(graf)
#ОБРЕЗКА/УДЛИНЕНИЕ ЛИНИЙ
def trimEvent(self, event = None):
self.trim_extend = 'Trim'
self.curent_class = trim_extend.Trim_extent(graf)
self.old_func = 'self.trimEvent()'
def extendEvent(self, event = None):
self.trim_extend = 'Extend'
self.curent_class = trim_extend.Trim_extent(graf)
self.old_func = 'self.extendEvent()'
#ИЗМЕНЕНИЕ ПАРАМЕТРОВ ВЫДЕЛЕННЫХ ОБЪЕКТОВ ПРИ СМЕНЕ ЗНАЧЕНИЯ В НАСТРОЙКАХ
def param_edit(self, params):
param_edit.Param_edit(graf, params)
#СОБЫТИЯ 3 КН МЫШИ
def edit_butt_3(self, event):
el = get_object.get_obj(event.x, event.y, graf, ('dim', 'text'))
if el:
self.kill()
#Получить координаты из списка координат привязок (их рассчитывает gpriv)
self.ex = self.priv_coord[0]
self.ey = self.priv_coord[1]
if el[0] == 'd':
self.editDimTextPlace(el)
elif el[0] == 't':
self.editText(el)
#РЕДАКТИРОВАНИЕ МЕСТОПОЛОЖЕНИЯ ТЕКСТА РАЗМЕРОВ
def editDimTextPlace(self, el):
if self.tip_p == 'c':
self.ex = self.priv_coord[0]#Получить координаты из списка координат привязок (их рассчитывает gpriv)
self.ey = self.priv_coord[1]
self.ex3,self.ey3 = self.ex,self.ey
self.dialog.config(text = 'Move dim text - new point:')
self.info.config(text = 'Escape - stop')
self.resFlag = True
self.c.bind_class(self.master1,"<Return>", self.kill)
self.c.bind('<Button-1>', self.editDimTextPlace2)
self.c.unbind('<Button-3>')
self.c.unbind('<Shift-Button-1>')
self.set_coord()
self.collection.append(el,)
select_clone.Select_clone([el,], graf)
self.Old_sel = None
self.edit_dim_text_clone = True
def editDimTextPlace2(self, event = None):
x1, y1, x2, y2, x3, y3, ort, size, fill, text, sloy, text_change, text_place, s, vr_s, vv_s, arrow_s, type_arrow, s_s_dim, w_text_dim, font_dim = get_conf.get_dim_conf(self.collection[0], graf)
self.ex2 = self.priv_coord[0]#Получить координаты из списка координат привязок (их рассчитывает gpriv)
self.ey2 = self.priv_coord[1]
self.ex,self.ey = self.coordinator(self.ex,self.ey)
s2 = self.coordinator2(s)
line3 = self.c.coords(self.get_snap_line(self.collection[0])[2])
if ort == 'vertical' and abs(self.ey2-y3) <= s2*2.0:
self.ey2 = y3-s
text_change = 'online3'
if x1<self.ex2<x2 or x2<self.ex2<x1:
text_change = 'online3_m_l'
elif ort == 'horizontal' and abs(self.ex2-x3) <= s2*2.0:
self.ex2 = x3+s
text_change = 'online3'
if y1<self.ey2<y2 or y2<self.ey2<y1:
text_change = 'online3_m_l'
else:
text_change = 'changed'
text_place = [self.ex2, self.ey2]
if event:
self.c.delete(self.collection[0])
dimension.c_dim(graf, x1, y1, x2, y2, x3, y3, text, sloy,
fill,
size,
ort,
text_change,
text_place,
s,
vv_s,
vr_s,
arrow_s,
type_arrow,
s_s_dim,
w_text_dim,
font_dim,
ID = self.collection[0])
self.changeFlag = True
self.kill()
else:
self.set_coord()
dimension.c_dim(graf, x1, y1, x2, y2, x3, y3, text, sloy,
fill,
size,
ort,
text_change,
text_place,
s,
vv_s,
vr_s,
arrow_s,
type_arrow,
s_s_dim,
w_text_dim,
font_dim,
temp = 'Yes')
self.ex3 = self.ex2
self.ey3 = self.ey2
def dim_text_place(self, Num):#Принимает объект - размер, возвращает кортеж приметивов его текста, линию привязки текста, координату привязки
objs = self.ALLOBJECT[Num]['id']
text_lines = []
for i in objs:
tag = self.ALLOBJECT[Num]['id'][i]
if 'dim_text' in tag:
text_lines.append(i)
if 'dim_text_priv' in tag:
priv_line = i
text_p = self.c.coords(priv_line)
text_place = []
text_place1 = (text_p[0] + text_p[2]) / 2.0
text_place2 = (text_p[1] + text_p[3]) / 2.0
if text_place1 == text_p[0]:
text_place.append(text_p[0])
text_place.append(text_place2)
text_place.append('vert')
else:
text_place.append(text_place1)
text_place.append(text_p[1])
text_place.append('hor')
return text_lines, priv_line, text_place
#РЕДАКТИРОВАНИЕ ТЕКСТА
def editText(self, Num):
self.dialog.config(text = 'Edit text:')
self.info.config(text = 'Enter - apply. Escape - stop')
self.command.delete(0, END)
self.collection.append(Num)
select_clone.Select_clone([Num,], graf)
text = self.ALLOBJECT[self.collection[0]]['text']
self.command.insert(0, text)
self.c.bind_class(self.master1, "<Return>", self.editText2)
self.command.focus_set()
self.Old_sel = None
def editText2(self, event):
fill, text, sloy, angle, anchor, size, line, coord, s_s, w_text, font = self.get_text_conf(self.collection[0])
text = self.command.get()
self.delete(elements = (self.collection[0],))
text_line.c_text(graf, coord[0], coord[1], text = text, size = size, anchor = anchor, sloy = sloy, fill = fill, angle = angle, s_s = s_s, w_text = w_text, font = font)
self.collection = []
self.changeFlag = True
self.enumerator_p()
self.kill()
#ДАТЬ ПАРАМЕТРЫ ОБЪЕКТА
def get_conf(self, obj):
return get_conf.get_conf(obj, graf)
def get_circle_conf(self, obj):
return get_conf.get_circle_conf(obj, graf)
def get_arc_conf(self, obj):
return get_conf.get_arc_conf(obj, graf)
def get_line_conf(self, obj):
return get_conf.get_line_conf(obj, graf)
def get_line_coord(self, obj):
return get_conf.get_line_coord(obj, graf)
def get_text_conf(self, obj):
return get_conf.get_text_conf(obj, graf)
def get_dim_conf(self, obj):
return get_conf.get_dim_conf(obj, graf)
def get_dimR_conf(self, obj):
return get_conf.get_dimR_conf(obj, graf)
#ИЗИЕНЕНИЕ УЗЛОВ
def editEvent(self, event):
self.curent_class = edit.Edit_node(graf)
#СОПРЯЖЕНИЕ
def filletEvent(self, event=None):
self.curent_class = fillet.Fillet(graf)
#ДАТЬ ПРИМИТИВ БЛИЖАЙШИЙ К ТОЧКЕ
def get_obj(self, x, y, t_obj = 'line'):
return get_object.get_obj(x, y, graf, t_obj)
#СМЕЩЕНИЕ
def offsetEvent(self, event=None):
self.curent_class = offset.Offset(graf)
#МАСШТАБИРОВАНИЕ ОБЪЕКТОВ
def scaleEvent(self, event=None):
self.curent_class = scale_object.Scale_object(graf)
#ВРАЩЕНИЕ
def rotateEvent(self, event=None):
self.curent_class = rotate_object.Rotate_object(graf)
#ЗЕРКАЛО (не применятеся к сложным объектам, содержащим текст)
def mirrorEvent(self, event=None):
self.curent_class = mirror_object.Mirror_object(graf)
#ПЕРЕМЕЩЕНИЕ
def moveEvent(self, event=None):
self.curent_class = move_object.Move_object(graf)
#КОПИРОВАНИЕ
def copyEvent(self,event=None):
self.curent_class = copy_object.Copy_object(graf)
#ВЫДЕЛЕНИЕ
def lapping_sel(self,event):
grab_object.lapping2(graf, select = 'select')
#СНЯТИЕ ВЫДЕЛЕНИЯ
def lapping_desel(self, event):
grab_object.lapping2(graf, select = 'deselect')
def resRect(self, event):
self.rectx2=event.x
self.recty2=event.y
self.priv_coord = (self.rectx2, self.recty2)
self.rectx,self.recty = self.coordinator(self.rectx,self.recty)
self.set_coord()
if self.rectx2<self.rectx:#Цвет зависит от координат x
color = self.left_color
else:
color = self.right_color
if self.rect:
self.c.coords(self.rect, self.rectx, self.recty, self.rectx2, self.recty2)
self.c.itemconfig(self.rect, outline = color)
else:
self.rect=self.c.create_rectangle(self.rectx, self.recty, self.rectx2, self.recty2, fill=None,outline=color, tags=['line', 'obj', 'rect'])#Нарисовать заново по новым координатам
def set_coord(self):
self.xynachres=self.c.coords(self.nachCoordy)
self.zoomOLDres = self.zoomOLD
def delete(self, event=None, elements = None, add_history = None): #Уделение объектов
def dele(i, h = None):#Удаляет пришедший объект с канваса и из ALLOBJECT
if h:
e = self.get_conf(i)
self.e_list.append(e)
self.c.delete(i)
del self.ALLOBJECT[i]
if ('c_', i) in self.history_undo:
self.history_undo.remove(('c_', i))
t1 = time.time()
if elements == None:#Если не заданы элементы для удаления
self.set_coord()
self.e_list = []
map(lambda x: dele(x, h = 'add'), self.collection)#Перебрать коллекцию
self.collection = []
self.history_undo.append(('delete', (self.e_list, self.xynachres, self.zoomOLDres)))
self.changeFlag = True
self.enumerator_p()
self.kill()
else:#Если заданы элементы для удаления
map(dele, elements)
t2 = time.time()
print ('delete', t2-t1)
def sbros(self):#Сбрасывает коллекцию - переводит список веделенных объектов в collectionBack.
t1 = time.time()
self.collectionBack = self.collection
self.c.delete('clone')
self.collection = []
t2 = time.time()
print ('sbros', t2-t1)
def BackCol(self, event):#core-feature!!! - Возвращает в коллекцию предыдущий набор
if self.resFlag == False and (not self.collection):#Если начего не рисуется и коллекция не пуста
def BC(i):
if i in self.ALLOBJECT:#Если объект есть в обхем списке (не был удален)
self.collection.append(i)#Добавить в коллекцию
print (111)
map(BC, self.collectionBack)#Перебрать старую коллекцию
select_clone.Select_clone(self.collection, graf)
self.colObj()#Посчитать колличество выделенных объектов
draft_gui.gui.update_prop()
def colObj(self):#Пишет информацию о количестве выбранных объектов
if self.collection:
self.info.config(text = ('Selected %s objects') %(len(self.collection)))
else:
self.info.config(text ='')
def back_color(self, color, obj):
if obj[0] in ['c', 'a']:
for i in self.ALLOBJECT[obj]['id']:
tag = self.ALLOBJECT[obj]['id'][i]
if 'line' in tag:
self.c.itemconfig(i, fill = color)
if 'cir' in tag or 'a' in tag:
self.c.itemconfig(i, outline = color)
else:
self.c.itemconfig(obj, fill = color)
def collektor_sel(self, event):
x = event.x
y = event.y
self.collektor(x, y, select = 'select')
def collektor_desel(self, event):
x = event.x
y = event.y
self.collektor(x, y, select = 'deselect')
def collektor(self, x, y, select):#Добавляет в коллекцию объект, приметивы которого в активном состоянии (находятся под курсором)
#Получить номер объекта по текущему активному приметиву
Num = get_object.get_obj(x, y, graf, 'all')
#Если не нажат Shift
if select == 'select':
#Если объект отсутствует в коллекции - добавить, сменить цвет
if Num not in self.collection and Num in self.ALLOBJECT:
self.collection.append(Num)
select_clone.Select_clone((Num,), graf)
self.Old_sel = None
#Если нажат Shift
else:
#Если объект в коллекции - вырвать, вернуть цвет
if Num in self.collection:
self.collection.remove(Num)
self.c.delete('C'+Num)
draft_gui.gui.update_prop()
#Сосчитать колличество выделенных объектов
self.colObj()
def mass_collektor(self, mass, select):#Добавляет в коллекцию объекты из массы приметивов
t1 = time.time()
old_col = self.collection
if select == 'select':#Если дабавить
append_list = []#Заместо коллекции
gettags = self.c.gettags
append = append_list.append
for content in mass:
Num = gettags(content)[1]#Получить номер объекта по приметиву
if Num not in self.collection and Num not in append_list and Num[0] != 'C':#Если объект отсутствует в коллекции - добавить, сменить цвет
append(Num)
select_clone.Select_clone(append_list, graf)
self.collection.extend(append_list)
else: #Если вырвать
delete_list = []
for content in mass:
Num = self.c.gettags(content)[1]#Получить номер объекта по приметиву
if Num in self.collection and Num not in delete_list and Num[0] != 'C':#Если объект в коллекции - вырвать из нее, вернуть цвет
#Если объекта нет в списке удаления
delete_list.append(Num)
#перебрать delete_list, удалить все его объекты из коллекции
for i in delete_list:
self.collection.remove(i)
self.c.delete('C'+i)
if old_col != self.collection:
draft_gui.gui.update_prop()
t2 = time.time()
print ('mass_collektor', t2-t1)
def edit_collektor(self, edit_mass): #Добавляет в коллекцию объекты из массы приметивов, если в массе есть размеры - то остальные объекты не попадут в коллекцию
prov = True #True, пока не попался размер
append_list = []
for content in edit_mass:
non_ap = False
Num = self.c.gettags(content)[1]#Получить номер объекта по приметиву
if Num not in append_list and Num[0] != 'C':
if Num[0] in ('d', 'r'):
prov = False
if Num[0] == 'r':
line1 = self.get_snap_line(Num)[0]
c = self.c.coords(line1) #get_conf.get_line_coord(line1, graf)#
xc = c[0]
yc = c[1]
if (xc, yc) == (self.ex, self.ey):
non_ap = True
elif Num[0] == 'c':
x0, y0, R, fill, width, sloy = get_conf.get_circle_conf(Num, graf)
if (x0, y0) == (self.ex, self.ey):
non_ap = True
elif Num[0] == 'a':
xc, yc, dx1, dy1, dx2, dy2, fill, width, sloy = get_conf.get_arc_conf(Num, graf)
if (xc, yc) == (self.ex, self.ey):
non_ap = True
if non_ap == False:
append_list.append(Num)
select_clone.Select_clone(append_list, graf)
if self.Old_sel in append_list:
self.Old_sel = None
self.collection.extend(append_list)
if self.tip_p == 'c' and prov == True and len(self.collection)==1:#Если объект 1, это линия и привязка к середине
return 'line_c'#Включит режим Move
else:
return 'another'#Включит режим Edit
def edit_c(self, edit_mass): #Проверяет, какие объекты находятся в коллекции - если только размеры по линии - оставляет коллекцию неизменной, если есть другие объекты - оставляет в кол. только те, к которым есть привязка в данный момент
delete_list = []#Список объектов из коллекции, к которым привязка нет
dim_list = []#Список размеров из коллекции
line_dim_edit = True#Будет True - пока не попался НЕразмер
for content in edit_mass:#Перебрать пришедшую коллекцию
if content[0] == 'd':#Если объект == размер
dim_list.append(content)#Добавить в список размеров
else:
line_dim_edit = False#Иначе неразмер попался
undel_obj = False#Если False - убрать объект из коллекции
find = self.ALLOBJECT[content]['id']#self.c.find_withtag(content)#Получить приметивы объекта
for i in find:#Перебрать их
if i in self.find_privs2:#Если приметив в списке приметивов - привязок
undel_obj = True#Оставить объект в коллекции
if undel_obj == False:#Если не удалять - False
delete_list.append(content)#Добавить объект в список удаления
self.c.delete('C'+content)
map(lambda i: self.collection.remove(i), delete_list)#перебрать delete_list, удалить все его объекты из коллекции
#core-feature!!! - определяет, по одной линии все размеры или нет. Если да - можно перенести всю размерную цепочку
if line_dim_edit == True:#Если ни одного неразмера не попалось
if len(dim_list) > 1:#Если количество размеров > 1
line3_list = []#Список первых координат размерных линий размеров
ort1 = None#ориентация первого размера
ort2 = None#То же второго
bFlag = False#Если False - то все размерные линии имеют одну общую координату (x или y) и лежат по одной линии
for i in dim_list:# Перебрать список размеров
if dim_list.index(i) == 0: #Если размер первый в списке
ort1 = self.ALLOBJECT[i]['ort']#Присвоить его ориентацию первой переменной
else:
ort2 = self.ALLOBJECT[i]['ort']#Иначе второй
if ort1 != ort2:#Если переменные не равны - Вылететь, коллекцию больше не изменять
bFlag = True
break
line3 = self.get_snap_line(i)[2]#Взять размерную линию размера
coord = self.c.coords(line3)#Взять координаты размерной линии
line3_list.append(coord[0:2])#Добавить в список координат только 2 первые координаты
if bFlag == False:#Если Вылетания не произошло
for ind, i in enumerate(line3_list):#Перебрать список координат
if ort1 == 'vertical':#Если оринтация вертикальная
if i == line3_list[-1]:#Если элемент последний в списке
ii = -1#Второй элемент - взять предыдущий
else:
ii = 1#Иначе - последующий
if i[1] != line3_list[ind + ii][1]:#Если координата y второго не равна y первого - Вылететь, коллекцию больше не изменять
bFlag = True
break
else:
if i == line3_list[-1]:
ii = -1
else:
ii = 1
if i[0] != line3_list[ind + ii][0]:#Если координата x второго не равна x первого - Вылететь, коллекцию больше не изменять
bFlag = True
break
if bFlag == False:#Если вылетания и теперь не произошло
self.collection = dim_list#Коллекция = списку размеров
for i in self.collection:#Поменять цвет размеров
self.c.delete('C'+i)
select_clone.Select_clone(self.collection, graf)
def colorer(self, event):#действие при наезжании курсора на приметив
Num = self.get_obj(event.x, event.y, 'all')
if Num not in self.collection and Num in self.ALLOBJECT and Num not in ('trace', 'trace_o'):#Если объект отсутствует в коллекции - сменить цвет, включить флаг
select_clone.Select_clone((Num,), graf)
if self.resFlag == False:#Если ничего не рисуется - выключить действия lapping
self.c.unbind('<Button-1>')
self.c.unbind('<Shift-Button-1>')
def colorerL(self, event=None):#действие при уходн курсора с приметива
Num = self.get_obj(event.x, event.y, 'all')
if Num not in self.collection and Num in self.ALLOBJECT:#Если объект не в коллекции, вернуть цвет
if Num in self.ALLOBJECT:
self.c.delete(self.all_clone['C'+Num])
del self.all_clone['C'+Num]
if self.resFlag == False:
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
def m_coordinator(self, arg, zoomOLDres): #Переводит расстояния момента при zoomOLDres в расстояния сейчас
if self.zoomOLD != zoomOLDres:
r = -self.zoomOLD+zoomOLDres
if self.zoomOLD>zoomOLDres:
arg *= (zoomm**r)
else:
arg *= zoomp**(-r)
return arg
def n_coordinator(self, arg): #Переводит расстояния момента при zoomOLDres в насстоящие расстояния
if self.zoomOLD>0:
arg = arg*zoomm**self.zoomOLD
else:
zoomOLDx = self.zoomOLD*(-1)
arg = arg*zoomp**zoomOLDx
return arg
def coordinator(self,x,y,zoomOLDres = None, xynachres = None):#Пересчитывает координаты если был изменен зум или перемещен экран
xynach=self.c.coords(self.nachCoordy)
if zoomOLDres == None:
zoomOLDres = self.zoomOLDres
xynachres = self.xynachres
if self.zoomOLD == zoomOLDres:
dx=xynach[0]-xynachres[0]
dy=xynach[1]-xynachres[1]
else:
r=-self.zoomOLD+zoomOLDres
if self.zoomOLD>zoomOLDres:
x *= zoomm**r
y *= zoomm**r
dx = xynach[0] - xynachres[0] * zoomm**r
dy = xynach[1] - xynachres[1] * zoomm**r
else:
x *= zoomp**(-r)
y *= zoomp**(-r)
dx = xynach[0] - xynachres[0] * zoomp**(-r)
dy = xynach[1] - xynachres[1] * zoomp**(-r)
x = dx + x
y = dy + y
return x,y
def coordinator2(self,arg):#Переводит действительные расстояния в расстояния сейчас
if self.zoomOLD>0:
arg *= zoomp**self.zoomOLD
else:
zoomOLDx = self.zoomOLD*(-1)
arg /= zoomp**zoomOLDx
return arg
def standart_unbind(self):
self.resFlag = True
self.c.bind_class(self.master1,"<Return>", self.kill)
self.c.unbind('<Button-3>')
self.c.unbind_class(self.master1, "<Control-Button-3>")
self.c.unbind('<Button-1>')
self.c.unbind('<Shift-Button-1>')
self.c.unbind_class(self.master1, "<Delete>")
def kill(self, event=None):#Возвращает все в исходное состояние
if self.rect:
self.c.delete(self.rect)
self.rect = None
if self.col:
#fill = self.ALLOBJECT[self.col]['fill']
#self.back_color(fill, self.col)
self.c.delete('C'+self.col)
self.col = None
if self.curent_class:
del self.curent_class
self.curent_class = None
t=self.c.find_withtag('c1')
if t:
self.c.delete('c1')
if 'trace' in self.ALLOBJECT:
self.c.delete('trace')
del self.ALLOBJECT['trace']
if 'trace_o' in self.ALLOBJECT:
self.c.delete('trace_o')
del self.ALLOBJECT['trace_o']
self.c.delete('clone')
self.c.delete('temp')
self.unpriv = False
self.edit_clone = False
self.move_clone = False
self.mirror_clone = False
self.rotate_clone = False
self.edit_dim_clone = False
self.copy_clone = False
self.line_clone = False
self.circle_clone = False
self.arc_clone = False
self.dim_clone = False
self.dimR_clone = False
self.trim_dim_clone = False
self.edit_dim_text_clone = False
self.c.bind_class(self.c,"<Motion>", self.gpriv)
self.c.bind_class(self.master1, "<Control-Button-3>", self.BackCol)
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
self.c.bind('<Button-3>', self.edit_butt_3)
self.c.bind_class(self.master1, "<Return>", self.old_function)
self.c.bind_class(self.master1, "<Delete>", self.delete)
self.c.unbind_class(self.c,"<Shift-1>")
self.c.unbind_class(self.master1, "<Motion>")
self.c.unbind_class(self.c, "<End>")
self.dialog.config(text = 'Command:')
self.info.config(text = '')
self.resFlag = False
self.lappingFlag = False
self.anchorFlag = False
self.trace_on = False
self.trace_obj_on = False
self.command.delete(0,END)
self.com = None
self.sbros()
self.func_collection = []
self.temp_collection = []
self.c.config(cursor = 'crosshair')
draft_gui.gui.update_prop()
def comY_N(self, default):#Проверяет, как ответил пользователь на yes/No, если не ответил - вернет то, что по умолчанию
com=self.command.get()
if com:
com = str.upper(com)
if com in ['N', 'Y']:
default = com
else:
default = 'unknow'
self.command.delete(0,END)
else:
default = default
return default
def comOrKill(self, event=None):#Берет значени из коммандной строки если пользователь вписал число, отчищает ком.строку
com=self.command.get()
try:
com = float(com)
except ValueError:
self.info.config(text = 'Unknow command')
self.com = None
else:
self.com = com
def commer(self,x1,y1,x2,y2): #Просчитывает координаты если self.com == True
self.comOrKill()
if self.com:
self.com = self.coordinator2(self.com)
dx=x1-x2
dy=y1-y2
if self.ortoFlag == False and x1 != x2 and y1 != y2:
dx0=math.sqrt((self.com*self.com * dx*dx)/(dy*dy + dx*dx))
dy0=dx0*dy/dx
i=1
if x1<x2:
i=-1
x2=x1-i*dx0
y2=y1-i*dy0
else:
x2,y2=self.orto(x1,y1,x2,y2)
x2,y2=self.ortoRes(x1,y1,x2,y2)
return x2,y2
def gpriv(self,event=None, x=None, y = None, f = None):
t=self.c.find_withtag('c1')#Найти значек привязки
if t:#Если таковой имеется
self.c.delete('c1')#Удалить значек
if self.resFlag == False:#Если режим рисования не включен
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
self.find_privs = ['t'] #Отчистить список привязок
#if event:#Если метод вызван событием
self.find_privs = ['t']#Список приметивов привязки (с разделителем)
#self.c.unbind_class(self.master1, "<End>")#Выключить реакцию на End - перебор привязок
x=event.x#Получить координаты положения курсора
y=event.y
if not self.unpriv:
self.x_priv, self.y_priv, self.tip_p = self.priv(x,y)#Проверить, попадает ли положение курсора под возможность привязки к приметиву
p = self.tip_p #Тип привязки
self.priv_coord = (self.x_priv, self.y_priv)#Назначить кориеж координат привязки
if x!=self.x_priv or y!=self.y_priv or p != self.tip_p: #Если координаты курсора не равны координатам привязки или тип привязки сменился
self.tip_p = p #Переназначить тип привязки на новый
x1 = self.x_priv
y1 = self.y_priv
r=self.size_simbol_p
if p == 'r':#Если тип привязки - к конечной точке
self.c.create_oval(x1-r,y1-r,x1+r,y1+r, outline = self.priv_color,width = 3, fill = None, tags = 'c1')#Нарисовать знак привязки - круг
elif p == 'c':#Если привязка к середине нарисовать знак привязки - треугольник
self.c.create_line(x1-r,y1-r,x1+r,y1-r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1-r,y1-r,x1,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1,y1+r,x1+r,y1-r,fill=self.priv_color,width=3,tags='c1')
elif p == 'X': #Если привязка к пересечению - нарисовать знак Х
self.c.create_line(x1-r,y1-r,x1+r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1+r,y1-r,x1-r,y1+r,fill=self.priv_color,width=3,tags='c1')
elif p == 'N': #Если привязка к ближайшей - нарисовать знак N
self.c.create_line(x1-r,y1-r,x1+r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1+r,y1-r,x1-r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1-r,y1-r,x1-r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1+r,y1-r,x1+r,y1+r,fill=self.priv_color,width=3,tags='c1')
if self.resFlag == False:#Если режим рисования не включен
#self.c.tag_unbind('sel', "<Button-1>")#Выключить возможность выделения
self.c.bind('<Button-1>', self.editEvent)#Включить возможность редактирования узла
else:
if not self.rect:
el = get_object.get_obj(x, y, graf, 'all')
if el and el not in ['trace', 'trace_o']:
if el == self.Old_sel:
pass
elif el != self.Old_sel and self.resFlag == False:
if self.Old_sel:
self.c.delete('C'+self.Old_sel)
self.Old_sel = None
if el not in self.collection and el in self.ALLOBJECT:#Если объект отсутствует в коллекции - сменить цвет, включить флаг
select_clone.Select_clone((el,), graf)
self.Old_sel = el
#if self.resFlag == False:#Если ничего не рисуется - выключить действия lapping
self.c.bind('<Button-1>', self.collektor_sel)
self.c.bind('<Shift-Button-1>', self.collektor_desel)
else:
if self.Old_sel:
self.c.delete('C'+self.Old_sel)
self.Old_sel = None
if self.resFlag == False:
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
if any((self.edit_clone, self.move_clone, self.copy_clone, self.mirror_clone, self.rotate_clone, self.edit_dim_clone, self.line_clone, self.circle_clone, self.arc_clone, self.dim_clone, self.edit_dim_text_clone, self.dimR_clone, self.trim_dim_clone)):
if len(self.collection) < 100:
self.c.delete('temp')
if self.edit_clone:
self.curent_class.editEvent2()
elif self.move_clone:
self.curent_class.moveEvent3()
elif self.copy_clone:
self.curent_class.copyEvent3()
elif self.mirror_clone:
self.curent_class.mirrorEvent4()
elif self.rotate_clone:
self.curent_class.rotateEvent5()
elif self.line_clone:
self.curent_class.line2()
elif self.circle_clone:
self.curent_class.circle2()
elif self.arc_clone:
self.curent_class.arc3()
elif self.dim_clone:
self.curent_class.risDim4()
elif self.edit_dim_text_clone:
self.editDimTextPlace2()
elif self.dimR_clone:
self.curent_class.risDimR3()
elif self.trim_dim_clone:
self.curent_class.dim_conf()
if self.trace_on:
trace.tracer(graf, self.trace_x1, self.trace_y1, self.trace_x2, self.trace_y2, self.snap_s, self.angle_s)
if self.trace_obj_on:
trace_object.tracer_obj(graf, self.priv_coord[0], self.priv_coord[1], self.snap_s)
else:
self.x_priv, self.y_priv = x, y
self.priv_coord = (self.x_priv, self.y_priv)
def priv(self, x, y, f = None):#Принимает координаты точки и может принять список приметивов, возвращает координаты точки привязки если привязка допустима, в противном случае не изменяет пришедших координат
if f == None:#Если список приметивов не назначен
find = list(self.c.find_overlapping(x-self.snap_s,y-self.snap_s,x+self.snap_s,y+self.snap_s))#Найти все приметивы, попадающие в квадрат вокруг точки
if self.rect:
try:
find.remove(self.rect)
except ValueError:
pass
else:
find = [f]#Иначе приобразовать пришедший список в список
tip_p = None
stopFlag = False
xi=x#Приравнять возвращаемые координаты к тем, которые пришли
yi=y
priv_coord_list = [] #Список координат приметивов с тегом привязки
### Привязка к одному приметиву ###
for i in find:#Перебрать список приметивов
obj_tags = self.c.gettags(i)
t = obj_tags[1]
if t[0] == 'C' or 'temp' in obj_tags or 'text' in obj_tags:
continue
tags = self.ALLOBJECT[t]['id'][i]
if 'priv' in tags and 'line' in tags:#Если у приметива есть тег привязки
xy = self.c.coords(i)#Взять координаты приметива
priv_coord_list.append((xy,'line'))#Добавить координаты приметива в список
ay1 = abs(y-xy[1])#Получить разность координат приметива и пришедших в метод координат (коорд. курсора)
ay2 = abs(y-xy[3])
ax1 = abs(x-xy[0])
ax2 = abs(x-xy[2])
if ax1<=ax2 and ax1<=self.snap_s and ay1<=self.snap_s: #or ay2<=self.snap_s):#Если разность координат х по первой точке меньше, чем по второй и эта разность меньше self.snap_s
if ay1<=ay2 and ay1<self.snap_s:#Если разность по у первой точки меньше, чем по второй и эта разность меньше self.snap_s
yt=xy[1]#Текущимь координатами взять координаты первой точки приметива
xt=xy[0]
tip_p = 'r'#Тип привязки - к конточке
self.find_privs.append(i)#Добавить приметив в список привязок
if stopFlag == False:#Если точка привязки не была найдена ранее
xi = xt#Назначить возвращаемые координаты равными координатам точки
yi = yt
stopFlag = True#Остановить назначение возвращаемых координат
elif ax1>=ax2 and ax2<=self.snap_s and ay2<=self.snap_s:#(ay1<=self.snap_s or ay2<=self.snap_s):#Если разность координат х по второй точке меньше, чем по первой и эта разность меньше self.snap_s
if ay1>=ay2 and ay2<=self.snap_s:
yt=xy[3]
xt=xy[2]
tip_p = 'r'
self.find_privs.append(i)
if stopFlag == False:
xi = xt
yi = yt
stopFlag = True
else:#Если не подошел не один из вариантов - привязка к середине
y0=xy[1]-((xy[1]-xy[3])/2.0)
x0=xy[0]-((xy[0]-xy[2])/2.0)
if abs(x-x0)<=self.snap_s and abs(y-y0)<=self.snap_s:
yt=y0
xt=x0
tip_p = 'c'
self.find_privs.append(i)
if stopFlag == False:
xi = xt
yi = yt
stopFlag = True
if 'temp' in tags or 'cir_centr' in tags or 'a_centr' in tags:
tip_p = None
stopFlag = False
xi=x
yi=y
elif 'priv' in tags and 'cir' in tags:
xy = self.c.coords(i)
priv_coord_list.append((xy,'cir'))
xc,yc,R = self.coord_circle(xy[0],xy[1],xy[2],xy[3])
if abs(x - xc)<=self.snap_s:
if abs(yc-R - y) <= self.snap_s:
xi = xc
yi = yc-R
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(yc+R - y) <= self.snap_s:
xi = xc
yi = yc+R
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(y - yc)<=self.snap_s:
if abs(xc-R - x) <= self.snap_s:
xi = xc-R
yi = yc
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(xc+R - x) <= self.snap_s:
xi = xc+R
yi = yc
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif 'priv' in tags and 'a' in tags:
xy = self.c.coords(i)
start = float(self.c.itemcget(i, 'start'))
extent = float(self.c.itemcget(i, 'extent'))
priv_coord_list.append((xy,'a'))
xc, yc, dx1, dy1, dx2, dy2 = get_conf.get_arc_coord(xy[0],xy[1],xy[2],xy[3], start, extent)
R = (xy[2]-xy[0])/2.0
if abs(x - dx1)<=self.snap_s:
if abs(y - dy1)<=self.snap_s:
xi = dx1
yi = dy1
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(x - dx2)<=self.snap_s:
if abs(y - dy2)<=self.snap_s:
xi = dx2
yi = dy2
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
if stopFlag == False and self.snap_near == True and self.resFlag == True and priv_coord_list:#Привязка к ближайшей точке на линии - Если неподошел не один предыдущий вариант
for i in priv_coord_list:
xy = priv_coord_list[priv_coord_list.index(i)][0]
if i[1] == 'line':
xt, yt = calc.min_distanse(xy[0],xy[1],xy[2],xy[3], x,y)
if xt:
xi = xt
yi = yt
tip_p = 'N'
break
else:
xc,yc,R = self.coord_circle(xy[0],xy[1],xy[2],xy[3])
if i[1] == 'a':
xt,yt, d = calc.min_distanse_cir(xc, yc, R, x, y)
if d<=self.snap_s:
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'N'
break
elif i[1] == 'cir':
xt,yt, d = calc.min_distanse_cir(xc, yc, R, x, y)
if d<=self.snap_s:
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'N'
break
### Привязка к двум приметивам ###
if len(priv_coord_list) > 1 and stopFlag == False:#Привязка к пересечению
for i in priv_coord_list:#Перебрать список координат
ind = priv_coord_list.index(i)#Взять индекс текущего элемента
if ind == 0:#Если элемент первый - приверять пересечение с последующим
ii = 1
else:#Иначе с предыдущим
ii = -1
r = priv_coord_list[ind+ii]
if i[1] == 'line' and r[1] == 'line':
xt,yt = calc.intersection_l_l(i[0][0],i[0][1],i[0][2],i[0][3],r[0][0],r[0][1],r[0][2],r[0][3])#Проверить есть ли точка пересечения, если да - вычислить
if xt != None:#Если точка есть
if (abs(y-yt)<=self.snap_s) and (abs(x-xt)<=self.snap_s):#Если разность координат не превышает self.snap_s
if (xt != i[0][0] or yt != i[0][1]) and (xt != i[0][2] or yt != i[0][3]):#Если эта точка не равна одной из точек
if (xt != r[0][0] or yt != r[0][1]) and (xt != r[0][2] or yt != r[0][3]):
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'X'#Тип привязки - пересечение
break
elif (i[1] == 'line' and r[1] in ['cir', 'a']) or (i[1] in ['cir', 'a'] and r[1] == 'line'):
if i[1] == 'line':
line = i
circle = r
else:
line = r
circle = i
xc,yc,R = self.coord_circle(circle[0][0],circle[0][1],circle[0][2],circle[0][3])
xt,yt = calc.intersection_l_c(xc, yc, R, line[0][0], line[0][1], line[0][2], line[0][3], x, y)
if xt != None:#Если точка есть
if (abs(y-yt)<=self.snap_s) and (abs(x-xt)<=self.snap_s):#Если разность координат не превышает self.snap_s
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'X'#Тип привязки - пересечение
break
elif i[1] in ['cir', 'a'] and r[1] in ['cir', 'a']:
xc1,yc1,R1 = self.coord_circle(i[0][0],i[0][1],i[0][2],i[0][3])
xc2,yc2,R2 = self.coord_circle(r[0][0],r[0][1],r[0][2],r[0][3])
xt, yt = calc.intersection_c_c(xc1, yc1, R1, xc2, yc2, R2, x, y)
if xt != None:#Если точка есть
if (abs(y-yt)<=self.snap_s) and (abs(x-xt)<=self.snap_s):#Если разность координат не превышает self.snap_s
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'X'#Тип привязки - пересечение
break
if f == None: #Если список приметивов не был назначен - включить функцию перебора привязки
self.perebor_priv()
return xi,yi,tip_p #Вернуть координаты привязки, и ее тип
def coord_circle(self, x1,y1,x2,y2):
xc = (x1+x2)/2.0
yc = (y1+y2)/2.0
R = (x2-x1)/2.0
return xc, yc, R
def perebor_priv(self):
if len(self.find_privs)>2:
self.c.bind_class(self.master1, "<End>", self.end_priv)
def end_priv(self, event):#Переберает тип привязки, если есть варианты
t_index = self.find_privs.index('t')
if len(self.find_privs) == t_index+1:
self.gpriv(x = self.x_priv, y = self.y_priv, f = self.find_privs[0])
self.find_privs.remove('t')
self.find_privs.insert(1, 't')
else:
self.gpriv(x = self.x_priv, y = self.y_priv, f = self.find_privs[t_index+1])
self.find_privs.remove('t')
self.find_privs.insert(t_index+1, 't')
def ort(self, event=None, color_only = None):
if not color_only:
if self.ortoFlag == True:
self.ortoFlag = False
self.button_orto.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.ortoFlag=True
self.button_orto.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.ortoFlag == False:
self.button_orto.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_orto.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def trac(self, event=None, color_only = None):
if 'trace' in self.ALLOBJECT:
self.c.delete('trace')
del self.ALLOBJECT['trace']
if 'trace_o' in self.ALLOBJECT:
self.c.delete('trace_o')
del self.ALLOBJECT['trace_o']
if not color_only:
if self.tracingFlag == True:
self.tracingFlag = False
self.trace_on = False
self.button_trace.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.tracingFlag=True
self.button_trace.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.tracingFlag == False:
self.button_trace.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_trace.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def trac_obj(self, event=None, color_only = None):
if 'trace_o' in self.ALLOBJECT:
self.c.delete('trace_o')
del self.ALLOBJECT['trace_o']
if not color_only:
if self.tracing_obj_Flag == True:
self.tracing_obj_Flag = False
self.trace_obj_on = False
self.button_trace_obj.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.tracing_obj_Flag=True
self.button_trace_obj.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.tracing_obj_Flag == False:
self.button_trace_obj.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_trace_obj.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def snap_n(self, event = None, color_only = None):
if not color_only:
if self.snap_near == True:
self.snap_near = False
self.button_snap_N.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.snap_near=True
self.button_snap_N.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.snap_near == False:
self.button_snap_N.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_snap_N.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def orto(self,x1,y1,x2,y2):
if abs(x2-x1)>abs(y2-y1):
y2=y1
else:
x2=x1
return x2,y2
def ortoRes(self,x1,y1,x2,y2):
i=1
if x2==x1:
if y1>y2:
i=-1
y2=y1+i*self.com
else:
if x1>x2:
i=-1
x2=x1+i*self.com
return x2,y2
#выход из редактора
def exitMethod(self):
self.save_change()
e = self.donate()
if e != 3:
self.master1.destroy()
#please, donate!
def d(self):
eroot = Toplevel()
eroot.title('Donate adress')
self.don = PhotoImage(file = os.path.join(self.appPath, 'res', 'don.gif'))
val = '5213 2437 3660 6532'
val2 = '1Kgect6s92fhRftHeuLVqgPJ1FYt7Lhee9'
val3 = 'simonovsen@gmail.com'
l_donate = Text(eroot, relief = 'flat', height = 1, width = len(val), bg = 'light gray')
l_donate11 = Label(eroot, text = 'Bank card:')
l_donate12 = Label(eroot, text = 'Bitcoin adress:')
l_donate13 = Label(eroot, text = 'PayPal account:')
l_donate2 = Text(eroot, relief = 'flat', height = 1, width = len(val2), bg = 'light gray')
l_donate3 = Text(eroot, relief = 'flat', height = 1, width = len(val3), bg = 'light gray')
l_donate.insert(END, val)
l_donate2.insert(END, val2)
l_donate3.insert(END, val3)
l_donate.configure(state = DISABLED)
l_donate2.configure(state = DISABLED)
l_donate3.configure(state = DISABLED)
l_donate11.grid(row=0, column = 0, sticky = 'w', padx = 3, pady = 3)
l_donate.grid(row=0, column = 1, sticky = 'w', padx = 3, pady = 3)
l_donate12.grid(row=1, column = 0, sticky = 'w', padx = 2, pady = 3)
l_donate2.grid(row=1, column = 1, sticky = 'w', padx = 3, pady = 3)
l_donate13.grid(row=2, column = 0, sticky = 'w', padx = 2, pady = 3)
l_donate3.grid(row=2, column = 1, sticky = 'w', padx = 3, pady = 3)
but = Button(eroot, text = 'OK', command = eroot.destroy)
but.grid(row=3, column = 1, sticky = 'e', padx = 10, pady = 10)
def donate(self):
e = randint(2, 5)
if e == 3:
eroot = Toplevel()
eroot.title('Please, donate!')
self.don = PhotoImage(file = os.path.join(self.appPath, 'res', 'don.gif'))
eroot.tk.call('wm', 'iconphoto', eroot._w, self.don)
eroot.resizable(width=FALSE, height=FALSE)
from locale import getdefaultlocale
lang = getdefaultlocale()
if lang[0][0:2] != 'ru':
donate_text = '''
SAMoCAD - open sours program,
so developers want to eat.
You can help the project.
'''
feed = 'Feed :-)'
away = 'Get away from me!'
else:
donate_text = '''
SAMoCAD - бесплатная програма,
поэтому разработчики хотят кушать.
Вы можете помочь проекту.
'''
feed = 'Накормить'
away = 'Отстаньте от меня!'
l_donate = Label(eroot, justify = LEFT, text = donate_text)
self.imag = PhotoImage(file = os.path.join(self.appPath, 'res', 'icon3.gif'))
but = Button(eroot, text = feed, command = self.d)
but2 = Button(eroot, text = away, command = self.master1.destroy)
ca = Canvas(eroot, width = 100, height = 100)
ca.create_image(0,0,anchor=NW,image = self.imag)
ca.grid(row=0, column = 0, rowspan = 2, padx = 5, pady = 5)
l_donate.grid(row=0, column = 1,columnspan = 2, padx = 10, pady = 10)
but.grid(row=1, column = 1, padx = 10, pady = 10)
but2.grid(row=1, column = 2, padx = 10, pady = 10)
return e
#РИСОВАНИЕ ОБЪЕКТОВ - СОБЫТИЯ
#ПОСТРОЕНИЕ ВРЕМЕННЫХ ЛИНИЙ
def temp_lines(self, event):
self.oldinfo = self.info.cget('text')
self.info.config(text = (self.oldinfo + ' Create temp lines - line 2'))
Num = self.c.gettags(self.c.find_withtag('current'))[1]
self.temp_collection.append(Num)
#self.c.tag_bind('Line', '<Button-3>', self.temp_lines2)
def temp_lines2(self, event):
self.info.config(text = self.oldinfo)
Num = self.c.gettags(self.c.find_withtag('current'))[1]
self.temp_collection.append(Num)
stopFlag = False
if len(self.temp_collection) > 1:
for i in self.temp_collection:
if i not in self.ALLOBJECT:
stopFlag = True
if stopFlag == False:
c = map(lambda i: self.c.coords(self.c.find_withtag(i)[0]), self.temp_collection)
x, y = calc.intersection_stright(c[0][0],c[0][1],c[0][2],c[0][3],c[1][0],c[1][1],c[1][2],c[1][3])
if x != None:
self.c_line(x-5,y-5,x+5,y+5,fill='gray',width=1,sloy = 'temp', tip = 'temp')
self.temp_lines_list.append(self.Nline)
self.c_line(x+5,y-5,x-5,y+5,fill='gray',width=1,sloy = 'temp', tip = 'temp')
self.temp_lines_list.append(self.Nline)
self.c.bind_class(self.master1, "<Control-KeyPress-j>", self.del_temp_lines)
self.temp_collection = []
def del_temp_lines(self, event=None):
find = self.c.find_withtag('temp')
del_list = []
for i in find:
Num = self.c.gettags(i)[1]
del_list.append(Num)
if del_list:
self.delete(elements = del_list)
self.c.unbind_class(self.master1, "<Control-KeyPress-j>")
#ЛИНИЯ
def risLine(self):
self.curent_class = line.Line(graf)
#РАЗМЕР
def risDim(self):
self.curent_class = dimension.Dimension(graf)
def risDimR(self):
self.curent_class = dimension.Dimension_R(graf)
#ТЕКСТ
def risText(self, event = None):
self.curent_class = text_line.Text(graf)
#МЕТОДЫ ЧЕРЧЕНИЯ ОБЪЕКТОВ
#КРУГ
def risCircle(self):
self.curent_class = circle.Circle(graf)
#ДУГА
def risArc(self):
self.curent_class = arc.Arc(graf)
#ЛИНИЯ
def c_line(self, x1, y1, x2, y2, width = None, sloy = None, fill = None, stipple = None, factor_stip = None, tip = 'norm'):
self.curent_class = line.c_line(graf, x1, y1, x2, y2, width, sloy, fill, stipple, factor_stip, tip)
def copy_line(self, content):
self.Nlined += 1
self.Nline = 'L' + str(self.Nlined)
self.ALLOBJECT[self.Nline] = self.ALLOBJECT[content].copy()
return self.Nline
#РАЗМЕР ЛИНЕЙНЫЙ
def dim(self,x1,y1,x2,y2,x3,y3,text=None, sloy = None,
fill = None,
size = None,
ort = None,
text_change = 'unchange',
text_place = None,
s=None,
vv_s=None,
vr_s = None,
arrow_s = None,
type_arrow = None,
s_s = None,
w_text = None,
font = None):
self.curent_class = dimension.c_dim(self,x1,y1,x2,y2,x3,y3,text, sloy,
fill,
size,
ort,
text_change,
text_place,
s,
vv_s,
vr_s,
arrow_s,
type_arrow,
s_s,
w_text,
font)
def get_snap_line(self, cont):#Находит в сложном объекте линии привязки
lines = []
if cont[0] in ('d', 'r'):
for i in self.ALLOBJECT[cont]['id']:
tag = self.ALLOBJECT[cont]['id'][i]
if 'priv' in tag:
if 'dim_text' not in tag and 'dimr_text' not in tag:
lines.append(i)
else:
for i in self.ALLOBJECT[cont]['id']:
tag = self.ALLOBJECT[cont]['id'][i]
if 'priv' in tag:
lines.append(i)
return lines
#РАЗМЕР РАДИУСНЫЙ
def dimR(self,x1,y1,x2,y2, text=None, sloy = None,
fill = None,
size = None,
s=None,
vr_s = None,
arrow_s = None,
type_arrow = None,
s_s = None,
w_text = None,
font = None,
Rn = None):
self.curent_class = dimension.c_dimR(self,x1,y1,x2,y2, text, sloy,
fill,
size,
s,
vr_s,
arrow_s,
type_arrow,
s_s,
w_text,
font,
Rn)
#КРУГ
def c_circle(self,x0,y0,xr = None, yr = None, width = None, sloy = None, fill = None, R = None):
self.curent_class = circle.c_circle(graf, x0, y0, xr, yr, width, sloy, fill, R)
#ДУГА
def c_arc(self,x0,y0,xr1=None, yr1=None, xr2=None, yr2=None, width = None, sloy = None, fill = None, R = None, start = None, extent = None):
self.curent_class = arc.c_arc(graf, x0,y0,xr1, yr1, xr2, yr2, width, sloy, fill, R, start, extent)
#ТЕКСТ
def c_text(self, x, y, text, anchor = 'sw', sloy = None, fill = None, angle = 0, size = None, s_s = None, w_text = None, font = None): #Текст - отрисовка
self.curent_class = text_line.c_text(graf, x, y, text, anchor, sloy, fill, angle, size, s_s, w_text, font)
#Печать картинки в постскрипт
def print_postScript(self, event = None):
self.curent_class = print_ps.Print_PS(graf)
def enumerator_p(self):
self.enumerator +=1
if self.enumerator == self.auto_save_step:
self.enumerator = 0
self.fileCurSave()
def exportDXF(self):
self.s_dxf = True
self.fileSave()
self.s_dxf = False
def fileSave(self):
opt = options = {}
if self.s_dxf == False:
options['defaultextension'] = '.svg'
options['filetypes'] = [('SVG files', '.svg'),
('text files', '.txt'),
('all files', '.*')]
else:
options['defaultextension'] = '.dxf'
options['filetypes'] = [('text files', '.dxf'),
('all files', '.*')]
options['initialdir'] = self.appPath
options['initialfile'] = 'draft_1'
options['parent'] = self.master1
options['title'] = 'Save file'
f = tkFileDialog.asksaveasfile(mode='w', **opt)
if f:
if self.zoomOLD != 0:
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomm**self.zoomOLD,zoomm**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomp**zoomOLDx,zoomp**zoomOLDx)
xynach=self.c.coords(self.nachCoordy)
dx=-xynach[0]
dy=-xynach[1]
self.c.move('obj',dx+10,dy+10)
if self.s_dxf == False:
#save = save_file.saver(graf)
save = to_svg.Svger(graf)
for i in save.write_list:
#if i[:8] == 'self.c_t' or i[:8] == 'self.dim':
#f.write(codecs.BOM_UTF8)
f.writelines("%s\n" % i)#.encode("utf8"))
f.close()
self.saveFlag = True
self.changeFlag = False
self.current_file = f.name
self.master1.title(self.prog_version + ' - ' + self.current_file)
else:
save = to_dxf.Dxfer(graf)
for i in save.write_list:
f.writelines("%s\n" % i)
if self.zoomOLD != 0:
self.c.move('obj',-dx-10,-dy-10)
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomp**self.zoomOLD,zoomp**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomm**zoomOLDx,zoomm**zoomOLDx)
def fileCurSave(self):
if self.saveFlag == False:
self.fileSave()
else:
back_file = self.current_file[0:-4]+'.bak'
try:
copyfile(self.current_file, back_file)
except IOError:
print ('Error Back file')
f = open(self.current_file, 'w')
if self.zoomOLD != 0:
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomm**self.zoomOLD,zoomm**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomp**zoomOLDx,zoomp**zoomOLDx)
xynach=self.c.coords(self.nachCoordy)
dx=-xynach[0]
dy=-xynach[1]
self.c.move('obj',dx+10,dy+10)
fileName, fileExt = os.path.splitext(f.name)
if fileExt == '.svg':
save = to_svg.Svger(graf)
for i in save.write_list:
f.writelines("%s\n" % i)
elif fileExt == '.txt':
save = save_file.saver(graf)
for i in save.write_list:
if i[:8] == 'self.c_t' or i[:8] == 'self.dim':
f.write(codecs.BOM_UTF8)
f.writelines("%s\n" % i.encode("utf8"))
'''
save = save_file.saver(graf)
for i in save.write_list:
if i[:8] == 'self.c_t' or i[:8] == 'self.dim':
f.write(codecs.BOM_UTF8)
f.writelines("%s\n" % i.encode("utf8"))
'''
f.close()
self.changeFlag = False
if self.zoomOLD != 0:
self.c.move('obj',-dx-10,-dy-10)
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomp**self.zoomOLD,zoomp**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomm**zoomOLDx,zoomm**zoomOLDx)
def new(self, event = None):
self.save_change()
self.saveFlag = False
self.changeFlag = False
self.current_file = 'New draft'
self.master1.title(self.prog_version + ' - ' + self.current_file)
self.delete(elements = self.ALLOBJECT.keys())
self.sbros_all()
def sbros_all(self):
self.collection = []
self.collectionBack = []
self.history_undo = []
def save_change(self):
if self.ALLOBJECT and self.changeFlag == True:
save_yes_no = tkMessageBox.askyesno('Save draft?', 'Save drawing?')
if save_yes_no == True:
self.fileCurSave()
def importDXF(self):
self.s_dxf = True
self.fileOpen()
self.s_dxf = False
def fileOpen(self, event = None):
self.save_change()
opt = options = {}
if self.s_dxf == False:
options['defaultextension'] = '.svg'
options['filetypes'] = [('SVG files', '.svg'),
('text files', '.txt'),
('all files', '.*')]
options['title'] = 'Open file'
else:
options['defaultextension'] = '.dxf'
options['filetypes'] = [('DXF files', '.dxf'),
('all files', '.*')]
options['title'] = 'Import from DXF'
options['initialdir'] = self.appPath
options['parent'] = self.master1
f = tkFileDialog.askopenfile(**opt)
if f:
if self.ALLOBJECT:
self.delete(elements = self.ALLOBJECT.keys())
if self.zoomOLD != 0:
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomm**self.zoomOLD,zoomm**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomp**zoomOLDx,zoomp**zoomOLDx)
zoomOLD = self.zoomOLD
self.zoomOLD = 0
xynach=self.c.coords(self.nachCoordy)
dx=-xynach[0]
dy=-xynach[1]
self.c.move('obj',dx+10,dy+10)
if self.s_dxf == False:
fileName, fileExt = os.path.splitext(f.name)
if fileExt == '.svg':
text = f.read()
SVG = from_svg.SVGopener(text, graf)
list_command = SVG.command_list
elif fileExt == '.txt':
list_command = f.readlines()
else:
text = f.read()
DXF = from_dxf.DXFopener(text)
list_command = DXF.command_list
if list_command:
errors = ''
for i in list_command:
try:
exec(i)
except:
errors += (i+'\n')
continue
if errors:
print ('Errors in opened file!')
print ('___________________________')
print (errors)
print ('___________________________')
f.close()
if self.s_dxf == False:
self.saveFlag = True
self.changeFlag = False
self.current_file = f.name
self.master1.title(self.prog_version + ' - ' + self.current_file)
else:
self.saveFlag = False
self.changeFlag = True
self.current_file = 'New draft'
self.master1.title(self.prog_version + ' - ' + self.current_file)
self.sbros_all()
self.c.move('obj',-dx-10,-dy-10)
self.zoomOLD = zoomOLD
if zoomOLD != 0:
if zoomOLD>0:
self.c.scale('obj',0,0,zoomp**zoomOLD,zoomp**zoomOLD)
else:
zoomOLDx=zoomOLD*(-1)
self.c.scale('obj',0,0,zoomm**zoomOLDx,zoomm**zoomOLDx)
def zoomP(self,x,y):
self.c.scale('obj',x,y,zoomp,zoomp)
def zoomM(self,x,y):
self.c.scale('obj',x,y,zoomm,zoomm)
def zoommerP(self):
x=self.frame1.winfo_width()/2.0
y=self.frame1.winfo_height()/2.0
self.zoomOLD += 1
if self.zoomOLD == -19:
self.c.itemconfig('t_LOD', state = 'normal')
self.c.itemconfig('snap_text', stipple = ('@'+os.path.join(self.appPath, 'res', '00.xbm')))
self.c.scale('obj',x,y,zoomp,zoomp)
def zoommerM(self):
x=self.frame1.winfo_width()/2.0
y=self.frame1.winfo_height()/2.0
self.zoomOLD -= 1
if self.zoomOLD ==-20:
self.c.itemconfig('t_LOD', state = 'hidden')
self.c.itemconfig('snap_text', stipple = '')
self.c.scale('obj',x,y,zoomm,zoomm)
def Mzoommer(self,event):
x = self.priv_coord[0]#event.x
y = self.priv_coord[1]#event.y
#if x<0:
#x = -x
#if y<0:
# y = -y
if event.delta > 0 or event.num == 4:
self.zoomOLD += 1
if self.zoomOLD == -19:
self.c.itemconfig('t_LOD', state = 'normal')
self.c.itemconfig('snap_text', stipple = ('@'+os.path.join(self.appPath, 'res', '00.xbm')))
self.c.scale('obj',x,y,zoomp,zoomp)
#self.zoomP(x,y)
else:
self.zoomOLD -= 1
if self.zoomOLD ==-20:
self.c.itemconfig('t_LOD', state = 'hidden')
self.c.itemconfig('snap_text', stipple = '')
self.c.scale('obj',x,y,zoomm,zoomm)
#self.zoomM(x,y)
def mouseMove(self,event):
global x1,y1
self.c.move('obj', event.x - x1, event.y - y1)
x1 = event.x
y1 = event.y
def OnMouseMove(self,event):
global x1,y1
x1 = event.x
y1 = event.y
root = Tk()
graf=Graphics()
graf.initial(root)
root.mainloop()
| codeparrot/github-code-clean |
import collections
from sympy import (
Abs, E, Float, I, Integer, Max, Min, N, Poly, Pow, PurePoly, Rational,
S, Symbol, cos, exp, oo, pi, signsimp, simplify, sin, sqrt, symbols,
sympify, trigsimp, sstr)
from sympy.matrices.matrices import (ShapeError, MatrixError,
NonSquareMatrixError, DeferredVector)
from sympy.matrices import (
GramSchmidt, ImmutableMatrix, ImmutableSparseMatrix, Matrix,
SparseMatrix, casoratian, diag, eye, hessian,
matrix_multiply_elementwise, ones, randMatrix, rot_axis1, rot_axis2,
rot_axis3, wronskian, zeros)
from sympy.core.compatibility import long, iterable, u, range
from sympy.utilities.iterables import flatten, capture
from sympy.utilities.pytest import raises, XFAIL, slow, skip
from sympy.abc import x, y, z
# don't re-order this list
classes = (Matrix, SparseMatrix, ImmutableMatrix, ImmutableSparseMatrix)
def test_args():
for c, cls in enumerate(classes):
m = cls.zeros(3, 2)
# all should give back the same type of arguments, e.g. ints for shape
assert m.shape == (3, 2) and all(type(i) is int for i in m.shape)
assert m.rows == 3 and type(m.rows) is int
assert m.cols == 2 and type(m.cols) is int
if not c % 2:
assert type(m._mat) is list
else:
assert type(m._smat) is dict
def test_division():
v = Matrix(1, 2, [x, y])
assert v.__div__(z) == Matrix(1, 2, [x/z, y/z])
assert v.__truediv__(z) == Matrix(1, 2, [x/z, y/z])
assert v/z == Matrix(1, 2, [x/z, y/z])
def test_sum():
m = Matrix([[1, 2, 3], [x, y, x], [2*y, -50, z*x]])
assert m + m == Matrix([[2, 4, 6], [2*x, 2*y, 2*x], [4*y, -100, 2*z*x]])
n = Matrix(1, 2, [1, 2])
raises(ShapeError, lambda: m + n)
def test_addition():
a = Matrix((
(1, 2),
(3, 1),
))
b = Matrix((
(1, 2),
(3, 0),
))
assert a + b == a.add(b) == Matrix([[2, 4], [6, 1]])
def test_fancy_index_matrix():
for M in (Matrix, SparseMatrix):
a = M(3, 3, range(9))
assert a == a[:, :]
assert a[1, :] == Matrix(1, 3, [3, 4, 5])
assert a[:, 1] == Matrix([1, 4, 7])
assert a[[0, 1], :] == Matrix([[0, 1, 2], [3, 4, 5]])
assert a[[0, 1], 2] == a[[0, 1], [2]]
assert a[2, [0, 1]] == a[[2], [0, 1]]
assert a[:, [0, 1]] == Matrix([[0, 1], [3, 4], [6, 7]])
assert a[0, 0] == 0
assert a[0:2, :] == Matrix([[0, 1, 2], [3, 4, 5]])
assert a[:, 0:2] == Matrix([[0, 1], [3, 4], [6, 7]])
assert a[::2, 1] == a[[0, 2], 1]
assert a[1, ::2] == a[1, [0, 2]]
a = M(3, 3, range(9))
assert a[[0, 2, 1, 2, 1], :] == Matrix([
[0, 1, 2],
[6, 7, 8],
[3, 4, 5],
[6, 7, 8],
[3, 4, 5]])
assert a[:, [0,2,1,2,1]] == Matrix([
[0, 2, 1, 2, 1],
[3, 5, 4, 5, 4],
[6, 8, 7, 8, 7]])
a = SparseMatrix.zeros(3)
a[1, 2] = 2
a[0, 1] = 3
a[2, 0] = 4
assert a.extract([1, 1], [2]) == Matrix([
[2],
[2]])
assert a.extract([1, 0], [2, 2, 2]) == Matrix([
[2, 2, 2],
[0, 0, 0]])
assert a.extract([1, 0, 1, 2], [2, 0, 1, 0]) == Matrix([
[2, 0, 0, 0],
[0, 0, 3, 0],
[2, 0, 0, 0],
[0, 4, 0, 4]])
def test_multiplication():
a = Matrix((
(1, 2),
(3, 1),
(0, 6),
))
b = Matrix((
(1, 2),
(3, 0),
))
c = a*b
assert c[0, 0] == 7
assert c[0, 1] == 2
assert c[1, 0] == 6
assert c[1, 1] == 6
assert c[2, 0] == 18
assert c[2, 1] == 0
h = matrix_multiply_elementwise(a, c)
assert h == a.multiply_elementwise(c)
assert h[0, 0] == 7
assert h[0, 1] == 4
assert h[1, 0] == 18
assert h[1, 1] == 6
assert h[2, 0] == 0
assert h[2, 1] == 0
raises(ShapeError, lambda: matrix_multiply_elementwise(a, b))
c = b * Symbol("x")
assert isinstance(c, Matrix)
assert c[0, 0] == x
assert c[0, 1] == 2*x
assert c[1, 0] == 3*x
assert c[1, 1] == 0
c2 = x * b
assert c == c2
c = 5 * b
assert isinstance(c, Matrix)
assert c[0, 0] == 5
assert c[0, 1] == 2*5
assert c[1, 0] == 3*5
assert c[1, 1] == 0
def test_power():
raises(NonSquareMatrixError, lambda: Matrix((1, 2))**2)
R = Rational
A = Matrix([[2, 3], [4, 5]])
assert (A**-3)[:] == [R(-269)/8, R(153)/8, R(51)/2, R(-29)/2]
assert (A**5)[:] == [6140, 8097, 10796, 14237]
A = Matrix([[2, 1, 3], [4, 2, 4], [6, 12, 1]])
assert (A**3)[:] == [290, 262, 251, 448, 440, 368, 702, 954, 433]
assert A**0 == eye(3)
assert A**1 == A
assert (Matrix([[2]]) ** 100)[0, 0] == 2**100
assert eye(2)**10000000 == eye(2)
assert Matrix([[1, 2], [3, 4]])**Integer(2) == Matrix([[7, 10], [15, 22]])
A = Matrix([[33, 24], [48, 57]])
assert (A**(S(1)/2))[:] == [5, 2, 4, 7]
A = Matrix([[0, 4], [-1, 5]])
assert (A**(S(1)/2))**2 == A
def test_creation():
raises(ValueError, lambda: Matrix(5, 5, range(20)))
raises(IndexError, lambda: Matrix((1, 2))[2])
with raises(IndexError):
Matrix((1, 2))[1:2] = 5
with raises(IndexError):
Matrix((1, 2))[3] = 5
assert Matrix() == Matrix([]) == Matrix([[]]) == Matrix(0, 0, [])
a = Matrix([[x, 0], [0, 0]])
m = a
assert m.cols == m.rows
assert m.cols == 2
assert m[:] == [x, 0, 0, 0]
b = Matrix(2, 2, [x, 0, 0, 0])
m = b
assert m.cols == m.rows
assert m.cols == 2
assert m[:] == [x, 0, 0, 0]
assert a == b
assert Matrix(b) == b
c = Matrix((
Matrix((
(1, 2, 3),
(4, 5, 6)
)),
(7, 8, 9)
))
assert c.cols == 3
assert c.rows == 3
assert c[:] == [1, 2, 3, 4, 5, 6, 7, 8, 9]
assert Matrix(eye(2)) == eye(2)
assert ImmutableMatrix(ImmutableMatrix(eye(2))) == ImmutableMatrix(eye(2))
assert ImmutableMatrix(c) == c.as_immutable()
assert Matrix(ImmutableMatrix(c)) == ImmutableMatrix(c).as_mutable()
assert c is not Matrix(c)
def test_tolist():
lst = [[S.One, S.Half, x*y, S.Zero], [x, y, z, x**2], [y, -S.One, z*x, 3]]
m = Matrix(lst)
assert m.tolist() == lst
def test_as_mutable():
assert zeros(0, 3).as_mutable() == zeros(0, 3)
assert zeros(0, 3).as_immutable() == ImmutableMatrix(zeros(0, 3))
def test_determinant():
for M in [Matrix(), Matrix([[1]])]:
assert (
M.det() ==
M.det_bareis() ==
M.berkowitz_det() ==
M.det_LU_decomposition() ==
1)
M = Matrix(( (-3, 2),
( 8, -5) ))
assert M.det(method="bareis") == -1
assert M.det(method="berkowitz") == -1
M = Matrix(( (x, 1),
(y, 2*y) ))
assert M.det(method="bareis") == 2*x*y - y
assert M.det(method="berkowitz") == 2*x*y - y
M = Matrix(( (1, 1, 1),
(1, 2, 3),
(1, 3, 6) ))
assert M.det(method="bareis") == 1
assert M.det(method="berkowitz") == 1
M = Matrix(( ( 3, -2, 0, 5),
(-2, 1, -2, 2),
( 0, -2, 5, 0),
( 5, 0, 3, 4) ))
assert M.det(method="bareis") == -289
assert M.det(method="berkowitz") == -289
M = Matrix(( ( 1, 2, 3, 4),
( 5, 6, 7, 8),
( 9, 10, 11, 12),
(13, 14, 15, 16) ))
assert M.det(method="bareis") == 0
assert M.det(method="berkowitz") == 0
M = Matrix(( (3, 2, 0, 0, 0),
(0, 3, 2, 0, 0),
(0, 0, 3, 2, 0),
(0, 0, 0, 3, 2),
(2, 0, 0, 0, 3) ))
assert M.det(method="bareis") == 275
assert M.det(method="berkowitz") == 275
M = Matrix(( (1, 0, 1, 2, 12),
(2, 0, 1, 1, 4),
(2, 1, 1, -1, 3),
(3, 2, -1, 1, 8),
(1, 1, 1, 0, 6) ))
assert M.det(method="bareis") == -55
assert M.det(method="berkowitz") == -55
M = Matrix(( (-5, 2, 3, 4, 5),
( 1, -4, 3, 4, 5),
( 1, 2, -3, 4, 5),
( 1, 2, 3, -2, 5),
( 1, 2, 3, 4, -1) ))
assert M.det(method="bareis") == 11664
assert M.det(method="berkowitz") == 11664
M = Matrix(( ( 2, 7, -1, 3, 2),
( 0, 0, 1, 0, 1),
(-2, 0, 7, 0, 2),
(-3, -2, 4, 5, 3),
( 1, 0, 0, 0, 1) ))
assert M.det(method="bareis") == 123
assert M.det(method="berkowitz") == 123
M = Matrix(( (x, y, z),
(1, 0, 0),
(y, z, x) ))
assert M.det(method="bareis") == z**2 - x*y
assert M.det(method="berkowitz") == z**2 - x*y
def test_det_LU_decomposition():
for M in [Matrix(), Matrix([[1]])]:
assert M.det(method="det_LU") == 1
M = Matrix(( (-3, 2),
( 8, -5) ))
assert M.det(method="det_LU") == -1
M = Matrix(( (x, 1),
(y, 2*y) ))
assert M.det(method="det_LU") == 2*x*y - y
M = Matrix(( (1, 1, 1),
(1, 2, 3),
(1, 3, 6) ))
assert M.det(method="det_LU") == 1
M = Matrix(( ( 3, -2, 0, 5),
(-2, 1, -2, 2),
( 0, -2, 5, 0),
( 5, 0, 3, 4) ))
assert M.det(method="det_LU") == -289
M = Matrix(( (3, 2, 0, 0, 0),
(0, 3, 2, 0, 0),
(0, 0, 3, 2, 0),
(0, 0, 0, 3, 2),
(2, 0, 0, 0, 3) ))
assert M.det(method="det_LU") == 275
M = Matrix(( (1, 0, 1, 2, 12),
(2, 0, 1, 1, 4),
(2, 1, 1, -1, 3),
(3, 2, -1, 1, 8),
(1, 1, 1, 0, 6) ))
assert M.det(method="det_LU") == -55
M = Matrix(( (-5, 2, 3, 4, 5),
( 1, -4, 3, 4, 5),
( 1, 2, -3, 4, 5),
( 1, 2, 3, -2, 5),
( 1, 2, 3, 4, -1) ))
assert M.det(method="det_LU") == 11664
M = Matrix(( ( 2, 7, -1, 3, 2),
( 0, 0, 1, 0, 1),
(-2, 0, 7, 0, 2),
(-3, -2, 4, 5, 3),
( 1, 0, 0, 0, 1) ))
assert M.det(method="det_LU") == 123
M = Matrix(( (x, y, z),
(1, 0, 0),
(y, z, x) ))
assert M.det(method="det_LU") == z**2 - x*y
def test_berkowitz_minors():
B = Matrix(2, 2, [1, 2, 2, 1])
assert B.berkowitz_minors() == (1, -3)
def test_slicing():
m0 = eye(4)
assert m0[:3, :3] == eye(3)
assert m0[2:4, 0:2] == zeros(2)
m1 = Matrix(3, 3, lambda i, j: i + j)
assert m1[0, :] == Matrix(1, 3, (0, 1, 2))
assert m1[1:3, 1] == Matrix(2, 1, (2, 3))
m2 = Matrix([[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]])
assert m2[:, -1] == Matrix(4, 1, [3, 7, 11, 15])
assert m2[-2:, :] == Matrix([[8, 9, 10, 11], [12, 13, 14, 15]])
def test_submatrix_assignment():
m = zeros(4)
m[2:4, 2:4] = eye(2)
assert m == Matrix(((0, 0, 0, 0),
(0, 0, 0, 0),
(0, 0, 1, 0),
(0, 0, 0, 1)))
m[:2, :2] = eye(2)
assert m == eye(4)
m[:, 0] = Matrix(4, 1, (1, 2, 3, 4))
assert m == Matrix(((1, 0, 0, 0),
(2, 1, 0, 0),
(3, 0, 1, 0),
(4, 0, 0, 1)))
m[:, :] = zeros(4)
assert m == zeros(4)
m[:, :] = [(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16)]
assert m == Matrix(((1, 2, 3, 4),
(5, 6, 7, 8),
(9, 10, 11, 12),
(13, 14, 15, 16)))
m[:2, 0] = [0, 0]
assert m == Matrix(((0, 2, 3, 4),
(0, 6, 7, 8),
(9, 10, 11, 12),
(13, 14, 15, 16)))
def test_extract():
m = Matrix(4, 3, lambda i, j: i*3 + j)
assert m.extract([0, 1, 3], [0, 1]) == Matrix(3, 2, [0, 1, 3, 4, 9, 10])
assert m.extract([0, 3], [0, 0, 2]) == Matrix(2, 3, [0, 0, 2, 9, 9, 11])
assert m.extract(range(4), range(3)) == m
raises(IndexError, lambda: m.extract([4], [0]))
raises(IndexError, lambda: m.extract([0], [3]))
def test_reshape():
m0 = eye(3)
assert m0.reshape(1, 9) == Matrix(1, 9, (1, 0, 0, 0, 1, 0, 0, 0, 1))
m1 = Matrix(3, 4, lambda i, j: i + j)
assert m1.reshape(
4, 3) == Matrix(((0, 1, 2), (3, 1, 2), (3, 4, 2), (3, 4, 5)))
assert m1.reshape(2, 6) == Matrix(((0, 1, 2, 3, 1, 2), (3, 4, 2, 3, 4, 5)))
def test_applyfunc():
m0 = eye(3)
assert m0.applyfunc(lambda x: 2*x) == eye(3)*2
assert m0.applyfunc(lambda x: 0) == zeros(3)
def test_expand():
m0 = Matrix([[x*(x + y), 2], [((x + y)*y)*x, x*(y + x*(x + y))]])
# Test if expand() returns a matrix
m1 = m0.expand()
assert m1 == Matrix(
[[x*y + x**2, 2], [x*y**2 + y*x**2, x*y + y*x**2 + x**3]])
a = Symbol('a', real=True)
assert Matrix([exp(I*a)]).expand(complex=True) == \
Matrix([cos(a) + I*sin(a)])
assert Matrix([[0, 1, 2], [0, 0, -1], [0, 0, 0]]).exp() == Matrix([
[1, 1, Rational(3, 2)],
[0, 1, -1],
[0, 0, 1]]
)
def test_random():
M = randMatrix(3, 3)
M = randMatrix(3, 3, seed=3)
M = randMatrix(3, 4, 0, 150)
M = randMatrix(3, symmetric=True)
S = M.copy()
S.simplify()
assert S == M # doesn't fail when elements are Numbers, not int
def test_LUdecomp():
testmat = Matrix([[0, 2, 5, 3],
[3, 3, 7, 4],
[8, 4, 0, 2],
[-2, 6, 3, 4]])
L, U, p = testmat.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - testmat == zeros(4)
testmat = Matrix([[6, -2, 7, 4],
[0, 3, 6, 7],
[1, -2, 7, 4],
[-9, 2, 6, 3]])
L, U, p = testmat.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - testmat == zeros(4)
M = Matrix(((1, x, 1), (2, y, 0), (y, 0, z)))
L, U, p = M.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - M == zeros(3)
mL = Matrix((
(1, 0, 0),
(2, 3, 0),
))
assert mL.is_lower is True
assert mL.is_upper is False
mU = Matrix((
(1, 2, 3),
(0, 4, 5),
))
assert mU.is_lower is False
assert mU.is_upper is True
# test FF LUdecomp
M = Matrix([[1, 3, 3],
[3, 2, 6],
[3, 2, 2]])
P, L, Dee, U = M.LUdecompositionFF()
assert P*M == L*Dee.inv()*U
M = Matrix([[1, 2, 3, 4],
[3, -1, 2, 3],
[3, 1, 3, -2],
[6, -1, 0, 2]])
P, L, Dee, U = M.LUdecompositionFF()
assert P*M == L*Dee.inv()*U
M = Matrix([[0, 0, 1],
[2, 3, 0],
[3, 1, 4]])
P, L, Dee, U = M.LUdecompositionFF()
assert P*M == L*Dee.inv()*U
def test_LUsolve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.LUsolve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.LUsolve(b)
assert soln == x
def test_QRsolve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.QRsolve(b)
assert soln == x
x = Matrix([[1, 2], [3, 4], [5, 6]])
b = A*x
soln = A.QRsolve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.QRsolve(b)
assert soln == x
x = Matrix([[7, 8], [9, 10], [11, 12]])
b = A*x
soln = A.QRsolve(b)
assert soln == x
def test_inverse():
A = eye(4)
assert A.inv() == eye(4)
assert A.inv(method="LU") == eye(4)
assert A.inv(method="ADJ") == eye(4)
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
Ainv = A.inv()
assert A*Ainv == eye(3)
assert A.inv(method="LU") == Ainv
assert A.inv(method="ADJ") == Ainv
# test that immutability is not a problem
cls = ImmutableMatrix
m = cls([[48, 49, 31],
[ 9, 71, 94],
[59, 28, 65]])
assert all(type(m.inv(s)) is cls for s in 'GE ADJ LU'.split())
cls = ImmutableSparseMatrix
m = cls([[48, 49, 31],
[ 9, 71, 94],
[59, 28, 65]])
assert all(type(m.inv(s)) is cls for s in 'CH LDL'.split())
def test_matrix_inverse_mod():
A = Matrix(2, 1, [1, 0])
raises(NonSquareMatrixError, lambda: A.inv_mod(2))
A = Matrix(2, 2, [1, 0, 0, 0])
raises(ValueError, lambda: A.inv_mod(2))
A = Matrix(2, 2, [1, 2, 3, 4])
Ai = Matrix(2, 2, [1, 1, 0, 1])
assert A.inv_mod(3) == Ai
A = Matrix(2, 2, [1, 0, 0, 1])
assert A.inv_mod(2) == A
def test_util():
R = Rational
v1 = Matrix(1, 3, [1, 2, 3])
v2 = Matrix(1, 3, [3, 4, 5])
assert v1.norm() == sqrt(14)
assert v1.project(v2) == Matrix(1, 3, [R(39)/25, R(52)/25, R(13)/5])
assert Matrix.zeros(1, 2) == Matrix(1, 2, [0, 0])
assert ones(1, 2) == Matrix(1, 2, [1, 1])
assert v1.copy() == v1
# cofactor
assert eye(3) == eye(3).cofactorMatrix()
test = Matrix([[1, 3, 2], [2, 6, 3], [2, 3, 6]])
assert test.cofactorMatrix() == \
Matrix([[27, -6, -6], [-12, 2, 3], [-3, 1, 0]])
test = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert test.cofactorMatrix() == \
Matrix([[-3, 6, -3], [6, -12, 6], [-3, 6, -3]])
def test_jacobian_hessian():
L = Matrix(1, 2, [x**2*y, 2*y**2 + x*y])
syms = [x, y]
assert L.jacobian(syms) == Matrix([[2*x*y, x**2], [y, 4*y + x]])
L = Matrix(1, 2, [x, x**2*y**3])
assert L.jacobian(syms) == Matrix([[1, 0], [2*x*y**3, x**2*3*y**2]])
f = x**2*y
syms = [x, y]
assert hessian(f, syms) == Matrix([[2*y, 2*x], [2*x, 0]])
f = x**2*y**3
assert hessian(f, syms) == \
Matrix([[2*y**3, 6*x*y**2], [6*x*y**2, 6*x**2*y]])
f = z + x*y**2
g = x**2 + 2*y**3
ans = Matrix([[0, 2*y],
[2*y, 2*x]])
assert ans == hessian(f, Matrix([x, y]))
assert ans == hessian(f, Matrix([x, y]).T)
assert hessian(f, (y, x), [g]) == Matrix([
[ 0, 6*y**2, 2*x],
[6*y**2, 2*x, 2*y],
[ 2*x, 2*y, 0]])
def test_QR():
A = Matrix([[1, 2], [2, 3]])
Q, S = A.QRdecomposition()
R = Rational
assert Q == Matrix([
[ 5**R(-1, 2), (R(2)/5)*(R(1)/5)**R(-1, 2)],
[2*5**R(-1, 2), (-R(1)/5)*(R(1)/5)**R(-1, 2)]])
assert S == Matrix([[5**R(1, 2), 8*5**R(-1, 2)], [0, (R(1)/5)**R(1, 2)]])
assert Q*S == A
assert Q.T * Q == eye(2)
A = Matrix([[1, 1, 1], [1, 1, 3], [2, 3, 4]])
Q, R = A.QRdecomposition()
assert Q.T * Q == eye(Q.cols)
assert R.is_upper
assert A == Q*R
def test_QR_non_square():
A = Matrix([[9, 0, 26], [12, 0, -7], [0, 4, 4], [0, -3, -3]])
Q, R = A.QRdecomposition()
assert Q.T * Q == eye(Q.cols)
assert R.is_upper
assert A == Q*R
A = Matrix([[1, -1, 4], [1, 4, -2], [1, 4, 2], [1, -1, 0]])
Q, R = A.QRdecomposition()
assert Q.T * Q == eye(Q.cols)
assert R.is_upper
assert A == Q*R
def test_nullspace():
# first test reduced row-ech form
R = Rational
M = Matrix([[5, 7, 2, 1],
[1, 6, 2, -1]])
out, tmp = M.rref()
assert out == Matrix([[1, 0, -R(2)/23, R(13)/23],
[0, 1, R(8)/23, R(-6)/23]])
M = Matrix([[-5, -1, 4, -3, -1],
[ 1, -1, -1, 1, 0],
[-1, 0, 0, 0, 0],
[ 4, 1, -4, 3, 1],
[-2, 0, 2, -2, -1]])
assert M*M.nullspace()[0] == Matrix(5, 1, [0]*5)
M = Matrix([[ 1, 3, 0, 2, 6, 3, 1],
[-2, -6, 0, -2, -8, 3, 1],
[ 3, 9, 0, 0, 6, 6, 2],
[-1, -3, 0, 1, 0, 9, 3]])
out, tmp = M.rref()
assert out == Matrix([[1, 3, 0, 0, 2, 0, 0],
[0, 0, 0, 1, 2, 0, 0],
[0, 0, 0, 0, 0, 1, R(1)/3],
[0, 0, 0, 0, 0, 0, 0]])
# now check the vectors
basis = M.nullspace()
assert basis[0] == Matrix([-3, 1, 0, 0, 0, 0, 0])
assert basis[1] == Matrix([0, 0, 1, 0, 0, 0, 0])
assert basis[2] == Matrix([-2, 0, 0, -2, 1, 0, 0])
assert basis[3] == Matrix([0, 0, 0, 0, 0, R(-1)/3, 1])
# issue 4797; just see that we can do it when rows > cols
M = Matrix([[1, 2], [2, 4], [3, 6]])
assert M.nullspace()
def test_wronskian():
assert wronskian([cos(x), sin(x)], x) == cos(x)**2 + sin(x)**2
assert wronskian([exp(x), exp(2*x)], x) == exp(3*x)
assert wronskian([exp(x), x], x) == exp(x) - x*exp(x)
assert wronskian([1, x, x**2], x) == 2
w1 = -6*exp(x)*sin(x)*x + 6*cos(x)*exp(x)*x**2 - 6*exp(x)*cos(x)*x - \
exp(x)*cos(x)*x**3 + exp(x)*sin(x)*x**3
assert wronskian([exp(x), cos(x), x**3], x).expand() == w1
assert wronskian([exp(x), cos(x), x**3], x, method='berkowitz').expand() \
== w1
w2 = -x**3*cos(x)**2 - x**3*sin(x)**2 - 6*x*cos(x)**2 - 6*x*sin(x)**2
assert wronskian([sin(x), cos(x), x**3], x).expand() == w2
assert wronskian([sin(x), cos(x), x**3], x, method='berkowitz').expand() \
== w2
assert wronskian([], x) == 1
def test_eigen():
R = Rational
assert eye(3).charpoly(x) == Poly((x - 1)**3, x)
assert eye(3).charpoly(y) == Poly((y - 1)**3, y)
M = Matrix([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
assert M.eigenvals(multiple=False) == {S.One: 3}
assert M.eigenvects() == (
[(1, 3, [Matrix([1, 0, 0]),
Matrix([0, 1, 0]),
Matrix([0, 0, 1])])])
M = Matrix([[0, 1, 1],
[1, 0, 0],
[1, 1, 1]])
assert M.eigenvals() == {2*S.One: 1, -S.One: 1, S.Zero: 1}
assert M.eigenvects() == (
[
(-1, 1, [Matrix([-1, 1, 0])]),
( 0, 1, [Matrix([0, -1, 1])]),
( 2, 1, [Matrix([R(2, 3), R(1, 3), 1])])
])
a = Symbol('a')
M = Matrix([[a, 0],
[0, 1]])
assert M.eigenvals() == {a: 1, S.One: 1}
M = Matrix([[1, -1],
[1, 3]])
assert M.eigenvects() == ([(2, 2, [Matrix(2, 1, [-1, 1])])])
M = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
a = R(15, 2)
b = 3*33**R(1, 2)
c = R(13, 2)
d = (R(33, 8) + 3*b/8)
e = (R(33, 8) - 3*b/8)
def NS(e, n):
return str(N(e, n))
r = [
(a - b/2, 1, [Matrix([(12 + 24/(c - b/2))/((c - b/2)*e) + 3/(c - b/2),
(6 + 12/(c - b/2))/e, 1])]),
( 0, 1, [Matrix([1, -2, 1])]),
(a + b/2, 1, [Matrix([(12 + 24/(c + b/2))/((c + b/2)*d) + 3/(c + b/2),
(6 + 12/(c + b/2))/d, 1])]),
]
r1 = [(NS(r[i][0], 2), NS(r[i][1], 2),
[NS(j, 2) for j in r[i][2][0]]) for i in range(len(r))]
r = M.eigenvects()
r2 = [(NS(r[i][0], 2), NS(r[i][1], 2),
[NS(j, 2) for j in r[i][2][0]]) for i in range(len(r))]
assert sorted(r1) == sorted(r2)
eps = Symbol('eps', real=True)
M = Matrix([[abs(eps), I*eps ],
[-I*eps, abs(eps) ]])
assert M.eigenvects() == (
[
( 0, 1, [Matrix([[-I*eps/abs(eps)], [1]])]),
( 2*abs(eps), 1, [ Matrix([[I*eps/abs(eps)], [1]]) ] ),
])
M = Matrix(3, 3, [1, 2, 0, 0, 3, 0, 2, -4, 2])
M._eigenvects = M.eigenvects(simplify=False)
assert max(i.q for i in M._eigenvects[0][2][0]) > 1
M._eigenvects = M.eigenvects(simplify=True)
assert max(i.q for i in M._eigenvects[0][2][0]) == 1
M = Matrix([[S(1)/4, 1], [1, 1]])
assert M.eigenvects(simplify=True) == [
(S(5)/8 + sqrt(73)/8, 1, [Matrix([[8/(3 + sqrt(73))], [1]])]),
(-sqrt(73)/8 + S(5)/8, 1, [Matrix([[8/(-sqrt(73) + 3)], [1]])])]
assert M.eigenvects(simplify=False) == [
(Rational(5, 8) + sqrt(73)/8, 1,
[Matrix([[-1/(-sqrt(73)/8 + Rational(-3, 8))], [1]])]),
(-sqrt(73)/8 + Rational(5, 8), 1,
[Matrix([[-1/(Rational(-3, 8) + sqrt(73)/8)], [1]])]),
]
m = Matrix([[1, .6, .6], [.6, .9, .9], [.9, .6, .6]])
evals = {-sqrt(385)/20 + S(5)/4: 1, sqrt(385)/20 + S(5)/4: 1, S.Zero: 1}
assert m.eigenvals() == evals
nevals = list(sorted(m.eigenvals(rational=False).keys()))
sevals = list(sorted(evals.keys()))
assert all(abs(nevals[i] - sevals[i]) < 1e-9 for i in range(len(nevals)))
def test_subs():
assert Matrix([[1, x], [x, 4]]).subs(x, 5) == Matrix([[1, 5], [5, 4]])
assert Matrix([[x, 2], [x + y, 4]]).subs([[x, -1], [y, -2]]) == \
Matrix([[-1, 2], [-3, 4]])
assert Matrix([[x, 2], [x + y, 4]]).subs([(x, -1), (y, -2)]) == \
Matrix([[-1, 2], [-3, 4]])
assert Matrix([[x, 2], [x + y, 4]]).subs({x: -1, y: -2}) == \
Matrix([[-1, 2], [-3, 4]])
assert Matrix([x*y]).subs({x: y - 1, y: x - 1}, simultaneous=True) == \
Matrix([(x - 1)*(y - 1)])
for cls in classes:
assert Matrix([[2, 0], [0, 2]]) == cls.eye(2).subs(1, 2)
def test_simplify():
f, n = symbols('f, n')
m = Matrix([[1, x], [x + 1/x, x - 1]])
m = m.row_join(eye(m.cols))
raw = m.rref(simplify=lambda x: x)[0]
assert raw != m.rref(simplify=True)[0]
M = Matrix([[ 1/x + 1/y, (x + x*y) / x ],
[ (f(x) + y*f(x))/f(x), 2 * (1/n - cos(n * pi)/n) / pi ]])
M.simplify()
assert M == Matrix([[ (x + y)/(x * y), 1 + y ],
[ 1 + y, 2*((1 - 1*cos(pi*n))/(pi*n)) ]])
eq = (1 + x)**2
M = Matrix([[eq]])
M.simplify()
assert M == Matrix([[eq]])
M.simplify(ratio=oo) == M
assert M == Matrix([[eq.simplify(ratio=oo)]])
def test_transpose():
M = Matrix([[1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
[1, 2, 3, 4, 5, 6, 7, 8, 9, 0]])
assert M.T == Matrix( [ [1, 1],
[2, 2],
[3, 3],
[4, 4],
[5, 5],
[6, 6],
[7, 7],
[8, 8],
[9, 9],
[0, 0] ])
assert M.T.T == M
assert M.T == M.transpose()
def test_conjugate():
M = Matrix([[0, I, 5],
[1, 2, 0]])
assert M.T == Matrix([[0, 1],
[I, 2],
[5, 0]])
assert M.C == Matrix([[0, -I, 5],
[1, 2, 0]])
assert M.C == M.conjugate()
assert M.H == M.T.C
assert M.H == Matrix([[ 0, 1],
[-I, 2],
[ 5, 0]])
def test_conj_dirac():
raises(AttributeError, lambda: eye(3).D)
M = Matrix([[1, I, I, I],
[0, 1, I, I],
[0, 0, 1, I],
[0, 0, 0, 1]])
assert M.D == Matrix([[ 1, 0, 0, 0],
[-I, 1, 0, 0],
[-I, -I, -1, 0],
[-I, -I, I, -1]])
def test_trace():
M = Matrix([[1, 0, 0],
[0, 5, 0],
[0, 0, 8]])
assert M.trace() == 14
def test_shape():
M = Matrix([[x, 0, 0],
[0, y, 0]])
assert M.shape == (2, 3)
def test_col_row_op():
M = Matrix([[x, 0, 0],
[0, y, 0]])
M.row_op(1, lambda r, j: r + j + 1)
assert M == Matrix([[x, 0, 0],
[1, y + 2, 3]])
M.col_op(0, lambda c, j: c + y**j)
assert M == Matrix([[x + 1, 0, 0],
[1 + y, y + 2, 3]])
# neither row nor slice give copies that allow the original matrix to
# be changed
assert M.row(0) == Matrix([[x + 1, 0, 0]])
r1 = M.row(0)
r1[0] = 42
assert M[0, 0] == x + 1
r1 = M[0, :-1] # also testing negative slice
r1[0] = 42
assert M[0, 0] == x + 1
c1 = M.col(0)
assert c1 == Matrix([x + 1, 1 + y])
c1[0] = 0
assert M[0, 0] == x + 1
c1 = M[:, 0]
c1[0] = 42
assert M[0, 0] == x + 1
def test_zip_row_op():
for cls in classes[:2]: # XXX: immutable matrices don't support row ops
M = cls.eye(3)
M.zip_row_op(1, 0, lambda v, u: v + 2*u)
assert M == cls([[1, 0, 0],
[2, 1, 0],
[0, 0, 1]])
M = cls.eye(3)*2
M[0, 1] = -1
M.zip_row_op(1, 0, lambda v, u: v + 2*u); M
assert M == cls([[2, -1, 0],
[4, 0, 0],
[0, 0, 2]])
def test_issue_3950():
m = Matrix([1, 2, 3])
a = Matrix([1, 2, 3])
b = Matrix([2, 2, 3])
assert not (m in [])
assert not (m in [1])
assert m != 1
assert m == a
assert m != b
def test_issue_3981():
class Index1(object):
def __index__(self):
return 1
class Index2(object):
def __index__(self):
return 2
index1 = Index1()
index2 = Index2()
m = Matrix([1, 2, 3])
assert m[index2] == 3
m[index2] = 5
assert m[2] == 5
m = Matrix([[1, 2, 3], [4, 5, 6]])
assert m[index1, index2] == 6
assert m[1, index2] == 6
assert m[index1, 2] == 6
m[index1, index2] = 4
assert m[1, 2] == 4
m[1, index2] = 6
assert m[1, 2] == 6
m[index1, 2] = 8
assert m[1, 2] == 8
def test_evalf():
a = Matrix([sqrt(5), 6])
assert all(a.evalf()[i] == a[i].evalf() for i in range(2))
assert all(a.evalf(2)[i] == a[i].evalf(2) for i in range(2))
assert all(a.n(2)[i] == a[i].n(2) for i in range(2))
def test_is_symbolic():
a = Matrix([[x, x], [x, x]])
assert a.is_symbolic() is True
a = Matrix([[1, 2, 3, 4], [5, 6, 7, 8]])
assert a.is_symbolic() is False
a = Matrix([[1, 2, 3, 4], [5, 6, x, 8]])
assert a.is_symbolic() is True
a = Matrix([[1, x, 3]])
assert a.is_symbolic() is True
a = Matrix([[1, 2, 3]])
assert a.is_symbolic() is False
a = Matrix([[1], [x], [3]])
assert a.is_symbolic() is True
a = Matrix([[1], [2], [3]])
assert a.is_symbolic() is False
def test_is_upper():
a = Matrix([[1, 2, 3]])
assert a.is_upper is True
a = Matrix([[1], [2], [3]])
assert a.is_upper is False
def test_is_lower():
a = Matrix([[1, 2, 3]])
assert a.is_lower is False
a = Matrix([[1], [2], [3]])
assert a.is_lower is True
def test_is_nilpotent():
a = Matrix(4, 4, [0, 2, 1, 6, 0, 0, 1, 2, 0, 0, 0, 3, 0, 0, 0, 0])
assert a.is_nilpotent()
a = Matrix([[1, 0], [0, 1]])
assert not a.is_nilpotent()
def test_zeros_ones_fill():
n, m = 3, 5
a = zeros(n, m)
a.fill( 5 )
b = 5 * ones(n, m)
assert a == b
assert a.rows == b.rows == 3
assert a.cols == b.cols == 5
assert a.shape == b.shape == (3, 5)
assert zeros(2) == zeros(2, 2)
assert ones(2) == ones(2, 2)
assert zeros(2, 3) == Matrix(2, 3, [0]*6)
assert ones(2, 3) == Matrix(2, 3, [1]*6)
def test_empty_zeros():
a = zeros(0)
assert a == Matrix()
a = zeros(0, 2)
assert a.rows == 0
assert a.cols == 2
a = zeros(2, 0)
assert a.rows == 2
assert a.cols == 0
def test_issue_3749():
a = Matrix([[x**2, x*y], [x*sin(y), x*cos(y)]])
assert a.diff(x) == Matrix([[2*x, y], [sin(y), cos(y)]])
assert Matrix([
[x, -x, x**2],
[exp(x), 1/x - exp(-x), x + 1/x]]).limit(x, oo) == \
Matrix([[oo, -oo, oo], [oo, 0, oo]])
assert Matrix([
[(exp(x) - 1)/x, 2*x + y*x, x**x ],
[1/x, abs(x), abs(sin(x + 1))]]).limit(x, 0) == \
Matrix([[1, 0, 1], [oo, 0, sin(1)]])
assert a.integrate(x) == Matrix([
[Rational(1, 3)*x**3, y*x**2/2],
[x**2*sin(y)/2, x**2*cos(y)/2]])
def test_inv_iszerofunc():
A = eye(4)
A.col_swap(0, 1)
for method in "GE", "LU":
assert A.inv(method=method, iszerofunc=lambda x: x == 0) == \
A.inv(method="ADJ")
def test_jacobian_metrics():
rho, phi = symbols("rho,phi")
X = Matrix([rho*cos(phi), rho*sin(phi)])
Y = Matrix([rho, phi])
J = X.jacobian(Y)
assert J == X.jacobian(Y.T)
assert J == (X.T).jacobian(Y)
assert J == (X.T).jacobian(Y.T)
g = J.T*eye(J.shape[0])*J
g = g.applyfunc(trigsimp)
assert g == Matrix([[1, 0], [0, rho**2]])
def test_jacobian2():
rho, phi = symbols("rho,phi")
X = Matrix([rho*cos(phi), rho*sin(phi), rho**2])
Y = Matrix([rho, phi])
J = Matrix([
[cos(phi), -rho*sin(phi)],
[sin(phi), rho*cos(phi)],
[ 2*rho, 0],
])
assert X.jacobian(Y) == J
def test_issue_4564():
X = Matrix([exp(x + y + z), exp(x + y + z), exp(x + y + z)])
Y = Matrix([x, y, z])
for i in range(1, 3):
for j in range(1, 3):
X_slice = X[:i, :]
Y_slice = Y[:j, :]
J = X_slice.jacobian(Y_slice)
assert J.rows == i
assert J.cols == j
for k in range(j):
assert J[:, k] == X_slice
def test_nonvectorJacobian():
X = Matrix([[exp(x + y + z), exp(x + y + z)],
[exp(x + y + z), exp(x + y + z)]])
raises(TypeError, lambda: X.jacobian(Matrix([x, y, z])))
X = X[0, :]
Y = Matrix([[x, y], [x, z]])
raises(TypeError, lambda: X.jacobian(Y))
raises(TypeError, lambda: X.jacobian(Matrix([ [x, y], [x, z] ])))
def test_vec():
m = Matrix([[1, 3], [2, 4]])
m_vec = m.vec()
assert m_vec.cols == 1
for i in range(4):
assert m_vec[i] == i + 1
def test_vech():
m = Matrix([[1, 2], [2, 3]])
m_vech = m.vech()
assert m_vech.cols == 1
for i in range(3):
assert m_vech[i] == i + 1
m_vech = m.vech(diagonal=False)
assert m_vech[0] == 2
m = Matrix([[1, x*(x + y)], [y*x + x**2, 1]])
m_vech = m.vech(diagonal=False)
assert m_vech[0] == x*(x + y)
m = Matrix([[1, x*(x + y)], [y*x, 1]])
m_vech = m.vech(diagonal=False, check_symmetry=False)
assert m_vech[0] == y*x
def test_vech_errors():
m = Matrix([[1, 3]])
raises(ShapeError, lambda: m.vech())
m = Matrix([[1, 3], [2, 4]])
raises(ValueError, lambda: m.vech())
raises(ShapeError, lambda: Matrix([ [1, 3] ]).vech())
raises(ValueError, lambda: Matrix([ [1, 3], [2, 4] ]).vech())
def test_diag():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
assert diag(a, b, b) == Matrix([
[1, 2, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0],
[0, 0, 3, x, 0, 0],
[0, 0, y, 3, 0, 0],
[0, 0, 0, 0, 3, x],
[0, 0, 0, 0, y, 3],
])
assert diag(a, b, c) == Matrix([
[1, 2, 0, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0, 0],
[0, 0, 3, x, 0, 0, 0],
[0, 0, y, 3, 0, 0, 0],
[0, 0, 0, 0, 3, x, 3],
[0, 0, 0, 0, y, 3, z],
[0, 0, 0, 0, x, y, z],
])
assert diag(a, c, b) == Matrix([
[1, 2, 0, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0, 0],
[0, 0, 3, x, 3, 0, 0],
[0, 0, y, 3, z, 0, 0],
[0, 0, x, y, z, 0, 0],
[0, 0, 0, 0, 0, 3, x],
[0, 0, 0, 0, 0, y, 3],
])
a = Matrix([x, y, z])
b = Matrix([[1, 2], [3, 4]])
c = Matrix([[5, 6]])
assert diag(a, 7, b, c) == Matrix([
[x, 0, 0, 0, 0, 0],
[y, 0, 0, 0, 0, 0],
[z, 0, 0, 0, 0, 0],
[0, 7, 0, 0, 0, 0],
[0, 0, 1, 2, 0, 0],
[0, 0, 3, 4, 0, 0],
[0, 0, 0, 0, 5, 6],
])
assert diag(1, [2, 3], [[4, 5]]) == Matrix([
[1, 0, 0, 0],
[0, 2, 0, 0],
[0, 3, 0, 0],
[0, 0, 4, 5]])
def test_get_diag_blocks1():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
assert a.get_diag_blocks() == [a]
assert b.get_diag_blocks() == [b]
assert c.get_diag_blocks() == [c]
def test_get_diag_blocks2():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
assert diag(a, b, b).get_diag_blocks() == [a, b, b]
assert diag(a, b, c).get_diag_blocks() == [a, b, c]
assert diag(a, c, b).get_diag_blocks() == [a, c, b]
assert diag(c, c, b).get_diag_blocks() == [c, c, b]
def test_inv_block():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
A = diag(a, b, b)
assert A.inv(try_block_diag=True) == diag(a.inv(), b.inv(), b.inv())
A = diag(a, b, c)
assert A.inv(try_block_diag=True) == diag(a.inv(), b.inv(), c.inv())
A = diag(a, c, b)
assert A.inv(try_block_diag=True) == diag(a.inv(), c.inv(), b.inv())
A = diag(a, a, b, a, c, a)
assert A.inv(try_block_diag=True) == diag(
a.inv(), a.inv(), b.inv(), a.inv(), c.inv(), a.inv())
assert A.inv(try_block_diag=True, method="ADJ") == diag(
a.inv(method="ADJ"), a.inv(method="ADJ"), b.inv(method="ADJ"),
a.inv(method="ADJ"), c.inv(method="ADJ"), a.inv(method="ADJ"))
def test_creation_args():
"""
Check that matrix dimensions can be specified using any reasonable type
(see issue 4614).
"""
raises(ValueError, lambda: zeros(3, -1))
raises(TypeError, lambda: zeros(1, 2, 3, 4))
assert zeros(long(3)) == zeros(3)
assert zeros(Integer(3)) == zeros(3)
assert zeros(3.) == zeros(3)
assert eye(long(3)) == eye(3)
assert eye(Integer(3)) == eye(3)
assert eye(3.) == eye(3)
assert ones(long(3), Integer(4)) == ones(3, 4)
raises(TypeError, lambda: Matrix(5))
raises(TypeError, lambda: Matrix(1, 2))
def test_diagonal_symmetrical():
m = Matrix(2, 2, [0, 1, 1, 0])
assert not m.is_diagonal()
assert m.is_symmetric()
assert m.is_symmetric(simplify=False)
m = Matrix(2, 2, [1, 0, 0, 1])
assert m.is_diagonal()
m = diag(1, 2, 3)
assert m.is_diagonal()
assert m.is_symmetric()
m = Matrix(3, 3, [1, 0, 0, 0, 2, 0, 0, 0, 3])
assert m == diag(1, 2, 3)
m = Matrix(2, 3, zeros(2, 3))
assert not m.is_symmetric()
assert m.is_diagonal()
m = Matrix(((5, 0), (0, 6), (0, 0)))
assert m.is_diagonal()
m = Matrix(((5, 0, 0), (0, 6, 0)))
assert m.is_diagonal()
m = Matrix(3, 3, [1, x**2 + 2*x + 1, y, (x + 1)**2, 2, 0, y, 0, 3])
assert m.is_symmetric()
assert not m.is_symmetric(simplify=False)
assert m.expand().is_symmetric(simplify=False)
def test_diagonalization():
m = Matrix(3, 2, [-3, 1, -3, 20, 3, 10])
assert not m.is_diagonalizable()
assert not m.is_symmetric()
raises(NonSquareMatrixError, lambda: m.diagonalize())
# diagonalizable
m = diag(1, 2, 3)
(P, D) = m.diagonalize()
assert P == eye(3)
assert D == m
m = Matrix(2, 2, [0, 1, 1, 0])
assert m.is_symmetric()
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
m = Matrix(2, 2, [1, 0, 0, 3])
assert m.is_symmetric()
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
assert P == eye(2)
assert D == m
m = Matrix(2, 2, [1, 1, 0, 0])
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
m = Matrix(3, 3, [1, 2, 0, 0, 3, 0, 2, -4, 2])
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
for i in P:
assert i.as_numer_denom()[1] == 1
m = Matrix(2, 2, [1, 0, 0, 0])
assert m.is_diagonal()
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
assert P == Matrix([[0, 1], [1, 0]])
# diagonalizable, complex only
m = Matrix(2, 2, [0, 1, -1, 0])
assert not m.is_diagonalizable(True)
raises(MatrixError, lambda: m.diagonalize(True))
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
# not diagonalizable
m = Matrix(2, 2, [0, 1, 0, 0])
assert not m.is_diagonalizable()
raises(MatrixError, lambda: m.diagonalize())
m = Matrix(3, 3, [-3, 1, -3, 20, 3, 10, 2, -2, 4])
assert not m.is_diagonalizable()
raises(MatrixError, lambda: m.diagonalize())
# symbolic
a, b, c, d = symbols('a b c d')
m = Matrix(2, 2, [a, c, c, b])
assert m.is_symmetric()
assert m.is_diagonalizable()
@XFAIL
def test_eigen_vects():
m = Matrix(2, 2, [1, 0, 0, I])
raises(NotImplementedError, lambda: m.is_diagonalizable(True))
# !!! bug because of eigenvects() or roots(x**2 + (-1 - I)*x + I, x)
# see issue 5292
assert not m.is_diagonalizable(True)
raises(MatrixError, lambda: m.diagonalize(True))
(P, D) = m.diagonalize(True)
def test_jordan_form():
m = Matrix(3, 2, [-3, 1, -3, 20, 3, 10])
raises(NonSquareMatrixError, lambda: m.jordan_form())
# diagonalizable
m = Matrix(3, 3, [7, -12, 6, 10, -19, 10, 12, -24, 13])
Jmust = Matrix(3, 3, [-1, 0, 0, 0, 1, 0, 0, 0, 1])
P, J = m.jordan_form()
assert Jmust == J
assert Jmust == m.diagonalize()[1]
# m = Matrix(3, 3, [0, 6, 3, 1, 3, 1, -2, 2, 1])
# m.jordan_form() # very long
# m.jordan_form() #
# diagonalizable, complex only
# Jordan cells
# complexity: one of eigenvalues is zero
m = Matrix(3, 3, [0, 1, 0, -4, 4, 0, -2, 1, 2])
# The blocks are ordered according to the value of their eigenvalues,
# in order to make the matrix compatible with .diagonalize()
Jmust = Matrix(3, 3, [2, 1, 0, 0, 2, 0, 0, 0, 2])
P, J = m.jordan_form()
assert Jmust == J
P, Jcells = m.jordan_cells()
# same here see 1456ff
assert Jcells[1] == Matrix(1, 1, [2])
assert Jcells[0] == Matrix(2, 2, [2, 1, 0, 2])
# complexity: all of eigenvalues are equal
m = Matrix(3, 3, [2, 6, -15, 1, 1, -5, 1, 2, -6])
# Jmust = Matrix(3, 3, [-1, 0, 0, 0, -1, 1, 0, 0, -1])
# same here see 1456ff
Jmust = Matrix(3, 3, [-1, 1, 0, 0, -1, 0, 0, 0, -1])
P, J = m.jordan_form()
assert Jmust == J
# complexity: two of eigenvalues are zero
m = Matrix(3, 3, [4, -5, 2, 5, -7, 3, 6, -9, 4])
Jmust = Matrix(3, 3, [0, 1, 0, 0, 0, 0, 0, 0, 1])
P, J = m.jordan_form()
assert Jmust == J
m = Matrix(4, 4, [6, 5, -2, -3, -3, -1, 3, 3, 2, 1, -2, -3, -1, 1, 5, 5])
Jmust = Matrix(4, 4, [2, 1, 0, 0,
0, 2, 0, 0,
0, 0, 2, 1,
0, 0, 0, 2]
)
P, J = m.jordan_form()
assert Jmust == J
m = Matrix(4, 4, [6, 2, -8, -6, -3, 2, 9, 6, 2, -2, -8, -6, -1, 0, 3, 4])
# Jmust = Matrix(4, 4, [2, 0, 0, 0, 0, 2, 1, 0, 0, 0, 2, 0, 0, 0, 0, -2])
# same here see 1456ff
Jmust = Matrix(4, 4, [-2, 0, 0, 0,
0, 2, 1, 0,
0, 0, 2, 0,
0, 0, 0, 2])
P, J = m.jordan_form()
assert Jmust == J
m = Matrix(4, 4, [5, 4, 2, 1, 0, 1, -1, -1, -1, -1, 3, 0, 1, 1, -1, 2])
assert not m.is_diagonalizable()
Jmust = Matrix(4, 4, [1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 4, 1, 0, 0, 0, 4])
P, J = m.jordan_form()
assert Jmust == J
# the following tests are new and include (some) test the cases where the old
# algorithm failed due to the fact that the block structure can
# *NOT* be determined from algebraic and geometric multiplicity alone
# This can be seen most easily when one lets compute the J.c.f. of a matrix that
# is in J.c.f already.
m = Matrix(4, 4, [2, 1, 0, 0,
0, 2, 1, 0,
0, 0, 2, 0,
0, 0, 0, 2
])
P, J = m.jordan_form()
assert m == J
m = Matrix(4, 4, [2, 1, 0, 0,
0, 2, 0, 0,
0, 0, 2, 1,
0, 0, 0, 2
])
P, J = m.jordan_form()
assert m == J
def test_Matrix_berkowitz_charpoly():
UA, K_i, K_w = symbols('UA K_i K_w')
A = Matrix([[-K_i - UA + K_i**2/(K_i + K_w), K_i*K_w/(K_i + K_w)],
[ K_i*K_w/(K_i + K_w), -K_w + K_w**2/(K_i + K_w)]])
charpoly = A.berkowitz_charpoly(x)
assert charpoly == \
Poly(x**2 + (K_i*UA + K_w*UA + 2*K_i*K_w)/(K_i + K_w)*x +
K_i*K_w*UA/(K_i + K_w), x, domain='ZZ(K_i,K_w,UA)')
assert type(charpoly) is PurePoly
A = Matrix([[1, 3], [2, 0]])
assert A.charpoly() == A.charpoly(x) == PurePoly(x**2 - x - 6)
def test_exp():
m = Matrix([[3, 4], [0, -2]])
m_exp = Matrix([[exp(3), -4*exp(-2)/5 + 4*exp(3)/5], [0, exp(-2)]])
assert m.exp() == m_exp
assert exp(m) == m_exp
m = Matrix([[1, 0], [0, 1]])
assert m.exp() == Matrix([[E, 0], [0, E]])
assert exp(m) == Matrix([[E, 0], [0, E]])
def test_has():
A = Matrix(((x, y), (2, 3)))
assert A.has(x)
assert not A.has(z)
assert A.has(Symbol)
A = A.subs(x, 2)
assert not A.has(x)
def test_errors():
raises(ValueError, lambda: Matrix([[1, 2], [1]]))
raises(IndexError, lambda: Matrix([[1, 2]])[1.2, 5])
raises(IndexError, lambda: Matrix([[1, 2]])[1, 5.2])
raises(ValueError, lambda: randMatrix(3, c=4, symmetric=True))
raises(ValueError, lambda: Matrix([1, 2]).reshape(4, 6))
raises(ShapeError,
lambda: Matrix([[1, 2], [3, 4]]).copyin_matrix([1, 0], Matrix([1, 2])))
raises(TypeError, lambda: Matrix([[1, 2], [3, 4]]).copyin_list([0,
1], set([])))
raises(NonSquareMatrixError, lambda: Matrix([[1, 2, 3], [2, 3, 0]]).inv())
raises(ShapeError,
lambda: Matrix(1, 2, [1, 2]).row_join(Matrix([[1, 2], [3, 4]])))
raises(
ShapeError, lambda: Matrix([1, 2]).col_join(Matrix([[1, 2], [3, 4]])))
raises(ShapeError, lambda: Matrix([1]).row_insert(1, Matrix([[1,
2], [3, 4]])))
raises(ShapeError, lambda: Matrix([1]).col_insert(1, Matrix([[1,
2], [3, 4]])))
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).trace())
raises(TypeError, lambda: Matrix([1]).applyfunc(1))
raises(ShapeError, lambda: Matrix([1]).LUsolve(Matrix([[1, 2], [3, 4]])))
raises(MatrixError, lambda: Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]
]).QRdecomposition())
raises(MatrixError, lambda: Matrix(1, 2, [1, 2]).QRdecomposition())
raises(
NonSquareMatrixError, lambda: Matrix([1, 2]).LUdecomposition_Simple())
raises(ValueError, lambda: Matrix([[1, 2], [3, 4]]).minorEntry(4, 5))
raises(ValueError, lambda: Matrix([[1, 2], [3, 4]]).minorMatrix(4, 5))
raises(TypeError, lambda: Matrix([1, 2, 3]).cross(1))
raises(TypeError, lambda: Matrix([1, 2, 3]).dot(1))
raises(ShapeError, lambda: Matrix([1, 2, 3]).dot(Matrix([1, 2])))
raises(ShapeError, lambda: Matrix([1, 2]).dot([]))
raises(TypeError, lambda: Matrix([1, 2]).dot('a'))
raises(NonSquareMatrixError, lambda: Matrix([1, 2, 3]).exp())
raises(ShapeError, lambda: Matrix([[1, 2], [3, 4]]).normalized())
raises(ValueError, lambda: Matrix([1, 2]).inv(method='not a method'))
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).inverse_GE())
raises(ValueError, lambda: Matrix([[1, 2], [1, 2]]).inverse_GE())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).inverse_ADJ())
raises(ValueError, lambda: Matrix([[1, 2], [1, 2]]).inverse_ADJ())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).inverse_LU())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).is_nilpotent())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).det())
raises(ValueError,
lambda: Matrix([[1, 2], [3, 4]]).det(method='Not a real method'))
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).det_bareis())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).berkowitz())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).berkowitz_det())
raises(ValueError,
lambda: hessian(Matrix([[1, 2], [3, 4]]), Matrix([[1, 2], [2, 1]])))
raises(ValueError, lambda: hessian(Matrix([[1, 2], [3, 4]]), []))
raises(ValueError, lambda: hessian(Symbol('x')**2, 'a'))
raises(ValueError,
lambda: Matrix([[5, 10, 7], [0, -1, 2], [8, 3, 4]]
).LUdecomposition_Simple(iszerofunc=lambda x: abs(x) <= 4))
raises(NotImplementedError, lambda: Matrix([[1, 0], [1, 1]])**(S(1)/2))
raises(NotImplementedError,
lambda: Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])**(0.5))
raises(IndexError, lambda: eye(3)[5, 2])
raises(IndexError, lambda: eye(3)[2, 5])
M = Matrix(((1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16)))
raises(ValueError, lambda: M.det('method=LU_decomposition()'))
def test_len():
assert len(Matrix()) == 0
assert len(Matrix([[1, 2]])) == len(Matrix([[1], [2]])) == 2
assert len(Matrix(0, 2, lambda i, j: 0)) == \
len(Matrix(2, 0, lambda i, j: 0)) == 0
assert len(Matrix([[0, 1, 2], [3, 4, 5]])) == 6
assert Matrix([1]) == Matrix([[1]])
assert not Matrix()
assert Matrix() == Matrix([])
def test_integrate():
A = Matrix(((1, 4, x), (y, 2, 4), (10, 5, x**2)))
assert A.integrate(x) == \
Matrix(((x, 4*x, x**2/2), (x*y, 2*x, 4*x), (10*x, 5*x, x**3/3)))
assert A.integrate(y) == \
Matrix(((y, 4*y, x*y), (y**2/2, 2*y, 4*y), (10*y, 5*y, y*x**2)))
def test_limit():
A = Matrix(((1, 4, sin(x)/x), (y, 2, 4), (10, 5, x**2 + 1)))
assert A.limit(x, 0) == Matrix(((1, 4, 1), (y, 2, 4), (10, 5, 1)))
def test_diff():
A = Matrix(((1, 4, x), (y, 2, 4), (10, 5, x**2 + 1)))
assert A.diff(x) == Matrix(((0, 0, 1), (0, 0, 0), (0, 0, 2*x)))
assert A.diff(y) == Matrix(((0, 0, 0), (1, 0, 0), (0, 0, 0)))
def test_getattr():
A = Matrix(((1, 4, x), (y, 2, 4), (10, 5, x**2 + 1)))
raises(AttributeError, lambda: A.nonexistantattribute)
assert getattr(A, 'diff')(x) == Matrix(((0, 0, 1), (0, 0, 0), (0, 0, 2*x)))
def test_hessenberg():
A = Matrix([[3, 4, 1], [2, 4, 5], [0, 1, 2]])
assert A.is_upper_hessenberg
A = A.T
assert A.is_lower_hessenberg
A[0, -1] = 1
assert A.is_lower_hessenberg is False
A = Matrix([[3, 4, 1], [2, 4, 5], [3, 1, 2]])
assert not A.is_upper_hessenberg
def test_cholesky():
raises(NonSquareMatrixError, lambda: Matrix((1, 2)).cholesky())
raises(ValueError, lambda: Matrix(((1, 2), (3, 4))).cholesky())
A = Matrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
assert A.cholesky() * A.cholesky().T == A
assert A.cholesky().is_lower
assert A.cholesky() == Matrix([[5, 0, 0], [3, 3, 0], [-1, 1, 3]])
def test_LDLdecomposition():
raises(NonSquareMatrixError, lambda: Matrix((1, 2)).LDLdecomposition())
raises(ValueError, lambda: Matrix(((1, 2), (3, 4))).LDLdecomposition())
A = Matrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
L, D = A.LDLdecomposition()
assert L * D * L.T == A
assert L.is_lower
assert L == Matrix([[1, 0, 0], [ S(3)/5, 1, 0], [S(-1)/5, S(1)/3, 1]])
assert D.is_diagonal()
assert D == Matrix([[25, 0, 0], [0, 9, 0], [0, 0, 9]])
def test_cholesky_solve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.cholesky_solve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.cholesky_solve(b)
assert soln == x
def test_LDLsolve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.LDLsolve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.LDLsolve(b)
assert soln == x
def test_lower_triangular_solve():
raises(NonSquareMatrixError,
lambda: Matrix([1, 0]).lower_triangular_solve(Matrix([0, 1])))
raises(ShapeError,
lambda: Matrix([[1, 0], [0, 1]]).lower_triangular_solve(Matrix([1])))
raises(ValueError,
lambda: Matrix([[2, 1], [1, 2]]).lower_triangular_solve(
Matrix([[1, 0], [0, 1]])))
A = Matrix([[1, 0], [0, 1]])
B = Matrix([[x, y], [y, x]])
C = Matrix([[4, 8], [2, 9]])
assert A.lower_triangular_solve(B) == B
assert A.lower_triangular_solve(C) == C
def test_upper_triangular_solve():
raises(NonSquareMatrixError,
lambda: Matrix([1, 0]).upper_triangular_solve(Matrix([0, 1])))
raises(TypeError,
lambda: Matrix([[1, 0], [0, 1]]).upper_triangular_solve(Matrix([1])))
raises(TypeError,
lambda: Matrix([[2, 1], [1, 2]]).upper_triangular_solve(
Matrix([[1, 0], [0, 1]])))
A = Matrix([[1, 0], [0, 1]])
B = Matrix([[x, y], [y, x]])
C = Matrix([[2, 4], [3, 8]])
assert A.upper_triangular_solve(B) == B
assert A.upper_triangular_solve(C) == C
def test_diagonal_solve():
raises(TypeError, lambda: Matrix([1, 1]).diagonal_solve(Matrix([1])))
A = Matrix([[1, 0], [0, 1]])*2
B = Matrix([[x, y], [y, x]])
assert A.diagonal_solve(B) == B/2
def test_matrix_norm():
# Vector Tests
# Test columns and symbols
x = Symbol('x', real=True)
v = Matrix([cos(x), sin(x)])
assert trigsimp(v.norm(2)) == 1
assert v.norm(10) == Pow(cos(x)**10 + sin(x)**10, S(1)/10)
# Test Rows
A = Matrix([[5, Rational(3, 2)]])
assert A.norm() == Pow(25 + Rational(9, 4), S(1)/2)
assert A.norm(oo) == max(A._mat)
assert A.norm(-oo) == min(A._mat)
# Matrix Tests
# Intuitive test
A = Matrix([[1, 1], [1, 1]])
assert A.norm(2) == 2
assert A.norm(-2) == 0
assert A.norm('frobenius') == 2
assert eye(10).norm(2) == eye(10).norm(-2) == 1
# Test with Symbols and more complex entries
A = Matrix([[3, y, y], [x, S(1)/2, -pi]])
assert (A.norm('fro')
== sqrt(S(37)/4 + 2*abs(y)**2 + pi**2 + x**2))
# Check non-square
A = Matrix([[1, 2, -3], [4, 5, Rational(13, 2)]])
assert A.norm(2) == sqrt(S(389)/8 + sqrt(78665)/8)
assert A.norm(-2) == S(0)
assert A.norm('frobenius') == sqrt(389)/2
# Test properties of matrix norms
# http://en.wikipedia.org/wiki/Matrix_norm#Definition
# Two matrices
A = Matrix([[1, 2], [3, 4]])
B = Matrix([[5, 5], [-2, 2]])
C = Matrix([[0, -I], [I, 0]])
D = Matrix([[1, 0], [0, -1]])
L = [A, B, C, D]
alpha = Symbol('alpha', real=True)
for order in ['fro', 2, -2]:
# Zero Check
assert zeros(3).norm(order) == S(0)
# Check Triangle Inequality for all Pairs of Matrices
for X in L:
for Y in L:
assert simplify(X.norm(order) + Y.norm(order) >=
(X + Y).norm(order))
# Scalar multiplication linearity
for M in [A, B, C, D]:
if order in [2, -2]:
# Abs is causing tests to fail when Abs(alpha) is inside a Max
# or Min. The tests produce mathematically true statements that
# are too complex to be simplified well.
continue
try:
assert ((alpha*M).norm(order) ==
abs(alpha) * M.norm(order))
except NotImplementedError:
pass # Some Norms fail on symbolic matrices due to Max issue
# Test Properties of Vector Norms
# http://en.wikipedia.org/wiki/Vector_norm
# Two column vectors
a = Matrix([1, 1 - 1*I, -3])
b = Matrix([S(1)/2, 1*I, 1])
c = Matrix([-1, -1, -1])
d = Matrix([3, 2, I])
e = Matrix([Integer(1e2), Rational(1, 1e2), 1])
L = [a, b, c, d, e]
alpha = Symbol('alpha', real=True)
for order in [1, 2, -1, -2, S.Infinity, S.NegativeInfinity, pi]:
# Zero Check
if order > 0:
assert Matrix([0, 0, 0]).norm(order) == S(0)
# Triangle inequality on all pairs
if order >= 1: # Triangle InEq holds only for these norms
for v in L:
for w in L:
assert simplify(v.norm(order) + w.norm(order) >=
(v + w).norm(order))
# Linear to scalar multiplication
if order in [1, 2, -1, -2, S.Infinity, S.NegativeInfinity]:
for vec in L:
try:
assert simplify((alpha*v).norm(order) -
(abs(alpha) * v.norm(order))) == 0
except NotImplementedError:
pass # Some Norms fail on symbolics due to Max issue
def test_singular_values():
x = Symbol('x', real=True)
A = Matrix([[0, 1*I], [2, 0]])
assert A.singular_values() == [2, 1]
A = eye(3)
A[1, 1] = x
A[2, 2] = 5
vals = A.singular_values()
assert 1 in vals and 5 in vals and abs(x) in vals
A = Matrix([[sin(x), cos(x)], [-cos(x), sin(x)]])
vals = [sv.trigsimp() for sv in A.singular_values()]
assert vals == [S(1), S(1)]
def test_condition_number():
x = Symbol('x', real=True)
A = eye(3)
A[0, 0] = 10
A[2, 2] = S(1)/10
assert A.condition_number() == 100
A[1, 1] = x
assert A.condition_number() == Max(10, Abs(x)) / Min(S(1)/10, Abs(x))
M = Matrix([[cos(x), sin(x)], [-sin(x), cos(x)]])
Mc = M.condition_number()
assert all(Float(1.).epsilon_eq(Mc.subs(x, val).evalf()) for val in
[Rational(1, 5), Rational(1, 2), Rational(1, 10), pi/2, pi, 7*pi/4 ])
def test_equality():
A = Matrix(((1, 2, 3), (4, 5, 6), (7, 8, 9)))
B = Matrix(((9, 8, 7), (6, 5, 4), (3, 2, 1)))
assert A == A[:, :]
assert not A != A[:, :]
assert not A == B
assert A != B
assert A != 10
assert not A == 10
# A SparseMatrix can be equal to a Matrix
C = SparseMatrix(((1, 0, 0), (0, 1, 0), (0, 0, 1)))
D = Matrix(((1, 0, 0), (0, 1, 0), (0, 0, 1)))
assert C == D
assert not C != D
def test_col_join():
assert eye(3).col_join(Matrix([[7, 7, 7]])) == \
Matrix([[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[7, 7, 7]])
def test_row_insert():
r4 = Matrix([[4, 4, 4]])
for i in range(-4, 5):
l = [1, 0, 0]
l.insert(i, 4)
assert flatten(eye(3).row_insert(i, r4).col(0).tolist()) == l
def test_col_insert():
c4 = Matrix([4, 4, 4])
for i in range(-4, 5):
l = [0, 0, 0]
l.insert(i, 4)
assert flatten(zeros(3).col_insert(i, c4).row(0).tolist()) == l
def test_normalized():
assert Matrix([3, 4]).normalized() == \
Matrix([Rational(3, 5), Rational(4, 5)])
def test_print_nonzero():
assert capture(lambda: eye(3).print_nonzero()) == \
'[X ]\n[ X ]\n[ X]\n'
assert capture(lambda: eye(3).print_nonzero('.')) == \
'[. ]\n[ . ]\n[ .]\n'
def test_zeros_eye():
assert Matrix.eye(3) == eye(3)
assert Matrix.zeros(3) == zeros(3)
assert ones(3, 4) == Matrix(3, 4, [1]*12)
i = Matrix([[1, 0], [0, 1]])
z = Matrix([[0, 0], [0, 0]])
for cls in classes:
m = cls.eye(2)
assert i == m # but m == i will fail if m is immutable
assert i == eye(2, cls=cls)
assert type(m) == cls
m = cls.zeros(2)
assert z == m
assert z == zeros(2, cls=cls)
assert type(m) == cls
def test_is_zero():
assert Matrix().is_zero
assert Matrix([[0, 0], [0, 0]]).is_zero
assert zeros(3, 4).is_zero
assert not eye(3).is_zero
assert Matrix([[x, 0], [0, 0]]).is_zero == None
assert SparseMatrix([[x, 0], [0, 0]]).is_zero == None
assert ImmutableMatrix([[x, 0], [0, 0]]).is_zero == None
assert ImmutableSparseMatrix([[x, 0], [0, 0]]).is_zero == None
assert Matrix([[x, 1], [0, 0]]).is_zero == False
a = Symbol('a', nonzero=True)
assert Matrix([[a, 0], [0, 0]]).is_zero == False
def test_rotation_matrices():
# This tests the rotation matrices by rotating about an axis and back.
theta = pi/3
r3_plus = rot_axis3(theta)
r3_minus = rot_axis3(-theta)
r2_plus = rot_axis2(theta)
r2_minus = rot_axis2(-theta)
r1_plus = rot_axis1(theta)
r1_minus = rot_axis1(-theta)
assert r3_minus*r3_plus*eye(3) == eye(3)
assert r2_minus*r2_plus*eye(3) == eye(3)
assert r1_minus*r1_plus*eye(3) == eye(3)
# Check the correctness of the trace of the rotation matrix
assert r1_plus.trace() == 1 + 2*cos(theta)
assert r2_plus.trace() == 1 + 2*cos(theta)
assert r3_plus.trace() == 1 + 2*cos(theta)
# Check that a rotation with zero angle doesn't change anything.
assert rot_axis1(0) == eye(3)
assert rot_axis2(0) == eye(3)
assert rot_axis3(0) == eye(3)
def test_DeferredVector():
assert str(DeferredVector("vector")[4]) == "vector[4]"
assert sympify(DeferredVector("d")) == DeferredVector("d")
def test_DeferredVector_not_iterable():
assert not iterable(DeferredVector('X'))
def test_DeferredVector_Matrix():
raises(TypeError, lambda: Matrix(DeferredVector("V")))
def test_GramSchmidt():
R = Rational
m1 = Matrix(1, 2, [1, 2])
m2 = Matrix(1, 2, [2, 3])
assert GramSchmidt([m1, m2]) == \
[Matrix(1, 2, [1, 2]), Matrix(1, 2, [R(2)/5, R(-1)/5])]
assert GramSchmidt([m1.T, m2.T]) == \
[Matrix(2, 1, [1, 2]), Matrix(2, 1, [R(2)/5, R(-1)/5])]
# from wikipedia
assert GramSchmidt([Matrix([3, 1]), Matrix([2, 2])], True) == [
Matrix([3*sqrt(10)/10, sqrt(10)/10]),
Matrix([-sqrt(10)/10, 3*sqrt(10)/10])]
def test_casoratian():
assert casoratian([1, 2, 3, 4], 1) == 0
assert casoratian([1, 2, 3, 4], 1, zero=False) == 0
def test_zero_dimension_multiply():
assert (Matrix()*zeros(0, 3)).shape == (0, 3)
assert zeros(3, 0)*zeros(0, 3) == zeros(3, 3)
assert zeros(0, 3)*zeros(3, 0) == Matrix()
def test_slice_issue_2884():
m = Matrix(2, 2, range(4))
assert m[1, :] == Matrix([[2, 3]])
assert m[-1, :] == Matrix([[2, 3]])
assert m[:, 1] == Matrix([[1, 3]]).T
assert m[:, -1] == Matrix([[1, 3]]).T
raises(IndexError, lambda: m[2, :])
raises(IndexError, lambda: m[2, 2])
def test_slice_issue_3401():
assert zeros(0, 3)[:, -1].shape == (0, 1)
assert zeros(3, 0)[0, :] == Matrix(1, 0, [])
def test_copyin():
s = zeros(3, 3)
s[3] = 1
assert s[:, 0] == Matrix([0, 1, 0])
assert s[3] == 1
assert s[3: 4] == [1]
s[1, 1] = 42
assert s[1, 1] == 42
assert s[1, 1:] == Matrix([[42, 0]])
s[1, 1:] = Matrix([[5, 6]])
assert s[1, :] == Matrix([[1, 5, 6]])
s[1, 1:] = [[42, 43]]
assert s[1, :] == Matrix([[1, 42, 43]])
s[0, 0] = 17
assert s[:, :1] == Matrix([17, 1, 0])
s[0, 0] = [1, 1, 1]
assert s[:, 0] == Matrix([1, 1, 1])
s[0, 0] = Matrix([1, 1, 1])
assert s[:, 0] == Matrix([1, 1, 1])
s[0, 0] = SparseMatrix([1, 1, 1])
assert s[:, 0] == Matrix([1, 1, 1])
def test_invertible_check():
# sometimes a singular matrix will have a pivot vector shorter than
# the number of rows in a matrix...
assert Matrix([[1, 2], [1, 2]]).rref() == (Matrix([[1, 2], [0, 0]]), [0])
raises(ValueError, lambda: Matrix([[1, 2], [1, 2]]).inv())
# ... but sometimes it won't, so that is an insufficient test of
# whether something is invertible.
m = Matrix([
[-1, -1, 0],
[ x, 1, 1],
[ 1, x, -1],
])
assert len(m.rref()[1]) == m.rows
# in addition, unless simplify=True in the call to rref, the identity
# matrix will be returned even though m is not invertible
assert m.rref()[0] == eye(3)
assert m.rref(simplify=signsimp)[0] != eye(3)
raises(ValueError, lambda: m.inv(method="ADJ"))
raises(ValueError, lambda: m.inv(method="GE"))
raises(ValueError, lambda: m.inv(method="LU"))
@XFAIL
def test_issue_3959():
x, y = symbols('x, y')
e = x*y
assert e.subs(x, Matrix([3, 5, 3])) == Matrix([3, 5, 3])*y
def test_issue_5964():
assert str(Matrix([[1, 2], [3, 4]])) == 'Matrix([[1, 2], [3, 4]])'
def test_issue_7604():
x, y = symbols(u("x y"))
assert sstr(Matrix([[x, 2*y], [y**2, x + 3]])) == \
'Matrix([\n[ x, 2*y],\n[y**2, x + 3]])'
def test_is_Identity():
assert eye(3).is_Identity
assert eye(3).as_immutable().is_Identity
assert not zeros(3).is_Identity
assert not ones(3).is_Identity
# issue 6242
assert not Matrix([[1, 0, 0]]).is_Identity
# issue 8854
assert SparseMatrix(3,3, {(0,0):1, (1,1):1, (2,2):1}).is_Identity
assert not SparseMatrix(2,3, range(6)).is_Identity
assert not SparseMatrix(3,3, {(0,0):1, (1,1):1}).is_Identity
assert not SparseMatrix(3,3, {(0,0):1, (1,1):1, (2,2):1, (0,1):2, (0,2):3}).is_Identity
def test_dot():
assert ones(1, 3).dot(ones(3, 1)) == 3
assert ones(1, 3).dot([1, 1, 1]) == 3
def test_dual():
B_x, B_y, B_z, E_x, E_y, E_z = symbols(
'B_x B_y B_z E_x E_y E_z', real=True)
F = Matrix((
( 0, E_x, E_y, E_z),
(-E_x, 0, B_z, -B_y),
(-E_y, -B_z, 0, B_x),
(-E_z, B_y, -B_x, 0)
))
Fd = Matrix((
( 0, -B_x, -B_y, -B_z),
(B_x, 0, E_z, -E_y),
(B_y, -E_z, 0, E_x),
(B_z, E_y, -E_x, 0)
))
assert F.dual().equals(Fd)
assert eye(3).dual().equals(zeros(3))
assert F.dual().dual().equals(-F)
def test_anti_symmetric():
assert Matrix([1, 2]).is_anti_symmetric() is False
m = Matrix(3, 3, [0, x**2 + 2*x + 1, y, -(x + 1)**2, 0, x*y, -y, -x*y, 0])
assert m.is_anti_symmetric() is True
assert m.is_anti_symmetric(simplify=False) is False
assert m.is_anti_symmetric(simplify=lambda x: x) is False
# tweak to fail
m[2, 1] = -m[2, 1]
assert m.is_anti_symmetric() is False
# untweak
m[2, 1] = -m[2, 1]
m = m.expand()
assert m.is_anti_symmetric(simplify=False) is True
m[0, 0] = 1
assert m.is_anti_symmetric() is False
def test_normalize_sort_diogonalization():
A = Matrix(((1, 2), (2, 1)))
P, Q = A.diagonalize(normalize=True)
assert P*P.T == P.T*P == eye(P.cols)
P, Q = A.diagonalize(normalize=True, sort=True)
assert P*P.T == P.T*P == eye(P.cols)
assert P*Q*P.inv() == A
def test_issue_5321():
raises(ValueError, lambda: Matrix([[1, 2, 3], Matrix(0, 1, [])]))
def test_issue_5320():
assert Matrix.hstack(eye(2), 2*eye(2)) == Matrix([
[1, 0, 2, 0],
[0, 1, 0, 2]
])
assert Matrix.vstack(eye(2), 2*eye(2)) == Matrix([
[1, 0],
[0, 1],
[2, 0],
[0, 2]
])
cls = SparseMatrix
assert cls.hstack(cls(eye(2)), cls(2*eye(2))) == Matrix([
[1, 0, 2, 0],
[0, 1, 0, 2]
])
def test_cross():
a = [1, 2, 3]
b = [3, 4, 5]
col = Matrix([-2, 4, -2])
row = col.T
def test(M, ans):
assert ans == M
assert type(M) == cls
for cls in classes:
A = cls(a)
B = cls(b)
test(A.cross(B), col)
test(A.cross(B.T), col)
test(A.T.cross(B.T), row)
test(A.T.cross(B), row)
raises(ShapeError, lambda:
Matrix(1, 2, [1, 1]).cross(Matrix(1, 2, [1, 1])))
def test_hash():
for cls in classes[-2:]:
s = set([cls.eye(1), cls.eye(1)])
assert len(s) == 1 and s.pop() == cls.eye(1)
# issue 3979
for cls in classes[:2]:
assert not isinstance(cls.eye(1), collections.Hashable)
@XFAIL
def test_issue_3979():
# when this passes, delete this and change the [1:2]
# to [:2] in the test_hash above for issue 3979
cls = classes[0]
raises(AttributeError, lambda: hash(cls.eye(1)))
def test_adjoint():
dat = [[0, I], [1, 0]]
ans = Matrix([[0, 1], [-I, 0]])
for cls in classes:
assert ans == cls(dat).adjoint()
def test_simplify_immutable():
from sympy import simplify, sin, cos
assert simplify(ImmutableMatrix([[sin(x)**2 + cos(x)**2]])) == \
ImmutableMatrix([[1]])
def test_rank():
from sympy.abc import x
m = Matrix([[1, 2], [x, 1 - 1/x]])
assert m.rank() == 2
n = Matrix(3, 3, range(1, 10))
assert n.rank() == 2
p = zeros(3)
assert p.rank() == 0
def test_replace():
from sympy import symbols, Function, Matrix
F, G = symbols('F, G', cls=Function)
K = Matrix(2, 2, lambda i, j: G(i+j))
M = Matrix(2, 2, lambda i, j: F(i+j))
N = M.replace(F, G)
assert N == K
def test_replace_map():
from sympy import symbols, Function, Matrix
F, G = symbols('F, G', cls=Function)
K = Matrix(2, 2, [(G(0), {F(0): G(0)}), (G(1), {F(1): G(1)}), (G(1), {F(1)\
: G(1)}), (G(2), {F(2): G(2)})])
M = Matrix(2, 2, lambda i, j: F(i+j))
N = M.replace(F, G, True)
assert N == K
def test_atoms():
from sympy.abc import x
m = Matrix([[1, 2], [x, 1 - 1/x]])
assert m.atoms() == set([S(1),S(2),S(-1), x])
assert m.atoms(Symbol) == set([x])
@slow
def test_pinv():
from sympy.abc import a, b, c, d
# Pseudoinverse of an invertible matrix is the inverse.
A1 = Matrix([[a, b], [c, d]])
assert simplify(A1.pinv()) == simplify(A1.inv())
# Test the four properties of the pseudoinverse for various matrices.
As = [Matrix([[13, 104], [2212, 3], [-3, 5]]),
Matrix([[1, 7, 9], [11, 17, 19]]),
Matrix([a, b])]
for A in As:
A_pinv = A.pinv()
AAp = A * A_pinv
ApA = A_pinv * A
assert simplify(AAp * A) == A
assert simplify(ApA * A_pinv) == A_pinv
assert AAp.H == AAp
assert ApA.H == ApA
def test_pinv_solve():
# Fully determined system (unique result, identical to other solvers).
A = Matrix([[1, 5], [7, 9]])
B = Matrix([12, 13])
assert A.pinv_solve(B) == A.cholesky_solve(B)
assert A.pinv_solve(B) == A.LDLsolve(B)
assert A.pinv_solve(B) == Matrix([sympify('-43/26'), sympify('71/26')])
assert A * A.pinv() * B == B
# Fully determined, with two-dimensional B matrix.
B = Matrix([[12, 13, 14], [15, 16, 17]])
assert A.pinv_solve(B) == A.cholesky_solve(B)
assert A.pinv_solve(B) == A.LDLsolve(B)
assert A.pinv_solve(B) == Matrix([[-33, -37, -41], [69, 75, 81]]) / 26
assert A * A.pinv() * B == B
# Underdetermined system (infinite results).
A = Matrix([[1, 0, 1], [0, 1, 1]])
B = Matrix([5, 7])
solution = A.pinv_solve(B)
w = {}
for s in solution.atoms(Symbol):
# Extract dummy symbols used in the solution.
w[s.name] = s
assert solution == Matrix([[w['w0_0']/3 + w['w1_0']/3 - w['w2_0']/3 + 1],
[w['w0_0']/3 + w['w1_0']/3 - w['w2_0']/3 + 3],
[-w['w0_0']/3 - w['w1_0']/3 + w['w2_0']/3 + 4]])
assert A * A.pinv() * B == B
# Overdetermined system (least squares results).
A = Matrix([[1, 0], [0, 0], [0, 1]])
B = Matrix([3, 2, 1])
assert A.pinv_solve(B) == Matrix([3, 1])
# Proof the solution is not exact.
assert A * A.pinv() * B != B
@XFAIL
def test_pinv_rank_deficient():
# Test the four properties of the pseudoinverse for various matrices.
As = [Matrix([[1, 1, 1], [2, 2, 2]]),
Matrix([[1, 0], [0, 0]])]
for A in As:
A_pinv = A.pinv()
AAp = A * A_pinv
ApA = A_pinv * A
assert simplify(AAp * A) == A
assert simplify(ApA * A_pinv) == A_pinv
assert AAp.H == AAp
assert ApA.H == ApA
# Test solving with rank-deficient matrices.
A = Matrix([[1, 0], [0, 0]])
# Exact, non-unique solution.
B = Matrix([3, 0])
solution = A.pinv_solve(B)
w1 = solution.atoms(Symbol).pop()
assert w1.name == 'w1_0'
assert solution == Matrix([3, w1])
assert A * A.pinv() * B == B
# Least squares, non-unique solution.
B = Matrix([3, 1])
solution = A.pinv_solve(B)
w1 = solution.atoms(Symbol).pop()
assert w1.name == 'w1_0'
assert solution == Matrix([3, w1])
assert A * A.pinv() * B != B
def test_issue_7201():
assert ones(0, 1) + ones(0, 1) == Matrix(0, 1, [])
assert ones(1, 0) + ones(1, 0) == Matrix(1, 0, [])
def test_free_symbols():
for M in ImmutableMatrix, ImmutableSparseMatrix, Matrix, SparseMatrix:
assert M([[x], [0]]).free_symbols == set([x])
def test_from_ndarray():
"""See issue 7465."""
try:
from numpy import array
except ImportError:
skip('NumPy must be available to test creating matrices from ndarrays')
assert Matrix(array([1, 2, 3])) == Matrix([1, 2, 3])
assert Matrix(array([[1, 2, 3]])) == Matrix([[1, 2, 3]])
assert Matrix(array([[1, 2, 3], [4, 5, 6]])) == \
Matrix([[1, 2, 3], [4, 5, 6]])
assert Matrix(array([x, y, z])) == Matrix([x, y, z])
raises(NotImplementedError, lambda: Matrix(array([[
[1, 2], [3, 4]], [[5, 6], [7, 8]]])))
def test_hermitian():
a = Matrix([[1, I], [-I, 1]])
assert a.is_hermitian
a[0, 0] = 2*I
assert a.is_hermitian is False
a[0, 0] = x
assert a.is_hermitian is None
a[0, 1] = a[1, 0]*I
assert a.is_hermitian is False
| codeparrot/github-code-clean |
"""
Contains wrapper class for datasets.
"""
import json
import os
import math
import random
import logging
import tempfile
import time
import shutil
import multiprocessing
from multiprocessing.dummy import Pool
from ast import literal_eval as make_tuple
from typing import Any, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Union
import numpy as np
from numpy.typing import ArrayLike
import pandas as pd
import deepchem as dc
from deepchem.utils.typing import OneOrMany, Shape
from deepchem.utils.data_utils import save_to_disk, load_from_disk, load_image_files
Batch = Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
logger = logging.getLogger(__name__)
def sparsify_features(X: np.ndarray) -> np.ndarray:
"""Extracts a sparse feature representation from dense feature array.
Parameters
----------
X: np.ndarray
A numpy array of shape `(n_samples, ...)`.
Returns
-------
X_sparse: np.ndarray
A numpy array with `dtype=object` where `X_sparse[i]` is a
typle of `(nonzero_inds, nonzero_vals)` with nonzero indices and
values in the i-th sample of `X`.
"""
n_samples = len(X)
X_sparse = []
for i in range(n_samples):
nonzero_inds = np.nonzero(X[i])[0]
nonzero_vals = X[i][nonzero_inds]
X_sparse.append((nonzero_inds, nonzero_vals))
return np.array(X_sparse, dtype=object)
def densify_features(X_sparse: np.ndarray, num_features: int) -> np.ndarray:
"""Expands sparse feature representation to dense feature array.
Assumes that the sparse representation was constructed from an array
which had original shape `(n_samples, num_features)` so doesn't
support reconstructing multidimensional dense arrays.
Parameters
----------
X_sparse: np.ndarray
Must have `dtype=object`. `X_sparse[i]` must be a tuple of nonzero
indices and values.
num_features: int
Number of features in dense array.
Returns
-------
X: np.ndarray
A numpy array of shape `(n_samples, num_features)`.
"""
n_samples = len(X_sparse)
X = np.zeros((n_samples, num_features))
for i in range(n_samples):
nonzero_inds, nonzero_vals = X_sparse[i]
X[i][nonzero_inds.astype(int)] = nonzero_vals
return X
def pad_features(batch_size: int, X_b: np.ndarray) -> np.ndarray:
"""Pads a batch of features to have precisely batch_size elements.
Given an array of features with length less than or equal to
batch-size, pads it to `batch_size` length. It does this by
repeating the original features in tiled fashion. For illustration,
suppose that `len(X_b) == 3` and `batch_size == 10`.
>>> X_b = np.arange(3)
>>> X_b
array([0, 1, 2])
>>> batch_size = 10
>>> X_manual = np.array([0, 1, 2, 0, 1, 2, 0, 1, 2, 0])
>>> X_out = pad_features(batch_size, X_b)
>>> assert (X_manual == X_out).all()
This function is similar to `pad_batch` but doesn't handle labels
`y` or weights `w` and is intended to be used for inference-time
query processing.
Parameters
----------
batch_size: int
The number of datapoints in a batch
X_b: np.ndarray
Must be such that `len(X_b) <= batch_size`
Returns
-------
X_out: np.ndarray
A numpy array with `len(X_out) == batch_size`.
"""
num_samples = len(X_b)
if num_samples > batch_size:
raise ValueError("Cannot pad an array longer than `batch_size`")
elif num_samples == batch_size:
return X_b
else:
# By invariant of when this is called, can assume num_samples > 0
# and num_samples < batch_size
if len(X_b.shape) > 1:
feature_shape = X_b.shape[1:]
X_out = np.zeros((batch_size,) + feature_shape, dtype=X_b.dtype)
else:
X_out = np.zeros((batch_size,), dtype=X_b.dtype)
# Fill in batch arrays
start = 0
while start < batch_size:
num_left = batch_size - start
if num_left < num_samples:
increment = num_left
else:
increment = num_samples
X_out[start:start + increment] = X_b[:increment]
start += increment
return X_out
def pad_batch(batch_size: int, X_b: np.ndarray, y_b: np.ndarray,
w_b: np.ndarray, ids_b: np.ndarray) -> Batch:
"""Pads batch to have size precisely batch_size elements.
Given arrays of features `X_b`, labels `y_b`, weights `w_b`, and
identifiers `ids_b` all with length less than or equal to
batch-size, pads them to `batch_size` length. It does this by
repeating the original entries in tiled fashion. Note that `X_b,
y_b, w_b, ids_b` must all have the same length.
Parameters
----------
batch_size: int
The number of datapoints in a batch
X_b: np.ndarray
Must be such that `len(X_b) <= batch_size`
y_b: np.ndarray
Must be such that `len(y_b) <= batch_size`
w_b: np.ndarray
Must be such that `len(w_b) <= batch_size`
ids_b: np.ndarray
Must be such that `len(ids_b) <= batch_size`
Returns
-------
Batch
The batch is a tuple of `(X_out, y_out, w_out, ids_out)`,
all numpy arrays with length `batch_size`.
"""
num_samples = len(X_b)
if num_samples == batch_size:
return (X_b, y_b, w_b, ids_b)
# By invariant of when this is called, can assume num_samples > 0
# and num_samples < batch_size
if len(X_b.shape) > 1:
feature_shape = X_b.shape[1:]
X_out = np.zeros((batch_size,) + feature_shape, dtype=X_b.dtype)
else:
X_out = np.zeros((batch_size,), dtype=X_b.dtype)
if y_b is None:
y_out = None
elif len(y_b.shape) < 2:
y_out = np.zeros(batch_size, dtype=y_b.dtype)
else:
y_out = np.zeros((batch_size,) + y_b.shape[1:], dtype=y_b.dtype)
if w_b is None:
w_out = None
elif len(w_b.shape) < 2:
w_out = np.zeros(batch_size, dtype=w_b.dtype)
else:
w_out = np.zeros((batch_size,) + w_b.shape[1:], dtype=w_b.dtype)
ids_out = np.zeros((batch_size,), dtype=ids_b.dtype)
# Fill in batch arrays
start = 0
# Only the first set of copy will be counted in training loss
if w_out is not None:
w_out[start:start + num_samples] = w_b[:]
while start < batch_size:
num_left = batch_size - start
if num_left < num_samples:
increment = num_left
else:
increment = num_samples
X_out[start:start + increment] = X_b[:increment]
if y_out is not None:
y_out[start:start + increment] = y_b[:increment]
ids_out[start:start + increment] = ids_b[:increment]
start += increment
return (X_out, y_out, w_out, ids_out)
class Dataset(object):
"""Abstract base class for datasets defined by X, y, w elements.
`Dataset` objects are used to store representations of a dataset as
used in a machine learning task. Datasets contain features `X`,
labels `y`, weights `w` and identifiers `ids`. Different subclasses
of `Dataset` may choose to hold `X, y, w, ids` in memory or on disk.
The `Dataset` class attempts to provide for strong interoperability
with other machine learning representations for datasets.
Interconversion methods allow for `Dataset` objects to be converted
to and from numpy arrays, pandas dataframes, tensorflow datasets,
and pytorch datasets (only to and not from for pytorch at present).
Note that you can never instantiate a `Dataset` object directly.
Instead you will need to instantiate one of the concrete subclasses.
"""
def __init__(self) -> None:
raise NotImplementedError()
def __len__(self) -> int:
"""Get the number of elements in the dataset.
Returns
-------
int
The number of elements in the dataset.
"""
raise NotImplementedError()
def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]:
"""Get the shape of the dataset.
Returns four tuples, giving the shape of the X, y, w, and ids
arrays.
Returns
-------
Tuple
The tuple contains four elements, which are the shapes of
the X, y, w, and ids arrays.
"""
raise NotImplementedError()
def get_task_names(self) -> np.ndarray:
"""Get the names of the tasks associated with this dataset."""
raise NotImplementedError()
@property
def X(self) -> np.ndarray:
"""Get the X vector for this dataset as a single numpy array.
Returns
-------
np.ndarray
A numpy array of identifiers `X`.
Note
----
If data is stored on disk, accessing this field may involve loading
data from disk and could potentially be slow. Using
`iterbatches()` or `itersamples()` may be more efficient for
larger datasets.
"""
raise NotImplementedError()
@property
def y(self) -> np.ndarray:
"""Get the y vector for this dataset as a single numpy array.
Returns
-------
np.ndarray
A numpy array of identifiers `y`.
Note
----
If data is stored on disk, accessing this field may involve loading
data from disk and could potentially be slow. Using
`iterbatches()` or `itersamples()` may be more efficient for
larger datasets.
"""
raise NotImplementedError()
@property
def ids(self) -> np.ndarray:
"""Get the ids vector for this dataset as a single numpy array.
Returns
-------
np.ndarray
A numpy array of identifiers `ids`.
Note
----
If data is stored on disk, accessing this field may involve loading
data from disk and could potentially be slow. Using
`iterbatches()` or `itersamples()` may be more efficient for
larger datasets.
"""
raise NotImplementedError()
@property
def w(self) -> np.ndarray:
"""Get the weight vector for this dataset as a single numpy array.
Returns
-------
np.ndarray
A numpy array of weights `w`.
Note
----
If data is stored on disk, accessing this field may involve loading
data from disk and could potentially be slow. Using
`iterbatches()` or `itersamples()` may be more efficient for
larger datasets.
"""
raise NotImplementedError()
def __repr__(self) -> str:
"""Convert self to REPL print representation."""
threshold = dc.utils.get_print_threshold()
task_str = np.array2string(
np.array(self.get_task_names()), threshold=threshold)
X_shape, y_shape, w_shape, _ = self.get_shape()
if self.__len__() < dc.utils.get_max_print_size():
id_str = np.array2string(self.ids, threshold=threshold)
return "<%s X.shape: %s, y.shape: %s, w.shape: %s, ids: %s, task_names: %s>" % (
self.__class__.__name__, str(X_shape), str(y_shape), str(w_shape),
id_str, task_str)
else:
return "<%s X.shape: %s, y.shape: %s, w.shape: %s, task_names: %s>" % (
self.__class__.__name__, str(X_shape), str(y_shape), str(w_shape),
task_str)
def __str__(self) -> str:
"""Convert self to str representation."""
return self.__repr__()
def iterbatches(self,
batch_size: Optional[int] = None,
epochs: int = 1,
deterministic: bool = False,
pad_batches: bool = False) -> Iterator[Batch]:
"""Get an object that iterates over minibatches from the dataset.
Each minibatch is returned as a tuple of four numpy arrays:
`(X, y, w, ids)`.
Parameters
----------
batch_size: int, optional (default None)
Number of elements in each batch.
epochs: int, optional (default 1)
Number of epochs to walk over dataset.
deterministic: bool, optional (default False)
If True, follow deterministic order.
pad_batches: bool, optional (default False)
If True, pad each batch to `batch_size`.
Returns
-------
Iterator[Batch]
Generator which yields tuples of four numpy arrays `(X, y, w, ids)`.
"""
raise NotImplementedError()
def itersamples(self) -> Iterator[Batch]:
"""Get an object that iterates over the samples in the dataset.
Examples
--------
>>> dataset = NumpyDataset(np.ones((2,2)))
>>> for x, y, w, id in dataset.itersamples():
... print(x.tolist(), y.tolist(), w.tolist(), id)
[1.0, 1.0] [0.0] [0.0] 0
[1.0, 1.0] [0.0] [0.0] 1
"""
raise NotImplementedError()
def transform(self, transformer: "dc.trans.Transformer", **args) -> "Dataset":
"""Construct a new dataset by applying a transformation to every sample in this dataset.
The argument is a function that can be called as follows:
>> newx, newy, neww = fn(x, y, w)
It might be called only once with the whole dataset, or multiple
times with different subsets of the data. Each time it is called,
it should transform the samples and return the transformed data.
Parameters
----------
transformer: dc.trans.Transformer
The transformation to apply to each sample in the dataset.
Returns
-------
Dataset
A newly constructed Dataset object.
"""
raise NotImplementedError()
def select(self,
indices: Union[Sequence[int], np.ndarray],
select_dir: Optional[str] = None) -> "Dataset":
"""Creates a new dataset from a selection of indices from self.
Parameters
----------
indices: Sequence
List of indices to select.
select_dir: str, optional (default None)
Path to new directory that the selected indices will be copied to.
"""
raise NotImplementedError()
def get_statistics(self, X_stats: bool = True,
y_stats: bool = True) -> Tuple[np.ndarray, ...]:
"""Compute and return statistics of this dataset.
Uses `self.itersamples()` to compute means and standard deviations
of the dataset. Can compute on large datasets that don't fit in
memory.
Parameters
----------
X_stats: bool, optional (default True)
If True, compute feature-level mean and standard deviations.
y_stats: bool, optional (default True)
If True, compute label-level mean and standard deviations.
Returns
-------
Tuple
- If `X_stats == True`, returns `(X_means, X_stds)`.
- If `y_stats == True`, returns `(y_means, y_stds)`.
- If both are true, returns `(X_means, X_stds, y_means, y_stds)`.
"""
x_shape, y_shape, w_shape, ids_shape = self.get_shape()
X_means = np.zeros(x_shape[1:])
X_m2 = np.zeros(x_shape[1:])
y_means = np.zeros(y_shape[1:])
y_m2 = np.zeros(y_shape[1:])
n = 0
for X, y, _, _ in self.itersamples():
n += 1
if X_stats:
dx = X - X_means
X_means += dx / n
X_m2 += dx * (X - X_means)
if y_stats:
dy = y - y_means
y_means += dy / n
y_m2 += dy * (y - y_means)
if n < 2:
X_stds = np.zeros(x_shape[1:])
y_stds = np.zeros(y_shape[1:])
else:
X_stds = np.sqrt(X_m2 / n)
y_stds = np.sqrt(y_m2 / n)
if X_stats and not y_stats:
return X_means, X_stds
elif y_stats and not X_stats:
return y_means, y_stds
elif X_stats and y_stats:
return X_means, X_stds, y_means, y_stds
else:
return tuple()
def make_tf_dataset(self,
batch_size: int = 100,
epochs: int = 1,
deterministic: bool = False,
pad_batches: bool = False):
"""Create a tf.data.Dataset that iterates over the data in this Dataset.
Each value returned by the Dataset's iterator is a tuple of (X, y,
w) for one batch.
Parameters
----------
batch_size: int, default 100
The number of samples to include in each batch.
epochs: int, default 1
The number of times to iterate over the Dataset.
deterministic: bool, default False
If True, the data is produced in order. If False, a different
random permutation of the data is used for each epoch.
pad_batches: bool, default False
If True, batches are padded as necessary to make the size of
each batch exactly equal batch_size.
Returns
-------
tf.data.Dataset
TensorFlow Dataset that iterates over the same data.
Note
----
This class requires TensorFlow to be installed.
"""
try:
import tensorflow as tf
except:
raise ImportError("This method requires TensorFlow to be installed.")
# Retrieve the first sample so we can determine the dtypes.
X, y, w, ids = next(self.itersamples())
dtypes = (tf.as_dtype(X.dtype), tf.as_dtype(y.dtype), tf.as_dtype(w.dtype))
shapes = (
tf.TensorShape([None] + list(X.shape)), # type: ignore
tf.TensorShape([None] + list(y.shape)), # type: ignore
tf.TensorShape([None] + list(w.shape))) # type: ignore
# Create a Tensorflow Dataset.
def gen_data():
for X, y, w, ids in self.iterbatches(batch_size, epochs, deterministic,
pad_batches):
yield (X, y, w)
return tf.data.Dataset.from_generator(gen_data, dtypes, shapes)
def make_pytorch_dataset(self,
epochs: int = 1,
deterministic: bool = False,
batch_size: Optional[int] = None):
"""Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset.
Each value returned by the Dataset's iterator is a tuple of (X, y, w, id)
containing the data for one batch, or for a single sample if batch_size is None.
Parameters
----------
epochs: int, default 1
The number of times to iterate over the Dataset.
deterministic: bool, default False
If True, the data is produced in order. If False, a different
random permutation of the data is used for each epoch.
batch_size: int, optional (default None)
The number of samples to return in each batch. If None, each returned
value is a single sample.
Returns
-------
torch.utils.data.IterableDataset
`torch.utils.data.IterableDataset` that iterates over the data in
this dataset.
Note
----
This class requires PyTorch to be installed.
"""
raise NotImplementedError()
def to_dataframe(self) -> pd.DataFrame:
"""Construct a pandas DataFrame containing the data from this Dataset.
Returns
-------
pd.DataFrame
Pandas dataframe. If there is only a single feature per datapoint,
will have column "X" else will have columns "X1,X2,..." for
features. If there is only a single label per datapoint, will
have column "y" else will have columns "y1,y2,..." for labels. If
there is only a single weight per datapoint will have column "w"
else will have columns "w1,w2,...". Will have column "ids" for
identifiers.
"""
X = self.X
y = self.y
w = self.w
ids = self.ids
if len(X.shape) == 1 or X.shape[1] == 1:
columns = ['X']
else:
columns = [f'X{i+1}' for i in range(X.shape[1])]
X_df = pd.DataFrame(X, columns=columns)
if len(y.shape) == 1 or y.shape[1] == 1:
columns = ['y']
else:
columns = [f'y{i+1}' for i in range(y.shape[1])]
y_df = pd.DataFrame(y, columns=columns)
if len(w.shape) == 1 or w.shape[1] == 1:
columns = ['w']
else:
columns = [f'w{i+1}' for i in range(w.shape[1])]
w_df = pd.DataFrame(w, columns=columns)
ids_df = pd.DataFrame(ids, columns=['ids'])
return pd.concat([X_df, y_df, w_df, ids_df], axis=1, sort=False)
@staticmethod
def from_dataframe(df: pd.DataFrame,
X: Optional[OneOrMany[str]] = None,
y: Optional[OneOrMany[str]] = None,
w: Optional[OneOrMany[str]] = None,
ids: Optional[str] = None):
"""Construct a Dataset from the contents of a pandas DataFrame.
Parameters
----------
df: pd.DataFrame
The pandas DataFrame
X: str or List[str], optional (default None)
The name of the column or columns containing the X array. If
this is None, it will look for default column names that match
those produced by to_dataframe().
y: str or List[str], optional (default None)
The name of the column or columns containing the y array. If
this is None, it will look for default column names that match
those produced by to_dataframe().
w: str or List[str], optional (default None)
The name of the column or columns containing the w array. If
this is None, it will look for default column names that match
those produced by to_dataframe().
ids: str, optional (default None)
The name of the column containing the ids. If this is None, it
will look for default column names that match those produced by
to_dataframe().
"""
# Find the X values.
if X is not None:
X_val = df[X]
elif 'X' in df.columns:
X_val = df['X']
else:
columns = []
i = 1
while f'X{i}' in df.columns:
columns.append(f'X{i}')
i += 1
X_val = df[columns]
if len(X_val.shape) == 1:
X_val = np.expand_dims(X_val, 1)
# Find the y values.
if y is not None:
y_val = df[y]
elif 'y' in df.columns:
y_val = df['y']
else:
columns = []
i = 1
while f'y{i}' in df.columns:
columns.append(f'y{i}')
i += 1
y_val = df[columns]
if len(y_val.shape) == 1:
y_val = np.expand_dims(y_val, 1)
# Find the w values.
if w is not None:
w_val = df[w]
elif 'w' in df.columns:
w_val = df['w']
else:
columns = []
i = 1
while f'w{i}' in df.columns:
columns.append(f'w{i}')
i += 1
w_val = df[columns]
if len(w_val.shape) == 1:
w_val = np.expand_dims(w_val, 1)
# Find the ids.
if ids is not None:
ids_val = df[ids]
elif 'ids' in df.columns:
ids_val = df['ids']
else:
ids_val = None
return NumpyDataset(X_val, y_val, w_val, ids_val)
class NumpyDataset(Dataset):
"""A Dataset defined by in-memory numpy arrays.
This subclass of `Dataset` stores arrays `X,y,w,ids` in memory as
numpy arrays. This makes it very easy to construct `NumpyDataset`
objects.
Examples
--------
>>> import numpy as np
>>> dataset = NumpyDataset(X=np.random.rand(5, 3), y=np.random.rand(5,), ids=np.arange(5))
"""
def __init__(self,
X: ArrayLike,
y: Optional[ArrayLike] = None,
w: Optional[ArrayLike] = None,
ids: Optional[ArrayLike] = None,
n_tasks: int = 1) -> None:
"""Initialize this object.
Parameters
----------
X: np.ndarray
Input features. A numpy array of shape `(n_samples,...)`.
y: np.ndarray, optional (default None)
Labels. A numpy array of shape `(n_samples, ...)`. Note that each label can
have an arbitrary shape.
w: np.ndarray, optional (default None)
Weights. Should either be 1D array of shape `(n_samples,)` or if
there's more than one task, of shape `(n_samples, n_tasks)`.
ids: np.ndarray, optional (default None)
Identifiers. A numpy array of shape `(n_samples,)`
n_tasks: int, default 1
Number of learning tasks.
"""
n_samples = np.shape(X)[0]
if n_samples > 0:
if y is None:
# Set labels to be zero, with zero weights
y = np.zeros((n_samples, n_tasks), np.float32)
w = np.zeros((n_samples, 1), np.float32)
if ids is None:
ids = np.arange(n_samples)
if not isinstance(X, np.ndarray):
X = np.array(X)
if not isinstance(y, np.ndarray):
y = np.array(y)
if w is None:
if len(y.shape) == 1:
w = np.ones(y.shape[0], np.float32)
else:
w = np.ones((y.shape[0], 1), np.float32)
if not isinstance(w, np.ndarray):
w = np.array(w)
self._X = X
self._y = y
self._w = w
self._ids = np.array(ids, dtype=object)
def __len__(self) -> int:
"""Get the number of elements in the dataset."""
return len(self._y)
def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]:
"""Get the shape of the dataset.
Returns four tuples, giving the shape of the X, y, w, and ids arrays.
"""
return self._X.shape, self._y.shape, self._w.shape, self._ids.shape
def get_task_names(self) -> np.ndarray:
"""Get the names of the tasks associated with this dataset."""
if len(self._y.shape) < 2:
return np.array([0])
return np.arange(self._y.shape[1])
@property
def X(self) -> np.ndarray:
"""Get the X vector for this dataset as a single numpy array."""
return self._X
@property
def y(self) -> np.ndarray:
"""Get the y vector for this dataset as a single numpy array."""
return self._y
@property
def ids(self) -> np.ndarray:
"""Get the ids vector for this dataset as a single numpy array."""
return self._ids
@property
def w(self) -> np.ndarray:
"""Get the weight vector for this dataset as a single numpy array."""
return self._w
def iterbatches(self,
batch_size: Optional[int] = None,
epochs: int = 1,
deterministic: bool = False,
pad_batches: bool = False) -> Iterator[Batch]:
"""Get an object that iterates over minibatches from the dataset.
Each minibatch is returned as a tuple of four numpy arrays:
`(X, y, w, ids)`.
Parameters
----------
batch_size: int, optional (default None)
Number of elements in each batch.
epochs: int, default 1
Number of epochs to walk over dataset.
deterministic: bool, optional (default False)
If True, follow deterministic order.
pad_batches: bool, optional (default False)
If True, pad each batch to `batch_size`.
Returns
-------
Iterator[Batch]
Generator which yields tuples of four numpy arrays `(X, y, w, ids)`.
"""
def iterate(dataset: NumpyDataset, batch_size: Optional[int], epochs: int,
deterministic: bool, pad_batches: bool):
n_samples = dataset._X.shape[0]
if deterministic:
sample_perm = np.arange(n_samples)
if batch_size is None:
batch_size = n_samples
for epoch in range(epochs):
if not deterministic:
sample_perm = np.random.permutation(n_samples)
batch_idx = 0
num_batches = math.ceil(n_samples / batch_size)
while batch_idx < num_batches:
start = batch_idx * batch_size
end = min(n_samples, (batch_idx + 1) * batch_size)
indices = range(start, end)
perm_indices = sample_perm[indices]
X_batch = dataset._X[perm_indices]
y_batch = dataset._y[perm_indices]
w_batch = dataset._w[perm_indices]
ids_batch = dataset._ids[perm_indices]
if pad_batches:
(X_batch, y_batch, w_batch, ids_batch) = pad_batch(
batch_size, X_batch, y_batch, w_batch, ids_batch)
batch_idx += 1
yield (X_batch, y_batch, w_batch, ids_batch)
return iterate(self, batch_size, epochs, deterministic, pad_batches)
def itersamples(self) -> Iterator[Batch]:
"""Get an object that iterates over the samples in the dataset.
Returns
-------
Iterator[Batch]
Iterator which yields tuples of four numpy arrays `(X, y, w, ids)`.
Examples
--------
>>> dataset = NumpyDataset(np.ones((2,2)))
>>> for x, y, w, id in dataset.itersamples():
... print(x.tolist(), y.tolist(), w.tolist(), id)
[1.0, 1.0] [0.0] [0.0] 0
[1.0, 1.0] [0.0] [0.0] 1
"""
n_samples = self._X.shape[0]
return ((self._X[i], self._y[i], self._w[i], self._ids[i])
for i in range(n_samples))
def transform(self, transformer: "dc.trans.Transformer",
**args) -> "NumpyDataset":
"""Construct a new dataset by applying a transformation to every sample in this dataset.
The argument is a function that can be called as follows:
>> newx, newy, neww = fn(x, y, w)
It might be called only once with the whole dataset, or multiple
times with different subsets of the data. Each time it is called,
it should transform the samples and return the transformed data.
Parameters
----------
transformer: dc.trans.Transformer
The transformation to apply to each sample in the dataset
Returns
-------
NumpyDataset
A newly constructed NumpyDataset object
"""
newx, newy, neww, newids = transformer.transform_array(
self._X, self._y, self._w, self._ids)
return NumpyDataset(newx, newy, neww, newids)
def select(self,
indices: Union[Sequence[int], np.ndarray],
select_dir: Optional[str] = None) -> "NumpyDataset":
"""Creates a new dataset from a selection of indices from self.
Parameters
----------
indices: List[int]
List of indices to select.
select_dir: str, optional (default None)
Used to provide same API as `DiskDataset`. Ignored since
`NumpyDataset` is purely in-memory.
Returns
-------
NumpyDataset
A selected NumpyDataset object
"""
X = self.X[indices]
y = self.y[indices]
w = self.w[indices]
ids = self.ids[indices]
return NumpyDataset(X, y, w, ids)
def make_pytorch_dataset(self,
epochs: int = 1,
deterministic: bool = False,
batch_size: Optional[int] = None):
"""Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset.
Each value returned by the Dataset's iterator is a tuple of (X, y, w, id)
containing the data for one batch, or for a single sample if batch_size is None.
Parameters
----------
epochs: int, default 1
The number of times to iterate over the Dataset
deterministic: bool, default False
If True, the data is produced in order. If False, a different
random permutation of the data is used for each epoch.
batch_size: int, optional (default None)
The number of samples to return in each batch. If None, each returned
value is a single sample.
Returns
-------
torch.utils.data.IterableDataset
`torch.utils.data.IterableDataset` that iterates over the data in
this dataset.
Note
----
This method requires PyTorch to be installed.
"""
try:
from deepchem.data.pytorch_datasets import _TorchNumpyDataset
except:
raise ImportError("This method requires PyTorch to be installed.")
pytorch_ds = _TorchNumpyDataset(
numpy_dataset=self,
epochs=epochs,
deterministic=deterministic,
batch_size=batch_size)
return pytorch_ds
@staticmethod
def from_DiskDataset(ds: "DiskDataset") -> "NumpyDataset":
"""Convert DiskDataset to NumpyDataset.
Parameters
----------
ds: DiskDataset
DiskDataset to transform to NumpyDataset.
Returns
-------
NumpyDataset
A new NumpyDataset created from DiskDataset.
"""
return NumpyDataset(ds.X, ds.y, ds.w, ds.ids)
def to_json(self, fname: str) -> None:
"""Dump NumpyDataset to the json file .
Parameters
----------
fname: str
The name of the json file.
"""
d = {
'X': self.X.tolist(),
'y': self.y.tolist(),
'w': self.w.tolist(),
'ids': self.ids.tolist()
}
with open(fname, 'w') as fout:
json.dump(d, fout)
@staticmethod
def from_json(fname: str) -> "NumpyDataset":
"""Create NumpyDataset from the json file.
Parameters
----------
fname: str
The name of the json file.
Returns
-------
NumpyDataset
A new NumpyDataset created from the json file.
"""
with open(fname) as fin:
d = json.load(fin)
return NumpyDataset(d['X'], d['y'], d['w'], d['ids'])
@staticmethod
def merge(datasets: Sequence[Dataset]) -> "NumpyDataset":
"""Merge multiple NumpyDatasets.
Parameters
----------
datasets: List[Dataset]
List of datasets to merge.
Returns
-------
NumpyDataset
A single NumpyDataset containing all the samples from all datasets.
Example
-------
>>> X1, y1 = np.random.rand(5, 3), np.random.randn(5, 1)
>>> first_dataset = dc.data.NumpyDataset(X1, y1)
>>> X2, y2 = np.random.rand(5, 3), np.random.randn(5, 1)
>>> second_dataset = dc.data.NumpyDataset(X2, y2)
>>> merged_dataset = dc.data.NumpyDataset.merge([first_dataset, second_dataset])
>>> print(len(merged_dataset) == len(first_dataset) + len(second_dataset))
True
"""
X, y, w, ids = datasets[0].X, datasets[0].y, datasets[0].w, datasets[0].ids
for dataset in datasets[1:]:
X = np.concatenate([X, dataset.X], axis=0)
y = np.concatenate([y, dataset.y], axis=0)
w = np.concatenate([w, dataset.w], axis=0)
ids = np.concatenate(
[ids, dataset.ids],
axis=0,
)
return NumpyDataset(X, y, w, ids, n_tasks=y.shape[1])
class _Shard(object):
def __init__(self, X, y, w, ids):
self.X = X
self.y = y
self.w = w
self.ids = ids
class DiskDataset(Dataset):
"""
A Dataset that is stored as a set of files on disk.
The DiskDataset is the workhorse class of DeepChem that facilitates analyses
on large datasets. Use this class whenever you're working with a large
dataset that can't be easily manipulated in RAM.
On disk, a `DiskDataset` has a simple structure. All files for a given
`DiskDataset` are stored in a `data_dir`. The contents of `data_dir` should
be laid out as follows:
| data_dir/
| |
| ---> metadata.csv.gzip
| |
| ---> tasks.json
| |
| ---> shard-0-X.npy
| |
| ---> shard-0-y.npy
| |
| ---> shard-0-w.npy
| |
| ---> shard-0-ids.npy
| |
| ---> shard-1-X.npy
| .
| .
| .
The metadata is constructed by static method
`DiskDataset._construct_metadata` and saved to disk by
`DiskDataset._save_metadata`. The metadata itself consists of a csv file
which has columns `('ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape',
'w_shape')`. `tasks.json` consists of a list of task names for this dataset.
The actual data is stored in `.npy` files (numpy array files) of the form
'shard-0-X.npy', 'shard-0-y.npy', etc.
The basic structure of `DiskDataset` is quite robust and will likely serve
you well for datasets up to about 100 GB or larger. However note that
`DiskDataset` has not been tested for very large datasets at the terabyte
range and beyond. You may be better served by implementing a custom
`Dataset` class for those use cases.
Examples
--------
Let's walk through a simple example of constructing a new `DiskDataset`.
>>> import deepchem as dc
>>> import numpy as np
>>> X = np.random.rand(10, 10)
>>> dataset = dc.data.DiskDataset.from_numpy(X)
If you have already saved a `DiskDataset` to `data_dir`, you can reinitialize it with
>> data_dir = "/path/to/my/data"
>> dataset = dc.data.DiskDataset(data_dir)
Once you have a dataset you can access its attributes as follows
>>> X = np.random.rand(10, 10)
>>> y = np.random.rand(10,)
>>> w = np.ones_like(y)
>>> dataset = dc.data.DiskDataset.from_numpy(X)
>>> X, y, w = dataset.X, dataset.y, dataset.w
One thing to beware of is that `dataset.X`, `dataset.y`, `dataset.w` are
loading data from disk! If you have a large dataset, these operations can be
extremely slow. Instead try iterating through the dataset instead.
>>> for (xi, yi, wi, idi) in dataset.itersamples():
... pass
Attributes
----------
data_dir: str
Location of directory where this `DiskDataset` is stored to disk
metadata_df: pd.DataFrame
Pandas Dataframe holding metadata for this `DiskDataset`
legacy_metadata: bool
Whether this `DiskDataset` uses legacy format.
Note
----
`DiskDataset` originally had a simpler metadata format without shape
information. Older `DiskDataset` objects had metadata files with columns
`('ids', 'X', 'y', 'w')` and not additional shape columns. `DiskDataset`
maintains backwards compatibility with this older metadata format, but we
recommend for performance reasons not using legacy metadata for new
projects.
"""
def __init__(self, data_dir: str) -> None:
"""Load a constructed DiskDataset from disk
Note that this method cannot construct a new disk dataset. Instead use
static methods `DiskDataset.create_dataset` or `DiskDataset.from_numpy`
for that purpose. Use this constructor instead to load a `DiskDataset`
that has already been created on disk.
Parameters
----------
data_dir: str
Location on disk of an existing `DiskDataset`.
"""
self.data_dir = data_dir
logger.info("Loading dataset from disk.")
tasks, self.metadata_df = self.load_metadata()
self.tasks = np.array(tasks)
if len(self.metadata_df.columns) == 4 and list(
self.metadata_df.columns) == ['ids', 'X', 'y', 'w']:
logger.info(
"Detected legacy metatadata on disk. You can upgrade from legacy metadata "
"to the more efficient current metadata by resharding this dataset "
"by calling the reshard() method of this object.")
self.legacy_metadata = True
elif len(self.metadata_df.columns) == 8 and list(
self.metadata_df.columns) == [
'ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape', 'w_shape'
]: # noqa
self.legacy_metadata = False
else:
raise ValueError(
"Malformed metadata on disk. Metadata must have columns 'ids', 'X', 'y', 'w', "
"'ids_shape', 'X_shape', 'y_shape', 'w_shape' (or if in legacy metadata format,"
"columns 'ids', 'X', 'y', 'w')")
self._cached_shards: Optional[List] = None
self._memory_cache_size = 20 * (1 << 20) # 20 MB
self._cache_used = 0
@staticmethod
def create_dataset(shard_generator: Iterable[Batch],
data_dir: Optional[str] = None,
tasks: Optional[ArrayLike] = []) -> "DiskDataset":
"""Creates a new DiskDataset
Parameters
----------
shard_generator: Iterable[Batch]
An iterable (either a list or generator) that provides tuples of data
(X, y, w, ids). Each tuple will be written to a separate shard on disk.
data_dir: str, optional (default None)
Filename for data directory. Creates a temp directory if none specified.
tasks: Sequence, optional (default [])
List of tasks for this dataset.
Returns
-------
DiskDataset
A new `DiskDataset` constructed from the given data
"""
if data_dir is None:
data_dir = tempfile.mkdtemp()
elif not os.path.exists(data_dir):
os.makedirs(data_dir)
metadata_rows = []
time1 = time.time()
for shard_num, (X, y, w, ids) in enumerate(shard_generator):
basename = "shard-%d" % shard_num
metadata_rows.append(
DiskDataset.write_data_to_disk(data_dir, basename, X, y, w, ids))
metadata_df = DiskDataset._construct_metadata(metadata_rows)
DiskDataset._save_metadata(metadata_df, data_dir, tasks)
time2 = time.time()
logger.info("TIMING: dataset construction took %0.3f s" % (time2 - time1))
return DiskDataset(data_dir)
def load_metadata(self) -> Tuple[List[str], pd.DataFrame]:
"""Helper method that loads metadata from disk."""
try:
tasks_filename, metadata_filename = self._get_metadata_filename()
with open(tasks_filename) as fin:
tasks = json.load(fin)
metadata_df = pd.read_csv(
metadata_filename, compression='gzip', dtype=object)
metadata_df = metadata_df.where((pd.notnull(metadata_df)), None)
return tasks, metadata_df
except Exception:
pass
# Load obsolete format -> save in new format
metadata_filename = os.path.join(self.data_dir, "metadata.joblib")
if os.path.exists(metadata_filename):
tasks, metadata_df = load_from_disk(metadata_filename)
del metadata_df['task_names']
del metadata_df['basename']
DiskDataset._save_metadata(metadata_df, self.data_dir, tasks)
return tasks, metadata_df
raise ValueError("No Metadata Found On Disk")
@staticmethod
def _save_metadata(metadata_df: pd.DataFrame, data_dir: str,
tasks: Optional[ArrayLike]) -> None:
"""Saves the metadata for a DiskDataset
Parameters
----------
metadata_df: pd.DataFrame
The dataframe which will be written to disk.
data_dir: str
Directory to store metadata.
tasks: Sequence, optional
Tasks of DiskDataset. If `None`, an empty list of tasks is written to
disk.
"""
if tasks is None:
tasks = []
elif isinstance(tasks, np.ndarray):
tasks = tasks.tolist()
metadata_filename = os.path.join(data_dir, "metadata.csv.gzip")
tasks_filename = os.path.join(data_dir, "tasks.json")
with open(tasks_filename, 'w') as fout:
json.dump(tasks, fout)
metadata_df.to_csv(metadata_filename, index=False, compression='gzip')
@staticmethod
def _construct_metadata(metadata_entries: List) -> pd.DataFrame:
"""Construct a dataframe containing metadata.
Parameters
----------
metadata_entries: List
`metadata_entries` should have elements returned by write_data_to_disk
above.
Returns
-------
pd.DataFrame
A Pandas Dataframe object contains metadata.
"""
columns = ('ids', 'X', 'y', 'w', 'ids_shape', 'X_shape', 'y_shape',
'w_shape')
metadata_df = pd.DataFrame(metadata_entries, columns=columns)
return metadata_df
@staticmethod
def write_data_to_disk(data_dir: str,
basename: str,
X: Optional[np.ndarray] = None,
y: Optional[np.ndarray] = None,
w: Optional[np.ndarray] = None,
ids: Optional[np.ndarray] = None) -> List[Any]:
"""Static helper method to write data to disk.
This helper method is used to write a shard of data to disk.
Parameters
----------
data_dir: str
Data directory to write shard to.
basename: str
Basename for the shard in question.
X: np.ndarray, optional (default None)
The features array.
y: np.ndarray, optional (default None)
The labels array.
w: np.ndarray, optional (default None)
The weights array.
ids: np.ndarray, optional (default None)
The identifiers array.
Returns
-------
List[Optional[str]]
List with values `[out_ids, out_X, out_y, out_w, out_ids_shape,
out_X_shape, out_y_shape, out_w_shape]` with filenames of locations to
disk which these respective arrays were written.
"""
if X is not None:
out_X: Optional[str] = "%s-X.npy" % basename
save_to_disk(X, os.path.join(data_dir, out_X)) # type: ignore
out_X_shape: Optional[Tuple[int, ...]] = X.shape
else:
out_X = None
out_X_shape = None
if y is not None:
out_y: Optional[str] = "%s-y.npy" % basename
save_to_disk(y, os.path.join(data_dir, out_y)) # type: ignore
out_y_shape: Optional[Tuple[int, ...]] = y.shape
else:
out_y = None
out_y_shape = None
if w is not None:
out_w: Optional[str] = "%s-w.npy" % basename
save_to_disk(w, os.path.join(data_dir, out_w)) # type: ignore
out_w_shape: Optional[Tuple[int, ...]] = w.shape
else:
out_w = None
out_w_shape = None
if ids is not None:
out_ids: Optional[str] = "%s-ids.npy" % basename
save_to_disk(ids, os.path.join(data_dir, out_ids)) # type: ignore
out_ids_shape: Optional[Tuple[int, ...]] = ids.shape
else:
out_ids = None
out_ids_shape = None
# note that this corresponds to the _construct_metadata column order
return [
out_ids, out_X, out_y, out_w, out_ids_shape, out_X_shape, out_y_shape,
out_w_shape
]
def save_to_disk(self) -> None:
"""Save dataset to disk."""
DiskDataset._save_metadata(self.metadata_df, self.data_dir, self.tasks)
self._cached_shards = None
def move(self, new_data_dir: str,
delete_if_exists: Optional[bool] = True) -> None:
"""Moves dataset to new directory.
Parameters
----------
new_data_dir: str
The new directory name to move this to dataset to.
delete_if_exists: bool, optional (default True)
If this option is set, delete the destination directory if it exists
before moving. This is set to True by default to be backwards compatible
with behavior in earlier versions of DeepChem.
Note
----
This is a stateful operation! `self.data_dir` will be moved into
`new_data_dir`. If `delete_if_exists` is set to `True` (by default this is
set `True`), then `new_data_dir` is deleted if it's a pre-existing
directory.
"""
if delete_if_exists and os.path.isdir(new_data_dir):
shutil.rmtree(new_data_dir)
shutil.move(self.data_dir, new_data_dir)
if delete_if_exists:
self.data_dir = new_data_dir
else:
self.data_dir = os.path.join(new_data_dir,
os.path.basename(self.data_dir))
def copy(self, new_data_dir: str) -> "DiskDataset":
"""Copies dataset to new directory.
Parameters
----------
new_data_dir: str
The new directory name to copy this to dataset to.
Returns
-------
DiskDataset
A copied DiskDataset object.
Note
----
This is a stateful operation! Any data at `new_data_dir` will be deleted
and `self.data_dir` will be deep copied into `new_data_dir`.
"""
if os.path.isdir(new_data_dir):
shutil.rmtree(new_data_dir)
shutil.copytree(self.data_dir, new_data_dir)
return DiskDataset(new_data_dir)
def get_task_names(self) -> np.ndarray:
"""Gets learning tasks associated with this dataset."""
return self.tasks
def reshard(self, shard_size: int) -> None:
"""Reshards data to have specified shard size.
Parameters
----------
shard_size: int
The size of shard.
Examples
--------
>>> import deepchem as dc
>>> import numpy as np
>>> X = np.random.rand(100, 10)
>>> d = dc.data.DiskDataset.from_numpy(X)
>>> d.reshard(shard_size=10)
>>> d.get_number_shards()
10
Note
----
If this `DiskDataset` is in `legacy_metadata` format, reshard will
convert this dataset to have non-legacy metadata.
"""
# Create temp directory to store resharded version
reshard_dir = tempfile.mkdtemp()
n_shards = self.get_number_shards()
# Get correct shapes for y/w
tasks = self.get_task_names()
_, y_shape, w_shape, _ = self.get_shape()
if len(y_shape) == 1:
y_shape = (len(y_shape), len(tasks))
if len(w_shape) == 1:
w_shape = (len(w_shape), len(tasks))
# Write data in new shards
def generator():
X_next = np.zeros((0,) + self.get_data_shape())
y_next = np.zeros((0,) + y_shape[1:])
w_next = np.zeros((0,) + w_shape[1:])
ids_next = np.zeros((0,), dtype=object)
for shard_num, (X, y, w, ids) in enumerate(self.itershards()):
logger.info("Resharding shard %d/%d" % (shard_num + 1, n_shards))
# Handle shapes
X = np.reshape(X, (len(X),) + self.get_data_shape())
# Note that this means that DiskDataset resharding currently doesn't
# work for datasets that aren't regression/classification.
if y is None: # datasets without label
y = y_next
w = w_next
else:
y = np.reshape(y, (len(y),) + y_shape[1:])
w = np.reshape(w, (len(w),) + w_shape[1:])
X_next = np.concatenate([X_next, X], axis=0)
y_next = np.concatenate([y_next, y], axis=0)
w_next = np.concatenate([w_next, w], axis=0)
ids_next = np.concatenate([ids_next, ids])
while len(X_next) > shard_size:
X_batch, X_next = X_next[:shard_size], X_next[shard_size:]
y_batch, y_next = y_next[:shard_size], y_next[shard_size:]
w_batch, w_next = w_next[:shard_size], w_next[shard_size:]
ids_batch, ids_next = ids_next[:shard_size], ids_next[shard_size:]
yield (X_batch, y_batch, w_batch, ids_batch)
# Handle spillover from last shard
yield (X_next, y_next, w_next, ids_next)
resharded_dataset = DiskDataset.create_dataset(
generator(), data_dir=reshard_dir, tasks=self.tasks)
shutil.rmtree(self.data_dir)
shutil.move(reshard_dir, self.data_dir)
# Should have updated to non-legacy metadata
self.legacy_metadata = False
self.metadata_df = resharded_dataset.metadata_df
# Note that this resets the cache internally
self.save_to_disk()
def get_data_shape(self) -> Shape:
"""Gets array shape of datapoints in this dataset."""
if not len(self.metadata_df):
raise ValueError("No data in dataset.")
if self.legacy_metadata:
sample_X = load_from_disk(
os.path.join(self.data_dir,
next(self.metadata_df.iterrows())[1]['X']))
return np.shape(sample_X)[1:]
else:
X_shape, _, _, _ = self.get_shape()
return X_shape[1:]
def get_shard_size(self) -> int:
"""Gets size of shards on disk."""
if not len(self.metadata_df):
raise ValueError("No data in dataset.")
sample_ids = load_from_disk(
os.path.join(self.data_dir,
next(self.metadata_df.iterrows())[1]['ids']))
return len(sample_ids)
def _get_metadata_filename(self) -> Tuple[str, str]:
"""Get standard location for metadata file."""
metadata_filename = os.path.join(self.data_dir, "metadata.csv.gzip")
tasks_filename = os.path.join(self.data_dir, "tasks.json")
return tasks_filename, metadata_filename
def get_number_shards(self) -> int:
"""Returns the number of shards for this dataset."""
return self.metadata_df.shape[0]
def itershards(self) -> Iterator[Batch]:
"""Return an object that iterates over all shards in dataset.
Datasets are stored in sharded fashion on disk. Each call to next() for the
generator defined by this function returns the data from a particular shard.
The order of shards returned is guaranteed to remain fixed.
Returns
-------
Iterator[Batch]
Generator which yields tuples of four numpy arrays `(X, y, w, ids)`.
"""
return (self.get_shard(i) for i in range(self.get_number_shards()))
def iterbatches(self,
batch_size: Optional[int] = None,
epochs: int = 1,
deterministic: bool = False,
pad_batches: bool = False) -> Iterator[Batch]:
""" Get an object that iterates over minibatches from the dataset.
It is guaranteed that the number of batches returned is
`math.ceil(len(dataset)/batch_size)`. Each minibatch is returned as
a tuple of four numpy arrays: `(X, y, w, ids)`.
Parameters
----------
batch_size: int, optional (default None)
Number of elements in a batch. If None, then it yields batches
with size equal to the size of each individual shard.
epoch: int, default 1
Number of epochs to walk over dataset
deterministic: bool, default False
Whether or not we should should shuffle each shard before
generating the batches. Note that this is only local in the
sense that it does not ever mix between different shards.
pad_batches: bool, default False
Whether or not we should pad the last batch, globally, such that
it has exactly batch_size elements.
Returns
-------
Iterator[Batch]
Generator which yields tuples of four numpy arrays `(X, y, w, ids)`.
"""
shard_indices = list(range(self.get_number_shards()))
return self._iterbatches_from_shards(shard_indices, batch_size, epochs,
deterministic, pad_batches)
def _iterbatches_from_shards(self,
shard_indices: Sequence[int],
batch_size: Optional[int] = None,
epochs: int = 1,
deterministic: bool = False,
pad_batches: bool = False) -> Iterator[Batch]:
"""Get an object that iterates over batches from a restricted set of shards."""
def iterate(dataset: DiskDataset, batch_size: Optional[int], epochs: int):
num_shards = len(shard_indices)
if deterministic:
shard_perm = np.arange(num_shards)
# (ytz): Depending on the application, thread-based pools may be faster
# than process based pools, since process based pools need to pickle/serialize
# objects as an extra overhead. Also, as hideously as un-thread safe this looks,
# we're actually protected by the GIL.
# mp.dummy aliases ThreadPool to Pool
pool = Pool(1)
if batch_size is None:
num_global_batches = num_shards
else:
num_global_batches = math.ceil(dataset.get_shape()[0][0] / batch_size)
for epoch in range(epochs):
if not deterministic:
shard_perm = np.random.permutation(num_shards)
next_shard = pool.apply_async(dataset.get_shard,
(shard_indices[shard_perm[0]],))
cur_global_batch = 0
cur_shard = 0
carry = None
while cur_global_batch < num_global_batches:
X, y, w, ids = next_shard.get()
if cur_shard < num_shards - 1:
next_shard = pool.apply_async(
dataset.get_shard, (shard_indices[shard_perm[cur_shard + 1]],))
elif epoch == epochs - 1:
pool.close()
if carry is not None:
X = np.concatenate([carry[0], X], axis=0)
if y is not None:
y = np.concatenate([carry[1], y], axis=0)
if w is not None:
w = np.concatenate([carry[2], w], axis=0)
ids = np.concatenate([carry[3], ids], axis=0)
carry = None
n_shard_samples = X.shape[0]
cur_local_batch = 0
if batch_size is None:
shard_batch_size = n_shard_samples
else:
shard_batch_size = batch_size
if n_shard_samples == 0:
cur_shard += 1
if batch_size is None:
cur_global_batch += 1
continue
num_local_batches = math.ceil(n_shard_samples / shard_batch_size)
if not deterministic:
sample_perm = np.random.permutation(n_shard_samples)
else:
sample_perm = np.arange(n_shard_samples)
while cur_local_batch < num_local_batches:
start = cur_local_batch * shard_batch_size
end = min(n_shard_samples, (cur_local_batch + 1) * shard_batch_size)
indices = range(start, end)
perm_indices = sample_perm[indices]
X_b = X[perm_indices]
if y is not None:
y_b = y[perm_indices]
else:
y_b = None
if w is not None:
w_b = w[perm_indices]
else:
w_b = None
ids_b = ids[perm_indices]
assert len(X_b) <= shard_batch_size
if len(X_b) < shard_batch_size and cur_shard != num_shards - 1:
assert carry is None
carry = [X_b, y_b, w_b, ids_b]
else:
# (ytz): this skips everything except possibly the last shard
if pad_batches:
(X_b, y_b, w_b, ids_b) = pad_batch(shard_batch_size, X_b, y_b,
w_b, ids_b)
yield X_b, y_b, w_b, ids_b
cur_global_batch += 1
cur_local_batch += 1
cur_shard += 1
return iterate(self, batch_size, epochs)
def itersamples(self) -> Iterator[Batch]:
"""Get an object that iterates over the samples in the dataset.
Returns
-------
Iterator[Batch]
Generator which yields tuples of four numpy arrays `(X, y, w, ids)`.
Examples
--------
>>> dataset = DiskDataset.from_numpy(np.ones((2,2)), np.ones((2,1)))
>>> for x, y, w, id in dataset.itersamples():
... print(x.tolist(), y.tolist(), w.tolist(), id)
[1.0, 1.0] [1.0] [1.0] 0
[1.0, 1.0] [1.0] [1.0] 1
"""
def iterate(dataset):
for (X_shard, y_shard, w_shard, ids_shard) in dataset.itershards():
n_samples = X_shard.shape[0]
for i in range(n_samples):
def sanitize(elem):
if elem is None:
return None
else:
return elem[i]
yield map(sanitize, [X_shard, y_shard, w_shard, ids_shard])
return iterate(self)
def transform(self,
transformer: "dc.trans.Transformer",
parallel: bool = False,
out_dir: Optional[str] = None,
**args) -> "DiskDataset":
"""Construct a new dataset by applying a transformation to every sample in this dataset.
The argument is a function that can be called as follows:
>> newx, newy, neww = fn(x, y, w)
It might be called only once with the whole dataset, or multiple times
with different subsets of the data. Each time it is called, it should
transform the samples and return the transformed data.
Parameters
----------
transformer: dc.trans.Transformer
The transformation to apply to each sample in the dataset.
parallel: bool, default False
If True, use multiple processes to transform the dataset in parallel.
out_dir: str, optional (default None)
The directory to save the new dataset in. If this is omitted, a
temporary directory is created automaticall.
Returns
-------
DiskDataset
A newly constructed Dataset object
"""
if out_dir is None:
out_dir = tempfile.mkdtemp()
tasks = self.get_task_names()
n_shards = self.get_number_shards()
time1 = time.time()
if parallel:
results = []
pool = multiprocessing.Pool()
for i in range(self.get_number_shards()):
row = self.metadata_df.iloc[i]
X_file = os.path.join(self.data_dir, row['X'])
if row['y'] is not None:
y_file: Optional[str] = os.path.join(self.data_dir, row['y'])
else:
y_file = None
if row['w'] is not None:
w_file: Optional[str] = os.path.join(self.data_dir, row['w'])
else:
w_file = None
ids_file = os.path.join(self.data_dir, row['ids'])
results.append(
pool.apply_async(DiskDataset._transform_shard,
(transformer, i, X_file, y_file, w_file, ids_file,
out_dir, tasks)))
pool.close()
metadata_rows = [r.get() for r in results]
metadata_df = DiskDataset._construct_metadata(metadata_rows)
DiskDataset._save_metadata(metadata_df, out_dir, tasks)
dataset = DiskDataset(out_dir)
else:
def generator():
for shard_num, row in self.metadata_df.iterrows():
logger.info("Transforming shard %d/%d" % (shard_num, n_shards))
X, y, w, ids = self.get_shard(shard_num)
newx, newy, neww, newids = transformer.transform_array(X, y, w, ids)
yield (newx, newy, neww, newids)
dataset = DiskDataset.create_dataset(
generator(), data_dir=out_dir, tasks=tasks)
time2 = time.time()
logger.info("TIMING: transforming took %0.3f s" % (time2 - time1))
return dataset
@staticmethod
def _transform_shard(transformer: "dc.trans.Transformer", shard_num: int,
X_file: str, y_file: str, w_file: str, ids_file: str,
out_dir: str, tasks: np.ndarray) -> List[Optional[str]]:
"""This is called by transform() to transform a single shard."""
X = None if X_file is None else np.array(load_from_disk(X_file))
y = None if y_file is None else np.array(load_from_disk(y_file))
w = None if w_file is None else np.array(load_from_disk(w_file))
ids = np.array(load_from_disk(ids_file))
X, y, w, ids = transformer.transform_array(X, y, w, ids)
basename = "shard-%d" % shard_num
return DiskDataset.write_data_to_disk(out_dir, basename, X, y, w, ids)
def make_pytorch_dataset(self,
epochs: int = 1,
deterministic: bool = False,
batch_size: Optional[int] = None):
"""Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset.
Each value returned by the Dataset's iterator is a tuple of (X, y, w, id)
containing the data for one batch, or for a single sample if batch_size is None.
Parameters
----------
epochs: int, default 1
The number of times to iterate over the Dataset
deterministic: bool, default False
If True, the data is produced in order. If False, a different
random permutation of the data is used for each epoch.
batch_size: int, optional (default None)
The number of samples to return in each batch. If None, each returned
value is a single sample.
Returns
-------
torch.utils.data.IterableDataset
`torch.utils.data.IterableDataset` that iterates over the data in
this dataset.
Note
----
This method requires PyTorch to be installed.
"""
try:
from deepchem.data.pytorch_datasets import _TorchDiskDataset
except:
raise ImportError("This method requires PyTorch to be installed.")
pytorch_ds = _TorchDiskDataset(
disk_dataset=self,
epochs=epochs,
deterministic=deterministic,
batch_size=batch_size)
return pytorch_ds
@staticmethod
def from_numpy(X: ArrayLike,
y: Optional[ArrayLike] = None,
w: Optional[ArrayLike] = None,
ids: Optional[ArrayLike] = None,
tasks: Optional[ArrayLike] = None,
data_dir: Optional[str] = None) -> "DiskDataset":
"""Creates a DiskDataset object from specified Numpy arrays.
Parameters
----------
X: np.ndarray
Feature array.
y: np.ndarray, optional (default None)
Labels array.
w: np.ndarray, optional (default None)
Weights array.
ids: np.ndarray, optional (default None)
Identifiers array.
tasks: Sequence, optional (default None)
Tasks in this dataset
data_dir: str, optional (default None)
The directory to write this dataset to. If none is specified, will use
a temporary directory instead.
Returns
-------
DiskDataset
A new `DiskDataset` constructed from the provided information.
"""
# To unify shape handling so from_numpy behaves like NumpyDataset, we just
# make a NumpyDataset under the hood
dataset = NumpyDataset(X, y, w, ids)
if tasks is None:
tasks = dataset.get_task_names()
# raw_data = (X, y, w, ids)
return DiskDataset.create_dataset(
[(dataset.X, dataset.y, dataset.w, dataset.ids)],
data_dir=data_dir,
tasks=tasks)
@staticmethod
def merge(datasets: Iterable["Dataset"],
merge_dir: Optional[str] = None) -> "DiskDataset":
"""Merges provided datasets into a merged dataset.
Parameters
----------
datasets: Iterable[Dataset]
List of datasets to merge.
merge_dir: str, optional (default None)
The new directory path to store the merged DiskDataset.
Returns
-------
DiskDataset
A merged DiskDataset.
"""
if merge_dir is not None:
if not os.path.exists(merge_dir):
os.makedirs(merge_dir)
else:
merge_dir = tempfile.mkdtemp()
# Protect against generator exhaustion
datasets = list(datasets)
# This ensures tasks are consistent for all datasets
tasks = []
for dataset in datasets:
try:
tasks.append(dataset.tasks) # type: ignore
except AttributeError:
pass
if tasks:
task_tuples = [tuple(task_list) for task_list in tasks]
if len(tasks) < len(datasets) or len(set(task_tuples)) > 1:
raise ValueError(
'Cannot merge datasets with different task specifications')
merge_tasks = tasks[0]
else:
merge_tasks = []
# determine the shard sizes of the datasets to merge
shard_sizes = []
for dataset in datasets:
if hasattr(dataset, 'get_shard_size'):
shard_sizes.append(dataset.get_shard_size()) # type: ignore
# otherwise the entire dataset is the "shard size"
else:
shard_sizes.append(len(dataset))
def generator():
for ind, dataset in enumerate(datasets):
logger.info("Merging in dataset %d/%d" % (ind, len(datasets)))
if hasattr(dataset, 'itershards'):
for (X, y, w, ids) in dataset.itershards():
yield (X, y, w, ids)
else:
yield (dataset.X, dataset.y, dataset.w, dataset.ids)
merged_dataset = DiskDataset.create_dataset(
generator(), data_dir=merge_dir, tasks=merge_tasks)
# we must reshard the dataset to have a uniform size
# choose the smallest shard size
if len(set(shard_sizes)) > 1:
merged_dataset.reshard(min(shard_sizes))
return merged_dataset
def subset(self, shard_nums: Sequence[int],
subset_dir: Optional[str] = None) -> "DiskDataset":
"""Creates a subset of the original dataset on disk.
Parameters
----------
shard_nums: Sequence[int]
The indices of shard to extract from the original DiskDataset.
subset_dir: str, optional (default None)
The new directory path to store the subset DiskDataset.
Returns
-------
DiskDataset
A subset DiskDataset.
"""
if subset_dir is not None:
if not os.path.exists(subset_dir):
os.makedirs(subset_dir)
else:
subset_dir = tempfile.mkdtemp()
tasks = self.get_task_names()
def generator():
for shard_num, row in self.metadata_df.iterrows():
if shard_num not in shard_nums:
continue
X, y, w, ids = self.get_shard(shard_num)
yield (X, y, w, ids)
return DiskDataset.create_dataset(
generator(), data_dir=subset_dir, tasks=tasks)
def sparse_shuffle(self) -> None:
"""Shuffling that exploits data sparsity to shuffle large datasets.
If feature vectors are sparse, say circular fingerprints or any other
representation that contains few nonzero values, it can be possible to
exploit the sparsity of the vector to simplify shuffles. This method
implements a sparse shuffle by compressing sparse feature vectors down
into a compressed representation, then shuffles this compressed dataset in
memory and writes the results to disk.
Note
----
This method only works for 1-dimensional feature vectors (does not work
for tensorial featurizations). Note that this shuffle is performed in
place.
"""
time1 = time.time()
shard_size = self.get_shard_size()
num_shards = self.get_number_shards()
X_sparse_list: List[np.ndarray] = []
y_list: List[np.ndarray] = []
w_list: List[np.ndarray] = []
ids_list: List[np.ndarray] = []
num_features = -1
for i in range(num_shards):
logger.info("Sparsifying shard %d/%d" % (i, num_shards))
(X_s, y_s, w_s, ids_s) = self.get_shard(i)
if num_features == -1:
num_features = X_s.shape[1]
X_sparse = sparsify_features(X_s)
X_sparse_list, y_list, w_list, ids_list = (
X_sparse_list + [X_sparse], y_list + [y_s], w_list + [w_s],
ids_list + [np.atleast_1d(np.squeeze(ids_s))])
# Get full dataset in memory
(X_sparse, y, w, ids) = (np.vstack(X_sparse_list), np.vstack(y_list),
np.vstack(w_list), np.concatenate(ids_list))
# Shuffle in memory
num_samples = len(X_sparse)
permutation = np.random.permutation(num_samples)
X_sparse, y, w, ids = (X_sparse[permutation], y[permutation],
w[permutation], ids[permutation])
# Write shuffled shards out to disk
for i in range(num_shards):
logger.info("Sparse shuffling shard %d/%d" % (i, num_shards))
start, stop = i * shard_size, (i + 1) * shard_size
(X_sparse_s, y_s, w_s, ids_s) = (X_sparse[start:stop], y[start:stop],
w[start:stop], ids[start:stop])
X_s = densify_features(X_sparse_s, num_features)
self.set_shard(i, X_s, y_s, w_s, ids_s)
time2 = time.time()
logger.info("TIMING: sparse_shuffle took %0.3f s" % (time2 - time1))
def complete_shuffle(self, data_dir: Optional[str] = None) -> Dataset:
"""Completely shuffle across all data, across all shards.
Note
----
The algorithm used for this complete shuffle is O(N^2) where N is the
number of shards. It simply constructs each shard of the output dataset
one at a time. Since the complete shuffle can take a long time, it's
useful to watch the logging output. Each shuffled shard is constructed
using select() which logs as it selects from each original shard. This
will results in O(N^2) logging statements, one for each extraction of
shuffled shard i's contributions from original shard j.
Parameters
----------
data_dir: Optional[str], (default None)
Directory to write the shuffled dataset to. If none is specified a
temporary directory will be used.
Returns
-------
DiskDataset
A DiskDataset whose data is a randomly shuffled version of this dataset.
"""
N = len(self)
perm = np.random.permutation(N).tolist()
shard_size = self.get_shard_size()
return self.select(perm, data_dir, shard_size)
def shuffle_each_shard(self,
shard_basenames: Optional[List[str]] = None) -> None:
"""Shuffles elements within each shard of the dataset.
Parameters
----------
shard_basenames: List[str], optional (default None)
The basenames for each shard. If this isn't specified, will assume the
basenames of form "shard-i" used by `create_dataset` and `reshard`.
"""
# Shuffle the arrays corresponding to each row in metadata_df
n_rows = len(self.metadata_df.index)
if shard_basenames is not None:
if len(shard_basenames) != n_rows:
raise ValueError(
"shard_basenames must provide a basename for each shard in this DiskDataset."
)
else:
shard_basenames = ["shard-%d" % shard_num for shard_num in range(n_rows)]
for i, basename in zip(range(n_rows), shard_basenames):
logger.info("Shuffling shard %d/%d" % (i, n_rows))
X, y, w, ids = self.get_shard(i)
n = X.shape[0]
permutation = np.random.permutation(n)
X, y, w, ids = (X[permutation], y[permutation], w[permutation],
ids[permutation])
DiskDataset.write_data_to_disk(self.data_dir, basename, X, y, w, ids)
# Reset cache
self._cached_shards = None
def shuffle_shards(self) -> None:
"""Shuffles the order of the shards for this dataset."""
metadata_rows = self.metadata_df.values.tolist()
random.shuffle(metadata_rows)
self.metadata_df = DiskDataset._construct_metadata(metadata_rows)
self.save_to_disk()
def get_shard(self, i: int) -> Batch:
"""Retrieves data for the i-th shard from disk.
Parameters
----------
i: int
Shard index for shard to retrieve batch from.
Returns
-------
Batch
A batch data for i-th shard.
"""
# See if we have a cached copy of this shard.
if self._cached_shards is None:
self._cached_shards = [None] * self.get_number_shards()
self._cache_used = 0
if self._cached_shards[i] is not None:
shard = self._cached_shards[i]
return (shard.X, shard.y, shard.w, shard.ids)
# We don't, so load it from disk.
row = self.metadata_df.iloc[i]
X = np.array(load_from_disk(os.path.join(self.data_dir, row['X'])))
if row['y'] is not None:
y: Optional[np.ndarray] = np.array(
load_from_disk(os.path.join(self.data_dir, row['y'])))
else:
y = None
if row['w'] is not None:
# TODO (ytz): Under what condition does this exist but the file itself doesn't?
w_filename = os.path.join(self.data_dir, row['w'])
if os.path.exists(w_filename):
w: Optional[np.ndarray] = np.array(load_from_disk(w_filename))
elif y is not None:
if len(y.shape) == 1:
w = np.ones(y.shape[0], np.float32)
else:
w = np.ones((y.shape[0], 1), np.float32)
else:
w = None
else:
w = None
ids = np.array(
load_from_disk(os.path.join(self.data_dir, row['ids'])), dtype=object)
# Try to cache this shard for later use. Since the normal usage pattern is
# a series of passes through the whole dataset, there's no point doing
# anything fancy. It never makes sense to evict another shard from the
# cache to make room for this one, because we'll probably want that other
# shard again before the next time we want this one. So just cache as many
# as we can and then stop.
shard = _Shard(X, y, w, ids)
shard_size = X.nbytes + ids.nbytes
if y is not None:
shard_size += y.nbytes
if w is not None:
shard_size += w.nbytes
if self._cache_used + shard_size < self._memory_cache_size:
self._cached_shards[i] = shard
self._cache_used += shard_size
return (shard.X, shard.y, shard.w, shard.ids)
def get_shard_ids(self, i: int) -> np.ndarray:
"""Retrieves the list of IDs for the i-th shard from disk.
Parameters
----------
i: int
Shard index for shard to retrieve weights from.
Returns
-------
np.ndarray
A numpy array of ids for i-th shard.
"""
if self._cached_shards is not None and self._cached_shards[i] is not None:
return self._cached_shards[i].ids
row = self.metadata_df.iloc[i]
return np.array(
load_from_disk(os.path.join(self.data_dir, row['ids'])), dtype=object)
def get_shard_y(self, i: int) -> np.ndarray:
"""Retrieves the labels for the i-th shard from disk.
Parameters
----------
i: int
Shard index for shard to retrieve labels from.
Returns
-------
np.ndarray
A numpy array of labels for i-th shard.
"""
if self._cached_shards is not None and self._cached_shards[i] is not None:
return self._cached_shards[i].y
row = self.metadata_df.iloc[i]
return np.array(load_from_disk(os.path.join(self.data_dir, row['y'])))
def get_shard_w(self, i: int) -> np.ndarray:
"""Retrieves the weights for the i-th shard from disk.
Parameters
----------
i: int
Shard index for shard to retrieve weights from.
Returns
-------
np.ndarray
A numpy array of weights for i-th shard.
"""
if self._cached_shards is not None and self._cached_shards[i] is not None:
return self._cached_shards[i].w
row = self.metadata_df.iloc[i]
return np.array(load_from_disk(os.path.join(self.data_dir, row['w'])))
def add_shard(self,
X: np.ndarray,
y: Optional[np.ndarray] = None,
w: Optional[np.ndarray] = None,
ids: Optional[np.ndarray] = None) -> None:
"""Adds a data shard.
Parameters
----------
X: np.ndarray
Feature array.
y: np.ndarray, optioanl (default None)
Labels array.
w: np.ndarray, optioanl (default None)
Weights array.
ids: np.ndarray, optioanl (default None)
Identifiers array.
"""
metadata_rows = self.metadata_df.values.tolist()
shard_num = len(metadata_rows)
basename = "shard-%d" % shard_num
metadata_rows.append(
DiskDataset.write_data_to_disk(self.data_dir, basename, X, y, w, ids))
self.metadata_df = DiskDataset._construct_metadata(metadata_rows)
self.save_to_disk()
def set_shard(self,
shard_num: int,
X: np.ndarray,
y: Optional[np.ndarray] = None,
w: Optional[np.ndarray] = None,
ids: Optional[np.ndarray] = None) -> None:
"""Writes data shard to disk.
Parameters
----------
shard_num: int
Shard index for shard to set new data.
X: np.ndarray
Feature array.
y: np.ndarray, optioanl (default None)
Labels array.
w: np.ndarray, optioanl (default None)
Weights array.
ids: np.ndarray, optioanl (default None)
Identifiers array.
"""
basename = "shard-%d" % shard_num
DiskDataset.write_data_to_disk(self.data_dir, basename, X, y, w, ids)
self._cached_shards = None
self.legacy_metadata = True
def select(self,
indices: Union[Sequence[int], np.ndarray],
select_dir: Optional[str] = None,
select_shard_size: Optional[int] = None,
output_numpy_dataset: Optional[bool] = False) -> Dataset:
"""Creates a new dataset from a selection of indices from self.
Examples
--------
>>> import numpy as np
>>> X = np.random.rand(10, 10)
>>> dataset = dc.data.DiskDataset.from_numpy(X)
>>> selected = dataset.select([1, 3, 4])
>>> len(selected)
3
Parameters
----------
indices: Sequence
List of indices to select.
select_dir: str, optional (default None)
Path to new directory that the selected indices will be copied to.
select_shard_size: Optional[int], (default None)
If specified, the shard-size to use for output selected `DiskDataset`.
If not output_numpy_dataset, then this is set to this current dataset's
shard size if not manually specified.
output_numpy_dataset: Optional[bool], (default False)
If True, output an in-memory `NumpyDataset` instead of a `DiskDataset`.
Note that `select_dir` and `select_shard_size` must be `None` if this
is `True`
Returns
-------
Dataset
A dataset containing the selected samples. The default dataset is `DiskDataset`.
If `output_numpy_dataset` is True, the dataset is `NumpyDataset`.
"""
if output_numpy_dataset and (select_dir is not None or
select_shard_size is not None):
raise ValueError(
"If output_numpy_dataset is set, then select_dir and select_shard_size must both be None"
)
if output_numpy_dataset:
# When outputting a NumpyDataset, we have 1 in-memory shard
select_shard_size = len(indices)
else:
if select_dir is not None:
if not os.path.exists(select_dir):
os.makedirs(select_dir)
else:
select_dir = tempfile.mkdtemp()
if select_shard_size is None:
select_shard_size = self.get_shard_size()
# Handle edge case with empty indices
if not len(indices):
if not output_numpy_dataset:
return DiskDataset.create_dataset([], data_dir=select_dir)
else:
return NumpyDataset(
np.array([]), np.array([]), np.array([]), np.array([]))
N = len(indices)
tasks = self.get_task_names()
n_shards = self.get_number_shards()
# We use two loops here. The outer while loop walks over selection shards
# (the chunks of the indices to select that should go into separate
# output shards), while the inner for loop walks over the shards in the
# source datasets to select out the shard indices from that source shard
def generator():
start = 0
select_shard_num = 0
while start < N:
logger.info(
"Constructing selection output shard %d" % (select_shard_num + 1))
end = min(start + select_shard_size, N)
select_shard_indices = indices[start:end]
sorted_indices = np.array(sorted(select_shard_indices)).astype(int)
Xs, ys, ws, ids_s = [], [], [], []
count, indices_count = 0, 0
for shard_num in range(self.get_number_shards()):
logger.info(
"Selecting from input shard %d/%d for selection output shard %d" %
(shard_num + 1, n_shards, select_shard_num + 1))
if self.legacy_metadata:
ids = self.get_shard_ids(shard_num)
shard_len = len(ids)
else:
shard_X_shape, _, _, _ = self._get_shard_shape(shard_num)
if len(shard_X_shape) > 0:
shard_len = shard_X_shape[0]
else:
shard_len = 0
# Find indices which rest in this shard
num_shard_elts = 0
while sorted_indices[indices_count +
num_shard_elts] < count + shard_len:
num_shard_elts += 1
if (indices_count + num_shard_elts) >= len(sorted_indices):
break
if num_shard_elts == 0:
count += shard_len
continue
else:
X, y, w, ids = self.get_shard(shard_num)
# Need to offset indices to fit within shard_size
shard_inds = sorted_indices[indices_count:indices_count +
num_shard_elts] - count
# Handle empty case where no data from this shard needed
X_sel = X[shard_inds]
# Handle the case of datasets with y/w missing
if y is not None:
y_sel = y[shard_inds]
else:
y_sel = np.array([])
if w is not None:
w_sel = w[shard_inds]
else:
w_sel = np.array([])
ids_sel = ids[shard_inds]
Xs.append(X_sel)
ys.append(y_sel)
ws.append(w_sel)
ids_s.append(ids_sel)
indices_count += num_shard_elts
count += shard_len
# Break if all indices have been used up already
if indices_count >= len(sorted_indices):
break
# Note these will be in the sorted order
X = np.concatenate(Xs, axis=0)
y = np.concatenate(ys, axis=0)
w = np.concatenate(ws, axis=0)
ids = np.concatenate(ids_s, axis=0)
# We need to recover the original ordering. We can do this by using
# np.where to find the locatios of the original indices in the sorted
# indices.
reverted_indices = np.array(
# We know there's only one match for np.where since this is a
# permutation, so the [0][0] pulls out the exact match location.
[
np.where(sorted_indices == orig_index)[0][0]
for orig_index in select_shard_indices
])
if y.size == 0:
tup_y = y
else:
tup_y = y[reverted_indices]
if w.size == 0:
tup_w = w
else:
tup_w = w[reverted_indices]
X, ids = X[reverted_indices], ids[reverted_indices]
yield (X, tup_y, tup_w, ids)
start = end
select_shard_num += 1
if not output_numpy_dataset:
return DiskDataset.create_dataset(
generator(), data_dir=select_dir, tasks=tasks)
else:
X, y, w, ids = next(generator())
return NumpyDataset(X, y, w, ids)
@property
def ids(self) -> np.ndarray:
"""Get the ids vector for this dataset as a single numpy array."""
if len(self) == 0:
return np.array([])
ids = []
for i in range(self.get_number_shards()):
ids.append(np.atleast_1d(np.squeeze(self.get_shard_ids(i))))
return np.concatenate(ids)
@property
def X(self) -> np.ndarray:
"""Get the X vector for this dataset as a single numpy array."""
Xs = []
one_dimensional = False
for (X_b, _, _, _) in self.itershards():
Xs.append(X_b)
if len(X_b.shape) == 1:
one_dimensional = True
if not one_dimensional:
return np.vstack(Xs)
else:
return np.concatenate(Xs)
@property
def y(self) -> np.ndarray:
"""Get the y vector for this dataset as a single numpy array."""
if len(self) == 0:
return np.array([])
ys = []
one_dimensional = False
for i in range(self.get_number_shards()):
y_b = self.get_shard_y(i)
ys.append(y_b)
if len(y_b.shape) == 1:
one_dimensional = True
if not one_dimensional:
return np.vstack(ys)
else:
return np.concatenate(ys)
@property
def w(self) -> np.ndarray:
"""Get the weight vector for this dataset as a single numpy array."""
ws = []
one_dimensional = False
for i in range(self.get_number_shards()):
w_b = self.get_shard_w(i)
ws.append(w_b)
if len(w_b.shape) == 1:
one_dimensional = True
if not one_dimensional:
return np.vstack(ws)
else:
return np.concatenate(ws)
@property
def memory_cache_size(self) -> int:
"""Get the size of the memory cache for this dataset, measured in bytes."""
return self._memory_cache_size
@memory_cache_size.setter
def memory_cache_size(self, size: int) -> None:
"""Get the size of the memory cache for this dataset, measured in bytes."""
self._memory_cache_size = size
if self._cache_used > size:
self._cached_shards = None
def __len__(self) -> int:
"""Finds number of elements in dataset."""
total = 0
for _, row in self.metadata_df.iterrows():
y = load_from_disk(os.path.join(self.data_dir, row['ids']))
total += len(y)
return total
def _get_shard_shape(self,
shard_num: int) -> Tuple[Shape, Shape, Shape, Shape]:
"""Finds the shape of the specified shard."""
if self.legacy_metadata:
raise ValueError(
"This function requires the new metadata format to be called. Please reshard this dataset by calling the reshard() method."
)
n_tasks = len(self.get_task_names())
row = self.metadata_df.iloc[shard_num]
if row['X_shape'] is not None:
shard_X_shape = make_tuple(str(row['X_shape']))
else:
shard_X_shape = tuple()
if n_tasks > 0:
if row['y_shape'] is not None:
shard_y_shape = make_tuple(str(row['y_shape']))
else:
shard_y_shape = tuple()
if row['w_shape'] is not None:
shard_w_shape = make_tuple(str(row['w_shape']))
else:
shard_w_shape = tuple()
else:
shard_y_shape = tuple()
shard_w_shape = tuple()
if row['ids_shape'] is not None:
shard_ids_shape = make_tuple(str(row['ids_shape']))
else:
shard_ids_shape = tuple()
X_shape, y_shape, w_shape, ids_shape = tuple(
np.array(shard_X_shape)), tuple(np.array(shard_y_shape)), tuple(
np.array(shard_w_shape)), tuple(np.array(shard_ids_shape))
return X_shape, y_shape, w_shape, ids_shape
def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]:
"""Finds shape of dataset.
Returns four tuples, giving the shape of the X, y, w, and ids arrays.
"""
n_tasks = len(self.get_task_names())
n_rows = len(self.metadata_df.index)
# If shape metadata is available use it to directly compute shape from
# metadata
if not self.legacy_metadata:
for shard_num in range(n_rows):
shard_X_shape, shard_y_shape, shard_w_shape, shard_ids_shape = self._get_shard_shape(
shard_num)
if shard_num == 0:
X_shape, y_shape, w_shape, ids_shape = np.array(
shard_X_shape), np.array(shard_y_shape), np.array(
shard_w_shape), np.array(shard_ids_shape)
else:
X_shape[0] += shard_X_shape[0]
if n_tasks > 0:
y_shape[0] += shard_y_shape[0]
w_shape[0] += shard_w_shape[0]
ids_shape[0] += shard_ids_shape[0]
return tuple(X_shape), tuple(y_shape), tuple(w_shape), tuple(ids_shape)
# In absense of shape metadata, fall back to loading data from disk to
# find shape.
else:
for shard_num, (X, y, w, ids) in enumerate(self.itershards()):
if shard_num == 0:
X_shape = np.array(X.shape)
if n_tasks > 0:
y_shape = np.array(y.shape)
w_shape = np.array(w.shape)
else:
y_shape = np.array([])
w_shape = np.array([])
ids_shape = np.array(ids.shape)
else:
X_shape[0] += np.array(X.shape)[0]
if n_tasks > 0:
y_shape[0] += np.array(y.shape)[0]
w_shape[0] += np.array(w.shape)[0]
ids_shape[0] += np.array(ids.shape)[0]
return tuple(X_shape), tuple(y_shape), tuple(w_shape), tuple(ids_shape)
def get_label_means(self) -> pd.DataFrame:
"""Return pandas series of label means."""
return self.metadata_df["y_means"]
def get_label_stds(self) -> pd.DataFrame:
"""Return pandas series of label stds."""
return self.metadata_df["y_stds"]
class ImageDataset(Dataset):
"""A Dataset that loads data from image files on disk."""
def __init__(self,
X: Union[np.ndarray, List[str]],
y: Optional[Union[np.ndarray, List[str]]],
w: Optional[ArrayLike] = None,
ids: Optional[ArrayLike] = None) -> None:
"""Create a dataset whose X and/or y array is defined by image files on disk.
Parameters
----------
X: np.ndarray or List[str]
The dataset's input data. This may be either a single NumPy
array directly containing the data, or a list containing the
paths to the image files
y: np.ndarray or List[str]
The dataset's labels. This may be either a single NumPy array
directly containing the data, or a list containing the paths to
the image files
w: np.ndarray, optional (default None)
a 1D or 2D array containing the weights for each sample or
sample/task pair
ids: np.ndarray, optional (default None)
the sample IDs
"""
n_samples = len(X)
if y is None:
y = np.zeros((n_samples,))
self._X_shape = self._find_array_shape(X)
self._y_shape = self._find_array_shape(y)
if w is None:
if len(self._y_shape) == 0:
# Case n_samples should be 1
if n_samples != 1:
raise ValueError("y can only be a scalar if n_samples == 1")
w = np.ones_like(y)
elif len(self._y_shape) == 1:
w = np.ones(self._y_shape[0], np.float32)
else:
w = np.ones((self._y_shape[0], 1), np.float32)
if ids is None:
if not isinstance(X, np.ndarray):
ids = X
elif not isinstance(y, np.ndarray):
ids = y
else:
ids = np.arange(n_samples)
self._X = X
self._y = y
self._w = np.asarray(w)
self._ids = np.array(ids, dtype=object)
def _find_array_shape(self, array: Union[np.ndarray, List[str]]) -> Shape:
if isinstance(array, np.ndarray):
return array.shape
image_shape = load_image_files([array[0]]).shape[1:]
return tuple(np.concatenate([[len(array)], image_shape]))
def __len__(self) -> int:
"""Get the number of elements in the dataset."""
return self._X_shape[0]
def get_shape(self) -> Tuple[Shape, Shape, Shape, Shape]:
"""Get the shape of the dataset.
Returns four tuples, giving the shape of the X, y, w, and ids arrays.
"""
return self._X_shape, self._y_shape, self._w.shape, self._ids.shape
def get_task_names(self) -> np.ndarray:
"""Get the names of the tasks associated with this dataset."""
if len(self._y_shape) < 2:
return np.array([0])
return np.arange(self._y_shape[1])
@property
def X(self) -> np.ndarray:
"""Get the X vector for this dataset as a single numpy array."""
if isinstance(self._X, np.ndarray):
return self._X
return load_image_files(self._X)
@property
def y(self) -> np.ndarray:
"""Get the y vector for this dataset as a single numpy array."""
if isinstance(self._y, np.ndarray):
return self._y
return load_image_files(self._y)
@property
def ids(self) -> np.ndarray:
"""Get the ids vector for this dataset as a single numpy array."""
return self._ids
@property
def w(self) -> np.ndarray:
"""Get the weight vector for this dataset as a single numpy array."""
return self._w
def iterbatches(self,
batch_size: Optional[int] = None,
epochs: int = 1,
deterministic: bool = False,
pad_batches: bool = False) -> Iterator[Batch]:
"""Get an object that iterates over minibatches from the dataset.
Each minibatch is returned as a tuple of four numpy arrays:
`(X, y, w, ids)`.
Parameters
----------
batch_size: int, optional (default None)
Number of elements in each batch.
epochs: int, default 1
Number of epochs to walk over dataset.
deterministic: bool, default False
If True, follow deterministic order.
pad_batches: bool, default False
If True, pad each batch to `batch_size`.
Returns
-------
Iterator[Batch]
Generator which yields tuples of four numpy arrays `(X, y, w, ids)`.
"""
def iterate(dataset, batch_size, epochs, deterministic, pad_batches):
n_samples = dataset._X_shape[0]
if deterministic:
sample_perm = np.arange(n_samples)
if batch_size is None:
batch_size = n_samples
for epoch in range(epochs):
if not deterministic:
sample_perm = np.random.permutation(n_samples)
batch_idx = 0
num_batches = np.math.ceil(n_samples / batch_size)
while batch_idx < num_batches:
start = batch_idx * batch_size
end = min(n_samples, (batch_idx + 1) * batch_size)
indices = range(start, end)
perm_indices = sample_perm[indices]
if isinstance(dataset._X, np.ndarray):
X_batch = dataset._X[perm_indices]
else:
X_batch = load_image_files([dataset._X[i] for i in perm_indices])
if isinstance(dataset._y, np.ndarray):
y_batch = dataset._y[perm_indices]
else:
y_batch = load_image_files([dataset._y[i] for i in perm_indices])
w_batch = dataset._w[perm_indices]
ids_batch = dataset._ids[perm_indices]
if pad_batches:
(X_batch, y_batch, w_batch, ids_batch) = pad_batch(
batch_size, X_batch, y_batch, w_batch, ids_batch)
batch_idx += 1
yield (X_batch, y_batch, w_batch, ids_batch)
return iterate(self, batch_size, epochs, deterministic, pad_batches)
def _get_image(self, array: Union[np.ndarray, List[str]],
indices: Union[int, np.ndarray]) -> np.ndarray:
"""Method for loading an image
Parameters
----------
array: Union[np.ndarray, List[str]]
A numpy array which contains images or List of image filenames
indices: Union[int, np.ndarray]
Index you want to get the images
Returns
-------
np.ndarray
Loaded images
"""
if isinstance(array, np.ndarray):
return array[indices]
if isinstance(indices, np.ndarray):
return load_image_files([array[i] for i in indices])
return load_image_files([array[indices]])[0]
def itersamples(self) -> Iterator[Batch]:
"""Get an object that iterates over the samples in the dataset.
Returns
-------
Iterator[Batch]
Iterator which yields tuples of four numpy arrays `(X, y, w, ids)`.
"""
n_samples = self._X_shape[0]
return ((self._get_image(self._X, i), self._get_image(self._y, i),
self._w[i], self._ids[i]) for i in range(n_samples))
def transform(
self,
transformer: "dc.trans.Transformer",
**args,
) -> "NumpyDataset":
"""Construct a new dataset by applying a transformation to every sample in this dataset.
The argument is a function that can be called as follows:
>> newx, newy, neww = fn(x, y, w)
It might be called only once with the whole dataset, or multiple times with
different subsets of the data. Each time it is called, it should transform
the samples and return the transformed data.
Parameters
----------
transformer: dc.trans.Transformer
The transformation to apply to each sample in the dataset
Returns
-------
NumpyDataset
A newly constructed NumpyDataset object
"""
newx, newy, neww, newids = transformer.transform_array(
self.X, self.y, self.w, self.ids)
return NumpyDataset(newx, newy, neww, newids)
def select(self,
indices: Union[Sequence[int], np.ndarray],
select_dir: Optional[str] = None) -> "ImageDataset":
"""Creates a new dataset from a selection of indices from self.
Parameters
----------
indices: Sequence
List of indices to select.
select_dir: str, optional (default None)
Used to provide same API as `DiskDataset`. Ignored since
`ImageDataset` is purely in-memory.
Returns
-------
ImageDataset
A selected ImageDataset object
"""
if isinstance(self._X, np.ndarray):
X = self._X[indices]
else:
X = [self._X[i] for i in indices]
if isinstance(self._y, np.ndarray):
y = self._y[indices]
else:
y = [self._y[i] for i in indices]
w = self._w[indices]
ids = self._ids[indices]
return ImageDataset(X, y, w, ids)
def make_pytorch_dataset(self,
epochs: int = 1,
deterministic: bool = False,
batch_size: Optional[int] = None):
"""Create a torch.utils.data.IterableDataset that iterates over the data in this Dataset.
Each value returned by the Dataset's iterator is a tuple of (X, y, w, id)
containing the data for one batch, or for a single sample if batch_size is None.
Parameters
----------
epochs: int, default 1
The number of times to iterate over the Dataset.
deterministic: bool, default False
If True, the data is produced in order. If False, a different
random permutation of the data is used for each epoch.
batch_size: int, optional (default None)
The number of samples to return in each batch. If None, each returned
value is a single sample.
Returns
-------
torch.utils.data.IterableDataset
`torch.utils.data.IterableDataset` that iterates over the data in
this dataset.
Note
----
This method requires PyTorch to be installed.
"""
try:
from deepchem.data.pytorch_datasets import _TorchImageDataset
except:
raise ValueError("This method requires PyTorch to be installed.")
pytorch_ds = _TorchImageDataset(
image_dataset=self,
epochs=epochs,
deterministic=deterministic,
batch_size=batch_size)
return pytorch_ds
class Databag(object):
"""A utility class to iterate through multiple datasets together.
A `Databag` is useful when you have multiple datasets that you want
to iterate in locksteps. This might be easiest to grasp with a
simple code example.
>>> ones_dataset = NumpyDataset(X=np.ones((5, 3)))
>>> zeros_dataset = NumpyDataset(X=np.zeros((5, 3)))
>>> databag = Databag({"ones": ones_dataset, "zeros": zeros_dataset})
>>> for sample_dict in databag.iterbatches(batch_size=1):
... print(sample_dict)
{'ones': array([[1., 1., 1.]]), 'zeros': array([[0., 0., 0.]])}
{'ones': array([[1., 1., 1.]]), 'zeros': array([[0., 0., 0.]])}
{'ones': array([[1., 1., 1.]]), 'zeros': array([[0., 0., 0.]])}
{'ones': array([[1., 1., 1.]]), 'zeros': array([[0., 0., 0.]])}
{'ones': array([[1., 1., 1.]]), 'zeros': array([[0., 0., 0.]])}
Note how we get a batch at a time from each of the datasets in the
`Databag`. This can be useful for training models that combine data
from multiple `Dataset` objects at a time.
"""
def __init__(self, datasets: Optional[Dict[Any, Dataset]] = None) -> None:
"""Initialize this `Databag`.
Parameters
----------
datasets: dict, optional (default None)
A dictionary mapping keys to `Dataset` objects.
"""
if datasets is None:
self.datasets = dict()
else:
self.datasets = datasets
def add_dataset(self, key: Any, dataset: Dataset) -> None:
"""Adds a dataset to this databag.
Parameters
----------
key: Any, hashable value
Key to be added
dataset: Dataset
The dataset that `key` should point to.
"""
self.datasets[key] = dataset
def iterbatches(self, **kwargs) -> Iterator[Dict[str, np.ndarray]]:
"""Loop through all internal datasets in the same order.
Parameters
----------
batch_size: int
Number of samples from each dataset to return
epochs: int
Number of times to loop through the datasets
pad_batches: bool
Should all batches==batch_size
Returns
-------
Iterator[Dict[str, np.ndarray]]
Generator which yields a dictionary {key: dataset.X[batch]}
"""
key_order = [x for x in self.datasets.keys()]
if "epochs" in kwargs:
epochs = kwargs['epochs']
del kwargs['epochs']
else:
epochs = 1
kwargs['deterministic'] = True
for epoch in range(epochs):
iterators = [self.datasets[x].iterbatches(**kwargs) for x in key_order]
for tup in zip(*iterators):
m_d = {key_order[i]: tup[i][0] for i in range(len(key_order))}
yield m_d
| codeparrot/github-code-clean |
"""
Integrate functions by rewriting them as Meijer G-functions.
There are three user-visible functions that can be used by other parts of the
sympy library to solve various integration problems:
- meijerint_indefinite
- meijerint_definite
- meijerint_inversion
They can be used to compute, respectively, indefinite integrals, definite
integrals over intervals of the real line, and inverse laplace-type integrals
(from c-I*oo to c+I*oo). See the respective docstrings for details.
The main references for this are:
[L] Luke, Y. L. (1969), The Special Functions and Their Approximations,
Volume 1
[R] Kelly B. Roach. Meijer G Function Representations.
In: Proceedings of the 1997 International Symposium on Symbolic and
Algebraic Computation, pages 205-211, New York, 1997. ACM.
[P] A. P. Prudnikov, Yu. A. Brychkov and O. I. Marichev (1990).
Integrals and Series: More Special Functions, Vol. 3,.
Gordon and Breach Science Publisher
"""
from sympy.core import oo, S, pi
from sympy.core.function import expand, expand_mul, expand_power_base
from sympy.core.add import Add
from sympy.core.mul import Mul
from sympy.core.cache import cacheit
from sympy.core.symbol import Dummy, Wild
from sympy.simplify import hyperexpand, powdenest
from sympy.logic.boolalg import And, Or
from sympy.functions.special.delta_functions import Heaviside
from sympy.functions.elementary.piecewise import Piecewise
from sympy.functions.special.hyper import meijerg
from sympy.utilities.misc import debug as _debug
from sympy.utilities import default_sort_key
# keep this at top for easy reference
z = Dummy('z')
def _create_lookup_table(table):
""" Add formulae for the function -> meijerg lookup table. """
def wild(n):
return Wild(n, exclude=[z])
p, q, a, b, c = map(wild, 'pqabc')
n = Wild('n', properties=[lambda x: x.is_Integer and x > 0])
t = p*z**q
def add(formula, an, ap, bm, bq, arg=t, fac=S(1), cond=True, hint=True):
table.setdefault(_mytype(formula, z), []).append((formula,
[(fac, meijerg(an, ap, bm, bq, arg))], cond, hint))
def addi(formula, inst, cond, hint=True):
table.setdefault(_mytype(formula, z), []).append((formula, inst, cond, hint))
def constant(a):
return [(a, meijerg([1], [], [], [0], z)),
(a, meijerg([], [1], [0], [], z))]
table[()] = [(a, constant(a), True, True)]
# [P], Section 8.
from sympy import unpolarify, Function, Not
class IsNonPositiveInteger(Function):
nargs = 1
@classmethod
def eval(cls, arg):
arg = unpolarify(arg)
if arg.is_Integer is True:
return arg <= 0
# Section 8.4.2
from sympy import (gamma, pi, cos, exp, re, sin, sqrt, sinh, cosh,
factorial, log, erf, polar_lift)
# TODO this needs more polar_lift (c/f entry for exp)
add(Heaviside(t - b)*(t - b)**(a-1), [a], [], [], [0], t/b,
gamma(a)*b**(a-1), And(b > 0))
add(Heaviside(b - t)*(b - t)**(a-1), [], [a], [0], [], t/b,
gamma(a)*b**(a-1), And(b > 0))
add(Heaviside(z - (b/p)**(1/q))*(t - b)**(a-1), [a], [], [], [0], t/b,
gamma(a)*b**(a-1), And(b > 0))
add(Heaviside((b/p)**(1/q) - z)*(b - t)**(a-1), [], [a], [0], [], t/b,
gamma(a)*b**(a-1), And(b > 0))
add((b + t)**(-a), [1 - a], [], [0], [], t/b, b**(-a)/gamma(a),
hint=Not(IsNonPositiveInteger(a)))
add(abs(b - t)**(-a), [1 - a], [(1 - a)/2], [0], [(1 - a)/2], t/b,
pi/(gamma(a)*cos(pi*a/2))*abs(b)**(-a), re(a) < 1)
add((t**a - b**a)/(t - b), [0, a], [], [0, a], [], t/b,
b**(a-1)*sin(a*pi)/pi)
# 12
def A1(r, sign, nu): return pi**(-S(1)/2)*(-sign*nu/2)**(1-2*r)
def tmpadd(r, sgn):
# XXX the a**2 is bad for matching
add((sqrt(a**2 + t) + sgn*a)**b/(a**2+t)**r,
[(1 + b)/2, 1-2*r + b/2], [],
[(b - sgn*b)/2], [(b + sgn*b)/2], t/a**2,
a**(b-2*r)*A1(r, sgn, b))
tmpadd(0, 1)
tmpadd(0, -1)
tmpadd(S(1)/2, 1)
tmpadd(S(1)/2, -1)
# 13
def tmpadd(r, sgn):
add((sqrt(a + p*z**q) + sgn*sqrt(p)*z**(q/2))**b/(a + p*z**q)**r,
[1 - r + sgn*b/2], [1 - r - sgn*b/2], [0, S(1)/2], [],
p*z**q/a, a**(b/2 - r)*A1(r, sgn, b))
tmpadd(0, 1)
tmpadd(0, -1)
tmpadd(S(1)/2, 1)
tmpadd(S(1)/2, -1)
# (those after look obscure)
# Section 8.4.3
add(exp(polar_lift(-1)*t), [], [], [0], [])
# TODO can do sin^n, sinh^n by expansion ... where?
# 8.4.4 (hyperbolic functions)
add(sinh(t), [], [1], [S(1)/2], [1, 0], t**2/4, pi**(S(3)/2))
add(cosh(t), [], [S(1)/2], [0], [S(1)/2, S(1)/2], t**2/4, pi**(S(3)/2))
# Section 8.4.5
# TODO can do t + a. but can also do by expansion... (XXX not really)
add(sin(t), [], [], [S(1)/2], [0], t**2/4, sqrt(pi))
add(cos(t), [], [], [0], [S(1)/2], t**2/4, sqrt(pi))
# Section 8.5.5
def make_log1(subs):
N = subs[n]
return [((-1)**N*factorial(N),
meijerg([], [1]*(N + 1), [0]*(N + 1), [], t))]
def make_log2(subs):
N = subs[n]
return [(factorial(N),
meijerg([1]*(N + 1), [], [], [0]*(N + 1), t))]
# TODO these only hold for positive p, and can be made more general
# but who uses log(x)*Heaviside(a-x) anyway ...
# TODO also it would be nice to derive them recursively ...
addi(log(t)**n*Heaviside(1 - t), make_log1, True)
addi(log(t)**n*Heaviside(t - 1), make_log2, True)
def make_log3(subs):
return make_log1(subs) + make_log2(subs)
addi(log(t)**n, make_log3, True)
addi(log(t + a),
constant(log(a)) + [(S(1), meijerg([1, 1], [], [1], [0], t/a))],
True)
addi(log(abs(t - a)), constant(log(abs(a))) + \
[(pi, meijerg([1, 1], [S(1)/2], [1], [0, S(1)/2], t/a))],
True)
# TODO log(x)/(x+a) and log(x)/(x-1) can also be done. should they
# be derivable?
# TODO further formulae in this section seem obscure
# Sections 8.4.9-10
# TODO
# Section 8.4.11
from sympy import Ei, I, expint, Si, Ci, Shi, Chi, fresnels, fresnelc
addi(Ei(t),
constant(-I*pi) + [(S(-1), meijerg([], [1], [0, 0], [], t*polar_lift(-1)))],
True)
# Section 8.4.12
add(Si(t), [1], [], [S(1)/2], [0, 0], t**2/4, sqrt(pi)/2)
add(Ci(t), [], [1], [0, 0], [S(1)/2], t**2/4, -sqrt(pi)/2)
# Section 8.4.13
add(Shi(t), [S(1)/2], [], [0], [S(-1)/2, S(-1)/2], polar_lift(-1)*t**2/4,
t*sqrt(pi)/4)
add(Chi(t), [], [S(1)/2, 1], [0, 0], [S(1)/2, S(1)/2], t**2/4, -pi**S('3/2')/2)
# generalized exponential integral
add(expint(a, t), [], [a], [a - 1, 0], [], t)
# Section 8.4.14
# TODO erfc
add(erf(t), [1], [], [S(1)/2], [0], t**2, 1/sqrt(pi))
# TODO exp(-x)*erf(I*x) does not work
# Fresnel Integrals
add(fresnels(t), [1], [], [S(3)/4], [0, S(1)/4], pi**2*t**4/16, S(1)/2)
add(fresnelc(t), [1], [], [S(1)/4], [0, S(3)/4], pi**2*t**4/16, S(1)/2)
##### bessel-type functions #####
from sympy import besselj, bessely, besseli, besselk
# Section 8.4.19
add(besselj(a, t), [], [], [a/2], [-a/2], t**2/4)
# all of the following are derivable
#add(sin(t)*besselj(a, t), [S(1)/4, S(3)/4], [], [(1+a)/2],
# [-a/2, a/2, (1-a)/2], t**2, 1/sqrt(2))
#add(cos(t)*besselj(a, t), [S(1)/4, S(3)/4], [], [a/2],
# [-a/2, (1+a)/2, (1-a)/2], t**2, 1/sqrt(2))
#add(besselj(a, t)**2, [S(1)/2], [], [a], [-a, 0], t**2, 1/sqrt(pi))
#add(besselj(a, t)*besselj(b, t), [0, S(1)/2], [], [(a + b)/2],
# [-(a+b)/2, (a - b)/2, (b - a)/2], t**2, 1/sqrt(pi))
# Section 8.4.20
add(bessely(a, t), [], [-(a+1)/2], [a/2, -a/2], [-(a+1)/2], t**2/4)
# TODO all of the following should be derivable
#add(sin(t)*bessely(a, t), [S(1)/4, S(3)/4], [(1 - a - 1)/2],
# [(1 + a)/2, (1 - a)/2], [(1 - a - 1)/2, (1 - 1 - a)/2, (1 - 1 + a)/2],
# t**2, 1/sqrt(2))
#add(cos(t)*bessely(a, t), [S(1)/4, S(3)/4], [(0 - a - 1)/2],
# [(0 + a)/2, (0 - a)/2], [(0 - a - 1)/2, (1 - 0 - a)/2, (1 - 0 + a)/2],
# t**2, 1/sqrt(2))
#add(besselj(a, t)*bessely(b, t), [0, S(1)/2], [(a - b - 1)/2],
# [(a + b)/2, (a - b)/2], [(a - b - 1)/2, -(a + b)/2, (b - a)/2],
# t**2, 1/sqrt(pi))
#addi(bessely(a, t)**2,
# [(2/sqrt(pi), meijerg([], [S(1)/2, S(1)/2 - a], [0, a, -a],
# [S(1)/2 - a], t**2)),
# (1/sqrt(pi), meijerg([S(1)/2], [], [a], [-a, 0], t**2))],
# True)
#addi(bessely(a, t)*bessely(b, t),
# [(2/sqrt(pi), meijerg([], [0, S(1)/2, (1 - a - b)/2],
# [(a + b)/2, (a - b)/2, (b - a)/2, -(a + b)/2],
# [(1 - a - b)/2], t**2)),
# (1/sqrt(pi), meijerg([0, S(1)/2], [], [(a + b)/2],
# [-(a + b)/2, (a - b)/2, (b - a)/2], t**2))],
# True)
# Section 8.4.21 ?
# Section 8.4.22
add(besseli(a, t), [], [(1 + a)/2], [a/2], [-a/2, (1 + a)/2], t**2/4, pi)
# TODO many more formulas. should all be derivable
# Section 8.4.23
add(besselk(a, t), [], [], [a/2, -a/2], [], t**2/4, S(1)/2)
# TODO many more formulas. should all be derivable
####################################################################
# First some helper functions.
####################################################################
from sympy.utilities.timeutils import timethis
timeit = timethis('meijerg')
def _mytype(f, x):
""" Create a hashable entity describing the type of f. """
if not f.has(x):
return ()
elif f.is_Function:
return (type(f),)
else:
types = [_mytype(a, x) for a in f.args]
res = []
for t in types:
res += list(t)
res.sort()
return tuple(res)
class _CoeffExpValueError(ValueError):
"""
Exception raised by _get_coeff_exp, for internal use only.
"""
pass
def _get_coeff_exp(expr, x):
"""
When expr is known to be of the form c*x**b, with c and/or b possibly 1,
return c, b.
>>> from sympy.abc import x, a, b
>>> from sympy.integrals.meijerint import _get_coeff_exp
>>> _get_coeff_exp(a*x**b, x)
(a, b)
>>> _get_coeff_exp(x, x)
(1, 1)
>>> _get_coeff_exp(2*x, x)
(2, 1)
>>> _get_coeff_exp(x**3, x)
(1, 3)
"""
from sympy import powsimp
(c, m) = expand_power_base(powsimp(expr)).as_coeff_mul(x)
if not m:
return c, S(0)
[m] = m
if m.is_Pow:
if m.base != x:
raise _CoeffExpValueError('expr not of form a*x**b')
return c, m.exp
elif m == x:
return c, S(1)
else:
raise _CoeffExpValueError('expr not of form a*x**b: %s' % expr)
def _exponents(expr, x):
"""
Find the exponents of ``x`` (not including zero) in ``expr``.
>>> from sympy.integrals.meijerint import _exponents
>>> from sympy.abc import x, y
>>> from sympy import sin
>>> _exponents(x, x)
set([1])
>>> _exponents(x**2, x)
set([2])
>>> _exponents(x**2 + x, x)
set([1, 2])
>>> _exponents(x**3*sin(x + x**y) + 1/x, x)
set([-1, 1, 3, y])
"""
def _exponents_(expr, x, res):
if expr == x:
res.update([1])
return
if expr.is_Pow and expr.base == x:
res.update([expr.exp])
return
for arg in expr.args:
_exponents_(arg, x, res)
res = set()
_exponents_(expr, x, res)
return res
def _functions(expr, x):
""" Find the types of functions in expr, to estimate the complexity. """
from sympy import Function
return set(e.func for e in expr.atoms(Function) if e.has(x))
def _find_splitting_points(expr, x):
"""
Find numbers a such that a linear substitution x --> x+a would
(hopefully) simplify expr.
>>> from sympy.integrals.meijerint import _find_splitting_points as fsp
>>> from sympy import sin
>>> from sympy.abc import a, x
>>> fsp(x, x)
set([0])
>>> fsp((x-1)**3, x)
set([1])
>>> fsp(sin(x+3)*x, x)
set([-3, 0])
"""
from sympy import Tuple
p, q = map(lambda n: Wild(n, exclude=[x]), 'pq')
def compute_innermost(expr, res):
if isinstance(expr, Tuple):
return
m = expr.match(p*x+q)
if m and m[p] != 0:
res.add(-m[q]/m[p])
return
if expr.is_Atom:
return
for arg in expr.args:
compute_innermost(arg, res)
innermost = set()
compute_innermost(expr, innermost)
return innermost
def _split_mul(f, x):
"""
Split expression ``f`` into fac, po, g, where fac is a constant factor,
po = x**s for some s independent of s, and g is "the rest".
>>> from sympy.integrals.meijerint import _split_mul
>>> from sympy import sin
>>> from sympy.abc import s, x
>>> _split_mul((3*x)**s*sin(x**2)*x, x)
(3**s, x*x**s, sin(x**2))
"""
from sympy import polarify, unpolarify
fac = S(1)
po = S(1)
g = S(1)
f = expand_power_base(f)
args = Mul.make_args(f)
for a in args:
if a == x:
po *= x
elif not a.has(x):
fac *= a
else:
if a.is_Pow:
c, t = a.base.as_coeff_mul(x)
if t != (x,):
c, t = expand_mul(a.base).as_coeff_mul(x)
if t == (x,):
po *= x**a.exp
fac *= unpolarify(polarify(c**a.exp, subs=False))
continue
g *= a
return fac, po, g
def _mul_args(f):
"""
Return a list ``L`` such that Mul(*L) == f.
If f is not a Mul or Pow, L=[f].
If f=g**n for an integer n, L=[g]*n.
If f is a Mul, L comes from applying _mul_args to all factors of f.
"""
args = Mul.make_args(f)
gs = []
for g in args:
if g.is_Pow and g.exp.is_Integer:
n = g.exp
base = g.base
if n < 0:
n = -n
base = 1/base
gs += [base]*n
else:
gs.append(g)
return gs
def _mul_as_two_parts(f):
"""
Find all the ways to split f into a product of two terms.
Return None on failure.
>>> from sympy.integrals.meijerint import _mul_as_two_parts
>>> from sympy import sin, exp
>>> from sympy.abc import x
>>> _mul_as_two_parts(x*sin(x)*exp(x))
[(x*exp(x), sin(x)), (x, exp(x)*sin(x)), (x*sin(x), exp(x))]
"""
from sympy.utilities.iterables import multiset_partitions
gs = _mul_args(f)
if len(gs) < 2:
return None
return [(Mul(*x), Mul(*y)) for (x, y) in multiset_partitions(gs, 2)]
def _inflate_g(g, n):
""" Return C, h such that h is a G function of argument z**n and
g = C*h. """
# TODO should this be a method of meijerg?
# See: [L, page 150, equation (5)]
def inflate(params, n):
""" (a1, .., ak) -> (a1/n, (a1+1)/n, ..., (ak + n-1)/n) """
res = []
for a in params:
for i in range(n):
res.append((a + i)/n)
return res
v = S(len(g.ap) - len(g.bq))
C = n**(1 + g.nu + v/2)
C /= (2*pi)**((n - 1)*g.delta)
return C, meijerg(inflate(g.an, n), inflate(g.aother, n),
inflate(g.bm, n), inflate(g.bother, n),
g.argument**n * n**(n*v))
def _flip_g(g):
""" Turn the G function into one of inverse argument
(i.e. G(1/x) -> G'(x)) """
# See [L], section 5.2
def tr(l): return [1 - a for a in l]
return meijerg(tr(g.bm), tr(g.bother), tr(g.an), tr(g.aother), 1/g.argument)
def _inflate_fox_h(g, a):
r"""
Let d denote the integrand in the definition of the G function ``g``.
Consider the function H which is defined in the same way, but with
integrand d/Gamma(a*s) (contour conventions as usual).
If a is rational, the function H can be written as C*G, for a constant C
and a G-function G.
This function returns C, G.
"""
if a < 0:
return _inflate_fox_h(_flip_g(g), -a)
p = S(a.p)
q = S(a.q)
# We use the substitution s->qs, i.e. inflate g by q. We are left with an
# extra factor of Gamma(p*s), for which we use Gauss' multiplication
# theorem.
D, g = _inflate_g(g, q)
z = g.argument
D /= (2*pi)**((1-p)/2)*p**(-S(1)/2)
z /= p**p
bs = [(n+1)/p for n in range(p)]
return D, meijerg(g.an, g.aother, g.bm, list(g.bother) + bs, z)
_dummies = {}
def _dummy(name, token, expr, **kwargs):
"""
Return a dummy. This will return the same dummy if the same token+name is
requested more than once, and it is not already in expr.
This is for being cache-friendly.
"""
d = _dummy_(name, token, **kwargs)
if expr.has(d):
return Dummy(name, **kwargs)
return d
def _dummy_(name, token, **kwargs):
"""
Return a dummy associated to name and token. Same effect as declaring
it globally.
"""
global _dummies
if not (name, token) in _dummies:
_dummies[(name, token)] = Dummy(name, **kwargs)
return _dummies[(name, token)]
def _is_analytic(f, x):
""" Check if f(x), when expressed using G functions on the positive reals,
will in fact agree with the G functions almost everywhere """
from sympy import Heaviside, Abs
return not any(expr.has(x) for expr in f.atoms(Heaviside, Abs))
def _condsimp(cond):
"""
Do naive simplifications on ``cond``.
Note that this routine is completely ad-hoc, simplification rules being
added as need arises rather than following any logical pattern.
>>> from sympy.integrals.meijerint import _condsimp as simp
>>> from sympy import Or, Eq, unbranched_argument as arg, And
>>> from sympy.abc import x, y, z
>>> simp(Or(x < y, z, Eq(x, y)))
Or(x <= y, z)
>>> simp(Or(x <= y, And(x < y, z)))
x <= y
"""
from sympy import (symbols, Wild, Eq, unbranched_argument, exp_polar, pi, I,
periodic_argument, oo, polar_lift)
from sympy.logic.boolalg import BooleanFunction
if not isinstance(cond, BooleanFunction):
return cond
cond = cond.func(*map(_condsimp, cond.args))
change = True
p, q, r = symbols('p q r', cls=Wild)
rules = [
(Or(p < q, Eq(p, q)), p <= q),
# The next two obviously are instances of a general pattern, but it is
# easier to spell out the few cases we care about.
(And(abs(unbranched_argument(p)) <= pi,
abs(unbranched_argument(exp_polar(-2*pi*I)*p)) <= pi),
Eq(unbranched_argument(exp_polar(-I*pi)*p), 0)),
(And(abs(unbranched_argument(p)) <= pi/2,
abs(unbranched_argument(exp_polar(-pi*I)*p)) <= pi/2),
Eq(unbranched_argument(exp_polar(-I*pi/2)*p), 0)),
(Or(p <= q, And(p < q, r)), p <= q)
]
while change:
change = False
for fro, to in rules:
if fro.func != cond.func:
continue
for n, arg in enumerate(cond.args):
if fro.args[0].has(r):
m = arg.match(fro.args[1])
num = 1
else:
num = 0
m = arg.match(fro.args[0])
if not m:
continue
otherargs = map(lambda x: x.subs(m), fro.args[:num] + fro.args[num+1:])
otherlist = [n]
for arg2 in otherargs:
for k, arg3 in enumerate(cond.args):
if k in otherlist:
continue
if arg2 == arg3:
otherlist += [k]
break
if arg3.func is And and arg2.args[1] == r and \
arg2.func is And and arg2.args[0] in arg3.args:
otherlist += [k]
break
if arg3.func is And and arg2.args[0] == r and \
arg2.func is And and arg2.args[1] in arg3.args:
otherlist += [k]
break
if len(otherlist) != len(otherargs) + 1:
continue
newargs = [arg for (k, arg) in enumerate(cond.args) \
if k not in otherlist] + [to.subs(m)]
cond = cond.func(*newargs)
change = True
break
# final tweak
def repl_eq(orig):
if orig.lhs == 0:
expr = orig.rhs
elif orig.rhs == 0:
expr = orig.lhs
else:
return orig
m = expr.match(unbranched_argument(polar_lift(p)**q))
if not m:
if expr.func is periodic_argument and not expr.args[0].is_polar \
and expr.args[1] == oo:
return (expr.args[0] > 0)
return orig
return (m[p] > 0)
return cond.replace(lambda expr: expr.is_Relational and expr.rel_op == '==',
repl_eq)
def _eval_cond(cond):
""" Re-evaluate the conditions. """
if isinstance(cond, bool):
return cond
return _condsimp(cond.doit())
####################################################################
# Now the "backbone" functions to do actual integration.
####################################################################
def _my_principal_branch(expr, period, full_pb=False):
""" Bring expr nearer to its principal branch by removing superfluous
factors.
This function does *not* guarantee to yield the principal branch,
to avoid introducing opaque principal_branch() objects,
unless full_pb=True. """
from sympy import principal_branch
res = principal_branch(expr, period)
if not full_pb:
res = res.replace(principal_branch, lambda x, y: x)
return res
def _rewrite_saxena_1(fac, po, g, x):
"""
Rewrite the integral fac*po*g dx, from zero to infinity, as
integral fac*G, where G has argument a*x. Note po=x**s.
Return fac, G.
"""
_, s = _get_coeff_exp(po, x)
a, b = _get_coeff_exp(g.argument, x)
period = g.get_period()
a = _my_principal_branch(a, period)
# We substitute t = x**b.
C = fac/(abs(b)*a**((s+1)/b - 1))
# Absorb a factor of (at)**((1 + s)/b - 1).
def tr(l): return [a + (1 + s)/b - 1 for a in l]
return C, meijerg(tr(g.an), tr(g.aother), tr(g.bm), tr(g.bother),
a*x)
def _check_antecedents_1(g, x, helper=False):
"""
Return a condition under which the mellin transform of g exists.
Any power of x has already been absorbed into the G function,
so this is just int_0^\infty g dx.
See [L, section 5.6.1]. (Note that s=1.)
If ``helper`` is True, only check if the MT exists at infinity, i.e. if
int_1^\infty g dx exists.
"""
# NOTE if you update these conditions, please update the documentation as well
from sympy import Eq, Not, ceiling, Ne, re, unbranched_argument as arg
delta = g.delta
eta, _ = _get_coeff_exp(g.argument, x)
m, n, p, q = S([len(g.bm), len(g.an), len(g.ap), len(g.bq)])
xi = m + n - p
if p > q:
def tr(l): return [1 - x for x in l]
return _check_antecedents_1(meijerg(tr(g.bm), tr(g.bother),
tr(g.an), tr(g.aother), x/eta),
x)
tmp = []
for b in g.bm:
tmp += [-re(b) < 1]
for a in g.an:
tmp += [1 < 1 - re(a)]
cond_3 = And(*tmp)
for b in g.bother:
tmp += [-re(b) < 1]
for a in g.aother:
tmp += [1 < 1 - re(a)]
cond_3_star = And(*tmp)
cond_4 = (-re(g.nu) + (q + 1 - p)/2 > q - p)
def debug(*msg):
_debug(*msg)
debug('Checking antecedents for 1 function:')
debug(' delta=%s, eta=%s, m=%s, n=%s, p=%s, q=%s'
% (delta, eta, m, n, p, q))
debug(' ap = %s, %s' % (list(g.an), list(g.aother)))
debug(' bq = %s, %s' % (list(g.bm), list(g.bother)))
debug(' cond_3=%s, cond_3*=%s, cond_4=%s' % (cond_3, cond_3_star, cond_4))
conds = []
# case 1
case1 = []
tmp1 = [1 <= n, p < q, 1 <= m]
tmp2 = [1 <= p, 1 <= m, Eq(q, p + 1), Not(And(Eq(n, 0), Eq(m, p + 1)))]
tmp3 = [1 <= p, Eq(q, p)]
for k in range(ceiling(delta/2) + 1):
tmp3 += [Ne(abs(arg(eta)), (delta - 2*k)*pi)]
tmp = [delta > 0, abs(arg(eta)) < delta*pi]
extra = [Ne(eta, 0), cond_3]
if helper:
extra = []
for t in [tmp1, tmp2, tmp3]:
case1 += [And(*(t + tmp + extra))]
conds += case1
debug(' case 1:', case1)
# case 2
extra = [cond_3]
if helper:
extra = []
case2 = [And(Eq(n, 0), p + 1 <= m, m <= q,
abs(arg(eta)) < delta*pi, *extra)]
conds += case2
debug(' case 2:', case2)
# case 3
extra = [cond_3, cond_4]
if helper:
extra = []
case3 = [And(p < q, 1 <= m, delta > 0, Eq(abs(arg(eta)), delta*pi), *extra)]
case3 += [And(p <= q - 2, Eq(delta, 0), Eq(abs(arg(eta)), 0), *extra)]
conds += case3
debug(' case 3:', case3)
# TODO altered cases 4-7
# extra case from wofram functions site:
# (reproduced verbatim from prudnikov, section 2.24.2)
# http://functions.wolfram.com/HypergeometricFunctions/MeijerG/21/02/01/
case_extra = []
case_extra += [Eq(p, q), Eq(delta, 0), Eq(arg(eta), 0), Ne(eta, 0)]
if not helper:
case_extra += [cond_3]
s = []
for a, b in zip(g.ap, g.bq):
s += [b - a]
case_extra += [re(Add(*s)) < 0]
case_extra = And(*case_extra)
conds += [case_extra]
debug(' extra case:', [case_extra])
case_extra_2 = [And(delta > 0, abs(arg(eta)) < delta*pi)]
if not helper:
case_extra_2 += [cond_3]
case_extra_2 = And(*case_extra_2)
conds += [case_extra_2]
debug(' second extra case:', [case_extra_2])
# TODO This leaves only one case from the three listed by prudnikov.
# Investigate if these indeed cover everything; if so, remove the rest.
return Or(*conds)
def _int0oo_1(g, x):
"""
Evaluate int_0^\infty g dx using G functions,
assuming the necessary conditions are fulfilled.
>>> from sympy.abc import a, b, c, d, x, y
>>> from sympy import meijerg
>>> from sympy.integrals.meijerint import _int0oo_1
>>> _int0oo_1(meijerg([a], [b], [c], [d], x*y), x)
gamma(-a)*gamma(c + 1)/(y*gamma(-d)*gamma(b + 1))
"""
# See [L, section 5.6.1]. Note that s=1.
from sympy import gamma, combsimp, unpolarify
eta, _ = _get_coeff_exp(g.argument, x)
res = 1/eta
# XXX TODO we should reduce order first
for b in g.bm:
res *= gamma(b + 1)
for a in g.an:
res *= gamma(1 - a - 1)
for b in g.bother:
res /= gamma(1 - b - 1)
for a in g.aother:
res /= gamma(a + 1)
return combsimp(unpolarify(res))
def _rewrite_saxena(fac, po, g1, g2, x, full_pb=False):
"""
Rewrite the integral fac*po*g1*g2 from 0 to oo in terms of G functions
with argument c*x.
Return C, f1, f2 such that integral C f1 f2 from 0 to infinity equals
integral fac po g1 g2 from 0 to infinity.
>>> from sympy.integrals.meijerint import _rewrite_saxena
>>> from sympy.abc import s, t, m
>>> from sympy import meijerg
>>> g1 = meijerg([], [], [0], [], s*t)
>>> g2 = meijerg([], [], [m/2], [-m/2], t**2/4)
>>> r = _rewrite_saxena(1, t**0, g1, g2, t)
>>> r[0]
s/(4*sqrt(pi))
>>> r[1]
meijerg(((), ()), ((-1/2, 0), ()), s**2*t/4)
>>> r[2]
meijerg(((), ()), ((m/2,), (-m/2,)), t/4)
"""
from sympy.core.numbers import ilcm
def pb(g):
a, b = _get_coeff_exp(g.argument, x)
per = g.get_period()
return meijerg(g.an, g.aother, g.bm, g.bother,
_my_principal_branch(a, per, full_pb)*x**b)
_, s = _get_coeff_exp(po, x)
_, b1 = _get_coeff_exp(g1.argument, x)
_, b2 = _get_coeff_exp(g2.argument, x)
if b1 < 0:
b1 = -b1
g1 = _flip_g(g1)
if b2 < 0:
b2 = -b2
g2 = _flip_g(g2)
if not b1.is_Rational or not b2.is_Rational:
return
m1, n1 = b1.p, b1.q
m2, n2 = b2.p, b2.q
tau = ilcm(m1*n2, m2*n1)
r1 = tau//(m1*n2)
r2 = tau//(m2*n1)
C1, g1 = _inflate_g(g1, r1)
C2, g2 = _inflate_g(g2, r2)
g1 = pb(g1)
g2 = pb(g2)
fac *= C1*C2
a1, b = _get_coeff_exp(g1.argument, x)
a2, _ = _get_coeff_exp(g2.argument, x)
# arbitrarily tack on the x**s part to g1
# TODO should we try both?
exp = (s + 1)/b - 1
fac = fac/(abs(b) * a1**exp)
def tr(l): return [a + exp for a in l]
g1 = meijerg(tr(g1.an), tr(g1.aother), tr(g1.bm), tr(g1.bother), a1*x)
g2 = meijerg(g2.an, g2.aother, g2.bm, g2.bother, a2*x)
return powdenest(fac, polar=True), g1, g2
def _check_antecedents(g1, g2, x):
""" Return a condition under which the integral theorem applies. """
from sympy import (re, Eq, Not, Ne, cos, I, exp, ceiling, sin, sign,
unpolarify)
from sympy import arg as arg_, unbranched_argument as arg
# Yes, this is madness.
# XXX TODO this is a testing *nightmare*
# NOTE if you update these conditions, please update the documentation as well
# The following conditions are found in
# [P], Section 2.24.1
#
# They are also reproduced (verbatim!) at
# http://functions.wolfram.com/HypergeometricFunctions/MeijerG/21/02/03/
#
# Note: k=l=r=alpha=1
sigma, _ = _get_coeff_exp(g1.argument, x)
omega, _ = _get_coeff_exp(g2.argument, x)
s, t, u, v = S([len(g1.bm), len(g1.an), len(g1.ap), len(g1.bq)])
m, n, p, q = S([len(g2.bm), len(g2.an), len(g2.ap), len(g2.bq)])
bstar = s + t - (u + v)/2
cstar = m + n - (p + q)/2
rho = g1.nu + (u - v)/2 + 1
mu = g2.nu + (p - q)/2 + 1
phi = q - p - (v - u)
eta = 1 - (v - u) - mu - rho
psi = (pi*(q - m - n) + abs(arg(omega)))/(q - p)
theta = (pi*(v - s - t) + abs(arg(sigma)))/(v - u)
lambda_c = (q - p)*abs(omega)**(1/(q - p))*cos(psi) \
+ (v - u)*abs(sigma)**(1/(v - u))*cos(theta)
def lambda_s0(c1, c2):
return c1*(q-p)*abs(omega)**(1/(q-p))*sin(psi) \
+ c2*(v-u)*abs(sigma)**(1/(v-u))*sin(theta)
lambda_s = Piecewise(
((lambda_s0(+1, +1)*lambda_s0(-1, -1)),
And(Eq(arg(sigma), 0), Eq(arg(omega), 0))),
(lambda_s0(sign(arg(omega)), +1)*lambda_s0(sign(arg(omega)), -1),
And(Eq(arg(sigma), 0), Ne(arg(omega), 0))),
(lambda_s0(+1, sign(arg(sigma)))*lambda_s0(-1, sign(arg(sigma))),
And(Ne(arg(sigma), 0), Eq(arg(omega), 0))),
(lambda_s0(sign(arg(omega)), sign(arg(sigma))), True))
_debug('Checking antecedents:')
_debug(' sigma=%s, s=%s, t=%s, u=%s, v=%s, b*=%s, rho=%s'
% (sigma, s, t, u, v, bstar, rho))
_debug(' omega=%s, m=%s, n=%s, p=%s, q=%s, c*=%s, mu=%s,'
% (omega, m, n, p, q, cstar, mu))
_debug(' phi=%s, eta=%s, psi=%s, theta=%s' % (phi, eta, psi, theta))
c1 = True
for g in [g1, g2]:
for a in g1.an:
for b in g1.bm:
diff = a - b
if diff > 0 and diff.is_integer:
c1 = False
tmp = []
for b in g1.bm:
for d in g2.bm:
tmp += [re(1 + b + d) > 0]
c2 = And(*tmp)
tmp = []
for a in g1.an:
for c in g2.an:
tmp += [re(1 + a + c) < 1 + 1]
c3 = And(*tmp)
tmp = []
for c in g1.an:
tmp += [(p - q)*re(1 + c - 1) - re(mu) > -S(3)/2]
c4 = And(*tmp)
tmp = []
for d in g1.bm:
tmp += [(p - q)*re(1 + d) - re(mu) > -S(3)/2]
c5 = And(*tmp)
tmp = []
for c in g2.an:
tmp += [(u - v)*re(1 + c - 1) - re(rho) > -S(3)/2]
c6 = And(*tmp)
tmp = []
for d in g2.bm:
tmp += [(u - v)*re(1 + d) - re(rho) > -S(3)/2]
c7 = And(*tmp)
c8 = (abs(phi) + 2*re((rho - 1)*(q - p) + (v - u)*(q - p) + (mu - 1)*(v - u)) > 0)
c9 = (abs(phi) - 2*re((rho - 1)*(q - p) + (v - u)*(q - p) + (mu - 1)*(v - u)) > 0)
c10 = (abs(arg(sigma)) < bstar*pi)
c11 = Eq(abs(arg(sigma)), bstar*pi)
c12 = (abs(arg(omega)) < cstar*pi)
c13 = Eq(abs(arg(omega)), cstar*pi)
# The following condition is *not* implemented as stated on the wolfram
# function site. In the book of prudnikov there is an additional part
# (the And involving re()). However, I only have this book in russian, and
# I don't read any russian. The following condition is what other people
# have told me it means.
# Worryingly, it is different from the condition implemented in REDUCE.
# The REDUCE implementation:
# https://reduce-algebra.svn.sourceforge.net/svnroot/reduce-algebra/trunk/packages/defint/definta.red
# (search for tst14)
# The Wolfram alpha version:
# http://functions.wolfram.com/HypergeometricFunctions/MeijerG/21/02/03/03/0014/
z0 = exp(-(bstar + cstar)*pi*I)
zos = unpolarify(z0*omega/sigma)
zso = unpolarify(z0*sigma/omega)
if zos == 1/zso:
c14 = And(Eq(phi, 0), bstar + cstar <= 1,
Or(Ne(zos, 1), re(mu + rho + v - u) < 1,
re(mu + rho + q - p) < 1))
else:
c14 = And(Eq(phi, 0), bstar - 1 + cstar <= 0,
Or(And(Ne(zos, 1), abs(arg_(1 - zos)) < pi),
And(re(mu + rho + v - u) < 1, Eq(zos, 1))))
c14_alt = And(Eq(phi, 0), cstar - 1 + bstar <= 0,
Or(And(Ne(zso, 1), abs(arg_(1 - zso)) < pi),
And(re(mu + rho + q - p) < 1, Eq(zso, 1))))
# Since r=k=l=1, in our case there is c14_alt which is the same as calling
# us with (g1, g2) = (g2, g1). The conditions below enumerate all cases
# (i.e. we don't have to try arguments reversed by hand), and indeed try
# all symmetric cases. (i.e. whenever there is a condition involving c14,
# there is also a dual condition which is exactly what we would get when g1,
# g2 were interchanged, *but c14 was unaltered*).
# Hence the following seems correct:
c14 = Or(c14, c14_alt)
tmp = [lambda_c > 0,
And(Eq(lambda_c, 0), Ne(lambda_s, 0), re(eta) > -1),
And(Eq(lambda_c, 0), Eq(lambda_s, 0), re(eta) > 0)]
c15 = Or(*tmp)
if _eval_cond(lambda_c > 0) is not False:
c15 = (lambda_c > 0)
for cond, i in [(c1, 1), (c2, 2), (c3, 3), (c4, 4), (c5, 5), (c6, 6),
(c7, 7), (c8, 8), (c9, 9), (c10, 10), (c11, 11),
(c12, 12), (c13, 13), (c14, 14), (c15, 15)]:
_debug(' c%s:' % i, cond)
# We will return Or(*conds)
conds = []
def pr(count):
_debug(' case %s:' % count, conds[-1])
conds += [And(m*n*s*t != 0, bstar > 0, cstar > 0, c1, c2, c3, c10, c12)] #1
pr(1)
conds += [And(Eq(u, v), Eq(bstar, 0), cstar > 0, sigma > 0, re(rho) < 1,
c1, c2, c3, c12)] #2
pr(2)
conds += [And(Eq(p, q), Eq(cstar, 0), bstar > 0, omega > 0, re(mu) < 1,
c1, c2, c3, c10)] #3
pr(3)
conds += [And(Eq(p, q), Eq(u, v), Eq(bstar, 0), Eq(cstar, 0),
sigma > 0, omega > 0, re(mu) < 1, re(rho) < 1,
Ne(sigma, omega), c1, c2, c3)] #4
pr(4)
conds += [And(Eq(p, q), Eq(u, v), Eq(bstar, 0), Eq(cstar, 0),
sigma > 0, omega > 0, re(mu + rho) < 1,
Ne(omega, sigma), c1, c2, c3)] #5
pr(5)
conds += [And(p > q, s > 0, bstar > 0, cstar >= 0,
c1, c2, c3, c5, c10, c13)] #6
pr(6)
conds += [And(p < q, t > 0, bstar > 0, cstar >= 0,
c1, c2, c3, c4, c10, c13)] #7
pr(7)
conds += [And(u > v, m > 0, cstar > 0, bstar >= 0,
c1, c2, c3, c7, c11, c12)] #8
pr(8)
conds += [And(u < v, n > 0, cstar > 0, bstar >= 0,
c1, c2, c3, c6, c11, c12)] #9
pr(9)
conds += [And(p > q, Eq(u, v), Eq(bstar, 0), cstar >= 0, sigma > 0,
re(rho) < 1, c1, c2, c3, c5, c13)] #10
pr(10)
conds += [And(p < q, Eq(u, v), Eq(bstar, 0), cstar >= 0, sigma > 0,
re(rho) < 1, c1, c2, c3, c4, c13)] #11
pr(11)
conds += [And(Eq(p, q), u > v, bstar >= 0, Eq(cstar, 0), omega > 0,
re(mu) < 1, c1, c2, c3, c7, c11)] #12
pr(12)
conds += [And(Eq(p, q), u < v, bstar >= 0, Eq(cstar, 0), omega > 0,
re(mu) < 1, c1, c2, c3, c6, c11)] #13
pr(13)
conds += [And(p < q, u > v, bstar >= 0, cstar >= 0,
c1, c2,c3, c4, c7, c11, c13)] #14
pr(14)
conds += [And(p > q, u < v, bstar >= 0, cstar >= 0,
c1, c2, c3, c5, c6, c11, c13)] #15
pr(15)
conds += [And(p > q, u > v, bstar >= 0, cstar >= 0,
c1, c2, c3, c5, c7, c8, c11, c13, c14)] #16
pr(16)
conds += [And(p < q, u < v, bstar >= 0, cstar >= 0,
c1, c2, c3, c4, c6, c9, c11, c13, c14)] #17
pr(17)
conds += [And(Eq(t, 0), s > 0, bstar > 0, phi > 0, c1, c2, c10)] #18
pr(18)
conds += [And(Eq(s, 0), t > 0, bstar > 0, phi < 0, c1, c3, c10)] #19
pr(19)
conds += [And(Eq(n, 0), m > 0, cstar > 0, phi < 0, c1, c2, c12)] #20
pr(20)
conds += [And(Eq(m, 0), n > 0, cstar > 0, phi > 0, c1, c3, c12)] #21
pr(21)
conds += [And(Eq(s*t, 0), bstar > 0, cstar > 0,
c1, c2, c3, c10, c12)] #22
pr(22)
conds += [And(Eq(m*n, 0), bstar > 0, cstar > 0,
c1, c2, c3, c10, c12)] #23
pr(23)
# The following case is from [Luke1969]. As far as I can tell, it is *not*
# covered by prudnikov's.
# Let G1 and G2 be the two G-functions. Suppose the integral exists from
# 0 to a > 0 (this is easy the easy part), that G1 is exponential decay at
# infinity, and that the mellin transform of G2 exists.
# Then the integral exists.
mt1_exists = _check_antecedents_1(g1, x, helper=True)
mt2_exists = _check_antecedents_1(g2, x, helper=True)
conds += [And(mt2_exists, Eq(t, 0), u < s, bstar > 0, c10, c1, c2, c3)]
pr('E1')
conds += [And(mt2_exists, Eq(s, 0), v < t, bstar > 0, c10, c1, c2, c3)]
pr('E2')
conds += [And(mt1_exists, Eq(n, 0), p < m, cstar > 0, c12, c1, c2, c3)]
pr('E3')
conds += [And(mt1_exists, Eq(m, 0), q < n, cstar > 0, c12, c1, c2, c3)]
pr('E4')
# Let's short-circuit if this worked ...
# the rest is corner-cases and terrible to read.
r = Or(*conds)
if _eval_cond(r) is not False:
return r
conds += [And(m + n > p, Eq(t, 0), Eq(phi, 0), s > 0, bstar > 0, cstar < 0,
abs(arg(omega)) < (m + n - p + 1)*pi,
c1, c2, c10, c14, c15)] #24
pr(24)
conds += [And(m + n > q, Eq(s, 0), Eq(phi, 0), t > 0, bstar > 0, cstar < 0,
abs(arg(omega)) < (m + n - q + 1)*pi,
c1, c3, c10, c14, c15)] #25
pr(25)
conds += [And(Eq(p, q - 1), Eq(t, 0), Eq(phi, 0), s > 0, bstar > 0,
cstar >= 0, cstar*pi < abs(arg(omega)),
c1, c2, c10, c14, c15)] #26
pr(26)
conds += [And(Eq(p, q + 1), Eq(s, 0), Eq(phi, 0), t > 0, bstar > 0,
cstar >= 0, cstar*pi < abs(arg(omega)),
c1, c3, c10, c14, c15)] #27
pr(27)
conds += [And(p < q - 1, Eq(t, 0), Eq(phi, 0), s > 0, bstar > 0,
cstar >= 0, cstar*pi < abs(arg(omega)),
abs(arg(omega)) < (m + n - p + 1)*pi,
c1, c2, c10, c14, c15)] #28
pr(28)
conds += [And(p > q + 1, Eq(s, 0), Eq(phi, 0), t > 0, bstar > 0, cstar >= 0,
cstar*pi < abs(arg(omega)),
abs(arg(omega)) < (m + n - q + 1)*pi,
c1, c3, c10, c14, c15)] #29
pr(29)
conds += [And(Eq(n, 0), Eq(phi, 0), s + t > 0, m > 0, cstar > 0, bstar < 0,
abs(arg(sigma)) < (s + t - u + 1)*pi,
c1, c2, c12, c14, c15)] #30
pr(30)
conds += [And(Eq(m, 0), Eq(phi, 0), s + t > v, n > 0, cstar > 0, bstar < 0,
abs(arg(sigma)) < (s + t - v + 1)*pi,
c1, c3, c12, c14, c15)] #31
pr(31)
conds += [And(Eq(n, 0), Eq(phi, 0), Eq(u, v - 1), m > 0, cstar > 0,
bstar >= 0, bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (bstar + 1)*pi,
c1, c2, c12, c14, c15)] #32
pr(32)
conds += [And(Eq(m, 0), Eq(phi, 0), Eq(u, v + 1), n > 0, cstar > 0,
bstar >= 0, bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (bstar + 1)*pi,
c1, c3, c12, c14, c15)] #33
pr(33)
conds += [And(Eq(n, 0), Eq(phi, 0), u < v - 1, m > 0, cstar > 0, bstar >= 0,
bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (s + t - u + 1)*pi,
c1, c2, c12, c14, c15)] #34
pr(34)
conds += [And(Eq(m, 0), Eq(phi, 0), u > v + 1, n > 0, cstar > 0, bstar >= 0,
bstar*pi < abs(arg(sigma)),
abs(arg(sigma)) < (s + t - v + 1)*pi,
c1, c3, c12, c14, c15)] #35
pr(35)
return Or(*conds)
# NOTE An alternative, but as far as I can tell weaker, set of conditions
# can be found in [L, section 5.6.2].
def _int0oo(g1, g2, x):
"""
Express integral from zero to infinity g1*g2 using a G function,
assuming the necessary conditions are fulfilled.
>>> from sympy.integrals.meijerint import _int0oo
>>> from sympy.abc import s, t, m
>>> from sympy import meijerg, S
>>> g1 = meijerg([], [], [-S(1)/2, 0], [], s**2*t/4)
>>> g2 = meijerg([], [], [m/2], [-m/2], t/4)
>>> _int0oo(g1, g2, t)
4*meijerg(((1/2, 0), ()), ((m/2,), (-m/2,)), s**(-2))/s**2
"""
# See: [L, section 5.6.2, equation (1)]
eta, _ = _get_coeff_exp(g1.argument, x)
omega, _ = _get_coeff_exp(g2.argument, x)
def neg(l): return [-x for x in l]
a1 = neg(g1.bm) + list(g2.an)
a2 = list(g2.aother) + neg(g1.bother)
b1 = neg(g1.an) + list(g2.bm)
b2 = list(g2.bother) + neg(g1.aother)
return meijerg(a1, a2, b1, b2, omega/eta)/eta
def _rewrite_inversion(fac, po, g, x):
""" Absorb ``po`` == x**s into g. """
_, s = _get_coeff_exp(po, x)
a, b = _get_coeff_exp(g.argument, x)
def tr(l): return [t + s/b for t in l]
return (powdenest(fac/a**(s/b), polar=True),
meijerg(tr(g.an), tr(g.aother), tr(g.bm), tr(g.bother), g.argument))
def _check_antecedents_inversion(g, x):
""" Check antecedents for the laplace inversion integral. """
from sympy import re, im, Or, And, Eq, exp, I, Add, nan, Ne
_debug('Checking antecedents for inversion:')
z = g.argument
_, e = _get_coeff_exp(z, x)
if e < 0:
_debug(' Flipping G.')
# We want to assume that argument gets large as |x| -> oo
return _check_antecedents_inversion(_flip_g(g), x)
def statement_half(a, b, c, z, plus):
coeff, exponent = _get_coeff_exp(z, x)
a *= exponent
b *= coeff**c
c *= exponent
conds = []
wp = b*exp(I*re(c)*pi/2)
wm = b*exp(-I*re(c)*pi/2)
if plus:
w = wp
else:
w = wm
conds += [And(Or(Eq(b, 0), re(c) <= 0), re(a) <= -1)]
conds += [And(Ne(b, 0), Eq(im(c), 0), re(c) > 0, re(w) < 0)]
conds += [And(Ne(b, 0), Eq(im(c), 0), re(c) > 0, re(w) <= 0,
re(a) <= -1)]
return Or(*conds)
def statement(a, b, c, z):
""" Provide a convergence statement for z**a * exp(b*z**c),
c/f sphinx docs. """
return And(statement_half(a, b, c, z, True),
statement_half(a, b, c, z, False))
# Notations from [L], section 5.7-10
m, n, p, q = S([len(g.bm), len(g.an), len(g.ap), len(g.bq)])
tau = m + n - p
nu = q - m - n
rho = (tau - nu)/2
sigma = q - p
if sigma == 1:
epsilon = S(1)/2
elif sigma > 1:
epsilon = 1
else:
epsilon = nan
theta = ((1 - sigma)/2 + Add(*g.bq) - Add(*g.ap))/sigma
delta = g.delta
_debug(' m=%s, n=%s, p=%s, q=%s, tau=%s, nu=%s, rho=%s, sigma=%s' % (
m, n, p, q, tau, nu, rho, sigma))
_debug(' epsilon=%s, theta=%s, delta=%s' % (epsilon, theta, delta))
# First check if the computation is valid.
if not (g.delta >= e/2 or (p >= 1 and p >= q)):
_debug(' Computation not valid for these parameters.')
return False
# Now check if the inversion integral exists.
# Test "condition A"
for a in g.an:
for b in g.bm:
if (a - b).is_integer and a > b:
_debug(' Not a valid G function.')
return False
# There are two cases. If p >= q, we can directly use a slater expansion
# like [L], 5.2 (11). Note in particular that the asymptotics of such an
# expansion even hold when some of the parameters differ by integers, i.e.
# the formula itself would not be valid! (b/c G functions are cts. in their
# parameters)
# When p < q, we need to use the theorems of [L], 5.10.
if p >= q:
_debug(' Using asymptotic slater expansion.')
return And(*[statement(a - 1, 0, 0, z) for a in g.an])
def E(z): return And(*[statement(a - 1, 0, z) for a in g.an])
def H(z): return statement(theta, -sigma, 1/sigma, z)
def Hp(z): return statement_half(theta, -sigma, 1/sigma, z, True)
def Hm(z): return statement_half(theta, -sigma, 1/sigma, z, False)
# [L], section 5.10
conds = []
# Theorem 1
conds += [And(1 <= n, p < q, 1 <= m, rho*pi - delta >= pi/2, delta > 0,
E(z*exp(I*pi*(nu + 1))))]
# Theorem 2, statements (2) and (3)
conds += [And(p + 1 <= m, m + 1 <= q, delta > 0, delta < pi/2, n == 0,
(m - p + 1)*pi - delta >= pi/2,
Hp(z*exp(I*pi*(q-m))), Hm(z*exp(-I*pi*(q-m))))]
# Theorem 2, statement (5)
conds += [And(p < q, m == q, n == 0, delta > 0,
(sigma + epsilon)*pi - delta >= pi/2, H(z))]
# Theorem 3, statements (6) and (7)
conds += [And(Or(And(p <= q - 2, 1 <= tau, tau <= sigma/2),
And(p + 1 <= m + n, m + n <= (p + q)/2)),
delta > 0, delta < pi/2, (tau + 1)*pi - delta >= pi/2,
Hp(z*exp(I*pi*nu)), Hm(z*exp(-I*pi*nu)))]
# Theorem 4, statements (10) and (11)
conds += [And(p < q, 1 <= m, rho > 0, delta > 0, delta + rho*pi < pi/2,
(tau + epsilon)*pi - delta >= pi/2,
Hp(z*exp(I*pi*nu)), Hm(z*exp(-I*pi*nu)))]
# Trivial case
conds += [m == 0]
# TODO
# Theorem 5 is quite general
# Theorem 6 contains special cases for q=p+1
return Or(*conds)
def _int_inversion(g, x, t):
"""
Compute the laplace inversion integral, assuming the formula applies.
"""
b, a = _get_coeff_exp(g.argument, x)
C, g = _inflate_fox_h(meijerg(g.an, g.aother, g.bm, g.bother, b/t**a), -a)
return C/t*g
####################################################################
# Finally, the real meat.
####################################################################
_lookup_table = None
@cacheit
@timeit
def _rewrite_single(f, x, recursive=True):
"""
Try to rewrite f as a sum of single G functions of the form
C*x**s*G(a*x**b), where b is a rational number and C is independent of x.
We guarantee that result.argument.as_coeff_mul(x) returns (a, (x**b,))
or (a, ()).
Returns a list of tuples (C, s, G) and a condition cond.
Returns None on failure.
"""
from sympy import polarify, unpolarify, oo, zoo, Tuple
global _lookup_table
if not _lookup_table:
_lookup_table = {}
_create_lookup_table(_lookup_table)
if isinstance(f, meijerg):
from sympy import factor
coeff, m = factor(f.argument, x).as_coeff_mul(x)
if len(m) > 1:
return None
m = m[0]
if m.is_Pow:
if m.base != x or not m.exp.is_Rational:
return None
elif m != x:
return None
return [(1, 0, meijerg(f.an, f.aother, f.bm, f.bother, coeff*m))], True
f_ = f
f = f.subs(x, z)
t = _mytype(f, z)
if t in _lookup_table:
l = _lookup_table[t]
for formula, terms, cond, hint in l:
subs = f.match(formula)
if subs:
subs_ = {}
for fro, to in subs.items():
subs_[fro] = unpolarify(polarify(to, lift=True),
exponents_only=True)
subs = subs_
if not isinstance(hint, bool):
hint = hint.subs(subs)
if hint is False:
continue
if not isinstance(cond, bool):
cond = unpolarify(cond.subs(subs))
if _eval_cond(cond) is False:
continue
if not isinstance(terms, list):
terms = terms(subs)
res = []
for fac, g in terms:
r1 = _get_coeff_exp(unpolarify(fac.subs(subs).subs(z, x),
exponents_only=True), x)
g = g.subs(subs).subs(z, x)
# NOTE these substitutions can in principle introduce oo,
# zoo and other absurdities. It shouldn't matter,
# but better be safe.
if Tuple(*(r1 + (g,))).has(oo, zoo, -oo):
continue
g = meijerg(g.an, g.aother, g.bm, g.bother,
unpolarify(g.argument, exponents_only=True))
res.append(r1 + (g,))
if res:
return res, cond
# try recursive mellin transform
if not recursive:
return None
_debug('Trying recursive mellin transform method.')
from sympy.integrals.transforms import (mellin_transform,
inverse_mellin_transform, IntegralTransformError,
MellinTransformStripError)
from sympy import oo, nan, zoo, simplify, cancel
def my_imt(F, s, x, strip):
""" Calling simplify() all the time is slow and not helpful, since
most of the time it only factors things in a way that has to be
un-done anyway. But sometimes it can remove apparent poles. """
# XXX should this be in inverse_mellin_transform?
try:
return inverse_mellin_transform(F, s, x, strip,
as_meijerg=True, needeval=True)
except MellinTransformStripError:
return inverse_mellin_transform(simplify(cancel(expand(F))), s, x, strip,
as_meijerg=True, needeval=True)
f = f_
s = _dummy('s', 'rewrite-single', f)
# to avoid infinite recursion, we have to force the two g functions case
def my_integrator(f, x):
from sympy import Integral, hyperexpand
r = _meijerint_definite_4(f, x, only_double=True)
if r is not None:
res, cond = r
res = _my_unpolarify(hyperexpand(res, rewrite='nonrepsmall'))
return Piecewise((res, cond),
(Integral(f, (x, 0, oo)), True))
return Integral(f, (x, 0, oo))
try:
F, strip, _ = mellin_transform(f, x, s, integrator=my_integrator,
simplify=False, needeval=True)
g = my_imt(F, s, x, strip)
except IntegralTransformError:
g = None
if g is None:
# We try to find an expression by analytic continuation.
# (also if the dummy is already in the expression, there is no point in
# putting in another one)
a = _dummy_('a', 'rewrite-single')
if not f.has(a) and _is_analytic(f, x):
try:
F, strip, _ = mellin_transform(f.subs(x, a*x), x, s,
integrator=my_integrator,
needeval=True, simplify=False)
g = my_imt(F, s, x, strip).subs(a, 1)
except IntegralTransformError:
g = None
if g is None or g.has(oo, nan, zoo):
_debug('Recursive mellin transform failed.')
return None
args = Add.make_args(g)
res = []
for f in args:
c, m = f.as_coeff_mul(x)
if len(m) > 1:
raise NotImplementedError('Unexpected form...')
g = m[0]
a, b = _get_coeff_exp(g.argument, x)
res += [(c, 0, meijerg(g.an, g.aother, g.bm, g.bother,
unpolarify(polarify(a, lift=True), exponents_only=True) \
*x**b))]
_debug('Recursive mellin transform worked:', g)
return res, True
def _rewrite1(f, x, recursive=True):
"""
Try to rewrite f using a (sum of) single G functions with argument a*x**b.
Return fac, po, g such that f = fac*po*g, fac is independent of x
and po = x**s.
Here g is a result from _rewrite_single.
Return None on failure.
"""
fac, po, g = _split_mul(f, x)
g = _rewrite_single(g, x, recursive)
if g:
return fac, po, g[0], g[1]
def _rewrite2(f, x):
"""
Try to rewrite f as a product of two G functions of arguments a*x**b.
Return fac, po, g1, g2 such that f = fac*po*g1*g2, where fac is
independent of x and po is x**s.
Here g1 and g2 are results of _rewrite_single.
Returns None on failure.
"""
fac, po, g = _split_mul(f, x)
if any(_rewrite_single(expr, x, False) is None for expr in _mul_args(g)):
return None
l = _mul_as_two_parts(g)
if not l:
return None
l.sort(key=lambda p: (max(len(_exponents(p[0], x)), len(_exponents(p[1], x))),
max(len(_functions(p[0], x)), len(_functions(p[1], x))),
max(len(_find_splitting_points(p[0], x)),
len(_find_splitting_points(p[1], x)))))
for recursive in [False, True]:
for fac1, fac2 in l:
g1 = _rewrite_single(fac1, x, recursive)
g2 = _rewrite_single(fac2, x, recursive)
if g1 and g2:
cond = And(g1[1], g2[1])
if cond is not False:
return fac, po, g1[0], g2[0], cond
def meijerint_indefinite(f, x):
"""
Compute an indefinite integral of ``f`` by rewriting it as a G function.
>>> from sympy.integrals.meijerint import meijerint_indefinite
>>> from sympy import sin
>>> from sympy.abc import x
>>> meijerint_indefinite(sin(x), x)
-cos(x)
"""
from sympy import hyper, meijerg, count_ops
results = []
for a in list(_find_splitting_points(f, x)) + [S(0)]:
res = _meijerint_indefinite_1(f.subs(x, x + a), x)
if res is None:
continue
results.append(res.subs(x, x - a))
if not res.has(hyper, meijerg):
return results[-1]
if results:
return sorted(results, key=count_ops)[0]
def _meijerint_indefinite_1(f, x):
""" Helper that does not attempt any substitution. """
from sympy import Integral, piecewise_fold
_debug('Trying to compute the indefinite integral of', f, 'wrt', x)
gs = _rewrite1(f, x)
if gs is None:
# Note: the code that calls us will do expand() and try again
return None
fac, po, gl, cond = gs
_debug(' could rewrite:', gs)
res = S(0)
for C, s, g in gl:
a, b = _get_coeff_exp(g.argument, x)
_, c = _get_coeff_exp(po, x)
c += s
# we do a substitution t=a*x**b, get integrand fac*t**rho*g
fac_ = fac * C / (b*a**((1 + c)/b))
rho = (c + 1)/b - 1
# we now use t**rho*G(params, t) = G(params + rho, t)
# [L, page 150, equation (4)]
# and integral G(params, t) dt = G(1, params+1, 0, t)
# (or a similar expression with 1 and 0 exchanged ... pick the one
# which yields a well-defined function)
# [R, section 5]
# (Note that this dummy will immediately go away again, so we
# can safely pass S(1) for ``expr``.)
t = _dummy('t', 'meijerint-indefinite', S(1))
def tr(p): return [a + rho + 1 for a in p]
if any(b.is_integer and b <= 0 for b in tr(g.bm)):
r = -meijerg(tr(g.an), tr(g.aother) + [1], tr(g.bm) + [0], tr(g.bother), t)
else:
r = meijerg(tr(g.an) + [1], tr(g.aother), tr(g.bm), tr(g.bother) + [0], t)
r = hyperexpand(r.subs(t, a*x**b))
# now substitute back
# Note: we really do want the powers of x to combine.
res += powdenest(fac_*r, polar=True)
def _clean(res):
"""This multiplies out superfluous powers of x we created, and chops off
constants:
>> _clean(x*(exp(x)/x - 1/x) + 3)
exp(x)
cancel is used before mul_expand since it is possible for an
expression to have an additive constant that doesn't become isolated
with simple expansion. Such a situation was identified in issue 3270:
>>> from sympy import sqrt, cancel
>>> from sympy.abc import x
>>> a = sqrt(2*x + 1)
>>> bad = (3*x*a**5 + 2*x - a**5 + 1)/a**2
>>> bad.expand().as_independent(x)[0]
0
>>> cancel(bad).expand().as_independent(x)[0]
1
"""
from sympy import cancel
res= expand_mul(cancel(res), deep=False)
return Add._from_args(res.as_coeff_add(x)[1])
res = piecewise_fold(res)
if res.is_Piecewise:
nargs = []
for expr, cond in res.args:
expr = _my_unpolarify(_clean(expr))
nargs += [(expr, cond)]
res = Piecewise(*nargs)
else:
res = _my_unpolarify(_clean(res))
return Piecewise((res, _my_unpolarify(cond)), (Integral(f, x), True))
@timeit
def meijerint_definite(f, x, a, b):
"""
Integrate ``f`` over the interval [``a``, ``b``], by rewriting it as a product
of two G functions, or as a single G function.
Return res, cond, where cond are convergence conditions.
>>> from sympy.integrals.meijerint import meijerint_definite
>>> from sympy import exp, oo
>>> from sympy.abc import x
>>> meijerint_definite(exp(-x**2), x, -oo, oo)
(sqrt(pi), True)
This function is implemented as a succession of functions
meijerint_definite, _meijerint_definite_2, _meijerint_definite_3,
_meijerint_definite_4. Each function in the list calls the next one
(presumably) several times. This means that calling meijerint_definite
can be very costly.
"""
# This consists of three steps:
# 1) Change the integration limits to 0, oo
# 2) Rewrite in terms of G functions
# 3) Evaluate the integral
#
# There are usually several ways of doing this, and we want to try all.
# This function does (1), calls _meijerint_definite_2 for step (2).
from sympy import Integral, arg, exp, I, And, DiracDelta, count_ops
_debug('Integrating', f, 'wrt %s from %s to %s.' % (x, a, b))
if f.has(DiracDelta):
_debug('Integrand has DiracDelta terms - giving up.')
return None
f_, x_, a_, b_ = f, x, a, b
# Let's use a dummy in case any of the boundaries has x.
d = Dummy('x')
f = f.subs(x, d)
x = d
if a == -oo and b != oo:
return meijerint_definite(f.subs(x, -x), x, -b, -a)
if a == -oo:
# Integrating -oo to oo. We need to find a place to split the integral.
_debug(' Integrating -oo to +oo.')
innermost = _find_splitting_points(f, x)
_debug(' Sensible splitting points:', innermost)
for c in sorted(innermost, key=default_sort_key, reverse=True) + [S(0)]:
_debug(' Trying to split at', c)
if not c.is_real:
_debug(' Non-real splitting point.')
continue
res1 = _meijerint_definite_2(f.subs(x, x + c), x)
if res1 is None:
_debug(' But could not compute first integral.')
continue
res2 = _meijerint_definite_2(f.subs(x, c-x), x)
if res2 is None:
_debug(' But could not compute second integral.')
continue
res1, cond1 = res1
res2, cond2 = res2
cond = _condsimp(And(cond1, cond2))
if cond is False:
_debug(' But combined condition is always false.')
continue
res = res1 + res2
return res, cond
return
if a == oo:
return -meijerint_definite(f, x, b, oo)
if (a, b) == (0, oo):
# This is a common case - try it directly first.
res = _meijerint_definite_2(f, x)
if res is not None and not res[0].has(meijerg):
return res
results = []
if b == oo:
for split in _find_splitting_points(f, x):
if (a - split >= 0) is True:
_debug('Trying x --> x + %s' % split)
res = _meijerint_definite_2(f.subs(x, x + split) \
*Heaviside(x + split - a), x)
if res is not None:
if res[0].has(meijerg):
results.append(res)
else:
return res
f = f.subs(x, x + a)
b = b - a
a = 0
if b != oo:
phi = exp(I*arg(b))
b = abs(b)
f = f.subs(x, phi*x)
f *= Heaviside(b - x)*phi
b = oo
_debug('Changed limits to', a, b)
_debug('Changed function to', f)
res = _meijerint_definite_2(f, x)
if res is not None:
if res[0].has(meijerg):
results.append(res)
else:
return res
if results:
return sorted(results, key=lambda x: count_ops(x[0]))[0]
def _guess_expansion(f, x):
""" Try to guess sensible rewritings for integrand f(x). """
from sympy import expand_trig
from sympy.functions.elementary.trigonometric import TrigonometricFunction
from sympy.functions.elementary.hyperbolic import HyperbolicFunction
res = [(f, 'originial integrand')]
expanded = expand_mul(res[-1][0])
if expanded != res[-1][0]:
res += [(expanded, 'expand_mul')]
expanded = expand(res[-1][0])
if expanded != res[-1][0]:
res += [(expanded, 'expand')]
if res[-1][0].has(TrigonometricFunction, HyperbolicFunction):
expanded = expand_mul(expand_trig(res[-1][0]))
if expanded != res[-1][0]:
res += [(expanded, 'expand_trig, expand_mul')]
return res
def _meijerint_definite_2(f, x):
"""
Try to integrate f dx from zero to infinty.
The body of this function computes various 'simplifications'
f1, f2, ... of f (e.g. by calling expand_mul(), trigexpand()
- see _guess_expansion) and calls _meijerint_definite_3 with each of
these in succession.
If _meijerint_definite_3 succeedes with any of the simplified functions,
returns this result.
"""
# This function does preparation for (2), calls
# _meijerint_definite_3 for (2) and (3) combined.
# use a positive dummy - we integrate from 0 to oo
dummy = _dummy('x', 'meijerint-definite2', f, positive=True)
f = f.subs(x, dummy)
x = dummy
if f == 0:
return S(0), True
for g, explanation in _guess_expansion(f, x):
_debug('Trying', explanation)
res = _meijerint_definite_3(g, x)
if res is not None and res[1] is not False:
return res
def _meijerint_definite_3(f, x):
"""
Try to integrate f dx from zero to infinity.
This function calls _meijerint_definite_4 to try to compute the
integral. If this fails, it tries using linearity.
"""
res = _meijerint_definite_4(f, x)
if res is not None and res[1] is not False:
return res
if f.is_Add:
_debug('Expanding and evaluating all terms.')
ress = [_meijerint_definite_4(g, x) for g in f.args]
if all(r is not None for r in ress):
conds = []
res = S(0)
for r, c in ress:
res += r
conds += [c]
c = And(*conds)
if c is not False:
return res, c
def _my_unpolarify(f):
from sympy import unpolarify
return _eval_cond(unpolarify(f))
@timeit
def _meijerint_definite_4(f, x, only_double=False):
"""
Try to integrate f dx from zero to infinity.
This function tries to apply the integration theorems found in literature,
i.e. it tries to rewrite f as either one or a product of two G-functions.
The parameter ``only_double`` is used internally in the recursive algorithm
to disable trying to rewrite f as a single G-function.
"""
# This function does (2) and (3)
_debug('Integrating', f)
# Try single G function.
if not only_double:
gs = _rewrite1(f, x, recursive=False)
if gs is not None:
fac, po, g, cond = gs
_debug('Could rewrite as single G function:', fac, po, g)
res = S(0)
for C, s, f in g:
if C == 0:
continue
C, f = _rewrite_saxena_1(fac*C, po*x**s, f, x)
res += C*_int0oo_1(f, x)
cond = And(cond, _check_antecedents_1(f, x))
cond = _my_unpolarify(cond)
_debug('Result before branch substitutions is:', res)
if cond is False:
_debug('But cond is always False.')
else:
return _my_unpolarify(hyperexpand(res)), cond
# Try two G functions.
gs = _rewrite2(f, x)
if gs is not None:
for full_pb in [False, True]:
fac, po, g1, g2, cond = gs
_debug('Could rewrite as two G functions:', fac, po, g1, g2)
res = S(0)
for C1, s1, f1 in g1:
for C2, s2, f2 in g2:
r = _rewrite_saxena(fac*C1*C2, po*x**(s1 + s2),
f1, f2, x, full_pb)
if r is None:
_debug('Non-rational exponents.')
return
C, f1_, f2_ = r
_debug('Saxena subst for yielded:', C, f1_, f2_)
cond = And(cond, _check_antecedents(f1_, f2_, x))
res += C*_int0oo(f1_, f2_, x)
_debug('Result before branch substitutions is:', res)
cond = _my_unpolarify(cond)
if cond is False:
_debug('But cond is always False (full_pb=%s).' % full_pb)
else:
if only_double:
return res, cond
return _my_unpolarify(hyperexpand(res)), cond
def meijerint_inversion(f, x, t):
"""
Compute the inverse laplace transform
:math:\int_{c+i\infty}^{c-i\infty} f(x) e^{tx) dx,
for real c larger than the real part of all singularities of f.
Note that ``t`` is always assumed real and positive.
Return None if the integral does not exist or could not be evaluated.
>>> from sympy.abc import x, t
>>> from sympy.integrals.meijerint import meijerint_inversion
>>> meijerint_inversion(1/x, x, t)
Heaviside(t)
"""
from sympy import I, Integral, exp, expand, log, Add, Mul, Heaviside
f_ = f
t_ = t
t = Dummy('t', polar=True) # We don't want sqrt(t**2) = abs(t) etc
f = f.subs(t_, t)
c = Dummy('c')
_debug('Laplace-inverting', f)
if not _is_analytic(f, x):
_debug('But expression is not analytic.')
return None
# We filter out exponentials here. If we are given an Add this will not
# work, but the calling code will take care of that.
shift = 0
if f.is_Mul:
args = list(f.args)
newargs = []
exponentials = []
while args:
arg = args.pop()
if isinstance(arg, exp):
arg2 = expand(arg)
if arg2.is_Mul:
args += arg2.args
continue
try:
a, b = _get_coeff_exp(arg.args[0], x)
except _CoeffExpValueError:
b = 0
if b == 1:
exponentials.append(a)
else:
newargs.append(arg)
elif arg.is_Pow:
arg2 = expand(arg)
if arg2.is_Mul:
args += arg2.args
continue
if not arg.base.has(x):
try:
a, b = _get_coeff_exp(arg.exp, x)
except _CoeffExpValueError:
b = 0
if b == 1:
exponentials.append(a*log(arg.base))
newargs.append(arg)
else:
newargs.append(arg)
shift = Add(*exponentials)
f = Mul(*newargs)
gs = _rewrite1(f, x)
if gs is not None:
fac, po, g, cond = gs
_debug('Could rewrite as single G function:', fac, po, g)
res = S(0)
for C, s, f in g:
C, f = _rewrite_inversion(fac*C, po*x**s, f, x)
res += C*_int_inversion(f, x, t)
cond = And(cond, _check_antecedents_inversion(f, x))
cond = _my_unpolarify(cond)
if cond is False:
_debug('But cond is always False.')
else:
_debug('Result before branch substitution:', res)
res = _my_unpolarify(hyperexpand(res))
if not res.has(Heaviside):
res *= Heaviside(t)
res = res.subs(t, t + shift)
if not isinstance(cond, bool):
cond = cond.subs(t, t + shift)
return Piecewise((res.subs(t, t_), cond),
(Integral(f_*exp(x*t), (x, c - oo*I, c + oo*I)).subs(t, t_), True))
| codeparrot/github-code-clean |
# python version 1.0 DO NOT EDIT
#
# Generated by smidump version 0.4.8:
#
# smidump -f python PDU2-MIB
FILENAME = "pdu2_mib.mib"
MIB = {
"moduleName" : "PDU2-MIB",
"PDU2-MIB" : {
"nodetype" : "module",
"language" : "SMIv2",
"organization" :
"""Raritan""",
"contact" :
"""
Author: Raritan Americas, Inc.
postal: Raritan Americas, Inc.
400 Cottontail Lane
Somerset, NJ 08873
email: tech@raritan.com
phone: +1 732 764 8886""",
"description" :
"""This MIB describes the SNMP functions of the Dominion PX G2
Power Distribution Unit by Raritan Computer.""",
"revisions" : (
{
"date" : "2016-02-09 00:00",
"description" :
"""PX2 release 3.2.20:
1) Added the following:
NetworkInterfaceTypeEnumeration,
networkInterfaceType to unitConfigurationTable
2) Added AddressSourceEnumeration
3) Added activeDNS ServerCount to unitConfigurationTable
4) Added activeDNSServerTable""",
},
{
"date" : "2015-10-26 00:00",
"description" :
"""PX2 release 3.2.10:
1) Added to trapInformation:
phoneNumber
Added the following traps:
smsMessageTransmissionFailure trap""",
},
{
"date" : "2015-09-30 00:00",
"description" :
"""PX2 release 3.2.1:
- Support for PMC/BCM2
- Line and node information for component poles
1) SensorTypeEnumeration: Added
reactivePower(29)
displacementPowerFactor(35)
2) SensorUnitsEnumeration: Added
var(23)
3) ProductTypeEnumeration: Added
powerMeter(3)
4) Added the following enumerations:
PanelLayoutEnumeration
PanelNumberingEnumeration
CircuitTypeEnumeration
PhaseEnumeration
LineEnumeration
PowerMeterTypeEnumeration
5) Added the following tables:
inletPoleConfigurationTable
overCurrentProtectorPoleConfigurationTable
outletPoleConfigurationTable
transferSwitchPoleConfigurationTable
powerMeterConfigurationTable
circuitConfigurationTable
circuitPoleConfigurationTable
circuitSensorConfigurationTable
circuitPoleSensorConfigurationTable
circuitSensorLogTable
circuitPoleSensorLogTable
circuitSensorMeasurementsTable
circuitPoleSensorMeasurementsTable
circuitSensorControlTable
6) unitConfigurationTable: Added
circuitCount
7) inletDeviceCapabilities: Added
reactivePower(28)
8) inletPoleCapabilities: Added
phaseAngle(23)
reactivePower(28)
displacementPowerFactor(34)
9) overCurrentProtectorConfigurationTable: Added
overCurrentProtectorPoleCount
10) transferSwitchConfigurationTable: Added
transferSwitchPoleCount
11) Added the following to trapInformation:
circuitNumber
circuitPoleNumber
12) Added the following traps:
circuitSensorStateChange
circuitPoleSensorStateChange
circuitAdded
circuitDeleted
circuitModified
circuitSensorReset
powerMeterAdded
powerMeterDeleted
powerMeterModified
13) Revised the DESCRIPTION clauses of:
pduCount
pduId
inletPoleCount""",
},
{
"date" : "2015-02-18 00:00",
"description" :
"""PX2 release 3.1.0:
- Support absolute humidity sensors
1) SensorStateEnumeration: Added
nonRedundant(30)
2) SensorTypeEnumeration: Added
absoluteHumidity(28)
3) SensorUnitsEnumeration: Added
grampercubicmeter(22)
4) Corrected the possible states for the rcmState, operatingState and activeInlet sensors
5) Documented that the unitSensorResetValue object in unitSensorControlTable applies
only to multi-inlet PDUs
6) Deprecated inletRatedVA, inletRatedFrequency
7) inletDeviceCapabilities: Corrected bit values for residualCurrent(25)
and rcmState(26)""",
},
{
"date" : "2014-06-04 00:00",
"description" :
"""PX2 release 3.0.0:
- Support for signed sensor readings
- Support for unit-level power and energy sensors
- Send trap on peripheral device firmware update
- Allow peripheral device auto management to be disabled
- Allow front panel outlet switching to be enabled or disabled
- Support DX-PIR occupancy sensor
1) Changed the SYNTAX from Integer32 to Unsigned32:
measurementsUnitSensorValue
2) Added signed value, range and threshold columns to:
unitSensorConfigurationTable
unitSensorMeasurementsTable
pduSensorStateChange
inletSensorConfigurationTable
inletSensorMeasurementsTable
inletSensorLogTable
inletSensorStateChange
inletPoleSensorConfigurationTable
inletPoleSensorMeasurementsTable
inletPoleSensorLogTable
inletPoleSensorStateChange
overCurrentProtectorSensorConfigurationTable
overCurrentProtectorSensorMeasurementsTable
overCurrentProtectorSensorLogTable
overCurrentProtectorSensorStateChange
outletSensorConfigurationTable
outletSensorMeasurementsTable
outletSensorLogTable
outletSensorStateChange
outletPoleSensorConfigurationTable
outletPoleSensorMeasurementsTable
outletPoleSensorLogTable
outletPoleSensorStateChange
3) Added unsigned value, range and threshold columns to:
unitSensorLogTable
transferSwitchSensorConfigurationTable
transferSwitchSensorMeasurementsTable
transferSwitchSensorLogTable
transferSwitchSensorStateChange
4) Added UnitSensorControlTable
5) Added unitSensorReset and unitSensorStateChange traps
6) Deprecated pduSensorStateChange trap
7) Added to unitConfigurationTable:
peripheralDevicesAutoManagement
frontPanelOutletSwitching
frontPanelRCMSelfTest
frontPanelActuatorControl
8) Added to externalSensorConfigurationTable:
externalSensorAlarmedToNormalDelay
9) Deprecated:
wireCount in unitConfiguraionTable
tables for wireSensors
traps for wireSensors
10) SensorStateEnumeration: Added
fail(14)
11) ProductTypeEnumeration: Renamed
rackSts(2) to transferSwitch
12) Added:
PeripheralDeviceFirmwareUpdateStateEnumeration
peripheralDeviceFirmwareUpdateState
peripheralDeviceFirmwareUpdate trap
13) Added userName to the following traps:
bulkConfigurationSaved
bulkConfigurationCopied
lhxSupportChanged
deviceSettingsSaved
deviceSettingsRestored""",
},
{
"date" : "2014-01-09 00:00",
"description" :
"""PX2 release 2.6.0:
- Support for PX3TS transfer switches
1) SensorTypeEnumeration:
Removed:
scrOpenStatus
scrShortStatus
Added:
i1SmpsStatus(46)
i2SmpsStatus(47)
switchStatus(48)
2) SensorStateEnumeration:
Removed:
marginal
fail
Added:
i1OpenFault(22)
i1ShortFault(23)
i2OpenFault(24)
i2ShortFault(25)
fault(26)
warning(27)
critical(28)
3) unitDeviceCapabilities: Added
i1SmpsStatus(45)
i2SmpsStatus(46)
4) transferSwitchCapabilities: Added
switchStatus(47)
5) Added transferSwitchConfiguration table
6) unitSensorLogTable:
Removed:
logUnitSensorAvgValue
logUnitSensorMaxValue
logUnitSensorMinValue
Added:
logUnitSensorSignedAvgValue
logUnitSensorSignedMaxValue
logUnitSensorSignedMinValue""",
},
{
"date" : "2014-01-07 00:00",
"description" :
"""PX2 release 2.5.30:
- Accumulating sensors (energy counters) can be reset
- Sensor accuray and tolerance variables are deprecated
1) Added peripheralDevicePackagePosition and peripheralDevicePackageState
2) Added radiusError trap
3) Added serverReachabilityError trap
4) Deprecated the following:
unitSensorConfigurationTable/unitSensorAccuracy
unitSensorConfigurationTable/unitSensorTolerance
inletSensorConfigurationTable/inletSensorAccuracy
inletSensorConfigurationTable/inletSensorTolerance
inletPoleSensorConfigurationTable/inletPoleSensorAccuracy
inletPoleSensorConfigurationTable/inletPoleSensorTolerance
outletSensorConfigurationTable/outetSensorAccuracy
outletSensorConfigurationTable/outletSensorTolerance
outletPoleSensorConfigurationTable/outetPoleSensorAccuracy
outletPoleSensorConfigurationTable/outletPoleSensorTolerance
overCurrentProtectorSensorConfigurationTable/overCurrentProtectorSensorAccuracy
overCurrentProtectorSensorConfigurationTable/overCurrentProtectorSensorTolerance
externalSensorConfigurationTable/externalSensorAccuracy
externalSensorConfigurationTable/externalSensorTolerance
wireSensorConfigurationTable/wireSensorAccuracy
wireSensorConfigurationTable/wireSensorTolerance
transferSwitchSensorConfigurationTable/transferSwitchSensorAccuracy
transferSwitchSensorConfigurationTable/transferSwitchSensorTolerance
5) Added inletSensorReset and outletSensorReset traps
6) Added inletSensorControl and inletSensorControlTable
7) Added outletSensorControl and outletSensorControlTable
8) Added unknownPeripheralDeviceAttached trap""",
},
{
"date" : "2013-11-21 00:00",
"description" :
"""PX2 release 2.5.20:
- Support for residual current monitors
- Support for USB cascading with one IP address
- Support for line-neutral voltage sensors
1) SensorTypeEnumeration: Added
rmsVoltageLN(25)
residualCurrent(26)
rcmState(27)
2) SensorStateEnumeration: Added
warning(27)
critical(28)
selfTest(29)
3) inletDeviceCapabilities: Added
residualCurrent(26)
rcmState(27)
4) Added rmsVoltageLN(24) to inletPoleCapabilities
5) Added inletRCMResidualOperatingCurrent to inletConfigurationTable
6) Added rcmControl under control
7) Added rcmSelfTestTable under rcmControl
8) Added DeviceCascadeTypeEnumeration
9) Added deviceCascadeType, deviceCascadePosition to unitConfigurationTable
10) Added agentInetPortNumber under trapInformation
11) Added agentInetPortNumber as a varbind to all traps
12) Added peripheralDevicePackageTable containing information on peripheral
device packages""",
},
{
"date" : "2013-09-18 00:00",
"description" :
"""
1) Added serverConnectivityUnrecoverable trap""",
},
{
"date" : "2013-08-01 00:00",
"description" :
"""
1) Add RCBO OCP types""",
},
{
"date" : "2013-07-10 00:00",
"description" :
"""
1) Added externalSensorTypeDefaultThresholdsTable """,
},
{
"date" : "2013-07-02 00:00",
"description" :
"""
1) Added relayBehaviorOnPowerLoss to unitConfigurationTable""",
},
{
"date" : "2013-05-21 00:00",
"description" :
"""
1) Added inletEnableState to inletConfigurationTable
2) Added traps: inletEnabled and inletDisabled""",
},
{
"date" : "2013-04-26 00:00",
"description" :
"""
1) Added traps: webcamInserted and webcamRemoved
2) Added trapInformation parameters: webcamModel,webcamConnectionPort""",
},
{
"date" : "2013-03-27 00:00",
"description" :
"""
1) Changed outletSource to outletPowerSource in outletConfigurationTable
2) Changed transferSwitchSource1 and transferSwitchSource2 to
transferSwitchPowerSource1 and transferSwitchPowerSource2 in transferSwitchConfigurationTable
3) Changed overCurrentProtectorSource to overCurrentProtectorPowerSource in overCurrentProtectorConfigurationTable
4) Changed wireSource to wirePowerSource in wireConfigurationTable""",
},
{
"date" : "2013-03-25 10:00",
"description" :
"""
1) Added comments showing the possible states for each sensor type. """,
},
{
"date" : "2013-03-25 00:00",
"description" :
"""
1) Added outletSource to outletConfigurationTable
2) Added transferSwitchSource1 and transferSwitchSource2 to transferSwitchConfigurationTable
3) Added overCurrentProtectorSource to overCurrentProtectorConfigurationTable
4) Added wireSource to wireConfigurationTable""",
},
{
"date" : "2013-03-18 00:00",
"description" :
""" 1) Added meteringControllerCount to the unitConfigurationTable
2) Added meteringController to BoardTypeEnumeration""",
},
{
"date" : "2013-02-25 00:00",
"description" :
"""
1) Added ProductTypeEnumeration
2) Added productType to unitConfigurationTable""",
},
{
"date" : "2013-02-04 00:00",
"description" :
""" 1) Added TransferSwitchTransferReasonEnumeration
2) Added transferSwitchLastTransferReason to transferSwitchControlTable
3) Added transferSwitchLastTransferReason to transferSwitchSensorStateChange trap""",
},
{
"date" : "2013-01-24 00:00",
"description" :
"""Added required sensor types and units""",
},
{
"date" : "2012-11-20 00:00",
"description" :
"""1) Added externalSensorIsActuator and externalSensorPosition to the externalSensorConfigurationTable
2) Added actuatorControlTable""",
},
{
"date" : "2012-11-15 00:00",
"description" :
"""1) Removed transferSwitchOutputCapabilities from transferSwitchConfigurationTable
2) Removed the following tables:
transferSwitchOutputSensorConfigurationTable
transferSwitchOutputSensorLogTable
transferSwitchOutputSensorMeasurementsTable
3) Removed transferSwitchOutputSensorStateChange trap
4) Added transferSwitchControlTable
5) Removed the following entries from SensorTypeEnumeration:
overTemperatureFault
fans
internalFault
inletPhaseDeviationFault
overloadFault
6) Added the following entries to SensorTypeEnumeration:
overloadStatus
overheatStatus
scrOpenStatus
scrShortStatus
fanStatus
inletPhaseSyncAngle
inletPhaseSync
7) Added the following entries to SensorStateEnumeration:
inSync,
outOfSync
8) Renamed transferSwitchNoTransferIfPhaseDeviationFault to
transferSwitchAutoReTransferRequiresPhaseSync""",
},
{
"date" : "2012-10-05 00:00",
"description" :
"""1) Modified the DESCRIPTION of the following.
outletSwitchControlTable
transferSwitchOutputSensorMeasurementsEntry
overCurrentProtectorSensorMeasurementsEntry
outletPoleSensorMeasurementsEntry
transferSwitchOutputSensorLogEntry
transferSwitchOutputSensorLogTable
wireSensorLogEntry
externalSensorNumber
controllerConfigurationEntry
SensorUnitsEnumeration
measurementsGroup
logGroup""",
},
{
"date" : "2012-10-04 00:00",
"description" :
"""1) In the transferSwitchConfigurationTable,
replaced transferSwitchFrequencyDeviation with
transferSwitchLowerMarginalFrequency and transferSwitchUpperMarginalFrequency""",
},
{
"date" : "2012-09-28 00:00",
"description" :
"""1) Modified the DESCRIPTION of the following.
bulkConfigurationCopied, userModified, userSessionTimeout""",
},
{
"date" : "2012-09-21 00:00",
"description" :
"""1) Added the following traps:
deviceSettingsSaved, deviceSettingsRestored""",
},
{
"date" : "2012-09-20 00:00",
"description" :
"""1) Added the following objects to the transferSwitchConfigurationTable:
transferSwitchInternalFaultType
2) Added transferSwitchInternalFaultType to transferSwitchSensorStateChange trap
3) Added marginal to SensorStateEnumeration""",
},
{
"date" : "2012-09-17 00:00",
"description" :
"""Deprecated the following objects from the unitConfigurationTable
pxInetAddressType,
pxInetIPAddress,
pxInetNetmask,
pxInetGateway """,
},
{
"date" : "2012-09-04 00:00",
"description" :
"""Support for transfer Switch objects and sensors.
1. Added transferSwitchCount to unitConfigurationTable
2. Added the following tables:
transferSwitchConfigurationTable,
transferSwitchSensorConfigurationTable,
transferSwitchSensorLogTable,
transferSwitchSensorMeasurementsTable
transferSwitchOutputSensorConfigurationTable,
transferSwitchOutputSensorLogTable,
transferSwitchOutputSensorMeasurementsTable
3. Added
transferSwitchSensorStateChange trap
transferSwitchOutputSensorStateChange trap""",
},
{
"date" : "2012-06-22 00:00",
"description" :
"""
1. Added surgeProtectorStatus to SensorTypeEnumeration
2. Added surgeProtectorStatus to inletDeviceCapabilities""",
},
{
"date" : "2012-06-18 00:00",
"description" :
"""Added a comment before the section listing the traps.
The comment notes that the pxInetIPAddressType and
pxInetIPAddress fields are not used for IPv6 traps""",
},
{
"date" : "2012-06-06 00:00",
"description" :
"""Support for wire objects and sensors.
1. Added wireCount to unitConfigurationTable
2. Added the following tables:
wireConfigurationTable,
wireSensorConfigurationTable,
wireSensorLogTable,
wireSensorMeasurementsTable
3. Added wireSensorStateChange trap""",
},
{
"date" : "2012-05-25 00:00",
"description" :
"""added userAccepted/DeclinedRestrictedServiceAgreement traps""",
},
{
"date" : "2012-05-15 00:00",
"description" :
"""
1. Added support for NTP servers.
Added the following objects under info
synchronizeWithNTPServer, useDHCPProvidedNTPServer,
firstNTPServerAddressType, firstNTPServerAddress,
secondNTPServerAddressType, secondNTPServerAddress""",
},
{
"date" : "2012-03-26 00:00",
"description" :
"""added lhxSupportChanged trap.""",
},
{
"date" : "2011-12-13 00:00",
"description" :
"""
1. Added usbSlaveConnected, usbSlaveDisonnected traps""",
},
{
"date" : "2011-11-29 00:00",
"description" :
"""
1. Added cascadedDeviceConnected to UnitConfigurationEntryStruct""",
},
{
"date" : "2011-10-25 00:00",
"description" :
"""
1. Added DeviceIdentificationParameterEnumeration
2. Added deviceIdentificationChanged Trap
3. Added sysContact, sysName, sysLocation to all traps""",
},
{
"date" : "2011-06-16 00:00",
"description" :
"""
1. Changed DESCRIPTION of outletSequencingDelay""",
},
{
"date" : "2011-03-22 00:00",
"description" :
"""
1. Added rfCodeTagConnected, rfCodeTagDisconnected traps
2. Changed MAX-ACCESS for externalOnOffSensorSubtype to read-write""",
},
{
"date" : "2011-02-21 00:00",
"description" :
"""
1. Added rpm(19) to SensorUnitsEnumeration""",
},
{
"date" : "2011-02-14 00:00",
"description" :
"""
1. Changed 5WIRE IEC60309 enumerations from 250V to 208V""",
},
{
"date" : "2011-02-08 00:00",
"description" :
"""
1. Removed OnOffSensorSubtypeEnumeration
2. Changed SYNTAX of externalOnOffSensorSubtype to SensorTypeEnumeration
3. Added binary, contact, fanspeed, none to SensorTypeEnumeration
4. Changed outletPoleCapabilities to be the same as inletPoleCapabilities""",
},
{
"date" : "2011-02-03 00:00",
"description" :
"""
1. Added externalSensorSerialNumber,externalOnOffSensorSubtype, externalSensorChannelNumber
to the externalSensorStateChange trap.""",
},
{
"date" : "2011-01-31 00:00",
"description" :
"""
1. Modifed the DESCRIPTION of the powerControl trap""",
},
{
"date" : "2010-12-15 00:00",
"description" :
"""
1. Added dataLoggingEnableForAllSensors to logConfigurationTable""",
},
{
"date" : "2010-12-13 11:31",
"description" :
"""
1. Added inrushGuardDelay to unitConfigurationTable
2. Added outletSequenceDelay to outletConfigurationTable
3. Deprecated outletSequencingDelay""",
},
{
"date" : "2010-12-13 00:00",
"description" :
"""1. Added externalOnOffSensorSubtype to ExternalSensorConfigurationEntryStruct
2. Added OnOffSensorSubtypeEnumeration
3. Added alarmed to SensorStateEnumeration
4. Removed firmwareFileDiscarded trap
5. Removed securityViolation trap""",
},
{
"date" : "2010-12-07 00:00",
"description" :
"""1. changed DESCRIPTION of measurementPeriod to say that the value is fixed at 1 second.""",
},
{
"date" : "2010-10-07 00:00",
"description" :
"""1. added ocpFUSEPAIR(5) to OverCurrentProtectorTypeEnumeration
2. changed ocpFUSE1POLE(4) to ocpFUSE(4) in OverCurrentProtectorTypeEnumeration""",
},
{
"date" : "2010-10-04 00:00",
"description" :
"""1. added ocpFUSE2POLE(5) to OverCurrentProtectorTypeEnumeration
2. changed ocpFUSE(4) to ocpFUSE1POLE(4) in OverCurrentProtectorTypeEnumeration""",
},
{
"date" : "2010-09-01 00:00",
"description" :
"""1. Removed userName from serverNotReachable and serverReachable traps""",
},
{
"date" : "2010-08-05 00:00",
"description" :
"""1. Added reliabilityDataTableSequenceNumber
2. Changed SYNTAX of reliabilityErrorLogIndex to Integer32(1..2147483647)""",
},
{
"date" : "2010-07-23 00:00",
"description" :
"""1. Moved serverCount to unitConfigurationTable """,
},
{
"date" : "2010-07-22 00:00",
"description" :
"""1. Added support for the Reliability Tables
2. Added new group reliabilityGroup
3. Defined nodes reliability,reliabilityData,reliabilityErrorLog
4. Added reliabilityDataTable & reliabilityErrorLogTable""",
},
{
"date" : "2010-07-21 00:00",
"description" :
"""1. Added plug56PA320 to PlugTypeEnumeration
2. Added plug56P320F to PlugTypeEnumeration""",
},
{
"date" : "2010-07-14 00:00",
"description" :
"""1. Added the following traps:
pingServerEnabled, pingServerDisabled, serverNotReachable, serverReachable
2. Added the serverReachabilityTable """,
},
{
"date" : "2010-07-06 00:00",
"description" :
"""1. Added externalSensorChannelNumber to externalSensorConfigurationTable""",
},
{
"date" : "2010-07-01 00:00",
"description" :
"""1. added outletSwitchingState to outletSwitchControlTable
2. added outletSwitchingTimeStamp to outletSwitchControlTable""",
},
{
"date" : "2010-06-30 00:00",
"description" :
"""1. added switchingOperation to the powerControl trap""",
},
{
"date" : "2010-06-21 00:00",
"description" :
"""1. added support for Load shedding
2. added loadShedding to the unitConfigurationTable.
3. added nonCritical to the outletConfigurationTable
4. added loadSheddingModeEntered & loadSheddingModeExited traps
5. modified description of inletPlug in inletConfigurationTable""",
},
{
"date" : "2010-06-03 00:00",
"description" :
"""1. added plugOTHER to PlugTypeEnumeration
2. added receptacleOTHER to ReceptacleTypeEnumeration
3. added inletPlugDescriptor to inletConfigurationTable
4. added outletReceptacleDescriptor to outletConfigurationTable""",
},
{
"date" : "2010-05-27 00:00",
"description" :
"""1. added INetAddressType and INetAddress to represent IP addresses
2. unitConfigurationTable: deprecated pxIpAddress
3. unitConfigurationTable: added pxInetAddressType,pxInetIPAddress,pxInetNetmask,pxInetGateway
2: added pxInetAddressType,pxInetIPAddress to all traps
3: defined new trap deviceUpdateFailed""",
},
{
"date" : "2010-05-24 00:00",
"description" :
"""Added typeOfSensor to externalSensorStateChange trap""",
},
{
"date" : "2010-04-19 00:00",
"description" :
"""modified the DESCRIPTION of the deviceUpdateCompleted trap""",
},
{
"date" : "2010-04-15 00:00",
"description" :
"""modified the DESCRIPTION of all SensorStateChangeDelay parameters""",
},
{
"date" : "2010-04-08 00:00",
"description" :
"""modified the DESCRIPTION of sensor parameters that do not apply to discrete sensors""",
},
{
"date" : "2010-03-29 00:00",
"description" :
"""added trap ldapError""",
},
{
"date" : "2010-03-25 00:00",
"description" :
"""changed the SYNTAX of the following objects from Integer32 to Unsigned32:
logInletSensorMaxValue,
logInletSensorMinValue,
logInletSensorAvgValue,
logInletPoleSensorMaxValue,
logInletPoleSensorMinValue,
logInletPoleSensorAvgValue,
logOutletSensorMaxValue,
logOutletSensorMinValue,
logOutletSensorAvgValue,
logOutletPoleSensorMaxValue,
logOutlePoletSensorMinValue,
logOutletPoleSensorAvgValue,
logOverCurrentProtectorSensorMaxValue,
logOverCurrentProtectorSensorMinValue,
logOverCurrentProtectorSensorAvgValue,
measurementsInletSensorValue,
measurementsInletPoleSensorValue,
measurementsOutletSensorValue,
measurementsOutletPoleSensorValue,
measurementsOverCurrentProtectorSensorValue""",
},
{
"date" : "2010-03-16 00:00",
"description" :
"""added trap smtpMessageTransmissionFailure""",
},
{
"date" : "2010-03-01 00:00",
"description" :
"""changed externalSensorsZCoordinateUnits to an Enumeration""",
},
{
"date" : "2010-01-29 00:00",
"description" :
"""The first version of the MIB.""",
},
),
"identity node" : "raritan",
},
"imports" : (
{"module" : "SNMPv2-SMI", "name" : "MODULE-IDENTITY"},
{"module" : "SNMPv2-SMI", "name" : "OBJECT-TYPE"},
{"module" : "SNMPv2-SMI", "name" : "NOTIFICATION-TYPE"},
{"module" : "SNMPv2-SMI", "name" : "enterprises"},
{"module" : "SNMPv2-SMI", "name" : "Integer32"},
{"module" : "SNMPv2-SMI", "name" : "Unsigned32"},
{"module" : "SNMPv2-SMI", "name" : "IpAddress"},
{"module" : "SNMPv2-CONF", "name" : "MODULE-COMPLIANCE"},
{"module" : "SNMPv2-CONF", "name" : "OBJECT-GROUP"},
{"module" : "SNMPv2-CONF", "name" : "NOTIFICATION-GROUP"},
{"module" : "SNMPv2-TC", "name" : "TEXTUAL-CONVENTION"},
{"module" : "SNMPv2-TC", "name" : "DisplayString"},
{"module" : "SNMPv2-TC", "name" : "MacAddress"},
{"module" : "SNMPv2-TC", "name" : "TruthValue"},
{"module" : "SNMPv2-TC", "name" : "RowPointer"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetAddressType"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetAddress"},
{"module" : "INET-ADDRESS-MIB", "name" : "InetPortNumber"},
{"module" : "RFC1213-MIB", "name" : "sysContact"},
{"module" : "RFC1213-MIB", "name" : "sysName"},
{"module" : "RFC1213-MIB", "name" : "sysLocation"},
),
"typedefs" : {
"SensorTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"rmsCurrent" : {
"nodetype" : "namednumber",
"number" : "1"
},
"peakCurrent" : {
"nodetype" : "namednumber",
"number" : "2"
},
"unbalancedCurrent" : {
"nodetype" : "namednumber",
"number" : "3"
},
"rmsVoltage" : {
"nodetype" : "namednumber",
"number" : "4"
},
"activePower" : {
"nodetype" : "namednumber",
"number" : "5"
},
"apparentPower" : {
"nodetype" : "namednumber",
"number" : "6"
},
"powerFactor" : {
"nodetype" : "namednumber",
"number" : "7"
},
"activeEnergy" : {
"nodetype" : "namednumber",
"number" : "8"
},
"apparentEnergy" : {
"nodetype" : "namednumber",
"number" : "9"
},
"temperature" : {
"nodetype" : "namednumber",
"number" : "10"
},
"humidity" : {
"nodetype" : "namednumber",
"number" : "11"
},
"airFlow" : {
"nodetype" : "namednumber",
"number" : "12"
},
"airPressure" : {
"nodetype" : "namednumber",
"number" : "13"
},
"onOff" : {
"nodetype" : "namednumber",
"number" : "14"
},
"trip" : {
"nodetype" : "namednumber",
"number" : "15"
},
"vibration" : {
"nodetype" : "namednumber",
"number" : "16"
},
"waterDetection" : {
"nodetype" : "namednumber",
"number" : "17"
},
"smokeDetection" : {
"nodetype" : "namednumber",
"number" : "18"
},
"binary" : {
"nodetype" : "namednumber",
"number" : "19"
},
"contact" : {
"nodetype" : "namednumber",
"number" : "20"
},
"fanSpeed" : {
"nodetype" : "namednumber",
"number" : "21"
},
"surgeProtectorStatus" : {
"nodetype" : "namednumber",
"number" : "22"
},
"frequency" : {
"nodetype" : "namednumber",
"number" : "23"
},
"phaseAngle" : {
"nodetype" : "namednumber",
"number" : "24"
},
"rmsVoltageLN" : {
"nodetype" : "namednumber",
"number" : "25"
},
"residualCurrent" : {
"nodetype" : "namednumber",
"number" : "26"
},
"rcmState" : {
"nodetype" : "namednumber",
"number" : "27"
},
"absoluteHumidity" : {
"nodetype" : "namednumber",
"number" : "28"
},
"reactivePower" : {
"nodetype" : "namednumber",
"number" : "29"
},
"other" : {
"nodetype" : "namednumber",
"number" : "30"
},
"none" : {
"nodetype" : "namednumber",
"number" : "31"
},
"powerQuality" : {
"nodetype" : "namednumber",
"number" : "32"
},
"overloadStatus" : {
"nodetype" : "namednumber",
"number" : "33"
},
"overheatStatus" : {
"nodetype" : "namednumber",
"number" : "34"
},
"displacementPowerFactor" : {
"nodetype" : "namednumber",
"number" : "35"
},
"fanStatus" : {
"nodetype" : "namednumber",
"number" : "37"
},
"inletPhaseSyncAngle" : {
"nodetype" : "namednumber",
"number" : "38"
},
"inletPhaseSync" : {
"nodetype" : "namednumber",
"number" : "39"
},
"operatingState" : {
"nodetype" : "namednumber",
"number" : "40"
},
"activeInlet" : {
"nodetype" : "namednumber",
"number" : "41"
},
"illuminance" : {
"nodetype" : "namednumber",
"number" : "42"
},
"doorContact" : {
"nodetype" : "namednumber",
"number" : "43"
},
"tamperDetection" : {
"nodetype" : "namednumber",
"number" : "44"
},
"motionDetection" : {
"nodetype" : "namednumber",
"number" : "45"
},
"i1smpsStatus" : {
"nodetype" : "namednumber",
"number" : "46"
},
"i2smpsStatus" : {
"nodetype" : "namednumber",
"number" : "47"
},
"switchStatus" : {
"nodetype" : "namednumber",
"number" : "48"
},
"description" :
"""The types a sensor can be.""",
},
"SensorStateEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"unavailable" : {
"nodetype" : "namednumber",
"number" : "-1"
},
"open" : {
"nodetype" : "namednumber",
"number" : "0"
},
"closed" : {
"nodetype" : "namednumber",
"number" : "1"
},
"belowLowerCritical" : {
"nodetype" : "namednumber",
"number" : "2"
},
"belowLowerWarning" : {
"nodetype" : "namednumber",
"number" : "3"
},
"normal" : {
"nodetype" : "namednumber",
"number" : "4"
},
"aboveUpperWarning" : {
"nodetype" : "namednumber",
"number" : "5"
},
"aboveUpperCritical" : {
"nodetype" : "namednumber",
"number" : "6"
},
"on" : {
"nodetype" : "namednumber",
"number" : "7"
},
"off" : {
"nodetype" : "namednumber",
"number" : "8"
},
"detected" : {
"nodetype" : "namednumber",
"number" : "9"
},
"notDetected" : {
"nodetype" : "namednumber",
"number" : "10"
},
"alarmed" : {
"nodetype" : "namednumber",
"number" : "11"
},
"ok" : {
"nodetype" : "namednumber",
"number" : "12"
},
"fail" : {
"nodetype" : "namednumber",
"number" : "14"
},
"yes" : {
"nodetype" : "namednumber",
"number" : "15"
},
"no" : {
"nodetype" : "namednumber",
"number" : "16"
},
"standby" : {
"nodetype" : "namednumber",
"number" : "17"
},
"one" : {
"nodetype" : "namednumber",
"number" : "18"
},
"two" : {
"nodetype" : "namednumber",
"number" : "19"
},
"inSync" : {
"nodetype" : "namednumber",
"number" : "20"
},
"outOfSync" : {
"nodetype" : "namednumber",
"number" : "21"
},
"i1OpenFault" : {
"nodetype" : "namednumber",
"number" : "22"
},
"i1ShortFault" : {
"nodetype" : "namednumber",
"number" : "23"
},
"i2OpenFault" : {
"nodetype" : "namednumber",
"number" : "24"
},
"i2ShortFault" : {
"nodetype" : "namednumber",
"number" : "25"
},
"fault" : {
"nodetype" : "namednumber",
"number" : "26"
},
"warning" : {
"nodetype" : "namednumber",
"number" : "27"
},
"critical" : {
"nodetype" : "namednumber",
"number" : "28"
},
"selfTest" : {
"nodetype" : "namednumber",
"number" : "29"
},
"nonRedundant" : {
"nodetype" : "namednumber",
"number" : "30"
},
"description" :
"""The states a sensor can be in.""",
},
"PlugTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"plugOTHER" : {
"nodetype" : "namednumber",
"number" : "-1"
},
"plugNONE" : {
"nodetype" : "namednumber",
"number" : "0"
},
"plug56P320" : {
"nodetype" : "namednumber",
"number" : "1"
},
"plug56P520" : {
"nodetype" : "namednumber",
"number" : "2"
},
"plug56P532" : {
"nodetype" : "namednumber",
"number" : "3"
},
"plugCS8365C" : {
"nodetype" : "namednumber",
"number" : "4"
},
"plugIEC320C14" : {
"nodetype" : "namednumber",
"number" : "5"
},
"plugIEC320C20" : {
"nodetype" : "namednumber",
"number" : "6"
},
"plugIEC603093WIRE250V16A" : {
"nodetype" : "namednumber",
"number" : "7"
},
"plugIEC603093WIRE250V20A" : {
"nodetype" : "namednumber",
"number" : "8"
},
"plugIEC603093WIRE250V30A" : {
"nodetype" : "namednumber",
"number" : "9"
},
"plugIEC603093WIRE250V32A" : {
"nodetype" : "namednumber",
"number" : "10"
},
"plugIEC603093WIRE250V60A" : {
"nodetype" : "namednumber",
"number" : "11"
},
"plugIEC603093WIRE250V63A" : {
"nodetype" : "namednumber",
"number" : "12"
},
"plugIEC603093WIRE250V100A" : {
"nodetype" : "namednumber",
"number" : "13"
},
"plugIEC603093WIRE250V125A" : {
"nodetype" : "namednumber",
"number" : "14"
},
"plugIEC603094WIRE250V20A" : {
"nodetype" : "namednumber",
"number" : "15"
},
"plugIEC603094WIRE250V30A" : {
"nodetype" : "namednumber",
"number" : "16"
},
"plugIEC603094WIRE250V60A" : {
"nodetype" : "namednumber",
"number" : "17"
},
"plugIEC603094WIRE250V100A" : {
"nodetype" : "namednumber",
"number" : "18"
},
"plugIEC603095WIRE208V20A" : {
"nodetype" : "namednumber",
"number" : "23"
},
"plugIEC603095WIRE208V30A" : {
"nodetype" : "namednumber",
"number" : "24"
},
"plugIEC603095WIRE208V60A" : {
"nodetype" : "namednumber",
"number" : "25"
},
"plugIEC603095WIRE208V100A" : {
"nodetype" : "namednumber",
"number" : "26"
},
"plugIEC603095WIRE415V16A" : {
"nodetype" : "namednumber",
"number" : "27"
},
"plugIEC603095WIRE415V32A" : {
"nodetype" : "namednumber",
"number" : "28"
},
"plugIEC603095WIRE415V63A" : {
"nodetype" : "namednumber",
"number" : "29"
},
"plugIEC603095WIRE415V125A" : {
"nodetype" : "namednumber",
"number" : "30"
},
"plugIEC603095WIRE480V20A" : {
"nodetype" : "namednumber",
"number" : "31"
},
"plugIEC603095WIRE480V30A" : {
"nodetype" : "namednumber",
"number" : "32"
},
"plugIEC603095WIRE480V60A" : {
"nodetype" : "namednumber",
"number" : "33"
},
"plugIEC603095WIRE480V100A" : {
"nodetype" : "namednumber",
"number" : "34"
},
"plugNEMA515P" : {
"nodetype" : "namednumber",
"number" : "35"
},
"plugNEMAL515P" : {
"nodetype" : "namednumber",
"number" : "36"
},
"plugNEMA520P" : {
"nodetype" : "namednumber",
"number" : "37"
},
"plugNEMAL520P" : {
"nodetype" : "namednumber",
"number" : "38"
},
"plugNEMAL530P" : {
"nodetype" : "namednumber",
"number" : "39"
},
"plugNEMAL615P" : {
"nodetype" : "namednumber",
"number" : "40"
},
"plugNEMAL620P" : {
"nodetype" : "namednumber",
"number" : "41"
},
"plugNEMAL630P" : {
"nodetype" : "namednumber",
"number" : "42"
},
"plugNEMAL1520P" : {
"nodetype" : "namednumber",
"number" : "43"
},
"plugNEMAL1530P" : {
"nodetype" : "namednumber",
"number" : "44"
},
"plugNEMAL2120P" : {
"nodetype" : "namednumber",
"number" : "45"
},
"plugNEMAL2130P" : {
"nodetype" : "namednumber",
"number" : "46"
},
"plugNEMAL2230P" : {
"nodetype" : "namednumber",
"number" : "47"
},
"plug56P320F" : {
"nodetype" : "namednumber",
"number" : "48"
},
"plug56PA320" : {
"nodetype" : "namednumber",
"number" : "49"
},
"description" :
"""The types of inlet plug.""",
},
"ReceptacleTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"receptacleOTHER" : {
"nodetype" : "namednumber",
"number" : "-1"
},
"receptacleNONE" : {
"nodetype" : "namednumber",
"number" : "0"
},
"receptacleBS1363" : {
"nodetype" : "namednumber",
"number" : "1"
},
"receptacle56P532" : {
"nodetype" : "namednumber",
"number" : "3"
},
"receptacleCS8364C" : {
"nodetype" : "namednumber",
"number" : "4"
},
"receptacleIEC320C13" : {
"nodetype" : "namednumber",
"number" : "5"
},
"receptacleIEC320C19" : {
"nodetype" : "namednumber",
"number" : "6"
},
"receptacleIEC603093WIRE250V16A" : {
"nodetype" : "namednumber",
"number" : "7"
},
"receptacleIEC603093WIRE250V20A" : {
"nodetype" : "namednumber",
"number" : "8"
},
"receptacleIEC603093WIRE250V30A" : {
"nodetype" : "namednumber",
"number" : "9"
},
"receptacleIEC603093WIRE250V32A" : {
"nodetype" : "namednumber",
"number" : "10"
},
"receptacleIEC603093WIRE250V60A" : {
"nodetype" : "namednumber",
"number" : "11"
},
"receptacleIEC603093WIRE250V63A" : {
"nodetype" : "namednumber",
"number" : "12"
},
"receptacleIEC603093WIRE250V100A" : {
"nodetype" : "namednumber",
"number" : "13"
},
"receptacleIEC603093WIRE250V125A" : {
"nodetype" : "namednumber",
"number" : "14"
},
"receptacleIEC603094WIRE250V20A" : {
"nodetype" : "namednumber",
"number" : "15"
},
"receptacleIEC603094WIRE250V30A" : {
"nodetype" : "namednumber",
"number" : "16"
},
"receptacleIEC603094WIRE250V60A" : {
"nodetype" : "namednumber",
"number" : "17"
},
"receptacleIEC603094WIRE250V100A" : {
"nodetype" : "namednumber",
"number" : "18"
},
"receptacleIEC603095WIRE208V20A" : {
"nodetype" : "namednumber",
"number" : "23"
},
"receptacleIEC603095WIRE208V30A" : {
"nodetype" : "namednumber",
"number" : "24"
},
"receptacleIEC603095WIRE208V60A" : {
"nodetype" : "namednumber",
"number" : "25"
},
"receptacleIEC603095WIRE208V100A" : {
"nodetype" : "namednumber",
"number" : "26"
},
"receptacleIEC603095WIRE415V16A" : {
"nodetype" : "namednumber",
"number" : "27"
},
"receptacleIEC603095WIRE415V32A" : {
"nodetype" : "namednumber",
"number" : "28"
},
"receptacleIEC603095WIRE415V63A" : {
"nodetype" : "namednumber",
"number" : "29"
},
"receptacleIEC603095WIRE415V125A" : {
"nodetype" : "namednumber",
"number" : "30"
},
"receptacleIEC603095WIRE480V20A" : {
"nodetype" : "namednumber",
"number" : "31"
},
"receptacleIEC603095WIRE480V30A" : {
"nodetype" : "namednumber",
"number" : "32"
},
"receptacleIEC603095WIRE480V60A" : {
"nodetype" : "namednumber",
"number" : "33"
},
"receptacleIEC603095WIRE480V100A" : {
"nodetype" : "namednumber",
"number" : "34"
},
"receptacleNEMA515R" : {
"nodetype" : "namednumber",
"number" : "35"
},
"receptacleNEMAL515R" : {
"nodetype" : "namednumber",
"number" : "36"
},
"receptacleNEMA520R" : {
"nodetype" : "namednumber",
"number" : "37"
},
"receptacleNEMAL520R" : {
"nodetype" : "namednumber",
"number" : "38"
},
"receptacleNEMAL530R" : {
"nodetype" : "namednumber",
"number" : "39"
},
"receptacleNEMAL615R" : {
"nodetype" : "namednumber",
"number" : "40"
},
"receptacleNEMAL620R" : {
"nodetype" : "namednumber",
"number" : "41"
},
"receptacleNEMAL630R" : {
"nodetype" : "namednumber",
"number" : "42"
},
"receptacleNEMAL1520R" : {
"nodetype" : "namednumber",
"number" : "43"
},
"receptacleNEMAL1530R" : {
"nodetype" : "namednumber",
"number" : "44"
},
"receptacleNEMAL2120RP" : {
"nodetype" : "namednumber",
"number" : "45"
},
"receptacleNEMAL2130R" : {
"nodetype" : "namednumber",
"number" : "46"
},
"receptacleSCHUKOTYPEE" : {
"nodetype" : "namednumber",
"number" : "47"
},
"receptacleSCHUKOTYPEF" : {
"nodetype" : "namednumber",
"number" : "48"
},
"description" :
"""The types of outlet receptacle.""",
},
"OverCurrentProtectorTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"ocpBREAKER1POLE" : {
"nodetype" : "namednumber",
"number" : "1"
},
"ocpBREAKER2POLE" : {
"nodetype" : "namednumber",
"number" : "2"
},
"ocpBREAKER3POLE" : {
"nodetype" : "namednumber",
"number" : "3"
},
"ocpFUSE" : {
"nodetype" : "namednumber",
"number" : "4"
},
"ocpFUSEPAIR" : {
"nodetype" : "namednumber",
"number" : "5"
},
"ocpRCBO2POLE" : {
"nodetype" : "namednumber",
"number" : "6"
},
"ocpRCBO3POLE" : {
"nodetype" : "namednumber",
"number" : "7"
},
"ocpRCBO4POLE" : {
"nodetype" : "namednumber",
"number" : "8"
},
"description" :
"""The types of overcurrent protectors.""",
},
"BoardTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"mainController" : {
"nodetype" : "namednumber",
"number" : "1"
},
"inletController" : {
"nodetype" : "namednumber",
"number" : "2"
},
"outletController" : {
"nodetype" : "namednumber",
"number" : "3"
},
"meteringController" : {
"nodetype" : "namednumber",
"number" : "4"
},
"description" :
"""The types of boards.""",
},
"OutletSwitchingOperationsEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
"cycle" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""The switching operations on an outlet.""",
},
"SensorUnitsEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"none" : {
"nodetype" : "namednumber",
"number" : "-1"
},
"other" : {
"nodetype" : "namednumber",
"number" : "0"
},
"volt" : {
"nodetype" : "namednumber",
"number" : "1"
},
"amp" : {
"nodetype" : "namednumber",
"number" : "2"
},
"watt" : {
"nodetype" : "namednumber",
"number" : "3"
},
"voltamp" : {
"nodetype" : "namednumber",
"number" : "4"
},
"wattHour" : {
"nodetype" : "namednumber",
"number" : "5"
},
"voltampHour" : {
"nodetype" : "namednumber",
"number" : "6"
},
"degreeC" : {
"nodetype" : "namednumber",
"number" : "7"
},
"hertz" : {
"nodetype" : "namednumber",
"number" : "8"
},
"percent" : {
"nodetype" : "namednumber",
"number" : "9"
},
"meterpersec" : {
"nodetype" : "namednumber",
"number" : "10"
},
"pascal" : {
"nodetype" : "namednumber",
"number" : "11"
},
"psi" : {
"nodetype" : "namednumber",
"number" : "12"
},
"g" : {
"nodetype" : "namednumber",
"number" : "13"
},
"degreeF" : {
"nodetype" : "namednumber",
"number" : "14"
},
"feet" : {
"nodetype" : "namednumber",
"number" : "15"
},
"inches" : {
"nodetype" : "namednumber",
"number" : "16"
},
"cm" : {
"nodetype" : "namednumber",
"number" : "17"
},
"meters" : {
"nodetype" : "namednumber",
"number" : "18"
},
"rpm" : {
"nodetype" : "namednumber",
"number" : "19"
},
"degrees" : {
"nodetype" : "namednumber",
"number" : "20"
},
"lux" : {
"nodetype" : "namednumber",
"number" : "21"
},
"grampercubicmeter" : {
"nodetype" : "namednumber",
"number" : "22"
},
"var" : {
"nodetype" : "namednumber",
"number" : "23"
},
"description" :
"""The sensor units.""",
},
"DaisychainMemberTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"standalone" : {
"nodetype" : "namednumber",
"number" : "0"
},
"master" : {
"nodetype" : "namednumber",
"number" : "1"
},
"slave" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""The daisy chain member type.""",
},
"URL" : {
"basetype" : "OctetString",
"status" : "current",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
"format" : "255a",
"description" :
"""A Uniform Resource Locator (URL), as defined in RFC1738.""",
},
"GlobalOutletStateOnStartupEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lastKnownState" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""The global outlet state on device start up; can be overridden per outlet.""",
},
"OutletStateOnStartupEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"off" : {
"nodetype" : "namednumber",
"number" : "0"
},
"on" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lastKnownState" : {
"nodetype" : "namednumber",
"number" : "2"
},
"globalOutletStateOnStartup" : {
"nodetype" : "namednumber",
"number" : "3"
},
"description" :
"""The outlet state on device start up; this overrides the global value.""",
},
"ExternalSensorsZCoordinateUnitsEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"rackUnits" : {
"nodetype" : "namednumber",
"number" : "0"
},
"text" : {
"nodetype" : "namednumber",
"number" : "1"
},
"description" :
"""The units of the external Sensor Z Coordinate.
rackUnits implies that the Z Coordinate for all external sensors
is in rack Units (U)
text implies that the Z Coordinate for all external sensors
is a text string (label) """,
},
"HundredthsOfAPercentage" : {
"basetype" : "Unsigned32",
"status" : "current",
"ranges" : [
{
"min" : "0",
"max" : "10000"
},
],
"range" : {
"min" : "0",
"max" : "10000"
},
"format" : "d",
"description" :
"""Data type for reporting values in hundredths of percentage, i.e. 0.01 %.""",
},
"DeviceIdentificationParameterEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"pduName" : {
"nodetype" : "namednumber",
"number" : "0"
},
"sysContact" : {
"nodetype" : "namednumber",
"number" : "1"
},
"sysName" : {
"nodetype" : "namednumber",
"number" : "2"
},
"sysLocation" : {
"nodetype" : "namednumber",
"number" : "3"
},
"description" :
"""The configurable parameters.""",
},
"TransferSwitchTransferReasonEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"unknown" : {
"nodetype" : "namednumber",
"number" : "0"
},
"startup" : {
"nodetype" : "namednumber",
"number" : "1"
},
"manualTransfer" : {
"nodetype" : "namednumber",
"number" : "2"
},
"automaticReTransfer" : {
"nodetype" : "namednumber",
"number" : "3"
},
"powerFailure" : {
"nodetype" : "namednumber",
"number" : "4"
},
"powerQuality" : {
"nodetype" : "namednumber",
"number" : "5"
},
"overloadAlarm" : {
"nodetype" : "namednumber",
"number" : "6"
},
"overheatAlarm" : {
"nodetype" : "namednumber",
"number" : "7"
},
"internalFailure" : {
"nodetype" : "namednumber",
"number" : "8"
},
"description" :
"""Transfer Switch Transfer Reason""",
},
"ProductTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"rackPdu" : {
"nodetype" : "namednumber",
"number" : "0"
},
"bcm" : {
"nodetype" : "namednumber",
"number" : "1"
},
"transferSwitch" : {
"nodetype" : "namednumber",
"number" : "2"
},
"powerMeter" : {
"nodetype" : "namednumber",
"number" : "3"
},
"description" :
"""The product types.""",
},
"RelayPowerLossBehaviorEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"nonLatching" : {
"nodetype" : "namednumber",
"number" : "0"
},
"latching" : {
"nodetype" : "namednumber",
"number" : "1"
},
"description" :
"""The type of relay behavior on power loss.""",
},
"DeviceCascadeTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"bridging" : {
"nodetype" : "namednumber",
"number" : "0"
},
"portForwarding" : {
"nodetype" : "namednumber",
"number" : "1"
},
"description" :
"""The type of configured cascading on this device.""",
},
"PeripheralDeviceFirmwareUpdateStateEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"started" : {
"nodetype" : "namednumber",
"number" : "1"
},
"successful" : {
"nodetype" : "namednumber",
"number" : "2"
},
"failed" : {
"nodetype" : "namednumber",
"number" : "3"
},
"description" :
"""The state of an peripheral device firmware update.""",
},
"PanelLayoutEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"invalid" : {
"nodetype" : "namednumber",
"number" : "-1"
},
"oneColumn" : {
"nodetype" : "namednumber",
"number" : "1"
},
"twoColumns" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""The panel circuit position layout.""",
},
"PanelNumberingEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"invalid" : {
"nodetype" : "namednumber",
"number" : "-1"
},
"oddEven" : {
"nodetype" : "namednumber",
"number" : "1"
},
"sequential" : {
"nodetype" : "namednumber",
"number" : "2"
},
"description" :
"""The panel circuit position numbering scheme.""",
},
"CircuitTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"onePhaseLL" : {
"nodetype" : "namednumber",
"number" : "1"
},
"onePhaseLN" : {
"nodetype" : "namednumber",
"number" : "2"
},
"onePhaseLLN" : {
"nodetype" : "namednumber",
"number" : "3"
},
"threePhase" : {
"nodetype" : "namednumber",
"number" : "4"
},
"description" :
"""The panel circuit types.""",
},
"PhaseEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"phaseA" : {
"nodetype" : "namednumber",
"number" : "1"
},
"phaseB" : {
"nodetype" : "namednumber",
"number" : "2"
},
"phaseC" : {
"nodetype" : "namednumber",
"number" : "3"
},
"neutral" : {
"nodetype" : "namednumber",
"number" : "4"
},
"earth" : {
"nodetype" : "namednumber",
"number" : "5"
},
"description" :
"""The power phase.""",
},
"LineEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"lineL1" : {
"nodetype" : "namednumber",
"number" : "1"
},
"lineL2" : {
"nodetype" : "namednumber",
"number" : "2"
},
"lineL3" : {
"nodetype" : "namednumber",
"number" : "3"
},
"lineNeutral" : {
"nodetype" : "namednumber",
"number" : "4"
},
"description" :
"""The Lines: L1, L2, L3, N.""",
},
"PowerMeterTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"singlePhase" : {
"nodetype" : "namednumber",
"number" : "1"
},
"splitPhase" : {
"nodetype" : "namednumber",
"number" : "2"
},
"threePhase" : {
"nodetype" : "namednumber",
"number" : "3"
},
"description" :
"""The power meter types""",
},
"NetworkInterfaceTypeEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"wired" : {
"nodetype" : "namednumber",
"number" : "0"
},
"wireless" : {
"nodetype" : "namednumber",
"number" : "1"
},
"description" :
"""The type of network interface.""",
},
"AddressSourceEnumeration" : {
"basetype" : "Enumeration",
"status" : "current",
"static" : {
"nodetype" : "namednumber",
"number" : "1"
},
"dhcp" : {
"nodetype" : "namednumber",
"number" : "2"
},
"dhcpv6" : {
"nodetype" : "namednumber",
"number" : "3"
},
"description" :
"""How was the address obtained?""",
},
}, # typedefs
"nodes" : {
"raritan" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742",
"status" : "current",
}, # node
"pdu2" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6",
}, # node
"traps" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0",
}, # node
"trapInformation" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0",
}, # node
"trapInformationTable" : {
"nodetype" : "table",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1",
"status" : "current",
"description" :
"""A list of Trap Information entries. The number of
entries is given by the value of pduCount.""",
}, # table
"trapInformationEntry" : {
"nodetype" : "row",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1",
"status" : "current",
"linkage" : [
"pduId",
],
"description" :
"""An entry containing objects used in traps.""",
}, # row
"userName" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The user currently logged in.""",
}, # column
"targetUser" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The user added, deleted, or modified.""",
}, # column
"imageVersion" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The version of the upgrade image.""",
}, # column
"roleName" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The role added, deleted, or modified.""",
}, # column
"smtpMessageRecipients" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Comma separated list of SMTP Message recipients""",
}, # column
"smtpServer" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The IP address/host name of the SMTP server""",
}, # column
"oldSensorState" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "SensorStateEnumeration"},
},
"access" : "notifyonly",
"description" :
"""The old sensor state used in Sensor State Transition traps.""",
}, # scalar
"pduNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "notifyonly",
"description" :
"""A unique value for each PDU. Its value
ranges between 1 and the value of pduCount.""",
}, # scalar
"inletPoleNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "notifyonly",
"description" :
"""A unique value for each inlet Pole. Its value
ranges between 1 and the value of inletPoleCount.""",
}, # scalar
"outletPoleNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "notifyonly",
"description" :
"""A unique value for each outlet Pole. Its value
ranges between 1 and the value of outletPoleCount.""",
}, # scalar
"externalSensorNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "notifyonly",
"description" :
"""A unique value for each external sensor. Its value
ranges between 1 and the value of externalSensorCount.""",
}, # scalar
"typeOfSensor" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "SensorTypeEnumeration"},
},
"access" : "notifyonly",
"description" :
"""The type of sensor.""",
}, # scalar
"errorDescription" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "notifyonly",
"description" :
"""Description of the Error""",
}, # scalar
"deviceChangedParameter" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "DeviceIdentificationParameterEnumeration"},
},
"access" : "notifyonly",
"description" :
"""Description of the parameter(s) that changed.""",
}, # scalar
"changedParameterNewValue" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "notifyonly",
"description" :
"""The new value of the changed parameter """,
}, # scalar
"lhxSupportEnabled" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "notifyonly",
"description" :
"""The new enabled state for Schroff LHX Support.""",
}, # scalar
"webcamModel" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "notifyonly",
"description" :
"""The model of the Webcam """,
}, # scalar
"webcamConnectionPort" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "notifyonly",
"description" :
"""The port to which the Webcam is connected """,
}, # scalar
"agentInetPortNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetPortNumber"},
},
"access" : "notifyonly",
"description" :
"""The UDP port number used for accessing
the SNMP agent on the device.
Examples:
If deviceCascadeType is portForwarding, then
master: 50500
slave 1:50501
slave 2: 50502
......
If cascading mode is not portForwarding and default (Standard) ports are being used, then
port: 161
""",
}, # scalar
"peripheralDeviceRomcode" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "notifyonly",
"description" :
"""The Romcode of an peripheral device""",
}, # scalar
"peripheralDeviceFirmwareUpdateState" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "PeripheralDeviceFirmwareUpdateStateEnumeration"},
},
"access" : "notifyonly",
"description" :
"""The firmware update state of an peripheral device""",
}, # scalar
"circuitNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.21",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "33000"
},
],
"range" : {
"min" : "1",
"max" : "33000"
},
},
},
"access" : "notifyonly",
"description" :
"""A unique value for each circuit.
circuitId is defined as follows.
circuitID = 1000*(panelId) + circuitPosition
Examples:
1045 is the the circuit on panel 1 with lowest circuit position equal to 45
4067 is the the circuit on panel 4 with lowest circuit position equal to 67. """,
}, # scalar
"circuitPoleNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.22",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "notifyonly",
"description" :
"""A unique value for each circuit Pole. """,
}, # scalar
"phoneNumber" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.0.0.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "notifyonly",
"description" :
"""The phone number of e.g. an SMS receiver.""",
}, # scalar
"board" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.1",
}, # node
"environmental" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.2",
}, # node
"configuration" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3",
}, # node
"pduCount" : {
"nodetype" : "scalar",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""For a PX2/3 and transfer switch, pduCount = 1
For a BCM2, pduCount = number of power meters + 1 (for the main controller)""",
}, # scalar
"unit" : {
"nodetype" : "node",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2",
}, # node
"nameplateTable" : {
"nodetype" : "table",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1",
"status" : "current",
"description" :
"""A list of PDU nameplate entries. The number of
entries is given by the value of pduCount.""",
}, # table
"nameplateEntry" : {
"nodetype" : "row",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1",
"status" : "current",
"linkage" : [
"pduId",
],
"description" :
"""An entry providing PDU nameplate information.""",
}, # row
"pduId" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "256"
},
],
"range" : {
"min" : "0",
"max" : "256"
},
},
},
"access" : "noaccess",
"description" :
"""A unique value for each PDU/Power meter.
PX2/3 and transfer switch: pduiId = 1
BCM2:
main controller: pduId = 0
power meter: pduId = rotary switch setting for the power meter
It is the same as the MeterID in the GUI
Example:
a power meter, rotary switch setting = 5, pduId = 5
a panel (power meter + branch metering), rotary switch setting = 23, pduId = 23 """,
}, # column
"pduManufacturer" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU manaufacturer.""",
}, # column
"pduModel" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU model.""",
}, # column
"pduSerialNumber" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU serial Number.""",
}, # column
"pduRatedVoltage" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU voltage rating.""",
}, # column
"pduRatedCurrent" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU current rating.""",
}, # column
"pduRatedFrequency" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU frequency rating.""",
}, # column
"pduRatedVA" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The PDU VA (VoltAmps) rating.""",
}, # column
"pduImage" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.1.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "URL"},
},
"access" : "readonly",
"description" :
"""The URL of the wiring diagram for this PDU.""",
}, # column
"unitConfigurationTable" : {
"nodetype" : "table",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2",
"status" : "current",
"description" :
"""A list of PDU configuration entries. The number of
entries is given by the value of pduCount.""",
}, # table
"unitConfigurationEntry" : {
"nodetype" : "row",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1",
"status" : "current",
"linkage" : [
"pduId",
],
"description" :
"""An entry containing configuration objects for a particular PDU.""",
}, # row
"inletCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "readonly",
"description" :
"""The number of inlets.""",
}, # column
"overCurrentProtectorCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of overcurrent protectors.""",
}, # column
"outletCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlets.""",
}, # column
"inletControllerCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of inlet controllers.""",
}, # column
"outletControllerCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of outlet controllers.""",
}, # column
"externalSensorCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of external sensors.""",
}, # column
"pxIPAddress" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.8",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""The current IP address. A value of 0.0.0.0 indicates an error
or an unset option.""",
}, # column
"netmask" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.9",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""The current netmask. A value of 0.0.0.0 indicates an error
or an unset option.""",
}, # column
"gateway" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.10",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "IpAddress"},
},
"access" : "readonly",
"description" :
"""The current gateway. A value of 0.0.0.0 indicates an error
or an unset option.""",
}, # column
"pxMACAddress" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "MacAddress"},
},
"access" : "readonly",
"description" :
"""The current MAC address.""",
}, # column
"utcOffset" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The current UTC offset.""",
}, # column
"pduName" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The user-defined name for the PDU.""",
}, # column
"networkInterfaceType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "NetworkInterfaceTypeEnumeration"},
},
"access" : "readonly",
"description" :
"""The network interface type: wired or wireless.""",
}, # column
"externalSensorsZCoordinateUnits" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.34",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "ExternalSensorsZCoordinateUnitsEnumeration"},
},
"access" : "readwrite",
"description" :
"""External Sensor Z Coordinate units: Freeform Text or Rack Units (U)
Default is U.""",
}, # column
"unitDeviceCapabilities" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.35",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Bits",
"i1smpsStatus" : {
"nodetype" : "namednumber",
"number" : "45"
},
"i2smpsStatus" : {
"nodetype" : "namednumber",
"number" : "46"
},
},
},
"access" : "readonly",
"description" :
"""A bit string which indicates which unit sensors are available.""",
}, # column
"outletSequencingDelay" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.36",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"", "name" : "Unsigned32"},
},
"access" : "readwrite",
"description" :
"""It is deprecated. This is an alias for inrushGuardDelay""",
}, # column
"globalOutletPowerCyclingPowerOffPeriod" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.37",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Unsigned32"},
},
"access" : "readwrite",
"description" :
"""The power-off period when an outlet is cycled;
applies to all outlets unless overridden at the outlet level;
specified in seconds;
0 <= value <= 3600 seconds.""",
}, # column
"globalOutletStateOnStartup" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.38",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "GlobalOutletStateOnStartupEnumeration"},
},
"access" : "readwrite",
"description" :
"""The outlet state on device startup; applies to all outlets
unless overridden at the outlet level.
Note that this value is ignored if relayBehaviorOnPowerLoss is set to latching.""",
}, # column
"outletPowerupSequence" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.39",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""The sequence in which will the outlets will be switched on under the following conditions.
1) Switch all outlets on operation is executed
2) Power to the PDU is cycled
String must consist of a comma separated sequence of the outlet numbers and
all outlet numbers must be included. The numbers entered must be a permutation of the numbers
1,2,3,-outletnumber.
Example for a 12 outlet PDU: 1,12,3,5,6,7,10,2,4,11,9,8.
The per outlet sequence delays are defined as outletSequenceDelay in the outletConfigurationTable""",
}, # column
"pduPowerCyclingPowerOffPeriod" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.40",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Unsigned32"},
},
"access" : "readwrite",
"description" :
"""When power to the PX is cycled (either manually or because of a
temporary power loss), this number determines how many seconds the
PX will wait before it provides power to the outlets.
specified in seconds:
0 <= value <= 3600 seconds.
Note that this value is ignored if relayBehaviorOnPowerLoss is set to latching.""",
}, # column
"pduDaisychainMemberType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.41",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "DaisychainMemberTypeEnumeration"},
},
"access" : "readonly",
"description" :
"""The daisy chain member type.""",
}, # column
"managedExternalSensorCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.42",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of managed external sensors """,
}, # column
"pxInetAddressType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.50",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddressType"},
},
"access" : "readonly",
"description" :
"""The type of address format
This object is deprecated in favor of ipAddressTable from the IP-MIB (rfc4293).""",
}, # column
"pxInetIPAddress" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.51",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddress"},
},
"access" : "readonly",
"description" :
"""The current IP address. A value of 0.0.0.0 indicates an error
or an unset option.
This object is deprecated in favor of ipAddressTable from the IP-MIB (rfc4293).
For IPv6, its value is 0.0.0.0 """,
}, # column
"pxInetNetmask" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.52",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddress"},
},
"access" : "readonly",
"description" :
"""The current netmask. A value of 0.0.0.0 indicates an error
or an unset option.
This object is deprecated in favor of ipAddressTable from the IP-MIB (rfc4293).
For IPv6, its value is 0.0.0.0 """,
}, # column
"pxInetGateway" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.53",
"status" : "deprecated",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddress"},
},
"access" : "readonly",
"description" :
"""The current gateway. A value of 0.0.0.0 indicates an error
or an unset option.
This object is deprecated in favor of ipAddressTable from the IP-MIB (rfc4293).
For IPv6, its value is 0.0.0.0 """,
}, # column
"loadShedding" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.55",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Enter/Exit Load Shedding Mode""",
}, # column
"serverCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.56",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of servers""",
}, # column
"inrushGuardDelay" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.57",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Unsigned32"},
},
"access" : "readwrite",
"description" :
"""The time interval between switching on two outlets;
specified in milliseconds;
100 <= value <= 100000 milliseconds.""",
}, # column
"cascadedDeviceConnected" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.58",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readonly",
"description" :
"""Indicates whether another PX2 is connected using an USB cable to
the USB-A port of this PX2 in a cascaded configuration.
true: Connected
false: Not Connected""",
}, # column
"synchronizeWithNTPServer" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.59",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Will time be obtained using NTP?
true: time will be obtained using NTP servers
false: time will not be obtained using NTP servers
Deafault is false.""",
}, # column
"useDHCPProvidedNTPServer" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.60",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Will the NTP server addresses be provided by DHCP/BOOTP?
This is used only if synchronizeWithNTPServer is enabled
Default is enabled, i.e. DHCP provided NTP servers will be used """,
}, # column
"firstNTPServerAddressType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.61",
"status" : "current",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddressType"},
},
"access" : "readwrite",
"default" : "ipv4",
"description" :
"""Represents the type of the corresponding instance
of firstNTPServerAddress object.""",
}, # column
"firstNTPServerAddress" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.62",
"status" : "current",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddress"},
},
"access" : "readwrite",
"description" :
"""The address of the primary ntp server.""",
}, # column
"secondNTPServerAddressType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.63",
"status" : "current",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddressType"},
},
"access" : "readwrite",
"description" :
"""Represents the type of the corresponding instance
of secondNTPServerAddress object.
Default is ipv4""",
}, # column
"secondNTPServerAddress" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.64",
"status" : "current",
"syntax" : {
"type" : { "module" :"INET-ADDRESS-MIB", "name" : "InetAddress"},
},
"access" : "readwrite",
"description" :
"""The address of the second ntp server.""",
}, # column
"wireCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.65",
"status" : "deprecated",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of wires.""",
}, # column
"transferSwitchCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.66",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of transfer switches.""",
}, # column
"productType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.67",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "ProductTypeEnumeration"},
},
"access" : "readonly",
"description" :
"""The product type.
Is this product a PDU, STS, BCM,...?""",
}, # column
"meteringControllerCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.68",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "255"
},
],
"range" : {
"min" : "0",
"max" : "255"
},
},
},
"access" : "readonly",
"description" :
"""The number of metering controllers.""",
}, # column
"relayBehaviorOnPowerLoss" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.69",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "RelayPowerLossBehaviorEnumeration"},
},
"access" : "readwrite",
"description" :
"""The relay behavior on power loss.""",
}, # column
"deviceCascadeType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.70",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "DeviceCascadeTypeEnumeration"},
},
"access" : "readwrite",
"description" :
"""The configured type of cascading:
default is bridging.
This can be set only if the unit is the master.""",
}, # column
"deviceCascadePosition" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.71",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "99"
},
],
"range" : {
"min" : "0",
"max" : "99"
},
},
},
"access" : "readonly",
"description" :
"""The position of the device in the cascaded chain.
0: master
>= 1: slaves """,
}, # column
"peripheralDevicesAutoManagement" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.72",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Auto-management enabled state for peripheral devices.""",
}, # column
"frontPanelOutletSwitching" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.73",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Enables/disables switching of outlets using the PDU front panel.""",
}, # column
"frontPanelRCMSelfTest" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.74",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Enables/disables front panel RCM self-test.""",
}, # column
"frontPanelActuatorControl" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.75",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Enables/disables front panel peripheral actuator control.""",
}, # column
"circuitCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.76",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Integer32"},
},
"access" : "readonly",
"description" :
"""The number of user configured circuits in the panel.""",
}, # column
"activeDNSServerCount" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.2.1.77",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""The number of active DNS Servers """,
}, # column
"controllerConfigurationTable" : {
"nodetype" : "table",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3",
"status" : "current",
"description" :
"""A list of entries for the boards in each PDU. The number of
entries is given by the value of
inletControllerCount + outletControllerCount + 1 (for main controller board).""",
}, # table
"controllerConfigurationEntry" : {
"nodetype" : "row",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3.1",
"status" : "current",
"linkage" : [
"pduId",
"boardType",
"boardIndex",
],
"description" :
"""An entry containing objects for a controller.""",
}, # row
"boardType" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"PDU2-MIB", "name" : "BoardTypeEnumeration"},
},
"access" : "noaccess",
"description" :
"""The type of board.""",
}, # column
"boardIndex" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "256"
},
],
"range" : {
"min" : "1",
"max" : "256"
},
},
},
"access" : "noaccess",
"description" :
"""A unique value for each controller. Its value
ranges between 1 and the value of
inletControllerCount + outletControllerCount + 1.""",
}, # column
"boardVersion" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The board hardware version.""",
}, # column
"boardFirmwareVersion" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""The firmware version.""",
}, # column
"boardFirmwareTimeStamp" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.3.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"", "name" : "Unsigned32"},
},
"access" : "readonly",
"description" :
"""The time when the board firmware was updated in UNIX(epoch)time.
It is measured in seconds relative to January 1, 1970 (midnight UTC/GMT),
i.e a value of 0 indicates January 1, 1970 (midnight UTC/GMT).""",
}, # column
"logConfigurationTable" : {
"nodetype" : "table",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.4",
"status" : "current",
"description" :
"""A table of parameters for the data logging feature. The number of
entries is given by the value of pduCount.""",
}, # table
"logConfigurationEntry" : {
"nodetype" : "row",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.4.1",
"status" : "current",
"linkage" : [
"pduId",
],
"description" :
"""An entry containing data logging parameters
for a particular PDU.""",
}, # row
"dataLogging" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
"oid" : "1.3.6.1.4.1.13742.6.3.2.4.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "TruthValue"},
},
"access" : "readwrite",
"description" :
"""Data Retrieval: enabled/disabled.""",
}, # column
"measurementPeriod" : {
"nodetype" : "column",
"moduleName" : "PDU2-MIB",
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
"""
pygments.lexers.lisp
~~~~~~~~~~~~~~~~~~~~
Lexers for Lispy languages.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Literal, Error
from pygments.lexers.python import PythonLexer
__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer',
'NewLispLexer', 'EmacsLispLexer', 'ShenLexer', 'CPSALexer',
'XtlangLexer', 'FennelLexer']
class SchemeLexer(RegexLexer):
"""
A Scheme lexer, parsing a stream and outputting the tokens
needed to highlight scheme code.
This lexer could be most probably easily subclassed to parse
other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp.
This parser is checked with pastes from the LISP pastebin
at http://paste.lisp.org/ to cover as much syntax as possible.
It supports the full Scheme syntax as defined in R5RS.
.. versionadded:: 0.6
"""
name = 'Scheme'
aliases = ['scheme', 'scm']
filenames = ['*.scm', '*.ss']
mimetypes = ['text/x-scheme', 'application/x-scheme']
# list of known keywords and builtins taken form vim 6.4 scheme.vim
# syntax file.
keywords = (
'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let',
'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote',
'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax',
'let-syntax', 'letrec-syntax', 'syntax-rules'
)
builtins = (
'*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle',
'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan',
'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr',
'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr',
'cadr', 'call-with-current-continuation', 'call-with-input-file',
'call-with-output-file', 'call-with-values', 'call/cc', 'car',
'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr',
'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?',
'char-ci<?', 'char-ci=?', 'char-ci>=?', 'char-ci>?', 'char-downcase',
'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase',
'char-upper-case?', 'char-whitespace?', 'char<=?', 'char<?', 'char=?',
'char>=?', 'char>?', 'char?', 'close-input-port', 'close-output-port',
'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port',
'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?',
'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp',
'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part',
'inexact->exact', 'inexact?', 'input-port?', 'integer->char',
'integer?', 'interaction-environment', 'lcm', 'length', 'list',
'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?',
'load', 'log', 'magnitude', 'make-polar', 'make-rectangular',
'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv',
'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment',
'null?', 'number->string', 'number?', 'numerator', 'odd?',
'open-input-file', 'open-output-file', 'output-port?', 'pair?',
'peek-char', 'port?', 'positive?', 'procedure?', 'quotient',
'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?',
'remainder', 'reverse', 'round', 'scheme-report-environment',
'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list',
'string->number', 'string->symbol', 'string-append', 'string-ci<=?',
'string-ci<?', 'string-ci=?', 'string-ci>=?', 'string-ci>?',
'string-copy', 'string-fill!', 'string-length', 'string-ref',
'string-set!', 'string<=?', 'string<?', 'string=?', 'string>=?',
'string>?', 'string?', 'substring', 'symbol->string', 'symbol?',
'tan', 'transcript-off', 'transcript-on', 'truncate', 'values',
'vector', 'vector->list', 'vector-fill!', 'vector-length',
'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file',
'with-output-to-file', 'write', 'write-char', 'zero?'
)
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
valid_name = r'[\w!$%&*+,/:<=>?@^~|-]+'
tokens = {
'root': [
# the comments
# and going to the end of the line
(r';.*$', Comment.Single),
# multi-line comment
(r'#\|', Comment.Multiline, 'multiline-comment'),
# commented form (entire sexpr folliwng)
(r'#;\s*\(', Comment, 'commented-form'),
# signifies that the program text that follows is written with the
# lexical and datum syntax described in r6rs
(r'#!r6rs', Comment),
# whitespaces - usually not relevant
(r'\s+', Text),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
# support for uncommon kinds of numbers -
# have to figure out what the characters mean
# (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number),
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
(r"'" + valid_name, String.Symbol),
(r"#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)", String.Char),
# constants
(r'(#t|#f)', Name.Constant),
# special operators
(r"('|#|`|,@|,|\.)", Operator),
# highlight the keywords
('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords),
Keyword),
# first variable in a quoted string like
# '(this is syntactic sugar)
(r"(?<='\()" + valid_name, Name.Variable),
(r"(?<=#\()" + valid_name, Name.Variable),
# highlight the builtins
(r"(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins),
Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
# find the remaining variables
(valid_name, Name.Variable),
# the famous parentheses!
(r'(\(|\))', Punctuation),
(r'(\[|\])', Punctuation),
],
'multiline-comment': [
(r'#\|', Comment.Multiline, '#push'),
(r'\|#', Comment.Multiline, '#pop'),
(r'[^|#]+', Comment.Multiline),
(r'[|#]', Comment.Multiline),
],
'commented-form': [
(r'\(', Comment, '#push'),
(r'\)', Comment, '#pop'),
(r'[^()]+', Comment),
],
}
class CommonLispLexer(RegexLexer):
"""
A Common Lisp lexer.
.. versionadded:: 0.9
"""
name = 'Common Lisp'
aliases = ['common-lisp', 'cl', 'lisp']
filenames = ['*.cl', '*.lisp']
mimetypes = ['text/x-common-lisp']
flags = re.IGNORECASE | re.MULTILINE
# couple of useful regexes
# characters that are not macro-characters and can be used to begin a symbol
nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]'
constituent = nonmacro + '|[#.:]'
terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters
# symbol token, reverse-engineered from hyperspec
# Take a deep breath...
symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent)
def __init__(self, **options):
from pygments.lexers._cl_builtins import BUILTIN_FUNCTIONS, \
SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \
BUILTIN_TYPES, BUILTIN_CLASSES
self.builtin_function = BUILTIN_FUNCTIONS
self.special_forms = SPECIAL_FORMS
self.macros = MACROS
self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS
self.declarations = DECLARATIONS
self.builtin_types = BUILTIN_TYPES
self.builtin_classes = BUILTIN_CLASSES
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
stack = ['root']
for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack):
if token is Name.Variable:
if value in self.builtin_function:
yield index, Name.Builtin, value
continue
if value in self.special_forms:
yield index, Keyword, value
continue
if value in self.macros:
yield index, Name.Builtin, value
continue
if value in self.lambda_list_keywords:
yield index, Keyword, value
continue
if value in self.declarations:
yield index, Keyword, value
continue
if value in self.builtin_types:
yield index, Keyword.Type, value
continue
if value in self.builtin_classes:
yield index, Name.Class, value
continue
yield index, token, value
tokens = {
'root': [
default('body'),
],
'multiline-comment': [
(r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19)
(r'\|#', Comment.Multiline, '#pop'),
(r'[^|#]+', Comment.Multiline),
(r'[|#]', Comment.Multiline),
],
'commented-form': [
(r'\(', Comment.Preproc, '#push'),
(r'\)', Comment.Preproc, '#pop'),
(r'[^()]+', Comment.Preproc),
],
'body': [
# whitespace
(r'\s+', Text),
# single-line comment
(r';.*$', Comment.Single),
# multi-line comment
(r'#\|', Comment.Multiline, 'multiline-comment'),
# encoding comment (?)
(r'#\d*Y.*$', Comment.Special),
# strings and characters
(r'"(\\.|\\\n|[^"\\])*"', String),
# quoting
(r":" + symbol, String.Symbol),
(r"::" + symbol, String.Symbol),
(r":#" + symbol, String.Symbol),
(r"'" + symbol, String.Symbol),
(r"'", Operator),
(r"`", Operator),
# decimal numbers
(r'[-+]?\d+\.?' + terminated, Number.Integer),
(r'[-+]?\d+/\d+' + terminated, Number),
(r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' +
terminated, Number.Float),
# sharpsign strings and characters
(r"#\\." + terminated, String.Char),
(r"#\\" + symbol, String.Char),
# vector
(r'#\(', Operator, 'body'),
# bitstring
(r'#\d*\*[01]*', Literal.Other),
# uninterned symbol
(r'#:' + symbol, String.Symbol),
# read-time and load-time evaluation
(r'#[.,]', Operator),
# function shorthand
(r'#\'', Name.Function),
# binary rational
(r'#b[+-]?[01]+(/[01]+)?', Number.Bin),
# octal rational
(r'#o[+-]?[0-7]+(/[0-7]+)?', Number.Oct),
# hex rational
(r'#x[+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex),
# radix rational
(r'#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?', Number),
# complex
(r'(#c)(\()', bygroups(Number, Punctuation), 'body'),
# array
(r'(#\d+a)(\()', bygroups(Literal.Other, Punctuation), 'body'),
# structure
(r'(#s)(\()', bygroups(Literal.Other, Punctuation), 'body'),
# path
(r'#p?"(\\.|[^"])*"', Literal.Other),
# reference
(r'#\d+=', Operator),
(r'#\d+#', Operator),
# read-time comment
(r'#+nil' + terminated + r'\s*\(', Comment.Preproc, 'commented-form'),
# read-time conditional
(r'#[+-]', Operator),
# special operators that should have been parsed already
(r'(,@|,|\.)', Operator),
# special constants
(r'(t|nil)' + terminated, Name.Constant),
# functions and variables
(r'\*' + symbol + r'\*', Name.Variable.Global),
(symbol, Name.Variable),
# parentheses
(r'\(', Punctuation, 'body'),
(r'\)', Punctuation, '#pop'),
],
}
class HyLexer(RegexLexer):
"""
Lexer for `Hy <http://hylang.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Hy'
aliases = ['hylang']
filenames = ['*.hy']
mimetypes = ['text/x-hy', 'application/x-hy']
special_forms = (
'cond', 'for', '->', '->>', 'car',
'cdr', 'first', 'rest', 'let', 'when', 'unless',
'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator',
',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in',
'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=',
'foreach', 'while',
'eval-and-compile', 'eval-when-compile'
)
declarations = (
'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv'
)
hy_builtins = ()
hy_core = (
'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc',
'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?',
'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat',
'repeatedly', 'take', 'take_nth', 'take_while', 'zero?'
)
builtins = hy_builtins + hy_core
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
valid_name = r'(?!#)[\w!$%*+<=>?/.#-:]+'
def _multi_escape(entries):
return words(entries, suffix=' ')
tokens = {
'root': [
# the comments - always starting with semicolon
# and going to the end of the line
(r';.*$', Comment.Single),
# whitespaces - usually not relevant
(r'[,\s]+', Text),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(r'0[0-7]+j?', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
(r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)),
(r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)),
# keywords
(r'::?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&@]', Operator),
include('py-keywords'),
include('py-builtins'),
# highlight the special forms
(_multi_escape(special_forms), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
(_multi_escape(declarations), Keyword.Declaration),
# highlight the builtins
(_multi_escape(builtins), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),
# find the remaining variables
(valid_name, Name.Variable),
# Hy accepts vector notation
(r'(\[|\])', Punctuation),
# Hy accepts map notation
(r'(\{|\})', Punctuation),
# the famous parentheses!
(r'(\(|\))', Punctuation),
],
'py-keywords': PythonLexer.tokens['keywords'],
'py-builtins': PythonLexer.tokens['builtins'],
}
def analyse_text(text):
if '(import ' in text or '(defn ' in text:
return 0.9
class RacketLexer(RegexLexer):
"""
Lexer for `Racket <http://racket-lang.org/>`_ source code (formerly
known as PLT Scheme).
.. versionadded:: 1.6
"""
name = 'Racket'
aliases = ['racket', 'rkt']
filenames = ['*.rkt', '*.rktd', '*.rktl']
mimetypes = ['text/x-racket', 'application/x-racket']
# Generated by example.rkt
_keywords = (
u'#%app', u'#%datum', u'#%declare', u'#%expression', u'#%module-begin',
u'#%plain-app', u'#%plain-lambda', u'#%plain-module-begin',
u'#%printing-module-begin', u'#%provide', u'#%require',
u'#%stratified-body', u'#%top', u'#%top-interaction',
u'#%variable-reference', u'->', u'->*', u'->*m', u'->d', u'->dm', u'->i',
u'->m', u'...', u':do-in', u'==', u'=>', u'_', u'absent', u'abstract',
u'all-defined-out', u'all-from-out', u'and', u'any', u'augment', u'augment*',
u'augment-final', u'augment-final*', u'augride', u'augride*', u'begin',
u'begin-for-syntax', u'begin0', u'case', u'case->', u'case->m',
u'case-lambda', u'class', u'class*', u'class-field-accessor',
u'class-field-mutator', u'class/c', u'class/derived', u'combine-in',
u'combine-out', u'command-line', u'compound-unit', u'compound-unit/infer',
u'cond', u'cons/dc', u'contract', u'contract-out', u'contract-struct',
u'contracted', u'define', u'define-compound-unit',
u'define-compound-unit/infer', u'define-contract-struct',
u'define-custom-hash-types', u'define-custom-set-types',
u'define-for-syntax', u'define-local-member-name', u'define-logger',
u'define-match-expander', u'define-member-name',
u'define-module-boundary-contract', u'define-namespace-anchor',
u'define-opt/c', u'define-sequence-syntax', u'define-serializable-class',
u'define-serializable-class*', u'define-signature',
u'define-signature-form', u'define-struct', u'define-struct/contract',
u'define-struct/derived', u'define-syntax', u'define-syntax-rule',
u'define-syntaxes', u'define-unit', u'define-unit-binding',
u'define-unit-from-context', u'define-unit/contract',
u'define-unit/new-import-export', u'define-unit/s', u'define-values',
u'define-values-for-export', u'define-values-for-syntax',
u'define-values/invoke-unit', u'define-values/invoke-unit/infer',
u'define/augment', u'define/augment-final', u'define/augride',
u'define/contract', u'define/final-prop', u'define/match',
u'define/overment', u'define/override', u'define/override-final',
u'define/private', u'define/public', u'define/public-final',
u'define/pubment', u'define/subexpression-pos-prop',
u'define/subexpression-pos-prop/name', u'delay', u'delay/idle',
u'delay/name', u'delay/strict', u'delay/sync', u'delay/thread', u'do',
u'else', u'except', u'except-in', u'except-out', u'export', u'extends',
u'failure-cont', u'false', u'false/c', u'field', u'field-bound?', u'file',
u'flat-murec-contract', u'flat-rec-contract', u'for', u'for*', u'for*/and',
u'for*/async', u'for*/first', u'for*/fold', u'for*/fold/derived',
u'for*/hash', u'for*/hasheq', u'for*/hasheqv', u'for*/last', u'for*/list',
u'for*/lists', u'for*/mutable-set', u'for*/mutable-seteq',
u'for*/mutable-seteqv', u'for*/or', u'for*/product', u'for*/set',
u'for*/seteq', u'for*/seteqv', u'for*/stream', u'for*/sum', u'for*/vector',
u'for*/weak-set', u'for*/weak-seteq', u'for*/weak-seteqv', u'for-label',
u'for-meta', u'for-syntax', u'for-template', u'for/and', u'for/async',
u'for/first', u'for/fold', u'for/fold/derived', u'for/hash', u'for/hasheq',
u'for/hasheqv', u'for/last', u'for/list', u'for/lists', u'for/mutable-set',
u'for/mutable-seteq', u'for/mutable-seteqv', u'for/or', u'for/product',
u'for/set', u'for/seteq', u'for/seteqv', u'for/stream', u'for/sum',
u'for/vector', u'for/weak-set', u'for/weak-seteq', u'for/weak-seteqv',
u'gen:custom-write', u'gen:dict', u'gen:equal+hash', u'gen:set',
u'gen:stream', u'generic', u'get-field', u'hash/dc', u'if', u'implies',
u'import', u'include', u'include-at/relative-to',
u'include-at/relative-to/reader', u'include/reader', u'inherit',
u'inherit-field', u'inherit/inner', u'inherit/super', u'init',
u'init-depend', u'init-field', u'init-rest', u'inner', u'inspect',
u'instantiate', u'interface', u'interface*', u'invariant-assertion',
u'invoke-unit', u'invoke-unit/infer', u'lambda', u'lazy', u'let', u'let*',
u'let*-values', u'let-syntax', u'let-syntaxes', u'let-values', u'let/cc',
u'let/ec', u'letrec', u'letrec-syntax', u'letrec-syntaxes',
u'letrec-syntaxes+values', u'letrec-values', u'lib', u'link', u'local',
u'local-require', u'log-debug', u'log-error', u'log-fatal', u'log-info',
u'log-warning', u'match', u'match*', u'match*/derived', u'match-define',
u'match-define-values', u'match-lambda', u'match-lambda*',
u'match-lambda**', u'match-let', u'match-let*', u'match-let*-values',
u'match-let-values', u'match-letrec', u'match-letrec-values',
u'match/derived', u'match/values', u'member-name-key', u'mixin', u'module',
u'module*', u'module+', u'nand', u'new', u'nor', u'object-contract',
u'object/c', u'only', u'only-in', u'only-meta-in', u'open', u'opt/c', u'or',
u'overment', u'overment*', u'override', u'override*', u'override-final',
u'override-final*', u'parameterize', u'parameterize*',
u'parameterize-break', u'parametric->/c', u'place', u'place*',
u'place/context', u'planet', u'prefix', u'prefix-in', u'prefix-out',
u'private', u'private*', u'prompt-tag/c', u'protect-out', u'provide',
u'provide-signature-elements', u'provide/contract', u'public', u'public*',
u'public-final', u'public-final*', u'pubment', u'pubment*', u'quasiquote',
u'quasisyntax', u'quasisyntax/loc', u'quote', u'quote-syntax',
u'quote-syntax/prune', u'recontract-out', u'recursive-contract',
u'relative-in', u'rename', u'rename-in', u'rename-inner', u'rename-out',
u'rename-super', u'require', u'send', u'send*', u'send+', u'send-generic',
u'send/apply', u'send/keyword-apply', u'set!', u'set!-values',
u'set-field!', u'shared', u'stream', u'stream*', u'stream-cons', u'struct',
u'struct*', u'struct-copy', u'struct-field-index', u'struct-out',
u'struct/c', u'struct/ctc', u'struct/dc', u'submod', u'super',
u'super-instantiate', u'super-make-object', u'super-new', u'syntax',
u'syntax-case', u'syntax-case*', u'syntax-id-rules', u'syntax-rules',
u'syntax/loc', u'tag', u'this', u'this%', u'thunk', u'thunk*', u'time',
u'unconstrained-domain->', u'unit', u'unit-from-context', u'unit/c',
u'unit/new-import-export', u'unit/s', u'unless', u'unquote',
u'unquote-splicing', u'unsyntax', u'unsyntax-splicing', u'values/drop',
u'when', u'with-continuation-mark', u'with-contract',
u'with-contract-continuation-mark', u'with-handlers', u'with-handlers*',
u'with-method', u'with-syntax', u'λ'
)
# Generated by example.rkt
_builtins = (
u'*', u'*list/c', u'+', u'-', u'/', u'<', u'</c', u'<=', u'<=/c', u'=', u'=/c',
u'>', u'>/c', u'>=', u'>=/c', u'abort-current-continuation', u'abs',
u'absolute-path?', u'acos', u'add-between', u'add1', u'alarm-evt',
u'always-evt', u'and/c', u'andmap', u'angle', u'any/c', u'append', u'append*',
u'append-map', u'apply', u'argmax', u'argmin', u'arithmetic-shift',
u'arity-at-least', u'arity-at-least-value', u'arity-at-least?',
u'arity-checking-wrapper', u'arity-includes?', u'arity=?',
u'arrow-contract-info', u'arrow-contract-info-accepts-arglist',
u'arrow-contract-info-chaperone-procedure',
u'arrow-contract-info-check-first-order', u'arrow-contract-info?',
u'asin', u'assf', u'assoc', u'assq', u'assv', u'atan',
u'bad-number-of-results', u'banner', u'base->-doms/c', u'base->-rngs/c',
u'base->?', u'between/c', u'bitwise-and', u'bitwise-bit-field',
u'bitwise-bit-set?', u'bitwise-ior', u'bitwise-not', u'bitwise-xor',
u'blame-add-car-context', u'blame-add-cdr-context', u'blame-add-context',
u'blame-add-missing-party', u'blame-add-nth-arg-context',
u'blame-add-range-context', u'blame-add-unknown-context',
u'blame-context', u'blame-contract', u'blame-fmt->-string',
u'blame-missing-party?', u'blame-negative', u'blame-original?',
u'blame-positive', u'blame-replace-negative', u'blame-source',
u'blame-swap', u'blame-swapped?', u'blame-update', u'blame-value',
u'blame?', u'boolean=?', u'boolean?', u'bound-identifier=?', u'box',
u'box-cas!', u'box-immutable', u'box-immutable/c', u'box/c', u'box?',
u'break-enabled', u'break-parameterization?', u'break-thread',
u'build-chaperone-contract-property', u'build-compound-type-name',
u'build-contract-property', u'build-flat-contract-property',
u'build-list', u'build-path', u'build-path/convention-type',
u'build-string', u'build-vector', u'byte-pregexp', u'byte-pregexp?',
u'byte-ready?', u'byte-regexp', u'byte-regexp?', u'byte?', u'bytes',
u'bytes->immutable-bytes', u'bytes->list', u'bytes->path',
u'bytes->path-element', u'bytes->string/latin-1', u'bytes->string/locale',
u'bytes->string/utf-8', u'bytes-append', u'bytes-append*',
u'bytes-close-converter', u'bytes-convert', u'bytes-convert-end',
u'bytes-converter?', u'bytes-copy', u'bytes-copy!',
u'bytes-environment-variable-name?', u'bytes-fill!', u'bytes-join',
u'bytes-length', u'bytes-no-nuls?', u'bytes-open-converter', u'bytes-ref',
u'bytes-set!', u'bytes-utf-8-index', u'bytes-utf-8-length',
u'bytes-utf-8-ref', u'bytes<?', u'bytes=?', u'bytes>?', u'bytes?', u'caaaar',
u'caaadr', u'caaar', u'caadar', u'caaddr', u'caadr', u'caar', u'cadaar',
u'cadadr', u'cadar', u'caddar', u'cadddr', u'caddr', u'cadr',
u'call-in-nested-thread', u'call-with-atomic-output-file',
u'call-with-break-parameterization',
u'call-with-composable-continuation', u'call-with-continuation-barrier',
u'call-with-continuation-prompt', u'call-with-current-continuation',
u'call-with-default-reading-parameterization',
u'call-with-escape-continuation', u'call-with-exception-handler',
u'call-with-file-lock/timeout', u'call-with-immediate-continuation-mark',
u'call-with-input-bytes', u'call-with-input-file',
u'call-with-input-file*', u'call-with-input-string',
u'call-with-output-bytes', u'call-with-output-file',
u'call-with-output-file*', u'call-with-output-string',
u'call-with-parameterization', u'call-with-semaphore',
u'call-with-semaphore/enable-break', u'call-with-values', u'call/cc',
u'call/ec', u'car', u'cartesian-product', u'cdaaar', u'cdaadr', u'cdaar',
u'cdadar', u'cdaddr', u'cdadr', u'cdar', u'cddaar', u'cddadr', u'cddar',
u'cdddar', u'cddddr', u'cdddr', u'cddr', u'cdr', u'ceiling', u'channel-get',
u'channel-put', u'channel-put-evt', u'channel-put-evt?',
u'channel-try-get', u'channel/c', u'channel?', u'chaperone-box',
u'chaperone-channel', u'chaperone-continuation-mark-key',
u'chaperone-contract-property?', u'chaperone-contract?', u'chaperone-evt',
u'chaperone-hash', u'chaperone-hash-set', u'chaperone-of?',
u'chaperone-procedure', u'chaperone-procedure*', u'chaperone-prompt-tag',
u'chaperone-struct', u'chaperone-struct-type', u'chaperone-vector',
u'chaperone?', u'char->integer', u'char-alphabetic?', u'char-blank?',
u'char-ci<=?', u'char-ci<?', u'char-ci=?', u'char-ci>=?', u'char-ci>?',
u'char-downcase', u'char-foldcase', u'char-general-category',
u'char-graphic?', u'char-in', u'char-in/c', u'char-iso-control?',
u'char-lower-case?', u'char-numeric?', u'char-punctuation?',
u'char-ready?', u'char-symbolic?', u'char-title-case?', u'char-titlecase',
u'char-upcase', u'char-upper-case?', u'char-utf-8-length',
u'char-whitespace?', u'char<=?', u'char<?', u'char=?', u'char>=?', u'char>?',
u'char?', u'check-duplicate-identifier', u'check-duplicates',
u'checked-procedure-check-and-extract', u'choice-evt',
u'class->interface', u'class-info', u'class-seal', u'class-unseal',
u'class?', u'cleanse-path', u'close-input-port', u'close-output-port',
u'coerce-chaperone-contract', u'coerce-chaperone-contracts',
u'coerce-contract', u'coerce-contract/f', u'coerce-contracts',
u'coerce-flat-contract', u'coerce-flat-contracts', u'collect-garbage',
u'collection-file-path', u'collection-path', u'combinations', u'compile',
u'compile-allow-set!-undefined', u'compile-context-preservation-enabled',
u'compile-enforce-module-constants', u'compile-syntax',
u'compiled-expression-recompile', u'compiled-expression?',
u'compiled-module-expression?', u'complete-path?', u'complex?', u'compose',
u'compose1', u'conjoin', u'conjugate', u'cons', u'cons/c', u'cons?', u'const',
u'continuation-mark-key/c', u'continuation-mark-key?',
u'continuation-mark-set->context', u'continuation-mark-set->list',
u'continuation-mark-set->list*', u'continuation-mark-set-first',
u'continuation-mark-set?', u'continuation-marks',
u'continuation-prompt-available?', u'continuation-prompt-tag?',
u'continuation?', u'contract-continuation-mark-key',
u'contract-custom-write-property-proc', u'contract-exercise',
u'contract-first-order', u'contract-first-order-passes?',
u'contract-late-neg-projection', u'contract-name', u'contract-proc',
u'contract-projection', u'contract-property?',
u'contract-random-generate', u'contract-random-generate-fail',
u'contract-random-generate-fail?',
u'contract-random-generate-get-current-environment',
u'contract-random-generate-stash', u'contract-random-generate/choose',
u'contract-stronger?', u'contract-struct-exercise',
u'contract-struct-generate', u'contract-struct-late-neg-projection',
u'contract-struct-list-contract?', u'contract-val-first-projection',
u'contract?', u'convert-stream', u'copy-directory/files', u'copy-file',
u'copy-port', u'cos', u'cosh', u'count', u'current-blame-format',
u'current-break-parameterization', u'current-code-inspector',
u'current-command-line-arguments', u'current-compile',
u'current-compiled-file-roots', u'current-continuation-marks',
u'current-contract-region', u'current-custodian', u'current-directory',
u'current-directory-for-user', u'current-drive',
u'current-environment-variables', u'current-error-port', u'current-eval',
u'current-evt-pseudo-random-generator',
u'current-force-delete-permissions', u'current-future',
u'current-gc-milliseconds', u'current-get-interaction-input-port',
u'current-inexact-milliseconds', u'current-input-port',
u'current-inspector', u'current-library-collection-links',
u'current-library-collection-paths', u'current-load',
u'current-load-extension', u'current-load-relative-directory',
u'current-load/use-compiled', u'current-locale', u'current-logger',
u'current-memory-use', u'current-milliseconds',
u'current-module-declare-name', u'current-module-declare-source',
u'current-module-name-resolver', u'current-module-path-for-load',
u'current-namespace', u'current-output-port', u'current-parameterization',
u'current-plumber', u'current-preserved-thread-cell-values',
u'current-print', u'current-process-milliseconds', u'current-prompt-read',
u'current-pseudo-random-generator', u'current-read-interaction',
u'current-reader-guard', u'current-readtable', u'current-seconds',
u'current-security-guard', u'current-subprocess-custodian-mode',
u'current-thread', u'current-thread-group',
u'current-thread-initial-stack-size',
u'current-write-relative-directory', u'curry', u'curryr',
u'custodian-box-value', u'custodian-box?', u'custodian-limit-memory',
u'custodian-managed-list', u'custodian-memory-accounting-available?',
u'custodian-require-memory', u'custodian-shutdown-all', u'custodian?',
u'custom-print-quotable-accessor', u'custom-print-quotable?',
u'custom-write-accessor', u'custom-write-property-proc', u'custom-write?',
u'date', u'date*', u'date*-nanosecond', u'date*-time-zone-name', u'date*?',
u'date-day', u'date-dst?', u'date-hour', u'date-minute', u'date-month',
u'date-second', u'date-time-zone-offset', u'date-week-day', u'date-year',
u'date-year-day', u'date?', u'datum->syntax', u'datum-intern-literal',
u'default-continuation-prompt-tag', u'degrees->radians',
u'delete-directory', u'delete-directory/files', u'delete-file',
u'denominator', u'dict->list', u'dict-can-functional-set?',
u'dict-can-remove-keys?', u'dict-clear', u'dict-clear!', u'dict-copy',
u'dict-count', u'dict-empty?', u'dict-for-each', u'dict-has-key?',
u'dict-implements/c', u'dict-implements?', u'dict-iter-contract',
u'dict-iterate-first', u'dict-iterate-key', u'dict-iterate-next',
u'dict-iterate-value', u'dict-key-contract', u'dict-keys', u'dict-map',
u'dict-mutable?', u'dict-ref', u'dict-ref!', u'dict-remove',
u'dict-remove!', u'dict-set', u'dict-set!', u'dict-set*', u'dict-set*!',
u'dict-update', u'dict-update!', u'dict-value-contract', u'dict-values',
u'dict?', u'directory-exists?', u'directory-list', u'disjoin', u'display',
u'display-lines', u'display-lines-to-file', u'display-to-file',
u'displayln', u'double-flonum?', u'drop', u'drop-common-prefix',
u'drop-right', u'dropf', u'dropf-right', u'dump-memory-stats',
u'dup-input-port', u'dup-output-port', u'dynamic->*', u'dynamic-get-field',
u'dynamic-object/c', u'dynamic-place', u'dynamic-place*',
u'dynamic-require', u'dynamic-require-for-syntax', u'dynamic-send',
u'dynamic-set-field!', u'dynamic-wind', u'eighth', u'empty',
u'empty-sequence', u'empty-stream', u'empty?',
u'environment-variables-copy', u'environment-variables-names',
u'environment-variables-ref', u'environment-variables-set!',
u'environment-variables?', u'eof', u'eof-evt', u'eof-object?',
u'ephemeron-value', u'ephemeron?', u'eprintf', u'eq-contract-val',
u'eq-contract?', u'eq-hash-code', u'eq?', u'equal-contract-val',
u'equal-contract?', u'equal-hash-code', u'equal-secondary-hash-code',
u'equal<%>', u'equal?', u'equal?/recur', u'eqv-hash-code', u'eqv?', u'error',
u'error-display-handler', u'error-escape-handler',
u'error-print-context-length', u'error-print-source-location',
u'error-print-width', u'error-value->string-handler', u'eval',
u'eval-jit-enabled', u'eval-syntax', u'even?', u'evt/c', u'evt?',
u'exact->inexact', u'exact-ceiling', u'exact-floor', u'exact-integer?',
u'exact-nonnegative-integer?', u'exact-positive-integer?', u'exact-round',
u'exact-truncate', u'exact?', u'executable-yield-handler', u'exit',
u'exit-handler', u'exn', u'exn-continuation-marks', u'exn-message',
u'exn:break', u'exn:break-continuation', u'exn:break:hang-up',
u'exn:break:hang-up?', u'exn:break:terminate', u'exn:break:terminate?',
u'exn:break?', u'exn:fail', u'exn:fail:contract',
u'exn:fail:contract:arity', u'exn:fail:contract:arity?',
u'exn:fail:contract:blame', u'exn:fail:contract:blame-object',
u'exn:fail:contract:blame?', u'exn:fail:contract:continuation',
u'exn:fail:contract:continuation?', u'exn:fail:contract:divide-by-zero',
u'exn:fail:contract:divide-by-zero?',
u'exn:fail:contract:non-fixnum-result',
u'exn:fail:contract:non-fixnum-result?', u'exn:fail:contract:variable',
u'exn:fail:contract:variable-id', u'exn:fail:contract:variable?',
u'exn:fail:contract?', u'exn:fail:filesystem',
u'exn:fail:filesystem:errno', u'exn:fail:filesystem:errno-errno',
u'exn:fail:filesystem:errno?', u'exn:fail:filesystem:exists',
u'exn:fail:filesystem:exists?', u'exn:fail:filesystem:missing-module',
u'exn:fail:filesystem:missing-module-path',
u'exn:fail:filesystem:missing-module?', u'exn:fail:filesystem:version',
u'exn:fail:filesystem:version?', u'exn:fail:filesystem?',
u'exn:fail:network', u'exn:fail:network:errno',
u'exn:fail:network:errno-errno', u'exn:fail:network:errno?',
u'exn:fail:network?', u'exn:fail:object', u'exn:fail:object?',
u'exn:fail:out-of-memory', u'exn:fail:out-of-memory?', u'exn:fail:read',
u'exn:fail:read-srclocs', u'exn:fail:read:eof', u'exn:fail:read:eof?',
u'exn:fail:read:non-char', u'exn:fail:read:non-char?', u'exn:fail:read?',
u'exn:fail:syntax', u'exn:fail:syntax-exprs',
u'exn:fail:syntax:missing-module',
u'exn:fail:syntax:missing-module-path',
u'exn:fail:syntax:missing-module?', u'exn:fail:syntax:unbound',
u'exn:fail:syntax:unbound?', u'exn:fail:syntax?', u'exn:fail:unsupported',
u'exn:fail:unsupported?', u'exn:fail:user', u'exn:fail:user?',
u'exn:fail?', u'exn:misc:match?', u'exn:missing-module-accessor',
u'exn:missing-module?', u'exn:srclocs-accessor', u'exn:srclocs?', u'exn?',
u'exp', u'expand', u'expand-once', u'expand-syntax', u'expand-syntax-once',
u'expand-syntax-to-top-form', u'expand-to-top-form', u'expand-user-path',
u'explode-path', u'expt', u'externalizable<%>', u'failure-result/c',
u'false?', u'field-names', u'fifth', u'file->bytes', u'file->bytes-lines',
u'file->lines', u'file->list', u'file->string', u'file->value',
u'file-exists?', u'file-name-from-path', u'file-or-directory-identity',
u'file-or-directory-modify-seconds', u'file-or-directory-permissions',
u'file-position', u'file-position*', u'file-size',
u'file-stream-buffer-mode', u'file-stream-port?', u'file-truncate',
u'filename-extension', u'filesystem-change-evt',
u'filesystem-change-evt-cancel', u'filesystem-change-evt?',
u'filesystem-root-list', u'filter', u'filter-map', u'filter-not',
u'filter-read-input-port', u'find-executable-path', u'find-files',
u'find-library-collection-links', u'find-library-collection-paths',
u'find-relative-path', u'find-system-path', u'findf', u'first',
u'first-or/c', u'fixnum?', u'flat-contract', u'flat-contract-predicate',
u'flat-contract-property?', u'flat-contract?', u'flat-named-contract',
u'flatten', u'floating-point-bytes->real', u'flonum?', u'floor',
u'flush-output', u'fold-files', u'foldl', u'foldr', u'for-each', u'force',
u'format', u'fourth', u'fprintf', u'free-identifier=?',
u'free-label-identifier=?', u'free-template-identifier=?',
u'free-transformer-identifier=?', u'fsemaphore-count', u'fsemaphore-post',
u'fsemaphore-try-wait?', u'fsemaphore-wait', u'fsemaphore?', u'future',
u'future?', u'futures-enabled?', u'gcd', u'generate-member-key',
u'generate-temporaries', u'generic-set?', u'generic?', u'gensym',
u'get-output-bytes', u'get-output-string', u'get-preference',
u'get/build-late-neg-projection', u'get/build-val-first-projection',
u'getenv', u'global-port-print-handler', u'group-by', u'group-execute-bit',
u'group-read-bit', u'group-write-bit', u'guard-evt', u'handle-evt',
u'handle-evt?', u'has-blame?', u'has-contract?', u'hash', u'hash->list',
u'hash-clear', u'hash-clear!', u'hash-copy', u'hash-copy-clear',
u'hash-count', u'hash-empty?', u'hash-eq?', u'hash-equal?', u'hash-eqv?',
u'hash-for-each', u'hash-has-key?', u'hash-iterate-first',
u'hash-iterate-key', u'hash-iterate-key+value', u'hash-iterate-next',
u'hash-iterate-pair', u'hash-iterate-value', u'hash-keys', u'hash-map',
u'hash-placeholder?', u'hash-ref', u'hash-ref!', u'hash-remove',
u'hash-remove!', u'hash-set', u'hash-set!', u'hash-set*', u'hash-set*!',
u'hash-update', u'hash-update!', u'hash-values', u'hash-weak?', u'hash/c',
u'hash?', u'hasheq', u'hasheqv', u'identifier-binding',
u'identifier-binding-symbol', u'identifier-label-binding',
u'identifier-prune-lexical-context',
u'identifier-prune-to-source-module',
u'identifier-remove-from-definition-context',
u'identifier-template-binding', u'identifier-transformer-binding',
u'identifier?', u'identity', u'if/c', u'imag-part', u'immutable?',
u'impersonate-box', u'impersonate-channel',
u'impersonate-continuation-mark-key', u'impersonate-hash',
u'impersonate-hash-set', u'impersonate-procedure',
u'impersonate-procedure*', u'impersonate-prompt-tag',
u'impersonate-struct', u'impersonate-vector', u'impersonator-contract?',
u'impersonator-ephemeron', u'impersonator-of?',
u'impersonator-prop:application-mark', u'impersonator-prop:blame',
u'impersonator-prop:contracted',
u'impersonator-property-accessor-procedure?', u'impersonator-property?',
u'impersonator?', u'implementation?', u'implementation?/c', u'in-bytes',
u'in-bytes-lines', u'in-combinations', u'in-cycle', u'in-dict',
u'in-dict-keys', u'in-dict-pairs', u'in-dict-values', u'in-directory',
u'in-hash', u'in-hash-keys', u'in-hash-pairs', u'in-hash-values',
u'in-immutable-hash', u'in-immutable-hash-keys',
u'in-immutable-hash-pairs', u'in-immutable-hash-values',
u'in-immutable-set', u'in-indexed', u'in-input-port-bytes',
u'in-input-port-chars', u'in-lines', u'in-list', u'in-mlist',
u'in-mutable-hash', u'in-mutable-hash-keys', u'in-mutable-hash-pairs',
u'in-mutable-hash-values', u'in-mutable-set', u'in-naturals',
u'in-parallel', u'in-permutations', u'in-port', u'in-producer', u'in-range',
u'in-sequences', u'in-set', u'in-slice', u'in-stream', u'in-string',
u'in-syntax', u'in-value', u'in-values*-sequence', u'in-values-sequence',
u'in-vector', u'in-weak-hash', u'in-weak-hash-keys', u'in-weak-hash-pairs',
u'in-weak-hash-values', u'in-weak-set', u'inexact->exact',
u'inexact-real?', u'inexact?', u'infinite?', u'input-port-append',
u'input-port?', u'inspector?', u'instanceof/c', u'integer->char',
u'integer->integer-bytes', u'integer-bytes->integer', u'integer-in',
u'integer-length', u'integer-sqrt', u'integer-sqrt/remainder', u'integer?',
u'interface->method-names', u'interface-extension?', u'interface?',
u'internal-definition-context-binding-identifiers',
u'internal-definition-context-introduce',
u'internal-definition-context-seal', u'internal-definition-context?',
u'is-a?', u'is-a?/c', u'keyword->string', u'keyword-apply', u'keyword<?',
u'keyword?', u'keywords-match', u'kill-thread', u'last', u'last-pair',
u'lcm', u'length', u'liberal-define-context?', u'link-exists?', u'list',
u'list*', u'list*of', u'list->bytes', u'list->mutable-set',
u'list->mutable-seteq', u'list->mutable-seteqv', u'list->set',
u'list->seteq', u'list->seteqv', u'list->string', u'list->vector',
u'list->weak-set', u'list->weak-seteq', u'list->weak-seteqv',
u'list-contract?', u'list-prefix?', u'list-ref', u'list-set', u'list-tail',
u'list-update', u'list/c', u'list?', u'listen-port-number?', u'listof',
u'load', u'load-extension', u'load-on-demand-enabled', u'load-relative',
u'load-relative-extension', u'load/cd', u'load/use-compiled',
u'local-expand', u'local-expand/capture-lifts',
u'local-transformer-expand', u'local-transformer-expand/capture-lifts',
u'locale-string-encoding', u'log', u'log-all-levels', u'log-level-evt',
u'log-level?', u'log-max-level', u'log-message', u'log-receiver?',
u'logger-name', u'logger?', u'magnitude', u'make-arity-at-least',
u'make-base-empty-namespace', u'make-base-namespace', u'make-bytes',
u'make-channel', u'make-chaperone-contract',
u'make-continuation-mark-key', u'make-continuation-prompt-tag',
u'make-contract', u'make-custodian', u'make-custodian-box',
u'make-custom-hash', u'make-custom-hash-types', u'make-custom-set',
u'make-custom-set-types', u'make-date', u'make-date*',
u'make-derived-parameter', u'make-directory', u'make-directory*',
u'make-do-sequence', u'make-empty-namespace',
u'make-environment-variables', u'make-ephemeron', u'make-exn',
u'make-exn:break', u'make-exn:break:hang-up', u'make-exn:break:terminate',
u'make-exn:fail', u'make-exn:fail:contract',
u'make-exn:fail:contract:arity', u'make-exn:fail:contract:blame',
u'make-exn:fail:contract:continuation',
u'make-exn:fail:contract:divide-by-zero',
u'make-exn:fail:contract:non-fixnum-result',
u'make-exn:fail:contract:variable', u'make-exn:fail:filesystem',
u'make-exn:fail:filesystem:errno', u'make-exn:fail:filesystem:exists',
u'make-exn:fail:filesystem:missing-module',
u'make-exn:fail:filesystem:version', u'make-exn:fail:network',
u'make-exn:fail:network:errno', u'make-exn:fail:object',
u'make-exn:fail:out-of-memory', u'make-exn:fail:read',
u'make-exn:fail:read:eof', u'make-exn:fail:read:non-char',
u'make-exn:fail:syntax', u'make-exn:fail:syntax:missing-module',
u'make-exn:fail:syntax:unbound', u'make-exn:fail:unsupported',
u'make-exn:fail:user', u'make-file-or-directory-link',
u'make-flat-contract', u'make-fsemaphore', u'make-generic',
u'make-handle-get-preference-locked', u'make-hash',
u'make-hash-placeholder', u'make-hasheq', u'make-hasheq-placeholder',
u'make-hasheqv', u'make-hasheqv-placeholder',
u'make-immutable-custom-hash', u'make-immutable-hash',
u'make-immutable-hasheq', u'make-immutable-hasheqv',
u'make-impersonator-property', u'make-input-port',
u'make-input-port/read-to-peek', u'make-inspector',
u'make-keyword-procedure', u'make-known-char-range-list',
u'make-limited-input-port', u'make-list', u'make-lock-file-name',
u'make-log-receiver', u'make-logger', u'make-mixin-contract',
u'make-mutable-custom-set', u'make-none/c', u'make-object',
u'make-output-port', u'make-parameter', u'make-parent-directory*',
u'make-phantom-bytes', u'make-pipe', u'make-pipe-with-specials',
u'make-placeholder', u'make-plumber', u'make-polar', u'make-prefab-struct',
u'make-primitive-class', u'make-proj-contract',
u'make-pseudo-random-generator', u'make-reader-graph', u'make-readtable',
u'make-rectangular', u'make-rename-transformer',
u'make-resolved-module-path', u'make-security-guard', u'make-semaphore',
u'make-set!-transformer', u'make-shared-bytes', u'make-sibling-inspector',
u'make-special-comment', u'make-srcloc', u'make-string',
u'make-struct-field-accessor', u'make-struct-field-mutator',
u'make-struct-type', u'make-struct-type-property',
u'make-syntax-delta-introducer', u'make-syntax-introducer',
u'make-temporary-file', u'make-tentative-pretty-print-output-port',
u'make-thread-cell', u'make-thread-group', u'make-vector',
u'make-weak-box', u'make-weak-custom-hash', u'make-weak-custom-set',
u'make-weak-hash', u'make-weak-hasheq', u'make-weak-hasheqv',
u'make-will-executor', u'map', u'match-equality-test',
u'matches-arity-exactly?', u'max', u'mcar', u'mcdr', u'mcons', u'member',
u'member-name-key-hash-code', u'member-name-key=?', u'member-name-key?',
u'memf', u'memq', u'memv', u'merge-input', u'method-in-interface?', u'min',
u'mixin-contract', u'module->exports', u'module->imports',
u'module->language-info', u'module->namespace',
u'module-compiled-cross-phase-persistent?', u'module-compiled-exports',
u'module-compiled-imports', u'module-compiled-language-info',
u'module-compiled-name', u'module-compiled-submodules',
u'module-declared?', u'module-path-index-join',
u'module-path-index-resolve', u'module-path-index-split',
u'module-path-index-submodule', u'module-path-index?', u'module-path?',
u'module-predefined?', u'module-provide-protected?', u'modulo', u'mpair?',
u'mutable-set', u'mutable-seteq', u'mutable-seteqv', u'n->th',
u'nack-guard-evt', u'namespace-anchor->empty-namespace',
u'namespace-anchor->namespace', u'namespace-anchor?',
u'namespace-attach-module', u'namespace-attach-module-declaration',
u'namespace-base-phase', u'namespace-mapped-symbols',
u'namespace-module-identifier', u'namespace-module-registry',
u'namespace-require', u'namespace-require/constant',
u'namespace-require/copy', u'namespace-require/expansion-time',
u'namespace-set-variable-value!', u'namespace-symbol->identifier',
u'namespace-syntax-introduce', u'namespace-undefine-variable!',
u'namespace-unprotect-module', u'namespace-variable-value', u'namespace?',
u'nan?', u'natural-number/c', u'negate', u'negative?', u'never-evt',
u'new-∀/c', u'new-∃/c', u'newline', u'ninth', u'non-empty-listof',
u'non-empty-string?', u'none/c', u'normal-case-path', u'normalize-arity',
u'normalize-path', u'normalized-arity?', u'not', u'not/c', u'null', u'null?',
u'number->string', u'number?', u'numerator', u'object%', u'object->vector',
u'object-info', u'object-interface', u'object-method-arity-includes?',
u'object-name', u'object-or-false=?', u'object=?', u'object?', u'odd?',
u'one-of/c', u'open-input-bytes', u'open-input-file',
u'open-input-output-file', u'open-input-string', u'open-output-bytes',
u'open-output-file', u'open-output-nowhere', u'open-output-string',
u'or/c', u'order-of-magnitude', u'ormap', u'other-execute-bit',
u'other-read-bit', u'other-write-bit', u'output-port?', u'pair?',
u'parameter-procedure=?', u'parameter/c', u'parameter?',
u'parameterization?', u'parse-command-line', u'partition', u'path->bytes',
u'path->complete-path', u'path->directory-path', u'path->string',
u'path-add-suffix', u'path-convention-type', u'path-element->bytes',
u'path-element->string', u'path-element?', u'path-for-some-system?',
u'path-list-string->path-list', u'path-only', u'path-replace-suffix',
u'path-string?', u'path<?', u'path?', u'pathlist-closure', u'peek-byte',
u'peek-byte-or-special', u'peek-bytes', u'peek-bytes!', u'peek-bytes!-evt',
u'peek-bytes-avail!', u'peek-bytes-avail!*', u'peek-bytes-avail!-evt',
u'peek-bytes-avail!/enable-break', u'peek-bytes-evt', u'peek-char',
u'peek-char-or-special', u'peek-string', u'peek-string!',
u'peek-string!-evt', u'peek-string-evt', u'peeking-input-port',
u'permutations', u'phantom-bytes?', u'pi', u'pi.f', u'pipe-content-length',
u'place-break', u'place-channel', u'place-channel-get',
u'place-channel-put', u'place-channel-put/get', u'place-channel?',
u'place-dead-evt', u'place-enabled?', u'place-kill', u'place-location?',
u'place-message-allowed?', u'place-sleep', u'place-wait', u'place?',
u'placeholder-get', u'placeholder-set!', u'placeholder?',
u'plumber-add-flush!', u'plumber-flush-all',
u'plumber-flush-handle-remove!', u'plumber-flush-handle?', u'plumber?',
u'poll-guard-evt', u'port->bytes', u'port->bytes-lines', u'port->lines',
u'port->list', u'port->string', u'port-closed-evt', u'port-closed?',
u'port-commit-peeked', u'port-count-lines!', u'port-count-lines-enabled',
u'port-counts-lines?', u'port-display-handler', u'port-file-identity',
u'port-file-unlock', u'port-next-location', u'port-number?',
u'port-print-handler', u'port-progress-evt',
u'port-provides-progress-evts?', u'port-read-handler',
u'port-try-file-lock?', u'port-write-handler', u'port-writes-atomic?',
u'port-writes-special?', u'port?', u'positive?', u'predicate/c',
u'prefab-key->struct-type', u'prefab-key?', u'prefab-struct-key',
u'preferences-lock-file-mode', u'pregexp', u'pregexp?', u'pretty-display',
u'pretty-format', u'pretty-print', u'pretty-print-.-symbol-without-bars',
u'pretty-print-abbreviate-read-macros', u'pretty-print-columns',
u'pretty-print-current-style-table', u'pretty-print-depth',
u'pretty-print-exact-as-decimal', u'pretty-print-extend-style-table',
u'pretty-print-handler', u'pretty-print-newline',
u'pretty-print-post-print-hook', u'pretty-print-pre-print-hook',
u'pretty-print-print-hook', u'pretty-print-print-line',
u'pretty-print-remap-stylable', u'pretty-print-show-inexactness',
u'pretty-print-size-hook', u'pretty-print-style-table?',
u'pretty-printing', u'pretty-write', u'primitive-closure?',
u'primitive-result-arity', u'primitive?', u'print', u'print-as-expression',
u'print-boolean-long-form', u'print-box', u'print-graph',
u'print-hash-table', u'print-mpair-curly-braces',
u'print-pair-curly-braces', u'print-reader-abbreviations',
u'print-struct', u'print-syntax-width', u'print-unreadable',
u'print-vector-length', u'printable/c', u'printable<%>', u'printf',
u'println', u'procedure->method', u'procedure-arity',
u'procedure-arity-includes/c', u'procedure-arity-includes?',
u'procedure-arity?', u'procedure-closure-contents-eq?',
u'procedure-extract-target', u'procedure-keywords',
u'procedure-reduce-arity', u'procedure-reduce-keyword-arity',
u'procedure-rename', u'procedure-result-arity', u'procedure-specialize',
u'procedure-struct-type?', u'procedure?', u'process', u'process*',
u'process*/ports', u'process/ports', u'processor-count', u'progress-evt?',
u'promise-forced?', u'promise-running?', u'promise/c', u'promise/name?',
u'promise?', u'prop:arity-string', u'prop:arrow-contract',
u'prop:arrow-contract-get-info', u'prop:arrow-contract?', u'prop:blame',
u'prop:chaperone-contract', u'prop:checked-procedure', u'prop:contract',
u'prop:contracted', u'prop:custom-print-quotable', u'prop:custom-write',
u'prop:dict', u'prop:dict/contract', u'prop:equal+hash', u'prop:evt',
u'prop:exn:missing-module', u'prop:exn:srclocs',
u'prop:expansion-contexts', u'prop:flat-contract',
u'prop:impersonator-of', u'prop:input-port',
u'prop:liberal-define-context', u'prop:object-name',
u'prop:opt-chaperone-contract', u'prop:opt-chaperone-contract-get-test',
u'prop:opt-chaperone-contract?', u'prop:orc-contract',
u'prop:orc-contract-get-subcontracts', u'prop:orc-contract?',
u'prop:output-port', u'prop:place-location', u'prop:procedure',
u'prop:recursive-contract', u'prop:recursive-contract-unroll',
u'prop:recursive-contract?', u'prop:rename-transformer', u'prop:sequence',
u'prop:set!-transformer', u'prop:stream', u'proper-subset?',
u'pseudo-random-generator->vector', u'pseudo-random-generator-vector?',
u'pseudo-random-generator?', u'put-preferences', u'putenv', u'quotient',
u'quotient/remainder', u'radians->degrees', u'raise',
u'raise-argument-error', u'raise-arguments-error', u'raise-arity-error',
u'raise-blame-error', u'raise-contract-error', u'raise-mismatch-error',
u'raise-not-cons-blame-error', u'raise-range-error',
u'raise-result-error', u'raise-syntax-error', u'raise-type-error',
u'raise-user-error', u'random', u'random-seed', u'range', u'rational?',
u'rationalize', u'read', u'read-accept-bar-quote', u'read-accept-box',
u'read-accept-compiled', u'read-accept-dot', u'read-accept-graph',
u'read-accept-infix-dot', u'read-accept-lang', u'read-accept-quasiquote',
u'read-accept-reader', u'read-byte', u'read-byte-or-special',
u'read-bytes', u'read-bytes!', u'read-bytes!-evt', u'read-bytes-avail!',
u'read-bytes-avail!*', u'read-bytes-avail!-evt',
u'read-bytes-avail!/enable-break', u'read-bytes-evt', u'read-bytes-line',
u'read-bytes-line-evt', u'read-case-sensitive', u'read-cdot', u'read-char',
u'read-char-or-special', u'read-curly-brace-as-paren',
u'read-curly-brace-with-tag', u'read-decimal-as-inexact',
u'read-eval-print-loop', u'read-language', u'read-line', u'read-line-evt',
u'read-on-demand-source', u'read-square-bracket-as-paren',
u'read-square-bracket-with-tag', u'read-string', u'read-string!',
u'read-string!-evt', u'read-string-evt', u'read-syntax',
u'read-syntax/recursive', u'read/recursive', u'readtable-mapping',
u'readtable?', u'real->decimal-string', u'real->double-flonum',
u'real->floating-point-bytes', u'real->single-flonum', u'real-in',
u'real-part', u'real?', u'reencode-input-port', u'reencode-output-port',
u'regexp', u'regexp-match', u'regexp-match*', u'regexp-match-evt',
u'regexp-match-exact?', u'regexp-match-peek',
u'regexp-match-peek-immediate', u'regexp-match-peek-positions',
u'regexp-match-peek-positions*',
u'regexp-match-peek-positions-immediate',
u'regexp-match-peek-positions-immediate/end',
u'regexp-match-peek-positions/end', u'regexp-match-positions',
u'regexp-match-positions*', u'regexp-match-positions/end',
u'regexp-match/end', u'regexp-match?', u'regexp-max-lookbehind',
u'regexp-quote', u'regexp-replace', u'regexp-replace*',
u'regexp-replace-quote', u'regexp-replaces', u'regexp-split',
u'regexp-try-match', u'regexp?', u'relative-path?', u'relocate-input-port',
u'relocate-output-port', u'remainder', u'remf', u'remf*', u'remove',
u'remove*', u'remove-duplicates', u'remq', u'remq*', u'remv', u'remv*',
u'rename-contract', u'rename-file-or-directory',
u'rename-transformer-target', u'rename-transformer?', u'replace-evt',
u'reroot-path', u'resolve-path', u'resolved-module-path-name',
u'resolved-module-path?', u'rest', u'reverse', u'round', u'second',
u'seconds->date', u'security-guard?', u'semaphore-peek-evt',
u'semaphore-peek-evt?', u'semaphore-post', u'semaphore-try-wait?',
u'semaphore-wait', u'semaphore-wait/enable-break', u'semaphore?',
u'sequence->list', u'sequence->stream', u'sequence-add-between',
u'sequence-andmap', u'sequence-append', u'sequence-count',
u'sequence-filter', u'sequence-fold', u'sequence-for-each',
u'sequence-generate', u'sequence-generate*', u'sequence-length',
u'sequence-map', u'sequence-ormap', u'sequence-ref', u'sequence-tail',
u'sequence/c', u'sequence?', u'set', u'set!-transformer-procedure',
u'set!-transformer?', u'set->list', u'set->stream', u'set-add', u'set-add!',
u'set-box!', u'set-clear', u'set-clear!', u'set-copy', u'set-copy-clear',
u'set-count', u'set-empty?', u'set-eq?', u'set-equal?', u'set-eqv?',
u'set-first', u'set-for-each', u'set-implements/c', u'set-implements?',
u'set-intersect', u'set-intersect!', u'set-map', u'set-mcar!', u'set-mcdr!',
u'set-member?', u'set-mutable?', u'set-phantom-bytes!',
u'set-port-next-location!', u'set-remove', u'set-remove!', u'set-rest',
u'set-some-basic-contracts!', u'set-subtract', u'set-subtract!',
u'set-symmetric-difference', u'set-symmetric-difference!', u'set-union',
u'set-union!', u'set-weak?', u'set/c', u'set=?', u'set?', u'seteq', u'seteqv',
u'seventh', u'sgn', u'shared-bytes', u'shell-execute', u'shrink-path-wrt',
u'shuffle', u'simple-form-path', u'simplify-path', u'sin',
u'single-flonum?', u'sinh', u'sixth', u'skip-projection-wrapper?', u'sleep',
u'some-system-path->string', u'sort', u'special-comment-value',
u'special-comment?', u'special-filter-input-port', u'split-at',
u'split-at-right', u'split-common-prefix', u'split-path', u'splitf-at',
u'splitf-at-right', u'sqr', u'sqrt', u'srcloc', u'srcloc->string',
u'srcloc-column', u'srcloc-line', u'srcloc-position', u'srcloc-source',
u'srcloc-span', u'srcloc?', u'stop-after', u'stop-before', u'stream->list',
u'stream-add-between', u'stream-andmap', u'stream-append', u'stream-count',
u'stream-empty?', u'stream-filter', u'stream-first', u'stream-fold',
u'stream-for-each', u'stream-length', u'stream-map', u'stream-ormap',
u'stream-ref', u'stream-rest', u'stream-tail', u'stream/c', u'stream?',
u'string', u'string->bytes/latin-1', u'string->bytes/locale',
u'string->bytes/utf-8', u'string->immutable-string', u'string->keyword',
u'string->list', u'string->number', u'string->path',
u'string->path-element', u'string->some-system-path', u'string->symbol',
u'string->uninterned-symbol', u'string->unreadable-symbol',
u'string-append', u'string-append*', u'string-ci<=?', u'string-ci<?',
u'string-ci=?', u'string-ci>=?', u'string-ci>?', u'string-contains?',
u'string-copy', u'string-copy!', u'string-downcase',
u'string-environment-variable-name?', u'string-fill!', u'string-foldcase',
u'string-join', u'string-len/c', u'string-length', u'string-locale-ci<?',
u'string-locale-ci=?', u'string-locale-ci>?', u'string-locale-downcase',
u'string-locale-upcase', u'string-locale<?', u'string-locale=?',
u'string-locale>?', u'string-no-nuls?', u'string-normalize-nfc',
u'string-normalize-nfd', u'string-normalize-nfkc',
u'string-normalize-nfkd', u'string-normalize-spaces', u'string-port?',
u'string-prefix?', u'string-ref', u'string-replace', u'string-set!',
u'string-split', u'string-suffix?', u'string-titlecase', u'string-trim',
u'string-upcase', u'string-utf-8-length', u'string<=?', u'string<?',
u'string=?', u'string>=?', u'string>?', u'string?', u'struct->vector',
u'struct-accessor-procedure?', u'struct-constructor-procedure?',
u'struct-info', u'struct-mutator-procedure?',
u'struct-predicate-procedure?', u'struct-type-info',
u'struct-type-make-constructor', u'struct-type-make-predicate',
u'struct-type-property-accessor-procedure?', u'struct-type-property/c',
u'struct-type-property?', u'struct-type?', u'struct:arity-at-least',
u'struct:arrow-contract-info', u'struct:date', u'struct:date*',
u'struct:exn', u'struct:exn:break', u'struct:exn:break:hang-up',
u'struct:exn:break:terminate', u'struct:exn:fail',
u'struct:exn:fail:contract', u'struct:exn:fail:contract:arity',
u'struct:exn:fail:contract:blame',
u'struct:exn:fail:contract:continuation',
u'struct:exn:fail:contract:divide-by-zero',
u'struct:exn:fail:contract:non-fixnum-result',
u'struct:exn:fail:contract:variable', u'struct:exn:fail:filesystem',
u'struct:exn:fail:filesystem:errno',
u'struct:exn:fail:filesystem:exists',
u'struct:exn:fail:filesystem:missing-module',
u'struct:exn:fail:filesystem:version', u'struct:exn:fail:network',
u'struct:exn:fail:network:errno', u'struct:exn:fail:object',
u'struct:exn:fail:out-of-memory', u'struct:exn:fail:read',
u'struct:exn:fail:read:eof', u'struct:exn:fail:read:non-char',
u'struct:exn:fail:syntax', u'struct:exn:fail:syntax:missing-module',
u'struct:exn:fail:syntax:unbound', u'struct:exn:fail:unsupported',
u'struct:exn:fail:user', u'struct:srcloc',
u'struct:wrapped-extra-arg-arrow', u'struct?', u'sub1', u'subbytes',
u'subclass?', u'subclass?/c', u'subprocess', u'subprocess-group-enabled',
u'subprocess-kill', u'subprocess-pid', u'subprocess-status',
u'subprocess-wait', u'subprocess?', u'subset?', u'substring', u'suggest/c',
u'symbol->string', u'symbol-interned?', u'symbol-unreadable?', u'symbol<?',
u'symbol=?', u'symbol?', u'symbols', u'sync', u'sync/enable-break',
u'sync/timeout', u'sync/timeout/enable-break', u'syntax->datum',
u'syntax->list', u'syntax-arm', u'syntax-column', u'syntax-debug-info',
u'syntax-disarm', u'syntax-e', u'syntax-line',
u'syntax-local-bind-syntaxes', u'syntax-local-certifier',
u'syntax-local-context', u'syntax-local-expand-expression',
u'syntax-local-get-shadower', u'syntax-local-identifier-as-binding',
u'syntax-local-introduce', u'syntax-local-lift-context',
u'syntax-local-lift-expression', u'syntax-local-lift-module',
u'syntax-local-lift-module-end-declaration',
u'syntax-local-lift-provide', u'syntax-local-lift-require',
u'syntax-local-lift-values-expression',
u'syntax-local-make-definition-context',
u'syntax-local-make-delta-introducer',
u'syntax-local-module-defined-identifiers',
u'syntax-local-module-exports',
u'syntax-local-module-required-identifiers', u'syntax-local-name',
u'syntax-local-phase-level', u'syntax-local-submodules',
u'syntax-local-transforming-module-provides?', u'syntax-local-value',
u'syntax-local-value/immediate', u'syntax-original?', u'syntax-position',
u'syntax-property', u'syntax-property-preserved?',
u'syntax-property-symbol-keys', u'syntax-protect', u'syntax-rearm',
u'syntax-recertify', u'syntax-shift-phase-level', u'syntax-source',
u'syntax-source-module', u'syntax-span', u'syntax-taint',
u'syntax-tainted?', u'syntax-track-origin',
u'syntax-transforming-module-expression?',
u'syntax-transforming-with-lifts?', u'syntax-transforming?', u'syntax/c',
u'syntax?', u'system', u'system*', u'system*/exit-code',
u'system-big-endian?', u'system-idle-evt', u'system-language+country',
u'system-library-subpath', u'system-path-convention-type', u'system-type',
u'system/exit-code', u'tail-marks-match?', u'take', u'take-common-prefix',
u'take-right', u'takef', u'takef-right', u'tan', u'tanh',
u'tcp-abandon-port', u'tcp-accept', u'tcp-accept-evt',
u'tcp-accept-ready?', u'tcp-accept/enable-break', u'tcp-addresses',
u'tcp-close', u'tcp-connect', u'tcp-connect/enable-break', u'tcp-listen',
u'tcp-listener?', u'tcp-port?', u'tentative-pretty-print-port-cancel',
u'tentative-pretty-print-port-transfer', u'tenth', u'terminal-port?',
u'the-unsupplied-arg', u'third', u'thread', u'thread-cell-ref',
u'thread-cell-set!', u'thread-cell-values?', u'thread-cell?',
u'thread-dead-evt', u'thread-dead?', u'thread-group?', u'thread-receive',
u'thread-receive-evt', u'thread-resume', u'thread-resume-evt',
u'thread-rewind-receive', u'thread-running?', u'thread-send',
u'thread-suspend', u'thread-suspend-evt', u'thread-try-receive',
u'thread-wait', u'thread/suspend-to-kill', u'thread?', u'time-apply',
u'touch', u'transplant-input-port', u'transplant-output-port', u'true',
u'truncate', u'udp-addresses', u'udp-bind!', u'udp-bound?', u'udp-close',
u'udp-connect!', u'udp-connected?', u'udp-multicast-interface',
u'udp-multicast-join-group!', u'udp-multicast-leave-group!',
u'udp-multicast-loopback?', u'udp-multicast-set-interface!',
u'udp-multicast-set-loopback!', u'udp-multicast-set-ttl!',
u'udp-multicast-ttl', u'udp-open-socket', u'udp-receive!',
u'udp-receive!*', u'udp-receive!-evt', u'udp-receive!/enable-break',
u'udp-receive-ready-evt', u'udp-send', u'udp-send*', u'udp-send-evt',
u'udp-send-ready-evt', u'udp-send-to', u'udp-send-to*', u'udp-send-to-evt',
u'udp-send-to/enable-break', u'udp-send/enable-break', u'udp?', u'unbox',
u'uncaught-exception-handler', u'unit?', u'unspecified-dom',
u'unsupplied-arg?', u'use-collection-link-paths',
u'use-compiled-file-paths', u'use-user-specific-search-paths',
u'user-execute-bit', u'user-read-bit', u'user-write-bit', u'value-blame',
u'value-contract', u'values', u'variable-reference->empty-namespace',
u'variable-reference->module-base-phase',
u'variable-reference->module-declaration-inspector',
u'variable-reference->module-path-index',
u'variable-reference->module-source', u'variable-reference->namespace',
u'variable-reference->phase',
u'variable-reference->resolved-module-path',
u'variable-reference-constant?', u'variable-reference?', u'vector',
u'vector->immutable-vector', u'vector->list',
u'vector->pseudo-random-generator', u'vector->pseudo-random-generator!',
u'vector->values', u'vector-append', u'vector-argmax', u'vector-argmin',
u'vector-copy', u'vector-copy!', u'vector-count', u'vector-drop',
u'vector-drop-right', u'vector-fill!', u'vector-filter',
u'vector-filter-not', u'vector-immutable', u'vector-immutable/c',
u'vector-immutableof', u'vector-length', u'vector-map', u'vector-map!',
u'vector-member', u'vector-memq', u'vector-memv', u'vector-ref',
u'vector-set!', u'vector-set*!', u'vector-set-performance-stats!',
u'vector-split-at', u'vector-split-at-right', u'vector-take',
u'vector-take-right', u'vector/c', u'vector?', u'vectorof', u'version',
u'void', u'void?', u'weak-box-value', u'weak-box?', u'weak-set',
u'weak-seteq', u'weak-seteqv', u'will-execute', u'will-executor?',
u'will-register', u'will-try-execute', u'with-input-from-bytes',
u'with-input-from-file', u'with-input-from-string',
u'with-output-to-bytes', u'with-output-to-file', u'with-output-to-string',
u'would-be-future', u'wrap-evt', u'wrapped-extra-arg-arrow',
u'wrapped-extra-arg-arrow-extra-neg-party-argument',
u'wrapped-extra-arg-arrow-real-func', u'wrapped-extra-arg-arrow?',
u'writable<%>', u'write', u'write-byte', u'write-bytes',
u'write-bytes-avail', u'write-bytes-avail*', u'write-bytes-avail-evt',
u'write-bytes-avail/enable-break', u'write-char', u'write-special',
u'write-special-avail*', u'write-special-evt', u'write-string',
u'write-to-file', u'writeln', u'xor', u'zero?', u'~.a', u'~.s', u'~.v', u'~a',
u'~e', u'~r', u'~s', u'~v'
)
_opening_parenthesis = r'[([{]'
_closing_parenthesis = r'[)\]}]'
_delimiters = r'()[\]{}",\'`;\s'
_symbol = r'(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters
_exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?'
_exponent = r'(?:[defls][-+]?\d+)'
_inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)'
_inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|'
r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes)
_inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes,
_exponent)
_inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent)
_inexact_special = r'(?:(?:inf|nan)\.[0f])'
_inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal,
_inexact_special)
_inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special)
tokens = {
'root': [
(_closing_parenthesis, Error),
(r'(?!\Z)', Text, 'unquoted-datum')
],
'datum': [
(r'(?s)#;|#*', Comment),
(u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single),
(r'#\|', Comment.Multiline, 'block-comment'),
# Whitespaces
(r'(?u)\s+', Text),
# Numbers: Keep in mind Racket reader hash prefixes, which
# can denote the base or the type. These don't map neatly
# onto Pygments token types; some judgment calls here.
# #d or no prefix
(r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters),
Number.Integer, '#pop'),
(r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' %
(_exact_decimal_prefix, _delimiters), Number.Float, '#pop'),
(r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' %
(_exact_decimal_prefix, _inexact_normal_no_hashes,
_inexact_normal_no_hashes, _inexact_normal_no_hashes,
_delimiters), Number, '#pop'),
# Inexact without explicit #i
(r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' %
(_inexact_real, _inexact_unsigned, _inexact_unsigned,
_inexact_real, _inexact_real, _delimiters), Number.Float,
'#pop'),
# The remaining extflonums
(r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' %
(_inexact_simple, _delimiters), Number.Float, '#pop'),
# #b
(r'(?iu)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'),
# #o
(r'(?iu)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'),
# #x
(r'(?iu)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'),
# #i is always inexact, i.e. float
(r'(?iu)(#d)?#i%s' % _symbol, Number.Float, '#pop'),
# Strings and characters
(r'#?"', String.Double, ('#pop', 'string')),
(r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'),
(r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'),
(r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'),
(r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'),
# Constants
(r'#(true|false|[tTfF])', Name.Constant, '#pop'),
# Keyword argument names (e.g. #:keyword)
(r'(?u)#:%s' % _symbol, Keyword.Declaration, '#pop'),
# Reader extensions
(r'(#lang |#!)(\S+)',
bygroups(Keyword.Namespace, Name.Namespace)),
(r'#reader', Keyword.Namespace, 'quoted-datum'),
# Other syntax
(r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator),
(r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis,
Operator, ('#pop', 'quoted-datum'))
],
'datum*': [
(r'`|,@?', Operator),
(_symbol, String.Symbol, '#pop'),
(r'[|\\]', Error),
default('#pop')
],
'list': [
(_closing_parenthesis, Punctuation, '#pop')
],
'unquoted-datum': [
include('datum'),
(r'quote(?=[%s])' % _delimiters, Keyword,
('#pop', 'quoted-datum')),
(r'`', Operator, ('#pop', 'quasiquoted-datum')),
(r'quasiquote(?=[%s])' % _delimiters, Keyword,
('#pop', 'quasiquoted-datum')),
(_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')),
(words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
Keyword, '#pop'),
(words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters),
Name.Builtin, '#pop'),
(_symbol, Name, '#pop'),
include('datum*')
],
'unquoted-list': [
include('list'),
(r'(?!\Z)', Text, 'unquoted-datum')
],
'quasiquoted-datum': [
include('datum'),
(r',@?', Operator, ('#pop', 'unquoted-datum')),
(r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword,
('#pop', 'unquoted-datum')),
(_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')),
include('datum*')
],
'quasiquoted-list': [
include('list'),
(r'(?!\Z)', Text, 'quasiquoted-datum')
],
'quoted-datum': [
include('datum'),
(_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')),
include('datum*')
],
'quoted-list': [
include('list'),
(r'(?!\Z)', Text, 'quoted-datum')
],
'block-comment': [
(r'#\|', Comment.Multiline, '#push'),
(r'\|#', Comment.Multiline, '#pop'),
(r'[^#|]+|.', Comment.Multiline)
],
'string': [
(r'"', String.Double, '#pop'),
(r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|'
r'U[\da-fA-F]{1,8}|.)', String.Escape),
(r'[^\\"]+', String.Double)
]
}
class NewLispLexer(RegexLexer):
"""
For `newLISP. <http://www.newlisp.org/>`_ source code (version 10.3.0).
.. versionadded:: 1.5
"""
name = 'NewLisp'
aliases = ['newlisp']
filenames = ['*.lsp', '*.nl', '*.kif']
mimetypes = ['text/x-newlisp', 'application/x-newlisp']
flags = re.IGNORECASE | re.MULTILINE | re.UNICODE
# list of built-in functions for newLISP version 10.3
builtins = (
'^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++',
'<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10',
'$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7',
'$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs',
'acos', 'acosh', 'add', 'address', 'amb', 'and', 'append-file',
'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin',
'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec',
'base64-enc', 'bayes-query', 'bayes-train', 'begin',
'beta', 'betai', 'bind', 'binomial', 'bits', 'callback',
'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean',
'close', 'command-event', 'cond', 'cons', 'constant',
'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count',
'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry',
'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec',
'def-new', 'default', 'define-macro', 'define',
'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device',
'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while',
'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup',
'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event',
'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand',
'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter',
'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt',
'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln',
'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string',
'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc',
'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert',
'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error',
'last', 'legal?', 'length', 'let', 'letex', 'letn',
'list?', 'list', 'load', 'local', 'log', 'lookup',
'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat',
'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply',
'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error',
'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local',
'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping',
'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select',
'net-send-to', 'net-send-udp', 'net-send', 'net-service',
'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper',
'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack',
'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop',
'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print',
'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event',
'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand',
'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file',
'read-key', 'read-line', 'read-utf8', 'reader-event',
'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex',
'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse',
'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self',
'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all',
'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent',
'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt',
'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?',
'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term',
'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case',
'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?',
'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until',
'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while',
'write', 'write-char', 'write-file', 'write-line',
'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?',
)
# valid names
valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+'
tokens = {
'root': [
# shebang
(r'#!(.*?)$', Comment.Preproc),
# comments starting with semicolon
(r';.*$', Comment.Single),
# comments starting with #
(r'#.*$', Comment.Single),
# whitespace
(r'\s+', Text),
# strings, symbols and characters
(r'"(\\\\|\\"|[^"])*"', String),
# braces
(r'\{', String, "bracestring"),
# [text] ... [/text] delimited strings
(r'\[text\]*', String, "tagstring"),
# 'special' operators...
(r"('|:)", Operator),
# highlight the builtins
(words(builtins, suffix=r'\b'),
Keyword),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Variable),
# the remaining variables
(valid_name, String.Symbol),
# parentheses
(r'(\(|\))', Punctuation),
],
# braced strings...
'bracestring': [
(r'\{', String, "#push"),
(r'\}', String, "#pop"),
('[^{}]+', String),
],
# tagged [text]...[/text] delimited strings...
'tagstring': [
(r'(?s)(.*?)(\[/text\])', String, '#pop'),
],
}
class EmacsLispLexer(RegexLexer):
"""
An ELisp lexer, parsing a stream and outputting the tokens
needed to highlight elisp code.
.. versionadded:: 2.1
"""
name = 'EmacsLisp'
aliases = ['emacs', 'elisp', 'emacs-lisp']
filenames = ['*.el']
mimetypes = ['text/x-elisp', 'application/x-elisp']
flags = re.MULTILINE
# couple of useful regexes
# characters that are not macro-characters and can be used to begin a symbol
nonmacro = r'\\.|[\w!$%&*+-/<=>?@^{}~|]'
constituent = nonmacro + '|[#.:]'
terminated = r'(?=[ "()\]\'\n,;`])' # whitespace or terminating macro characters
# symbol token, reverse-engineered from hyperspec
# Take a deep breath...
symbol = r'((?:%s)(?:%s)*)' % (nonmacro, constituent)
macros = set((
'atomic-change-group', 'case', 'block', 'cl-block', 'cl-callf', 'cl-callf2',
'cl-case', 'cl-decf', 'cl-declaim', 'cl-declare',
'cl-define-compiler-macro', 'cl-defmacro', 'cl-defstruct',
'cl-defsubst', 'cl-deftype', 'cl-defun', 'cl-destructuring-bind',
'cl-do', 'cl-do*', 'cl-do-all-symbols', 'cl-do-symbols', 'cl-dolist',
'cl-dotimes', 'cl-ecase', 'cl-etypecase', 'eval-when', 'cl-eval-when', 'cl-flet',
'cl-flet*', 'cl-function', 'cl-incf', 'cl-labels', 'cl-letf',
'cl-letf*', 'cl-load-time-value', 'cl-locally', 'cl-loop',
'cl-macrolet', 'cl-multiple-value-bind', 'cl-multiple-value-setq',
'cl-progv', 'cl-psetf', 'cl-psetq', 'cl-pushnew', 'cl-remf',
'cl-return', 'cl-return-from', 'cl-rotatef', 'cl-shiftf',
'cl-symbol-macrolet', 'cl-tagbody', 'cl-the', 'cl-typecase',
'combine-after-change-calls', 'condition-case-unless-debug', 'decf',
'declaim', 'declare', 'declare-function', 'def-edebug-spec',
'defadvice', 'defclass', 'defcustom', 'defface', 'defgeneric',
'defgroup', 'define-advice', 'define-alternatives',
'define-compiler-macro', 'define-derived-mode', 'define-generic-mode',
'define-global-minor-mode', 'define-globalized-minor-mode',
'define-minor-mode', 'define-modify-macro',
'define-obsolete-face-alias', 'define-obsolete-function-alias',
'define-obsolete-variable-alias', 'define-setf-expander',
'define-skeleton', 'defmacro', 'defmethod', 'defsetf', 'defstruct',
'defsubst', 'deftheme', 'deftype', 'defun', 'defvar-local',
'delay-mode-hooks', 'destructuring-bind', 'do', 'do*',
'do-all-symbols', 'do-symbols', 'dolist', 'dont-compile', 'dotimes',
'dotimes-with-progress-reporter', 'ecase', 'ert-deftest', 'etypecase',
'eval-and-compile', 'eval-when-compile', 'flet', 'ignore-errors',
'incf', 'labels', 'lambda', 'letrec', 'lexical-let', 'lexical-let*',
'loop', 'multiple-value-bind', 'multiple-value-setq', 'noreturn',
'oref', 'oref-default', 'oset', 'oset-default', 'pcase',
'pcase-defmacro', 'pcase-dolist', 'pcase-exhaustive', 'pcase-let',
'pcase-let*', 'pop', 'psetf', 'psetq', 'push', 'pushnew', 'remf',
'return', 'rotatef', 'rx', 'save-match-data', 'save-selected-window',
'save-window-excursion', 'setf', 'setq-local', 'shiftf',
'track-mouse', 'typecase', 'unless', 'use-package', 'when',
'while-no-input', 'with-case-table', 'with-category-table',
'with-coding-priority', 'with-current-buffer', 'with-demoted-errors',
'with-eval-after-load', 'with-file-modes', 'with-local-quit',
'with-output-to-string', 'with-output-to-temp-buffer',
'with-parsed-tramp-file-name', 'with-selected-frame',
'with-selected-window', 'with-silent-modifications', 'with-slots',
'with-syntax-table', 'with-temp-buffer', 'with-temp-file',
'with-temp-message', 'with-timeout', 'with-tramp-connection-property',
'with-tramp-file-property', 'with-tramp-progress-reporter',
'with-wrapper-hook', 'load-time-value', 'locally', 'macrolet', 'progv',
'return-from',
))
special_forms = set((
'and', 'catch', 'cond', 'condition-case', 'defconst', 'defvar',
'function', 'if', 'interactive', 'let', 'let*', 'or', 'prog1',
'prog2', 'progn', 'quote', 'save-current-buffer', 'save-excursion',
'save-restriction', 'setq', 'setq-default', 'subr-arity',
'unwind-protect', 'while',
))
builtin_function = set((
'%', '*', '+', '-', '/', '/=', '1+', '1-', '<', '<=', '=', '>', '>=',
'Snarf-documentation', 'abort-recursive-edit', 'abs',
'accept-process-output', 'access-file', 'accessible-keymaps', 'acos',
'active-minibuffer-window', 'add-face-text-property',
'add-name-to-file', 'add-text-properties', 'all-completions',
'append', 'apply', 'apropos-internal', 'aref', 'arrayp', 'aset',
'ash', 'asin', 'assoc', 'assoc-string', 'assq', 'atan', 'atom',
'autoload', 'autoload-do-load', 'backtrace', 'backtrace--locals',
'backtrace-debug', 'backtrace-eval', 'backtrace-frame',
'backward-char', 'backward-prefix-chars', 'barf-if-buffer-read-only',
'base64-decode-region', 'base64-decode-string',
'base64-encode-region', 'base64-encode-string', 'beginning-of-line',
'bidi-find-overridden-directionality', 'bidi-resolved-levels',
'bitmap-spec-p', 'bobp', 'bolp', 'bool-vector',
'bool-vector-count-consecutive', 'bool-vector-count-population',
'bool-vector-exclusive-or', 'bool-vector-intersection',
'bool-vector-not', 'bool-vector-p', 'bool-vector-set-difference',
'bool-vector-subsetp', 'bool-vector-union', 'boundp',
'buffer-base-buffer', 'buffer-chars-modified-tick',
'buffer-enable-undo', 'buffer-file-name', 'buffer-has-markers-at',
'buffer-list', 'buffer-live-p', 'buffer-local-value',
'buffer-local-variables', 'buffer-modified-p', 'buffer-modified-tick',
'buffer-name', 'buffer-size', 'buffer-string', 'buffer-substring',
'buffer-substring-no-properties', 'buffer-swap-text', 'bufferp',
'bury-buffer-internal', 'byte-code', 'byte-code-function-p',
'byte-to-position', 'byte-to-string', 'byteorder',
'call-interactively', 'call-last-kbd-macro', 'call-process',
'call-process-region', 'cancel-kbd-macro-events', 'capitalize',
'capitalize-region', 'capitalize-word', 'car', 'car-less-than-car',
'car-safe', 'case-table-p', 'category-docstring',
'category-set-mnemonics', 'category-table', 'category-table-p',
'ccl-execute', 'ccl-execute-on-string', 'ccl-program-p', 'cdr',
'cdr-safe', 'ceiling', 'char-after', 'char-before',
'char-category-set', 'char-charset', 'char-equal', 'char-or-string-p',
'char-resolve-modifiers', 'char-syntax', 'char-table-extra-slot',
'char-table-p', 'char-table-parent', 'char-table-range',
'char-table-subtype', 'char-to-string', 'char-width', 'characterp',
'charset-after', 'charset-id-internal', 'charset-plist',
'charset-priority-list', 'charsetp', 'check-coding-system',
'check-coding-systems-region', 'clear-buffer-auto-save-failure',
'clear-charset-maps', 'clear-face-cache', 'clear-font-cache',
'clear-image-cache', 'clear-string', 'clear-this-command-keys',
'close-font', 'clrhash', 'coding-system-aliases',
'coding-system-base', 'coding-system-eol-type', 'coding-system-p',
'coding-system-plist', 'coding-system-priority-list',
'coding-system-put', 'color-distance', 'color-gray-p',
'color-supported-p', 'combine-after-change-execute',
'command-error-default-function', 'command-remapping', 'commandp',
'compare-buffer-substrings', 'compare-strings',
'compare-window-configurations', 'completing-read',
'compose-region-internal', 'compose-string-internal',
'composition-get-gstring', 'compute-motion', 'concat', 'cons',
'consp', 'constrain-to-field', 'continue-process',
'controlling-tty-p', 'coordinates-in-window-p', 'copy-alist',
'copy-category-table', 'copy-file', 'copy-hash-table', 'copy-keymap',
'copy-marker', 'copy-sequence', 'copy-syntax-table', 'copysign',
'cos', 'current-active-maps', 'current-bidi-paragraph-direction',
'current-buffer', 'current-case-table', 'current-column',
'current-global-map', 'current-idle-time', 'current-indentation',
'current-input-mode', 'current-local-map', 'current-message',
'current-minor-mode-maps', 'current-time', 'current-time-string',
'current-time-zone', 'current-window-configuration',
'cygwin-convert-file-name-from-windows',
'cygwin-convert-file-name-to-windows', 'daemon-initialized',
'daemonp', 'dbus--init-bus', 'dbus-get-unique-name',
'dbus-message-internal', 'debug-timer-check', 'declare-equiv-charset',
'decode-big5-char', 'decode-char', 'decode-coding-region',
'decode-coding-string', 'decode-sjis-char', 'decode-time',
'default-boundp', 'default-file-modes', 'default-printer-name',
'default-toplevel-value', 'default-value', 'define-category',
'define-charset-alias', 'define-charset-internal',
'define-coding-system-alias', 'define-coding-system-internal',
'define-fringe-bitmap', 'define-hash-table-test', 'define-key',
'define-prefix-command', 'delete',
'delete-all-overlays', 'delete-and-extract-region', 'delete-char',
'delete-directory-internal', 'delete-field', 'delete-file',
'delete-frame', 'delete-other-windows-internal', 'delete-overlay',
'delete-process', 'delete-region', 'delete-terminal',
'delete-window-internal', 'delq', 'describe-buffer-bindings',
'describe-vector', 'destroy-fringe-bitmap', 'detect-coding-region',
'detect-coding-string', 'ding', 'directory-file-name',
'directory-files', 'directory-files-and-attributes', 'discard-input',
'display-supports-face-attributes-p', 'do-auto-save', 'documentation',
'documentation-property', 'downcase', 'downcase-region',
'downcase-word', 'draw-string', 'dump-colors', 'dump-emacs',
'dump-face', 'dump-frame-glyph-matrix', 'dump-glyph-matrix',
'dump-glyph-row', 'dump-redisplay-history', 'dump-tool-bar-row',
'elt', 'emacs-pid', 'encode-big5-char', 'encode-char',
'encode-coding-region', 'encode-coding-string', 'encode-sjis-char',
'encode-time', 'end-kbd-macro', 'end-of-line', 'eobp', 'eolp', 'eq',
'eql', 'equal', 'equal-including-properties', 'erase-buffer',
'error-message-string', 'eval', 'eval-buffer', 'eval-region',
'event-convert-list', 'execute-kbd-macro', 'exit-recursive-edit',
'exp', 'expand-file-name', 'expt', 'external-debugging-output',
'face-attribute-relative-p', 'face-attributes-as-vector', 'face-font',
'fboundp', 'fceiling', 'fetch-bytecode', 'ffloor',
'field-beginning', 'field-end', 'field-string',
'field-string-no-properties', 'file-accessible-directory-p',
'file-acl', 'file-attributes', 'file-attributes-lessp',
'file-directory-p', 'file-executable-p', 'file-exists-p',
'file-locked-p', 'file-modes', 'file-name-absolute-p',
'file-name-all-completions', 'file-name-as-directory',
'file-name-completion', 'file-name-directory',
'file-name-nondirectory', 'file-newer-than-file-p', 'file-readable-p',
'file-regular-p', 'file-selinux-context', 'file-symlink-p',
'file-system-info', 'file-system-info', 'file-writable-p',
'fillarray', 'find-charset-region', 'find-charset-string',
'find-coding-systems-region-internal', 'find-composition-internal',
'find-file-name-handler', 'find-font', 'find-operation-coding-system',
'float', 'float-time', 'floatp', 'floor', 'fmakunbound',
'following-char', 'font-at', 'font-drive-otf', 'font-face-attributes',
'font-family-list', 'font-get', 'font-get-glyphs',
'font-get-system-font', 'font-get-system-normal-font', 'font-info',
'font-match-p', 'font-otf-alternates', 'font-put',
'font-shape-gstring', 'font-spec', 'font-variation-glyphs',
'font-xlfd-name', 'fontp', 'fontset-font', 'fontset-info',
'fontset-list', 'fontset-list-all', 'force-mode-line-update',
'force-window-update', 'format', 'format-mode-line',
'format-network-address', 'format-time-string', 'forward-char',
'forward-comment', 'forward-line', 'forward-word',
'frame-border-width', 'frame-bottom-divider-width',
'frame-can-run-window-configuration-change-hook', 'frame-char-height',
'frame-char-width', 'frame-face-alist', 'frame-first-window',
'frame-focus', 'frame-font-cache', 'frame-fringe-width', 'frame-list',
'frame-live-p', 'frame-or-buffer-changed-p', 'frame-parameter',
'frame-parameters', 'frame-pixel-height', 'frame-pixel-width',
'frame-pointer-visible-p', 'frame-right-divider-width',
'frame-root-window', 'frame-scroll-bar-height',
'frame-scroll-bar-width', 'frame-selected-window', 'frame-terminal',
'frame-text-cols', 'frame-text-height', 'frame-text-lines',
'frame-text-width', 'frame-total-cols', 'frame-total-lines',
'frame-visible-p', 'framep', 'frexp', 'fringe-bitmaps-at-pos',
'fround', 'fset', 'ftruncate', 'funcall', 'funcall-interactively',
'function-equal', 'functionp', 'gap-position', 'gap-size',
'garbage-collect', 'gc-status', 'generate-new-buffer-name', 'get',
'get-buffer', 'get-buffer-create', 'get-buffer-process',
'get-buffer-window', 'get-byte', 'get-char-property',
'get-char-property-and-overlay', 'get-file-buffer', 'get-file-char',
'get-internal-run-time', 'get-load-suffixes', 'get-pos-property',
'get-process', 'get-screen-color', 'get-text-property',
'get-unicode-property-internal', 'get-unused-category',
'get-unused-iso-final-char', 'getenv-internal', 'gethash',
'gfile-add-watch', 'gfile-rm-watch', 'global-key-binding',
'gnutls-available-p', 'gnutls-boot', 'gnutls-bye', 'gnutls-deinit',
'gnutls-error-fatalp', 'gnutls-error-string', 'gnutls-errorp',
'gnutls-get-initstage', 'gnutls-peer-status',
'gnutls-peer-status-warning-describe', 'goto-char', 'gpm-mouse-start',
'gpm-mouse-stop', 'group-gid', 'group-real-gid',
'handle-save-session', 'handle-switch-frame', 'hash-table-count',
'hash-table-p', 'hash-table-rehash-size',
'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
'hash-table-weakness', 'iconify-frame', 'identity', 'image-flush',
'image-mask-p', 'image-metadata', 'image-size', 'imagemagick-types',
'imagep', 'indent-to', 'indirect-function', 'indirect-variable',
'init-image-library', 'inotify-add-watch', 'inotify-rm-watch',
'input-pending-p', 'insert', 'insert-and-inherit',
'insert-before-markers', 'insert-before-markers-and-inherit',
'insert-buffer-substring', 'insert-byte', 'insert-char',
'insert-file-contents', 'insert-startup-screen', 'int86',
'integer-or-marker-p', 'integerp', 'interactive-form', 'intern',
'intern-soft', 'internal--track-mouse', 'internal-char-font',
'internal-complete-buffer', 'internal-copy-lisp-face',
'internal-default-process-filter',
'internal-default-process-sentinel', 'internal-describe-syntax-value',
'internal-event-symbol-parse-modifiers',
'internal-face-x-get-resource', 'internal-get-lisp-face-attribute',
'internal-lisp-face-attribute-values', 'internal-lisp-face-empty-p',
'internal-lisp-face-equal-p', 'internal-lisp-face-p',
'internal-make-lisp-face', 'internal-make-var-non-special',
'internal-merge-in-global-face',
'internal-set-alternative-font-family-alist',
'internal-set-alternative-font-registry-alist',
'internal-set-font-selection-order',
'internal-set-lisp-face-attribute',
'internal-set-lisp-face-attribute-from-resource',
'internal-show-cursor', 'internal-show-cursor-p', 'interrupt-process',
'invisible-p', 'invocation-directory', 'invocation-name', 'isnan',
'iso-charset', 'key-binding', 'key-description',
'keyboard-coding-system', 'keymap-parent', 'keymap-prompt', 'keymapp',
'keywordp', 'kill-all-local-variables', 'kill-buffer', 'kill-emacs',
'kill-local-variable', 'kill-process', 'last-nonminibuffer-frame',
'lax-plist-get', 'lax-plist-put', 'ldexp', 'length',
'libxml-parse-html-region', 'libxml-parse-xml-region',
'line-beginning-position', 'line-end-position', 'line-pixel-height',
'list', 'list-fonts', 'list-system-processes', 'listp', 'load',
'load-average', 'local-key-binding', 'local-variable-if-set-p',
'local-variable-p', 'locale-info', 'locate-file-internal',
'lock-buffer', 'log', 'logand', 'logb', 'logior', 'lognot', 'logxor',
'looking-at', 'lookup-image', 'lookup-image-map', 'lookup-key',
'lower-frame', 'lsh', 'macroexpand', 'make-bool-vector',
'make-byte-code', 'make-category-set', 'make-category-table',
'make-char', 'make-char-table', 'make-directory-internal',
'make-frame-invisible', 'make-frame-visible', 'make-hash-table',
'make-indirect-buffer', 'make-keymap', 'make-list',
'make-local-variable', 'make-marker', 'make-network-process',
'make-overlay', 'make-serial-process', 'make-sparse-keymap',
'make-string', 'make-symbol', 'make-symbolic-link', 'make-temp-name',
'make-terminal-frame', 'make-variable-buffer-local',
'make-variable-frame-local', 'make-vector', 'makunbound',
'map-char-table', 'map-charset-chars', 'map-keymap',
'map-keymap-internal', 'mapatoms', 'mapc', 'mapcar', 'mapconcat',
'maphash', 'mark-marker', 'marker-buffer', 'marker-insertion-type',
'marker-position', 'markerp', 'match-beginning', 'match-data',
'match-end', 'matching-paren', 'max', 'max-char', 'md5', 'member',
'memory-info', 'memory-limit', 'memory-use-counts', 'memq', 'memql',
'menu-bar-menu-at-x-y', 'menu-or-popup-active-p',
'menu-or-popup-active-p', 'merge-face-attribute', 'message',
'message-box', 'message-or-box', 'min',
'minibuffer-completion-contents', 'minibuffer-contents',
'minibuffer-contents-no-properties', 'minibuffer-depth',
'minibuffer-prompt', 'minibuffer-prompt-end',
'minibuffer-selected-window', 'minibuffer-window', 'minibufferp',
'minor-mode-key-binding', 'mod', 'modify-category-entry',
'modify-frame-parameters', 'modify-syntax-entry',
'mouse-pixel-position', 'mouse-position', 'move-overlay',
'move-point-visually', 'move-to-column', 'move-to-window-line',
'msdos-downcase-filename', 'msdos-long-file-names', 'msdos-memget',
'msdos-memput', 'msdos-mouse-disable', 'msdos-mouse-enable',
'msdos-mouse-init', 'msdos-mouse-p', 'msdos-remember-default-colors',
'msdos-set-keyboard', 'msdos-set-mouse-buttons',
'multibyte-char-to-unibyte', 'multibyte-string-p', 'narrow-to-region',
'natnump', 'nconc', 'network-interface-info',
'network-interface-list', 'new-fontset', 'newline-cache-check',
'next-char-property-change', 'next-frame', 'next-overlay-change',
'next-property-change', 'next-read-file-uses-dialog-p',
'next-single-char-property-change', 'next-single-property-change',
'next-window', 'nlistp', 'nreverse', 'nth', 'nthcdr', 'null',
'number-or-marker-p', 'number-to-string', 'numberp',
'open-dribble-file', 'open-font', 'open-termscript',
'optimize-char-table', 'other-buffer', 'other-window-for-scrolling',
'overlay-buffer', 'overlay-end', 'overlay-get', 'overlay-lists',
'overlay-properties', 'overlay-put', 'overlay-recenter',
'overlay-start', 'overlayp', 'overlays-at', 'overlays-in',
'parse-partial-sexp', 'play-sound-internal', 'plist-get',
'plist-member', 'plist-put', 'point', 'point-marker', 'point-max',
'point-max-marker', 'point-min', 'point-min-marker',
'pos-visible-in-window-p', 'position-bytes', 'posix-looking-at',
'posix-search-backward', 'posix-search-forward', 'posix-string-match',
'posn-at-point', 'posn-at-x-y', 'preceding-char',
'prefix-numeric-value', 'previous-char-property-change',
'previous-frame', 'previous-overlay-change',
'previous-property-change', 'previous-single-char-property-change',
'previous-single-property-change', 'previous-window', 'prin1',
'prin1-to-string', 'princ', 'print', 'process-attributes',
'process-buffer', 'process-coding-system', 'process-command',
'process-connection', 'process-contact', 'process-datagram-address',
'process-exit-status', 'process-filter', 'process-filter-multibyte-p',
'process-id', 'process-inherit-coding-system-flag', 'process-list',
'process-mark', 'process-name', 'process-plist',
'process-query-on-exit-flag', 'process-running-child-p',
'process-send-eof', 'process-send-region', 'process-send-string',
'process-sentinel', 'process-status', 'process-tty-name',
'process-type', 'processp', 'profiler-cpu-log',
'profiler-cpu-running-p', 'profiler-cpu-start', 'profiler-cpu-stop',
'profiler-memory-log', 'profiler-memory-running-p',
'profiler-memory-start', 'profiler-memory-stop', 'propertize',
'purecopy', 'put', 'put-text-property',
'put-unicode-property-internal', 'puthash', 'query-font',
'query-fontset', 'quit-process', 'raise-frame', 'random', 'rassoc',
'rassq', 're-search-backward', 're-search-forward', 'read',
'read-buffer', 'read-char', 'read-char-exclusive',
'read-coding-system', 'read-command', 'read-event',
'read-from-minibuffer', 'read-from-string', 'read-function',
'read-key-sequence', 'read-key-sequence-vector',
'read-no-blanks-input', 'read-non-nil-coding-system', 'read-string',
'read-variable', 'recent-auto-save-p', 'recent-doskeys',
'recent-keys', 'recenter', 'recursion-depth', 'recursive-edit',
'redirect-debugging-output', 'redirect-frame-focus', 'redisplay',
'redraw-display', 'redraw-frame', 'regexp-quote', 'region-beginning',
'region-end', 'register-ccl-program', 'register-code-conversion-map',
'remhash', 'remove-list-of-text-properties', 'remove-text-properties',
'rename-buffer', 'rename-file', 'replace-match',
'reset-this-command-lengths', 'resize-mini-window-internal',
'restore-buffer-modified-p', 'resume-tty', 'reverse', 'round',
'run-hook-with-args', 'run-hook-with-args-until-failure',
'run-hook-with-args-until-success', 'run-hook-wrapped', 'run-hooks',
'run-window-configuration-change-hook', 'run-window-scroll-functions',
'safe-length', 'scan-lists', 'scan-sexps', 'scroll-down',
'scroll-left', 'scroll-other-window', 'scroll-right', 'scroll-up',
'search-backward', 'search-forward', 'secure-hash', 'select-frame',
'select-window', 'selected-frame', 'selected-window',
'self-insert-command', 'send-string-to-terminal', 'sequencep',
'serial-process-configure', 'set', 'set-buffer',
'set-buffer-auto-saved', 'set-buffer-major-mode',
'set-buffer-modified-p', 'set-buffer-multibyte', 'set-case-table',
'set-category-table', 'set-char-table-extra-slot',
'set-char-table-parent', 'set-char-table-range', 'set-charset-plist',
'set-charset-priority', 'set-coding-system-priority',
'set-cursor-size', 'set-default', 'set-default-file-modes',
'set-default-toplevel-value', 'set-file-acl', 'set-file-modes',
'set-file-selinux-context', 'set-file-times', 'set-fontset-font',
'set-frame-height', 'set-frame-position', 'set-frame-selected-window',
'set-frame-size', 'set-frame-width', 'set-fringe-bitmap-face',
'set-input-interrupt-mode', 'set-input-meta-mode', 'set-input-mode',
'set-keyboard-coding-system-internal', 'set-keymap-parent',
'set-marker', 'set-marker-insertion-type', 'set-match-data',
'set-message-beep', 'set-minibuffer-window',
'set-mouse-pixel-position', 'set-mouse-position',
'set-network-process-option', 'set-output-flow-control',
'set-process-buffer', 'set-process-coding-system',
'set-process-datagram-address', 'set-process-filter',
'set-process-filter-multibyte',
'set-process-inherit-coding-system-flag', 'set-process-plist',
'set-process-query-on-exit-flag', 'set-process-sentinel',
'set-process-window-size', 'set-quit-char',
'set-safe-terminal-coding-system-internal', 'set-screen-color',
'set-standard-case-table', 'set-syntax-table',
'set-terminal-coding-system-internal', 'set-terminal-local-value',
'set-terminal-parameter', 'set-text-properties', 'set-time-zone-rule',
'set-visited-file-modtime', 'set-window-buffer',
'set-window-combination-limit', 'set-window-configuration',
'set-window-dedicated-p', 'set-window-display-table',
'set-window-fringes', 'set-window-hscroll', 'set-window-margins',
'set-window-new-normal', 'set-window-new-pixel',
'set-window-new-total', 'set-window-next-buffers',
'set-window-parameter', 'set-window-point', 'set-window-prev-buffers',
'set-window-redisplay-end-trigger', 'set-window-scroll-bars',
'set-window-start', 'set-window-vscroll', 'setcar', 'setcdr',
'setplist', 'show-face-resources', 'signal', 'signal-process', 'sin',
'single-key-description', 'skip-chars-backward', 'skip-chars-forward',
'skip-syntax-backward', 'skip-syntax-forward', 'sleep-for', 'sort',
'sort-charsets', 'special-variable-p', 'split-char',
'split-window-internal', 'sqrt', 'standard-case-table',
'standard-category-table', 'standard-syntax-table', 'start-kbd-macro',
'start-process', 'stop-process', 'store-kbd-macro-event', 'string',
'string-as-multibyte', 'string-as-unibyte', 'string-bytes',
'string-collate-equalp', 'string-collate-lessp', 'string-equal',
'string-lessp', 'string-make-multibyte', 'string-make-unibyte',
'string-match', 'string-to-char', 'string-to-multibyte',
'string-to-number', 'string-to-syntax', 'string-to-unibyte',
'string-width', 'stringp', 'subr-name', 'subrp',
'subst-char-in-region', 'substitute-command-keys',
'substitute-in-file-name', 'substring', 'substring-no-properties',
'suspend-emacs', 'suspend-tty', 'suspicious-object', 'sxhash',
'symbol-function', 'symbol-name', 'symbol-plist', 'symbol-value',
'symbolp', 'syntax-table', 'syntax-table-p', 'system-groups',
'system-move-file-to-trash', 'system-name', 'system-users', 'tan',
'terminal-coding-system', 'terminal-list', 'terminal-live-p',
'terminal-local-value', 'terminal-name', 'terminal-parameter',
'terminal-parameters', 'terpri', 'test-completion',
'text-char-description', 'text-properties-at', 'text-property-any',
'text-property-not-all', 'this-command-keys',
'this-command-keys-vector', 'this-single-command-keys',
'this-single-command-raw-keys', 'time-add', ' | codeparrot/github-code-clean |
# coding=utf-8
"""Tests for the keyword wizard."""
import unittest
from safe.definitions.constants import INASAFE_TEST
from safe.test.utilities import get_qgis_app
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app(qsetting=INASAFE_TEST)
import os
import shutil
import unittest
from datetime import datetime
from safe.common.utilities import temp_dir
from safe.definitions.constants import big_number, no_field
from safe.definitions.exposure import (exposure_land_cover, exposure_place,
exposure_population, exposure_structure)
from safe.definitions.exposure_classifications import (generic_place_classes,
generic_structure_classes)
from safe.definitions.extra_keywords import extra_keyword_earthquake_depth
from safe.definitions.fields import (aggregation_name_field,
exposure_type_field, female_count_field,
hazard_name_field, hazard_value_field,
population_count_field)
from safe.definitions.hazard import (hazard_cyclone, hazard_earthquake,
hazard_flood, hazard_volcano)
from safe.definitions.hazard_category import hazard_category_multiple_event
from safe.definitions.hazard_classifications import (cyclone_au_bom_hazard_classes,
earthquake_mmi_scale,
flood_hazard_classes,
volcano_hazard_classes)
from safe.definitions.layer_geometry import (layer_geometry_point,
layer_geometry_polygon,
layer_geometry_raster)
from safe.definitions.layer_modes import (layer_mode_classified,
layer_mode_continuous)
from safe.definitions.layer_purposes import (layer_purpose_aggregation,
layer_purpose_exposure,
layer_purpose_hazard)
from safe.definitions.units import (count_exposure_unit,
unit_kilometres_per_hour, unit_metres,
unit_mmi)
from safe.definitions.utilities import (default_classification_thresholds,
get_compulsory_fields)
from safe.gui.tools.wizard.wizard_dialog import WizardDialog
from safe.test.utilities import (clone_raster_layer, clone_shp_layer,
dict_values_sorted, load_test_vector_layer,
standard_data_path)
from safe.utilities.unicode import byteify
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
# Some default values for testing
source = 'Source'
source_scale = 'Source Scale'
source_url = 'Source Url'
source_date = datetime.strptime('06-12-2015', '%d-%m-%Y')
source_license = 'Source License'
layer_title = 'Layer Title'
# noinspection PyTypeChecker
class TestKeywordWizard(unittest.TestCase):
"""Test the InaSAFE keyword wizard GUI."""
maxDiff = None
def tearDown(self):
"""Run after each test."""
# Remove the mess that we made on each test
try:
shutil.rmtree(temp_dir(sub_dir='test'))
except BaseException:
pass
def check_list(self, expected_list, list_widget):
"""Helper function to check that list_widget is equal to expected_list.
:param expected_list: List of expected values to be found.
:type expected_list: list
:param list_widget: List widget that wants to be checked.
:type expected_list: QListWidget
"""
real_list = []
for i in range(list_widget.count()):
real_list.append(list_widget.item(i).text())
self.assertEqual(expected_list, real_list)
def check_current_step(self, expected_step):
"""Helper function to check the current step is expected_step.
:param expected_step: The expected current step.
:type expected_step: WizardStep instance
"""
current_step = expected_step.parent.get_current_step()
message = 'Should be step %s but it got %s' % (
expected_step.__class__.__name__, current_step.__class__.__name__)
self.assertEqual(expected_step, current_step, message)
def check_current_text(self, expected_text, list_widget):
"""Check the current text in list widget is expected_text.
:param expected_text: The expected current step.
:type expected_text: str
:param list_widget: List widget that wants to be checked.
:type list_widget: QListWidget
"""
try:
selected_items = list_widget.selectedItems()
selected_texts = [item.text() for item in selected_items]
if isinstance(expected_text, str):
expected_text = [expected_text]
self.assertListEqual(expected_text, selected_texts)
except AttributeError:
options = [
list_widget.item(i).text()
for i in range(list_widget.count())
]
message = 'There is no %s in the available option %s' % (
expected_text, options)
self.assertFalse(True, message)
# noinspection PyUnresolvedReferences
@staticmethod
def select_from_list_widget(option, list_widget):
"""Helper function to select option from list_widget.
:param option: Option to be chosen.
:type option: str
:param list_widget: List widget that wants to be checked.
:type list_widget: QListWidget
"""
available_options = []
for i in range(list_widget.count()):
if list_widget.item(i).text() == option:
list_widget.setCurrentRow(i)
return
else:
available_options.append(list_widget.item(i).text())
message = (
'There is no %s in the list widget. The available options are '
'%s' % (option, available_options))
raise Exception(message)
def test_invalid_keyword_layer(self):
layer = clone_raster_layer(
name='invalid_keyword_xml',
include_keywords=True,
source_directory=standard_data_path('other'),
extension='.tif')
# check the environment first
self.assertIsNotNone(layer.dataProvider())
# Initialize dialog
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
# It shouldn't raise any exception although the xml is invalid
dialog.set_keywords_creation_mode(layer)
def test_hazard_without_inasafe_fields(self):
"""Test keyword wizard for layer without inasafe fields."""
# cloning layer that has no inasafe fields
layer = load_test_vector_layer(
'hazard', 'classified_generic_polygon.shp', clone=True)
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select earthquake
self.select_from_list_widget(
hazard_earthquake['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select earthquake
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select classified mode
self.select_from_list_widget(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select h_zone field
self.select_from_list_widget(
'h_zone',
dialog.step_kw_field.lstFields)
# Click next to select h_zone
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
# Fill source form
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
# Check if in extra keywords step
self.check_current_step(dialog.step_kw_extra_keywords)
self.assertTrue(dialog.step_kw_extra_keywords.widgets_dict)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
# Fill title form
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to summary step
dialog.pbnNext.click()
# Check if in summary step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking keyword created
expected_keyword = {
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_earthquake['key'],
'inasafe_fields':
{hazard_value_field['key']: 'h_zone'},
'value_maps': layer.keywords['value_maps'],
'date': source_date,
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_classified['key']
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_aggregation_without_inasafe_fields(self):
"""Test keyword wizard for layer without inasafe fields."""
layer = load_test_vector_layer(
'aggregation', 'district_osm_jakarta.geojson', clone=True)
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check aggregation
self.check_current_text(
layer_purpose_aggregation['name'],
dialog.step_kw_purpose.lstCategories)
# Click next
dialog.pbnNext.click()
# check if in step field
self.check_current_step(dialog.step_kw_field)
# Check aggregation
self.check_current_text(
layer.keywords['inasafe_fields']['aggregation_name_field'],
dialog.step_kw_field.lstFields)
# Click next
dialog.pbnNext.click()
# Check field mapping steps
self.check_current_step(dialog.step_kw_fields_mapping)
# Click next to continue
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
# Click next
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
# Click next
dialog.pbnNext.click()
# Check if in summary step
self.check_current_step(dialog.step_kw_summary)
# Click next
dialog.pbnNext.click()
def test_hazard_volcano_polygon_keyword(self):
"""Test keyword wizard for volcano hazard polygon."""
layer = clone_shp_layer(
name='volcano_krb',
include_keywords=False,
source_directory=standard_data_path('hazard'))
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select volcano
self.select_from_list_widget(
hazard_volcano['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select volcano
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select classified mode
self.select_from_list_widget(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select KRB field
self.select_from_list_widget('KRB', dialog.step_kw_field.lstFields)
# Click next to select KRB
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Change combo box
dialog.step_kw_multi_classifications.exposure_combo_boxes[
0].setCurrentIndex(1)
# Click save
dialog.step_kw_multi_classifications.save_button.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# select inasafe fields step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Get the parameter widget for hazard name
hazard_name_parameter_widget = dialog.step_kw_inasafe_fields.\
parameter_container.get_parameter_widget_by_guid(
hazard_name_field['key'])
# Check if it's set to no field at the beginning
self.assertEqual(
no_field, hazard_name_parameter_widget.get_parameter().value)
# Select volcano
hazard_name_parameter_widget.set_choice('volcano')
# Check if it's set to volcano
self.assertEqual(
'volcano', hazard_name_parameter_widget.get_parameter().value)
# Check if in InaSAFE field step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Click next to finish InaSAFE Field step and go to source step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in summary step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_volcano['key'],
'inasafe_fields':
{
hazard_value_field['key']: 'KRB',
hazard_name_field['key']: 'volcano',
},
'value_maps': {
exposure_land_cover['key']: {
volcano_hazard_classes['key']: {
'active': True,
'classes': {
'high': ['Kawasan Rawan Bencana III'],
'low': ['Kawasan Rawan Bencana I'],
'medium': ['Kawasan Rawan Bencana II']
}
}
}
},
'date': source_date,
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_classified['key']
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_hazard_volcano_polygon_existing_keywords(self):
"""Test existing keyword for hazard volcano polygon."""
layer = load_test_vector_layer(
'hazard', 'volcano_krb.shp', clone=True)
default_classes = {
'high': ['Kawasan Rawan Bencana III'],
'low': ['Kawasan Rawan Bencana I'],
'medium': ['Kawasan Rawan Bencana II']
}
keywords = {
'hazard': hazard_volcano['key'],
'hazard_category': hazard_category_multiple_event['key'],
'inasafe_fields': {
hazard_name_field['key']: 'volcano',
hazard_value_field['key']: 'KRB'
},
'layer_geometry': layer_geometry_polygon['key'],
'layer_mode': layer_mode_classified['key'],
'layer_purpose': layer_purpose_hazard['key'],
'title': 'Volcano KRB',
'value_maps': {
exposure_land_cover['key']: {
volcano_hazard_classes['key']: {
'active': True,
'classes': default_classes
}
},
'population': {
'volcano_hazard_classes': {
'active': True,
'classes': default_classes
}
},
'road': {
'volcano_hazard_classes': {
'active': True,
'classes': default_classes
}
},
'structure': {
'volcano_hazard_classes': {
'active': True,
'classes': default_classes
}
}
}
}
layer.keywords = keywords
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, keywords)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check if hazard is selected
self.check_current_text(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# Check if volcano is selected
self.check_current_text(
hazard_volcano['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select volcano
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# Check if multiple event is selected
self.check_current_text(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if classified is selected
self.check_current_text(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# Check if KRB is selected
self.check_current_text('KRB', dialog.step_kw_field.lstFields)
# Click next to select KRB
dialog.pbnNext.click()
# Check if in select classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Click next to finish multi classifications step
dialog.pbnNext.click()
# select additional keywords / inasafe fields step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Check inasafe fields
parameters = dialog.step_kw_inasafe_fields. \
parameter_container.get_parameters(True)
# Get layer's inasafe_fields
inasafe_fields = layer.keywords.get('inasafe_fields')
self.assertIsNotNone(inasafe_fields)
for key, value in list(inasafe_fields.items()):
# Not check if it's hazard_class_field
if key == get_compulsory_fields(
layer_purpose_hazard['key'])['key']:
continue
# Check if existing key in parameters guid
self.assertIn(key, [p.guid for p in parameters])
# Iterate through all parameter to get parameter value
for parameter in parameters:
if parameter.guid == key:
# Check the value is the same
self.assertEqual(value, parameter.value)
break
for parameter in parameters:
# If not available is chosen, inasafe_fields shouldn't have it
if parameter.value == no_field:
self.assertNotIn(parameter.guid, list(inasafe_fields.keys()))
# If not available is not chosen, inasafe_fields should have it
else:
self.assertIn(parameter.guid, list(inasafe_fields.keys()))
# Click next to finish inasafe fields step and go to source step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
self.assertTrue(dialog.pbnNext.isEnabled())
self.assertEqual(dialog.step_kw_source.leSource.text(), '')
self.assertEqual(dialog.step_kw_source.leSource_url.text(), '')
self.assertFalse(dialog.step_kw_source.ckbSource_date.isChecked())
self.assertEqual(dialog.step_kw_source.leSource_scale.text(), '')
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
self.assertEqual(
'Volcano KRB', dialog.step_kw_title.leTitle.text())
self.assertTrue(dialog.pbnNext.isEnabled())
# Click finish
dialog.pbnNext.click()
self.assertDictEqual(
keywords['value_maps'], dialog.get_keywords()['value_maps'])
self.assertDictEqual(keywords, dialog.get_keywords())
def test_exposure_structure_polygon_keyword(self):
"""Test keyword wizard for exposure structure polygon."""
layer = clone_shp_layer(
name='buildings',
include_keywords=False,
source_directory=standard_data_path('exposure'))
self.assertIsNotNone(layer)
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.qsettings = None
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select exposure
self.select_from_list_widget(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# select structure
self.select_from_list_widget(
exposure_structure['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select structure
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select classified mode
self.select_from_list_widget(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select TYPE field
self.select_from_list_widget(
'TYPE', dialog.step_kw_field.lstFields)
# Click next to select TYPE
dialog.pbnNext.click()
# Check if in select classification step
self.check_current_step(dialog.step_kw_classification)
# select generic structure classes classification
self.select_from_list_widget(
generic_structure_classes['name'],
dialog.step_kw_classification.lstClassifications)
# Click next to select the classifications
dialog.pbnNext.click()
# Check if in classify step
self.check_current_step(dialog.step_kw_classify)
default_classes = generic_structure_classes['classes']
unassigned_values = [] # no need to check actually, not save in file
assigned_values = {
'residential': ['Residential'],
'education': ['School'],
'health': ['Clinic/Doctor'],
'transport': [],
'place of worship': ['Place of Worship - Islam'],
'government': ['Government'],
'commercial': ['Commercial', 'Industrial'],
'recreation': [],
'public facility': [],
'evacuation centre': [],
'other': []
}
dialog.step_kw_classify.populate_classified_values(
unassigned_values, assigned_values, default_classes)
# Click next to finish value mapping
dialog.pbnNext.click()
# Check if in InaSAFE field step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Click next to finish inasafe fields step and go to inasafe default
# field step
dialog.pbnNext.click()
# Check if in InaSAFE Default field step
# This step is disabled until we activate again value/rate fields.
# self.check_current_step(dialog.step_kw_default_inasafe_fields)
# Click next to finish InaSAFE Default Field step and go to source step
# dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'scale': source_scale,
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'exposure': exposure_structure['key'],
'inasafe_fields': {
exposure_type_field['key']: 'TYPE',
},
'value_map': dict(
(k,
v) for k,
v in list(
assigned_values.items()) if v),
'date': source_date,
'classification': generic_structure_classes['key'],
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_exposure['key'],
'layer_mode': layer_mode_classified['key']}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_exposure_structure_polygon_existing_keywords(self):
"""Test existing keyword for exposure structure polygon."""
layer = load_test_vector_layer(
'exposure', 'buildings.shp', clone=True)
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check if hazard is selected
self.check_current_text(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# Check if structure is selected
self.check_current_text(
exposure_structure['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select structure
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if classified is selected
self.check_current_text(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# Check if TYPE is selected
self.check_current_text('TYPE', dialog.step_kw_field.lstFields)
# Click next to select TYPE
dialog.pbnNext.click()
# Check if in select classification step
self.check_current_step(dialog.step_kw_classification)
# Check if generic structure classes is selected.
self.check_current_text(
generic_structure_classes['name'],
dialog.step_kw_classification.lstClassifications)
# Click next to select the classifications
dialog.pbnNext.click()
# Check if in classify step
self.check_current_step(dialog.step_kw_classify)
# Click next to finish value mapping
dialog.pbnNext.click()
# select additional keywords / inasafe fields step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Check inasafe fields
parameters = dialog.step_kw_inasafe_fields. \
parameter_container.get_parameters(True)
# Get layer's inasafe_fields
inasafe_fields = layer.keywords.get('inasafe_fields')
self.assertIsNotNone(inasafe_fields)
for key, value in list(inasafe_fields.items()):
# Not check if it's hazard_value_field
if key == get_compulsory_fields(
layer_purpose_exposure['key'])['key']:
continue
# Check if existing key in parameters guid
self.assertIn(key, [p.guid for p in parameters])
# Iterate through all parameter to get parameter value
for parameter in parameters:
if parameter.guid == key:
# Check the value is the same
self.assertEqual(value, parameter.value)
break
for parameter in parameters:
# If not available is chosen, inasafe_fields shouldn't have it
if parameter.value == no_field:
self.assertNotIn(parameter.guid, list(inasafe_fields.keys()))
# If not available is not chosen, inasafe_fields should have it
else:
self.assertIn(parameter.guid, list(inasafe_fields.keys()))
# Click next to finish inasafe fields step and go to inasafe default
# field step
dialog.pbnNext.click()
# Check if in InaSAFE Default field step
# This step is disabled until we activate again value/rate fields.
# self.check_current_step(dialog.step_kw_default_inasafe_fields)
# Click next to finish InaSAFE Default Field step and go to source step
# dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
self.assertTrue(dialog.pbnNext.isEnabled())
self.assertEqual(
dialog.step_kw_source.leSource.text(),
layer.keywords.get('source'))
self.assertEqual(dialog.step_kw_source.leSource_url.text(), '')
self.assertFalse(dialog.step_kw_source.ckbSource_date.isChecked())
self.assertEqual(dialog.step_kw_source.leSource_scale.text(), '')
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
self.assertEqual(
'Buildings', dialog.step_kw_title.leTitle.text())
self.assertTrue(dialog.pbnNext.isEnabled())
# Click finish
dialog.pbnNext.click()
self.assertDictEqual(
dict_values_sorted(layer.keywords['value_map']),
dict_values_sorted(dialog.get_keywords()['value_map']))
def test_aggregation_keyword(self):
"""Test Aggregation Keywords."""
layer = load_test_vector_layer(
'gisv4', 'aggregation', 'small_grid.geojson', clone_to_memory=True)
layer.keywords = {}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select aggregation
self.select_from_list_widget(
layer_purpose_aggregation['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select aggregation
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select area_name field
area_name = 'area_name'
self.select_from_list_widget(
area_name, dialog.step_kw_field.lstFields)
# Click next to select area_name
dialog.pbnNext.click()
# Check field mapping steps
self.check_current_step(dialog.step_kw_fields_mapping)
# Click next to continue
dialog.pbnNext.click()
# select inasafe fields step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Check behaviour
self.check_radio_button_behaviour(
dialog.step_kw_default_inasafe_fields)
# Click next to finish inasafe fields step and go to source step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
expected_keyword = {
'inasafe_fields': {aggregation_name_field['key']: area_name},
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_aggregation['key'],
'title': layer_title
}
# Check the keywords
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(real_keywords),
dict_values_sorted(expected_keyword))
def test_aggregation_existing_keyword(self):
"""Test Keyword wizard for aggregation layer with keywords."""
layer = load_test_vector_layer(
'gisv4', 'aggregation', 'small_grid.geojson', clone_to_memory=True)
area_name = 'area_name'
expected_keyword = {
'inasafe_fields': {aggregation_name_field['key']: area_name},
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_aggregation['key'],
'title': layer_title
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select aggregation
self.check_current_text(
layer_purpose_aggregation['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select aggregation
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select area_name field
self.check_current_text(
area_name, dialog.step_kw_field.lstFields)
# Click next to select KRB
dialog.pbnNext.click()
# Check field mapping steps
self.check_current_step(dialog.step_kw_fields_mapping)
# Click next to continue
dialog.pbnNext.click()
# Check inasafe fields step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Check behaviour
self.check_radio_button_behaviour(
dialog.step_kw_default_inasafe_fields)
# Click next to finish inasafe fields step and go to source step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
# Check if the title is already filled
self.assertEqual(dialog.step_kw_title.leTitle.text(), layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Check the keywords
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(real_keywords),
dict_values_sorted(expected_keyword))
def test_exposure_population_polygon_keyword(self):
"""Test exposure population polygon keyword."""
layer = load_test_vector_layer(
'exposure', 'census.geojson', clone_to_memory=True)
layer.keywords = {}
self.assertIsNotNone(layer)
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select exposure
self.select_from_list_widget(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# select population
self.select_from_list_widget(
exposure_population['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select population
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Select continuous
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# Select count
self.select_from_list_widget(
count_exposure_unit['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select count
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select population field
population_field = 'population'
self.select_from_list_widget(
population_field, dialog.step_kw_field.lstFields)
# Click next to select population
dialog.pbnNext.click()
# Check field mapping steps
self.check_current_step(dialog.step_kw_fields_mapping)
# Click next to continue
dialog.pbnNext.click()
# Check if in InaSAFE field step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Click next to finish InaSAFE Field step and go to source step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'scale': source_scale,
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'exposure': exposure_population['key'],
'exposure_unit': count_exposure_unit['key'],
'inasafe_fields':
{
population_count_field['key']: ['population'],
},
'date': source_date,
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_exposure['key'],
'layer_mode': layer_mode_continuous['key']
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_exposure_population_polygon_existing_keyword(self):
"""Test existing exposure population polygon with keyword."""
layer = load_test_vector_layer(
'exposure', 'census.geojson', clone_to_memory=True)
expected_keyword = {
'scale': source_scale,
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'exposure': exposure_population['key'],
'exposure_unit': count_exposure_unit['key'],
'inasafe_fields':
{
# Dummy, select more than fields to show we can do it.
population_count_field['key']: ['population', 'id'],
},
'date': source_date,
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_exposure['key'],
'layer_mode': layer_mode_continuous['key']
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check if exposure is selected
self.select_from_list_widget(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# Check if population is selected
self.check_current_text(
exposure_population['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select population
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if continuous is selected
self.check_current_text(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# Check if count is selected
self.check_current_text(
count_exposure_unit['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select count
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_field)
# Check if population is selected
population_field = expected_keyword['inasafe_fields'][
population_count_field['key']]
self.check_current_text(
population_field, dialog.step_kw_field.lstFields)
# Click next to select population
dialog.pbnNext.click()
# Check field mapping steps
self.check_current_step(dialog.step_kw_fields_mapping)
# Click next to continue
dialog.pbnNext.click()
# Check if in InaSAFE field step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Click next to finish inasafe fields step and go to source step
# field step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
self.assertEqual(dialog.step_kw_source.leSource.text(), source)
self.assertEqual(
dialog.step_kw_source.leSource_scale.text(), source_scale)
self.assertEqual(
dialog.step_kw_source.ckbSource_date.isChecked(), True)
self.assertEqual(
dialog.step_kw_source.dtSource_date.dateTime(), source_date)
self.assertEqual(
dialog.step_kw_source.leSource_license.text(), source_license)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
self.assertEqual(dialog.step_kw_title.leTitle.text(), layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_exposure_population_raster(self):
"""Test keyword wizard for population raster."""
path = standard_data_path(
'exposure', 'people_allow_resampling_true.tif')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='people_allow_resampling_true',
extension='.tif',
include_keywords=False,
source_directory=standard_data_path('exposure'))
self.assertIsNotNone(layer)
layer.keywords = {}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select exposure
self.select_from_list_widget(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# select population
self.select_from_list_widget(
exposure_population['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select population
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if continuous is selected
self.check_current_text(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# Check if count is selected
self.check_current_text(
count_exposure_unit['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select count
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
real_keywords = dialog.get_keywords()
self.assertEqual(1, real_keywords['active_band'])
def test_clean_keyword_wizard(self):
"""Test for having the clean state when we run keyword wizard."""
layer = load_test_vector_layer(
'gisv4',
'exposure',
'population_multi_fields.geojson',
clone_to_memory=True)
self.assertIsNotNone(layer)
expected_keyword = {
'scale': source_scale,
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'exposure': exposure_population['key'],
'exposure_unit': count_exposure_unit['key'],
'inasafe_fields':
{
# Dummy, select more than fields to show we can do it.
population_count_field['key']: [
'right_hand',
'left_hand'
],
female_count_field['key']: [
'F_0_4',
'F_5_9',
'F_9_15',
'F_15_30',
'F_30_60',
'F_60_100'
]
},
'date': source_date,
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_exposure['key'],
'layer_mode': layer_mode_continuous['key']
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Click next to select exposure
dialog.pbnNext.click()
# Click next to select population
dialog.pbnNext.click()
# Click next to select continuous
dialog.pbnNext.click()
# Click next to select count
dialog.pbnNext.click()
# Click next to select population
dialog.pbnNext.click()
# Click next to continue
dialog.pbnNext.click()
# Click next to finish inasafe fields step and go to source step
# field step
dialog.pbnNext.click()
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in summary step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
real_keywords = dialog.get_keywords()
self.assertDictEqual(byteify(real_keywords), byteify(expected_keyword))
# Now we try to use the same dialog object for assigning another layer.
# This is replication of #4200
hazard_layer = load_test_vector_layer(
'hazard', 'volcano_krb.shp', clone=True)
hazard_expected_keyword = {
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_volcano['key'],
'inasafe_fields':
{
hazard_value_field['key']: 'KRB',
hazard_name_field['key']: 'volcano',
},
'value_maps': {
exposure_land_cover['key']: {
volcano_hazard_classes['key']: {
'active': True,
'classes': {
'high': ['Kawasan Rawan Bencana III'],
'low': ['Kawasan Rawan Bencana I'],
'medium': ['Kawasan Rawan Bencana II']
}
}
}
},
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_classified['key']
}
# noinspection PyTypeChecker
dialog.set_keywords_creation_mode(
hazard_layer, hazard_expected_keyword)
# Click next to select hazard
dialog.pbnNext.click()
# Click next to select volcano
dialog.pbnNext.click()
# Click next to select multiple event
dialog.pbnNext.click()
# Click next to select classified
dialog.pbnNext.click()
# Click next to select KRB
dialog.pbnNext.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Click next to finish InaSAFE Field step and go to source step
dialog.pbnNext.click()
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in summary step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
real_keywords = dialog.get_keywords()
self.assertDictEqual(hazard_expected_keyword, real_keywords)
def test_exposure_multi_fields_existing_keyword(self):
"""Test for exposure layer with multiple fields."""
layer = load_test_vector_layer(
'gisv4',
'exposure',
'population_multi_fields.geojson',
clone_to_memory=True)
self.assertIsNotNone(layer)
expected_keyword = {
'scale': source_scale,
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'exposure': exposure_population['key'],
'exposure_unit': count_exposure_unit['key'],
'inasafe_fields':
{
# Dummy, select more than fields to show we can do it.
population_count_field['key']: [
'right_hand',
'left_hand'
],
female_count_field['key']: [
'F_0_4',
'F_5_9',
'F_9_15',
'F_15_30',
'F_30_60',
'F_60_100'
]
},
'date': source_date,
'layer_geometry': layer_geometry_polygon['key'],
'layer_purpose': layer_purpose_exposure['key'],
'layer_mode': layer_mode_continuous['key']
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check if exposure is selected
self.select_from_list_widget(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# Check if population is selected
self.check_current_text(
exposure_population['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select population
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if continuous is selected
self.check_current_text(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# Check if count is selected
self.check_current_text(
count_exposure_unit['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select count
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# Check if population field is selected
population_field = expected_keyword['inasafe_fields'][
population_count_field['key']]
self.check_current_text(
population_field, dialog.step_kw_field.lstFields)
# Click next to select population field
dialog.pbnNext.click()
# Check field mapping steps
self.check_current_step(dialog.step_kw_fields_mapping)
# Click next to continue
dialog.pbnNext.click()
# Check if in InaSAFE field step
self.check_current_step(dialog.step_kw_inasafe_fields)
# Click next to finish inasafe fields step and go to source step
# field step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
self.assertEqual(dialog.step_kw_source.leSource.text(), source)
self.assertEqual(
dialog.step_kw_source.leSource_scale.text(), source_scale)
self.assertEqual(
dialog.step_kw_source.ckbSource_date.isChecked(), True)
self.assertEqual(
dialog.step_kw_source.dtSource_date.dateTime(), source_date)
self.assertEqual(
dialog.step_kw_source.leSource_license.text(), source_license)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
self.assertEqual(dialog.step_kw_title.leTitle.text(), layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
real_keywords = dialog.get_keywords()
self.assertDictEqual(byteify(real_keywords), byteify(expected_keyword))
def test_exposure_place_population(self):
"""Test for place with population exposure."""
layer = load_test_vector_layer(
'gisv4',
'exposure',
'places.geojson',
clone_to_memory=True)
self.assertIsNotNone(layer)
expected_keyword = {
'scale': source_scale,
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'exposure': exposure_place['key'],
'inasafe_fields':
{
exposure_type_field['key']: 'Type',
population_count_field['key']: 'Population',
},
'date': source_date,
'layer_geometry': layer_geometry_point['key'],
'layer_purpose': layer_purpose_exposure['key'],
'layer_mode': layer_mode_classified['key'],
'classification': generic_place_classes['key'],
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check if exposure is selected
self.select_from_list_widget(
layer_purpose_exposure['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select exposure
dialog.pbnNext.click()
# Check if in select exposure step
self.check_current_step(dialog.step_kw_subcategory)
# Check if place is selected
self.check_current_text(
exposure_place['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select place
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if classified is selected
self.check_current_text(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# Check if place type field is selected
place_type_field = expected_keyword['inasafe_fields'][
exposure_type_field['key']]
self.check_current_text(
place_type_field, dialog.step_kw_field.lstFields)
# Click next to select place type field
dialog.pbnNext.click()
# Check if in select classification step
self.check_current_step(dialog.step_kw_classification)
# Check if generic structure classes is selected.
self.check_current_text(
generic_place_classes['name'],
dialog.step_kw_classification.lstClassifications)
# Click next to select the classifications
dialog.pbnNext.click()
# Check if in classify step
self.check_current_step(dialog.step_kw_classify)
# Click next to finish value mapping
dialog.pbnNext.click()
# select additional keywords / inasafe fields step
self.check_current_step(dialog.step_kw_inasafe_fields)
current_inasafe_field = dialog.step_kw_inasafe_fields.\
get_inasafe_fields()
population_field = current_inasafe_field.get(
population_count_field['key'])
expected_population_field = expected_keyword['inasafe_fields'][
population_count_field['key']]
# Check if the population field is set.
self.assertEqual(population_field, expected_population_field)
# Click next to finish set the InaSAFE fields
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
self.assertEqual(dialog.step_kw_title.leTitle.text(), layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
real_keywords = dialog.get_keywords()
self.assertDictEqual(byteify(real_keywords), byteify(expected_keyword))
def test_classified_raster_keywords(self):
"""Test keyword wizard for classified raster."""
path = standard_data_path('hazard', 'classified_flood_20_20.asc')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='classified_flood_20_20',
extension='.asc',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
layer.keywords = {}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select flood
self.select_from_list_widget(
hazard_flood['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select flood
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select classified mode
self.select_from_list_widget(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Change combo box
dialog.step_kw_multi_classifications.exposure_combo_boxes[
0].setCurrentIndex(1)
# Click save
dialog.step_kw_multi_classifications.save_button.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
self.check_current_step(dialog.step_kw_extra_keywords)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'active_band': 1,
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_flood['key'],
'date': source_date,
'layer_geometry': layer_geometry_raster['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_classified['key'],
'value_maps': {
exposure_land_cover['key']: {
flood_hazard_classes['key']: {
'active': True,
'classes': {
'wet': [1.0, 2.0, 3.0]
}
}
}
}
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_classified_raster_existing_keywords(self):
"""Test keyword wizard for existing keywords classified raster."""
layer = clone_raster_layer(
name='classified_flood_20_20',
extension='.asc',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
expected_keyword = {
'active_band': 1,
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_flood['key'],
'value_maps': {},
'date': source_date,
'layer_geometry': layer_geometry_raster['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_classified['key']
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Check if hazard is selected
self.check_current_text(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# Check if flood is selected
self.check_current_text(
hazard_flood['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select flood
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# Check if multiple event is selected
self.check_current_text(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Check if classified is selected
self.check_current_text(
layer_mode_classified['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select classified
dialog.pbnNext.click()
# Check if in select classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Change combo box
dialog.step_kw_multi_classifications.exposure_combo_boxes[
0].setCurrentIndex(1)
# Click save
dialog.step_kw_multi_classifications.save_button.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
self.assertEqual(dialog.step_kw_source.leSource.text(), source)
self.assertEqual(
dialog.step_kw_source.leSource_scale.text(), source_scale)
self.assertEqual(
dialog.step_kw_source.ckbSource_date.isChecked(), True)
self.assertEqual(
dialog.step_kw_source.dtSource_date.dateTime(), source_date)
self.assertEqual(
dialog.step_kw_source.leSource_license.text(), source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
self.check_current_step(dialog.step_kw_extra_keywords)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
self.assertEqual(dialog.step_kw_title.leTitle.text(), layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_continuous_raster_keywords(self):
"""Test keyword wizard for continuous raster."""
path = standard_data_path('hazard', 'continuous_flood_20_20.asc')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='continuous_flood_20_20',
extension='.asc',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
layer.keywords = {}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select flood
self.select_from_list_widget(
hazard_flood['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select flood
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select continuous mode
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# select unit metres
self.select_from_list_widget(
unit_metres['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select unit metres
dialog.pbnNext.click()
# Check if in select multi classifications step
self.check_current_step(dialog.step_kw_multi_classifications)
# Change combo box
dialog.step_kw_multi_classifications.exposure_combo_boxes[
0].setCurrentIndex(1)
# Click save
dialog.step_kw_multi_classifications.save_button.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
self.check_current_step(dialog.step_kw_extra_keywords)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'active_band': 1,
'continuous_hazard_unit': 'metres',
'date': source_date,
'hazard': hazard_flood['key'],
'hazard_category': hazard_category_multiple_event['key'],
'layer_geometry': layer_geometry_raster['key'],
'layer_mode': layer_mode_continuous['key'],
'layer_purpose': layer_purpose_hazard['key'],
'license': source_license,
'scale': source_scale,
'source': source,
'title': layer_title,
'url': source_url,
'thresholds': {
exposure_land_cover['key']: {
flood_hazard_classes['key']: {
'active': True,
'classes': {
'dry': [0.0, 1.0],
'wet': [1.0, big_number]
}
}
}
}
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(byteify(real_keywords), byteify(expected_keyword))
def test_continuous_raster_existing_keywords(self):
"""Test keyword wizard for continuous raster with assigned keyword."""
path = standard_data_path('hazard', 'continuous_flood_20_20.asc')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='continuous_flood_20_20',
extension='.asc',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
original_keywords = {
'active_band': 1,
'continuous_hazard_unit': 'metres',
'date': source_date,
'hazard': hazard_flood['key'],
'hazard_category': hazard_category_multiple_event['key'],
'layer_geometry': layer_geometry_raster['key'],
'layer_mode': layer_mode_continuous['key'],
'layer_purpose': layer_purpose_hazard['key'],
'license': source_license,
'scale': source_scale,
'source': source,
'thresholds': {
exposure_land_cover['key']: {
flood_hazard_classes['key']: {
'classes': {
'dry': [0, 1],
'wet': [1, 9999999999]
},
'active': True
}
},
},
'title': layer_title,
'url': source_url,
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, original_keywords)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select flood
self.select_from_list_widget(
hazard_flood['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select flood
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select continuous mode
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# select unit metres
self.select_from_list_widget(
unit_metres['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select unit metres
dialog.pbnNext.click()
# Check if in select multi classifications step
self.check_current_step(dialog.step_kw_multi_classifications)
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
self.check_current_step(dialog.step_kw_extra_keywords)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
real_keywords = dialog.get_keywords()
self.assertDictEqual(
byteify(real_keywords), byteify(original_keywords))
def test_continuous_vector(self):
"""Test continuous vector for keyword wizard."""
layer = load_test_vector_layer(
'hazard', 'continuous_vector.geojson', clone_to_memory=True)
layer.keywords = {}
self.assertIsNotNone(layer)
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'],
dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select flood
self.select_from_list_widget(
hazard_flood['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select population
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# Select continuous
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in select unit step
self.check_current_step(dialog.step_kw_unit)
# Select metres
self.select_from_list_widget(
unit_metres['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select metres
dialog.pbnNext.click()
# Check if in select field step
self.check_current_step(dialog.step_kw_field)
# select population field
depth_field = 'depth'
self.select_from_list_widget(
depth_field, dialog.step_kw_field.lstFields)
# Click next to select depth
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Change combo box
dialog.step_kw_multi_classifications.exposure_combo_boxes[
0].setCurrentIndex(1)
# Click save
dialog.step_kw_multi_classifications.save_button.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
self.check_current_step(dialog.step_kw_extra_keywords)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'continuous_hazard_unit': unit_metres['key'],
'date': source_date,
'hazard': hazard_flood['key'],
'hazard_category': hazard_category_multiple_event['key'],
'inasafe_fields': {hazard_value_field['key']: depth_field},
'layer_geometry': layer_geometry_polygon['key'],
'layer_mode': layer_mode_continuous['key'],
'layer_purpose': layer_purpose_hazard['key'],
'license': source_license,
'scale': source_scale,
'source': source,
'thresholds': {
exposure_land_cover['key']: {
flood_hazard_classes['key']: {
'classes': {
'dry': [0, 1],
'wet': [1, big_number]
},
'active': True
}
},
},
'title': layer_title,
'url': source_url
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
# @unittest.skip(
# 'This test is hanging for a unknown reason since a few times.')
def test_auto_select_one_item(self):
"""Test auto select if there is only one item in a list."""
layer = clone_shp_layer(
name='buildings',
include_keywords=True,
source_directory=standard_data_path('exposure'))
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
dialog.pbnNext.click() # choose exposure
self.assertEqual(
dialog.step_kw_subcategory.lstSubcategories.currentRow(), 2)
num_item = dialog.step_kw_subcategory.lstSubcategories.count()
dialog.close()
self.assertTrue(num_item == 3)
def test_earthquake_raster(self):
"""Test for Earthquake raster keyword wizard."""
path = standard_data_path('hazard', 'earthquake.tif')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='earthquake',
extension='.tif',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
layer.keywords = {}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select EQ
self.select_from_list_widget(
hazard_earthquake['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select EQ
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select continuous mode
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in unit step
self.check_current_step(dialog.step_kw_unit)
# select MMI
self.select_from_list_widget(
unit_mmi['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select MMI
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
# Check if in extra keywords step
self.check_current_step(dialog.step_kw_extra_keywords)
self.assertTrue(dialog.step_kw_extra_keywords.widgets_dict)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'active_band': 1,
'continuous_hazard_unit': unit_mmi['key'],
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_earthquake['key'],
'date': source_date,
'layer_geometry': layer_geometry_raster['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_continuous['key'],
'thresholds': {
exposure_population['key']: {
earthquake_mmi_scale['key']: {
'active': True,
'classes': default_classification_thresholds(
earthquake_mmi_scale)
}
}
}
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_earthquake_raster_dirty_keywords(self):
"""Test for Earthquake raster keyword wizard."""
path = standard_data_path('hazard', 'earthquake.tif')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='earthquake',
extension='.tif',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
expected_keyword = {
'continuous_hazard_unit': unit_mmi['key'],
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_earthquake['key'],
'date': source_date,
'layer_geometry': layer_geometry_raster['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_continuous['key'],
'thresholds': {
exposure_population['key']: {
earthquake_mmi_scale['key']: {
'active': True,
'classes': default_classification_thresholds(
earthquake_mmi_scale)
}
},
exposure_land_cover['key']: {
earthquake_mmi_scale['key']: {
'active': True,
'classes': default_classification_thresholds(
earthquake_mmi_scale)
}
}
},
'extra_keywords': {
extra_keyword_earthquake_depth['key']: 10
}
}
layer.keywords = expected_keyword
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer, expected_keyword)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select EQ
self.select_from_list_widget(
hazard_earthquake['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select EQ
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select continuous mode
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in unit step
self.check_current_step(dialog.step_kw_unit)
# select MMI
self.select_from_list_widget(
unit_mmi['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select MMI
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
# Check if in extra keywords step
self.check_current_step(dialog.step_kw_extra_keywords)
self.assertTrue(dialog.step_kw_extra_keywords.widgets_dict)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
real_keywords = dialog.get_keywords()
# Check if classification for land cover is not exist anymore #4214
self.assertNotIn(
exposure_land_cover['key'],
list(real_keywords['thresholds'].keys())
)
# Check if the extra keywords remain
extra_keywords = real_keywords['extra_keywords']
self.assertDictEqual(
extra_keywords, expected_keyword['extra_keywords'])
def test_cyclone_raster(self):
"""Test for cyclone raster keyword wizard when we have many units."""
path = standard_data_path('gisv4', 'hazard', 'cyclone_AUBOM_km_h.asc')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='cyclone_AUBOM_km_h',
extension='.asc',
include_keywords=False,
source_directory=standard_data_path('gisv4', 'hazard'))
self.assertIsNotNone(layer)
layer.keywords = {}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select cyclone
self.select_from_list_widget(
hazard_cyclone['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select EQ
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select continuous mode
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in unit step
self.check_current_step(dialog.step_kw_unit)
# select MMI
self.select_from_list_widget(
unit_kilometres_per_hour['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select MMI
dialog.pbnNext.click()
# Check if in select multi classifications step
self.check_current_step(dialog.step_kw_multi_classifications)
# Change combo box
dialog.step_kw_multi_classifications.exposure_combo_boxes[
0].setCurrentIndex(1)
# Click save
dialog.step_kw_multi_classifications.save_button.click()
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'active_band': 1,
'continuous_hazard_unit': unit_kilometres_per_hour['key'],
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_cyclone['key'],
'date': source_date,
'layer_geometry': layer_geometry_raster['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_continuous['key'],
'thresholds': {
exposure_land_cover['key']: {
cyclone_au_bom_hazard_classes['key']: {
'active': True,
'classes': default_classification_thresholds(
cyclone_au_bom_hazard_classes,
unit_kilometres_per_hour['key'])
}
}
}
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def test_earthquake_raster_invalid_key(self):
"""Test for Earthquake raster keyword wizard."""
path = standard_data_path('hazard', 'earthquake.tif')
message = "Path %s is not found" % path
self.assertTrue(os.path.exists(path), message)
layer = clone_raster_layer(
name='earthquake',
extension='.tif',
include_keywords=False,
source_directory=standard_data_path('hazard'))
self.assertIsNotNone(layer)
layer.keywords = {
'thresholds': {
exposure_structure['key']: {
'dummy': {
'active': True,
'classes': default_classification_thresholds(
earthquake_mmi_scale)
}
}
}
}
# noinspection PyTypeChecker
dialog = WizardDialog(iface=IFACE)
dialog.set_keywords_creation_mode(layer)
# Check if in select purpose step
self.check_current_step(dialog.step_kw_purpose)
# Select hazard
self.select_from_list_widget(
layer_purpose_hazard['name'], dialog.step_kw_purpose.lstCategories)
# Click next to select hazard
dialog.pbnNext.click()
# Check if in select hazard step
self.check_current_step(dialog.step_kw_subcategory)
# select EQ
self.select_from_list_widget(
hazard_earthquake['name'],
dialog.step_kw_subcategory.lstSubcategories)
# Click next to select EQ
dialog.pbnNext.click()
# Check if in select hazard category step
self.check_current_step(dialog.step_kw_hazard_category)
# select multiple_event
self.select_from_list_widget(
hazard_category_multiple_event['name'],
dialog.step_kw_hazard_category.lstHazardCategories)
# Click next to select multiple event
dialog.pbnNext.click()
# Check if in select band step
self.check_current_step(dialog.step_kw_band_selector)
# Click next to select Band 1 (default)
dialog.pbnNext.click()
# Check if in select layer mode step
self.check_current_step(dialog.step_kw_layermode)
# select continuous mode
self.select_from_list_widget(
layer_mode_continuous['name'],
dialog.step_kw_layermode.lstLayerModes)
# Click next to select continuous
dialog.pbnNext.click()
# Check if in unit step
self.check_current_step(dialog.step_kw_unit)
# select MMI
self.select_from_list_widget(
unit_mmi['name'],
dialog.step_kw_unit.lstUnits)
# Click next to select MMI
dialog.pbnNext.click()
# Check if in multi classification step
self.check_current_step(dialog.step_kw_multi_classifications)
# Click next to finish multi classifications step
dialog.pbnNext.click()
# Check if in source step
self.check_current_step(dialog.step_kw_source)
dialog.step_kw_source.leSource.setText(source)
dialog.step_kw_source.leSource_scale.setText(source_scale)
dialog.step_kw_source.leSource_url.setText(source_url)
dialog.step_kw_source.ckbSource_date.setChecked(True)
dialog.step_kw_source.dtSource_date.setDateTime(source_date)
dialog.step_kw_source.leSource_license.setText(source_license)
# Click next to finish source step and go to extra keywords step
dialog.pbnNext.click()
# Check if in extra keywords step
self.check_current_step(dialog.step_kw_extra_keywords)
self.assertTrue(dialog.step_kw_extra_keywords.widgets_dict)
# Click next to finish extra keywords step and go to title step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_title)
dialog.step_kw_title.leTitle.setText(layer_title)
# Click next to finish title step and go to kw summary step
dialog.pbnNext.click()
# Check if in title step
self.check_current_step(dialog.step_kw_summary)
# Click finish
dialog.pbnNext.click()
# Checking Keyword Created
expected_keyword = {
'active_band': 1,
'continuous_hazard_unit': unit_mmi['key'],
'scale': source_scale,
'hazard_category': hazard_category_multiple_event['key'],
'license': source_license,
'source': source,
'url': source_url,
'title': layer_title,
'hazard': hazard_earthquake['key'],
'date': source_date,
'layer_geometry': layer_geometry_raster['key'],
'layer_purpose': layer_purpose_hazard['key'],
'layer_mode': layer_mode_continuous['key'],
'thresholds': {
exposure_population['key']: {
earthquake_mmi_scale['key']: {
'active': True,
'classes': default_classification_thresholds(
earthquake_mmi_scale)
}
}
}
}
real_keywords = dialog.get_keywords()
self.assertDictEqual(dict_values_sorted(
real_keywords), dict_values_sorted(expected_keyword))
def check_radio_button_behaviour(self, inasafe_default_dialog):
"""Test radio button behaviour so they are disabled when user set the
ratio field and enabled when there is no field selected.
"""
# Get the parameter container from dialog.
parameter_container = (
inasafe_default_dialog.parameter_container.get_parameter_widgets())
# Check every parameter widgets on the container.
for parameter_widget in parameter_container:
parameter_widget = parameter_widget.widget()
# Locate the 'Do not report' radio button.
dont_use_button = (
parameter_widget.default_input_button_group.button(
len(parameter_widget._parameter.default_values) - 2))
# 'Do not report' button should be selected since the default
# selected input is 'No Field'.
self.assertTrue(dont_use_button.isChecked())
# Select ratio field on input.
current_index = parameter_widget.input.currentIndex()
parameter_widget.input.setCurrentIndex(current_index + 1)
self.assertFalse(dont_use_button.isChecked())
parameter_widget.input.setCurrentIndex(current_index)
self.assertTrue(dont_use_button.isChecked())
if __name__ == '__main__':
suite = unittest.makeSuite(TestKeywordWizard)
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
| codeparrot/github-code-clean |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from azure.profiles import KnownProfiles, ProfileDefinition
from azure.profiles.multiapiclient import MultiApiClientMixin
from msrest import Deserializer, Serializer
from ._configuration import NetworkManagementClientConfiguration
from ._operations_mixin import NetworkManagementClientOperationsMixin
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class _SDKClient(object):
def __init__(self, *args, **kwargs):
"""This is a fake class to support current implemetation of MultiApiClientMixin."
Will be removed in final version of multiapi azure-core based client
"""
pass
class NetworkManagementClient(NetworkManagementClientOperationsMixin, MultiApiClientMixin, _SDKClient):
"""Network Client.
This ready contains multiple API versions, to help you deal with all of the Azure clouds
(Azure Stack, Azure Government, Azure China, etc.).
By default, it uses the latest API version available on public Azure.
For production, you should stick to a particular api-version and/or profile.
The profile sets a mapping between an operation group and its API version.
The api-version parameter sets the default API version if the operation
group is not described in the profile.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
:param api_version: API version to use if no profile is provided, or if missing in profile.
:type api_version: str
:param base_url: Service URL
:type base_url: str
:param profile: A profile definition, from KnownProfiles to dict.
:type profile: azure.profiles.KnownProfiles
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
DEFAULT_API_VERSION = '2021-05-01'
_PROFILE_TAG = "azure.mgmt.network.NetworkManagementClient"
LATEST_PROFILE = ProfileDefinition({
_PROFILE_TAG: {
None: DEFAULT_API_VERSION,
'active_connectivity_configurations': '2021-02-01-preview',
'active_security_admin_rules': '2021-02-01-preview',
'active_security_user_rules': '2021-02-01-preview',
'admin_rule_collections': '2021-02-01-preview',
'admin_rules': '2021-02-01-preview',
'connectivity_configurations': '2021-02-01-preview',
'effective_connectivity_configurations': '2021-02-01-preview',
'effective_virtual_networks': '2021-02-01-preview',
'firewall_policy_rule_groups': '2020-04-01',
'interface_endpoints': '2019-02-01',
'network_groups': '2021-02-01-preview',
'network_manager_commits': '2021-02-01-preview',
'network_manager_deployment_status': '2021-02-01-preview',
'network_manager_effective_security_admin_rules': '2021-02-01-preview',
'network_managers': '2021-02-01-preview',
'network_security_perimeters': '2021-02-01-preview',
'p2_svpn_server_configurations': '2019-07-01',
'perimeter_associable_resource_types': '2021-02-01-preview',
'security_admin_configurations': '2021-02-01-preview',
'security_user_configurations': '2021-02-01-preview',
'user_rule_collections': '2021-02-01-preview',
'user_rules': '2021-02-01-preview',
}},
_PROFILE_TAG + " latest"
)
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
api_version: Optional[str] = None,
base_url: Optional[str] = None,
profile: KnownProfiles = KnownProfiles.default,
**kwargs # type: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = NetworkManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
super(NetworkManagementClient, self).__init__(
api_version=api_version,
profile=profile
)
@classmethod
def _models_dict(cls, api_version):
return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)}
@classmethod
def models(cls, api_version=DEFAULT_API_VERSION):
"""Module depends on the API version:
* 2015-06-15: :mod:`v2015_06_15.models<azure.mgmt.network.v2015_06_15.models>`
* 2016-09-01: :mod:`v2016_09_01.models<azure.mgmt.network.v2016_09_01.models>`
* 2016-12-01: :mod:`v2016_12_01.models<azure.mgmt.network.v2016_12_01.models>`
* 2017-03-01: :mod:`v2017_03_01.models<azure.mgmt.network.v2017_03_01.models>`
* 2017-06-01: :mod:`v2017_06_01.models<azure.mgmt.network.v2017_06_01.models>`
* 2017-10-01: :mod:`v2017_10_01.models<azure.mgmt.network.v2017_10_01.models>`
* 2018-01-01: :mod:`v2018_01_01.models<azure.mgmt.network.v2018_01_01.models>`
* 2018-02-01: :mod:`v2018_02_01.models<azure.mgmt.network.v2018_02_01.models>`
* 2018-04-01: :mod:`v2018_04_01.models<azure.mgmt.network.v2018_04_01.models>`
* 2018-06-01: :mod:`v2018_06_01.models<azure.mgmt.network.v2018_06_01.models>`
* 2018-07-01: :mod:`v2018_07_01.models<azure.mgmt.network.v2018_07_01.models>`
* 2018-08-01: :mod:`v2018_08_01.models<azure.mgmt.network.v2018_08_01.models>`
* 2018-10-01: :mod:`v2018_10_01.models<azure.mgmt.network.v2018_10_01.models>`
* 2018-11-01: :mod:`v2018_11_01.models<azure.mgmt.network.v2018_11_01.models>`
* 2018-12-01: :mod:`v2018_12_01.models<azure.mgmt.network.v2018_12_01.models>`
* 2019-02-01: :mod:`v2019_02_01.models<azure.mgmt.network.v2019_02_01.models>`
* 2019-04-01: :mod:`v2019_04_01.models<azure.mgmt.network.v2019_04_01.models>`
* 2019-06-01: :mod:`v2019_06_01.models<azure.mgmt.network.v2019_06_01.models>`
* 2019-07-01: :mod:`v2019_07_01.models<azure.mgmt.network.v2019_07_01.models>`
* 2019-08-01: :mod:`v2019_08_01.models<azure.mgmt.network.v2019_08_01.models>`
* 2019-09-01: :mod:`v2019_09_01.models<azure.mgmt.network.v2019_09_01.models>`
* 2019-11-01: :mod:`v2019_11_01.models<azure.mgmt.network.v2019_11_01.models>`
* 2019-12-01: :mod:`v2019_12_01.models<azure.mgmt.network.v2019_12_01.models>`
* 2020-03-01: :mod:`v2020_03_01.models<azure.mgmt.network.v2020_03_01.models>`
* 2020-04-01: :mod:`v2020_04_01.models<azure.mgmt.network.v2020_04_01.models>`
* 2020-05-01: :mod:`v2020_05_01.models<azure.mgmt.network.v2020_05_01.models>`
* 2020-06-01: :mod:`v2020_06_01.models<azure.mgmt.network.v2020_06_01.models>`
* 2020-07-01: :mod:`v2020_07_01.models<azure.mgmt.network.v2020_07_01.models>`
* 2020-08-01: :mod:`v2020_08_01.models<azure.mgmt.network.v2020_08_01.models>`
* 2020-11-01: :mod:`v2020_11_01.models<azure.mgmt.network.v2020_11_01.models>`
* 2021-02-01: :mod:`v2021_02_01.models<azure.mgmt.network.v2021_02_01.models>`
* 2021-02-01-preview: :mod:`v2021_02_01_preview.models<azure.mgmt.network.v2021_02_01_preview.models>`
* 2021-05-01: :mod:`v2021_05_01.models<azure.mgmt.network.v2021_05_01.models>`
"""
if api_version == '2015-06-15':
from ..v2015_06_15 import models
return models
elif api_version == '2016-09-01':
from ..v2016_09_01 import models
return models
elif api_version == '2016-12-01':
from ..v2016_12_01 import models
return models
elif api_version == '2017-03-01':
from ..v2017_03_01 import models
return models
elif api_version == '2017-06-01':
from ..v2017_06_01 import models
return models
elif api_version == '2017-10-01':
from ..v2017_10_01 import models
return models
elif api_version == '2018-01-01':
from ..v2018_01_01 import models
return models
elif api_version == '2018-02-01':
from ..v2018_02_01 import models
return models
elif api_version == '2018-04-01':
from ..v2018_04_01 import models
return models
elif api_version == '2018-06-01':
from ..v2018_06_01 import models
return models
elif api_version == '2018-07-01':
from ..v2018_07_01 import models
return models
elif api_version == '2018-08-01':
from ..v2018_08_01 import models
return models
elif api_version == '2018-10-01':
from ..v2018_10_01 import models
return models
elif api_version == '2018-11-01':
from ..v2018_11_01 import models
return models
elif api_version == '2018-12-01':
from ..v2018_12_01 import models
return models
elif api_version == '2019-02-01':
from ..v2019_02_01 import models
return models
elif api_version == '2019-04-01':
from ..v2019_04_01 import models
return models
elif api_version == '2019-06-01':
from ..v2019_06_01 import models
return models
elif api_version == '2019-07-01':
from ..v2019_07_01 import models
return models
elif api_version == '2019-08-01':
from ..v2019_08_01 import models
return models
elif api_version == '2019-09-01':
from ..v2019_09_01 import models
return models
elif api_version == '2019-11-01':
from ..v2019_11_01 import models
return models
elif api_version == '2019-12-01':
from ..v2019_12_01 import models
return models
elif api_version == '2020-03-01':
from ..v2020_03_01 import models
return models
elif api_version == '2020-04-01':
from ..v2020_04_01 import models
return models
elif api_version == '2020-05-01':
from ..v2020_05_01 import models
return models
elif api_version == '2020-06-01':
from ..v2020_06_01 import models
return models
elif api_version == '2020-07-01':
from ..v2020_07_01 import models
return models
elif api_version == '2020-08-01':
from ..v2020_08_01 import models
return models
elif api_version == '2020-11-01':
from ..v2020_11_01 import models
return models
elif api_version == '2021-02-01':
from ..v2021_02_01 import models
return models
elif api_version == '2021-02-01-preview':
from ..v2021_02_01_preview import models
return models
elif api_version == '2021-05-01':
from ..v2021_05_01 import models
return models
raise ValueError("API version {} is not available".format(api_version))
@property
def active_connectivity_configurations(self):
"""Instance depends on the API version:
* 2021-02-01-preview: :class:`ActiveConnectivityConfigurationsOperations<azure.mgmt.network.v2021_02_01_preview.aio.operations.ActiveConnectivityConfigurationsOperations>`
"""
api_version = self._get_api_version('active_connectivity_configurations')
if api_version == '2021-02-01-preview':
from ..v2021_02_01_preview.aio.operations import ActiveConnectivityConfigurationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'active_connectivity_configurations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def active_security_admin_rules(self):
"""Instance depends on the API version:
* 2021-02-01-preview: :class:`ActiveSecurityAdminRulesOperations<azure.mgmt.network.v2021_02_01_preview.aio.operations.ActiveSecurityAdminRulesOperations>`
"""
api_version = self._get_api_version('active_security_admin_rules')
if api_version == '2021-02-01-preview':
from ..v2021_02_01_preview.aio.operations import ActiveSecurityAdminRulesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'active_security_admin_rules'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def active_security_user_rules(self):
"""Instance depends on the API version:
* 2021-02-01-preview: :class:`ActiveSecurityUserRulesOperations<azure.mgmt.network.v2021_02_01_preview.aio.operations.ActiveSecurityUserRulesOperations>`
"""
api_version = self._get_api_version('active_security_user_rules')
if api_version == '2021-02-01-preview':
from ..v2021_02_01_preview.aio.operations import ActiveSecurityUserRulesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'active_security_user_rules'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def admin_rule_collections(self):
"""Instance depends on the API version:
* 2021-02-01-preview: :class:`AdminRuleCollectionsOperations<azure.mgmt.network.v2021_02_01_preview.aio.operations.AdminRuleCollectionsOperations>`
"""
api_version = self._get_api_version('admin_rule_collections')
if api_version == '2021-02-01-preview':
from ..v2021_02_01_preview.aio.operations import AdminRuleCollectionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'admin_rule_collections'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def admin_rules(self):
"""Instance depends on the API version:
* 2021-02-01-preview: :class:`AdminRulesOperations<azure.mgmt.network.v2021_02_01_preview.aio.operations.AdminRulesOperations>`
"""
api_version = self._get_api_version('admin_rules')
if api_version == '2021-02-01-preview':
from ..v2021_02_01_preview.aio.operations import AdminRulesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'admin_rules'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def application_gateway_private_endpoint_connections(self):
"""Instance depends on the API version:
* 2020-05-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2020_05_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
* 2020-06-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2020_06_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
* 2020-07-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2020_07_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
* 2020-08-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2020_08_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
* 2020-11-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2020_11_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
* 2021-02-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2021_02_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
* 2021-05-01: :class:`ApplicationGatewayPrivateEndpointConnectionsOperations<azure.mgmt.network.v2021_05_01.aio.operations.ApplicationGatewayPrivateEndpointConnectionsOperations>`
"""
api_version = self._get_api_version('application_gateway_private_endpoint_connections')
if api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import ApplicationGatewayPrivateEndpointConnectionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'application_gateway_private_endpoint_connections'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def application_gateway_private_link_resources(self):
"""Instance depends on the API version:
* 2020-05-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2020_05_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
* 2020-06-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2020_06_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
* 2020-07-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2020_07_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
* 2020-08-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2020_08_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
* 2020-11-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2020_11_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
* 2021-02-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2021_02_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
* 2021-05-01: :class:`ApplicationGatewayPrivateLinkResourcesOperations<azure.mgmt.network.v2021_05_01.aio.operations.ApplicationGatewayPrivateLinkResourcesOperations>`
"""
api_version = self._get_api_version('application_gateway_private_link_resources')
if api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import ApplicationGatewayPrivateLinkResourcesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'application_gateway_private_link_resources'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def application_gateways(self):
"""Instance depends on the API version:
* 2015-06-15: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2015_06_15.aio.operations.ApplicationGatewaysOperations>`
* 2016-09-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2016_09_01.aio.operations.ApplicationGatewaysOperations>`
* 2016-12-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2016_12_01.aio.operations.ApplicationGatewaysOperations>`
* 2017-03-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2017_03_01.aio.operations.ApplicationGatewaysOperations>`
* 2017-06-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2017_06_01.aio.operations.ApplicationGatewaysOperations>`
* 2017-10-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2017_10_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-01-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_01_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-02-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_02_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-04-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_04_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-06-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_06_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-07-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_07_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-08-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_08_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-10-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_10_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-11-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_11_01.aio.operations.ApplicationGatewaysOperations>`
* 2018-12-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2018_12_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-02-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_02_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-04-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_04_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-06-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_06_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-07-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_07_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-08-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_08_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-09-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_09_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-11-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_11_01.aio.operations.ApplicationGatewaysOperations>`
* 2019-12-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2019_12_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-03-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_03_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-04-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_04_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-05-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_05_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-06-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_06_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-07-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_07_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-08-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_08_01.aio.operations.ApplicationGatewaysOperations>`
* 2020-11-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2020_11_01.aio.operations.ApplicationGatewaysOperations>`
* 2021-02-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2021_02_01.aio.operations.ApplicationGatewaysOperations>`
* 2021-05-01: :class:`ApplicationGatewaysOperations<azure.mgmt.network.v2021_05_01.aio.operations.ApplicationGatewaysOperations>`
"""
api_version = self._get_api_version('application_gateways')
if api_version == '2015-06-15':
from ..v2015_06_15.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2016-09-01':
from ..v2016_09_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2016-12-01':
from ..v2016_12_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2017-03-01':
from ..v2017_03_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2017-06-01':
from ..v2017_06_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2017-10-01':
from ..v2017_10_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import ApplicationGatewaysOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import ApplicationGatewaysOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'application_gateways'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def application_security_groups(self):
"""Instance depends on the API version:
* 2017-10-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2017_10_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-01-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_01_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-02-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_02_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-04-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_04_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-06-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_06_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-07-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_07_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-08-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_08_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-10-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_10_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-11-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_11_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2018-12-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2018_12_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-02-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_02_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-04-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_04_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-06-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_06_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-07-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_07_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-08-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_08_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-09-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_09_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-11-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_11_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2019-12-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2019_12_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-03-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_03_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-04-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_04_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-05-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_05_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-06-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_06_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-07-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_07_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-08-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_08_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2020-11-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2020_11_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2021-02-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2021_02_01.aio.operations.ApplicationSecurityGroupsOperations>`
* 2021-05-01: :class:`ApplicationSecurityGroupsOperations<azure.mgmt.network.v2021_05_01.aio.operations.ApplicationSecurityGroupsOperations>`
"""
api_version = self._get_api_version('application_security_groups')
if api_version == '2017-10-01':
from ..v2017_10_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import ApplicationSecurityGroupsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'application_security_groups'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def available_delegations(self):
"""Instance depends on the API version:
* 2018-08-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2018_08_01.aio.operations.AvailableDelegationsOperations>`
* 2018-10-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2018_10_01.aio.operations.AvailableDelegationsOperations>`
* 2018-11-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2018_11_01.aio.operations.AvailableDelegationsOperations>`
* 2018-12-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2018_12_01.aio.operations.AvailableDelegationsOperations>`
* 2019-02-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_02_01.aio.operations.AvailableDelegationsOperations>`
* 2019-04-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_04_01.aio.operations.AvailableDelegationsOperations>`
* 2019-06-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_06_01.aio.operations.AvailableDelegationsOperations>`
* 2019-07-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_07_01.aio.operations.AvailableDelegationsOperations>`
* 2019-08-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_08_01.aio.operations.AvailableDelegationsOperations>`
* 2019-09-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_09_01.aio.operations.AvailableDelegationsOperations>`
* 2019-11-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_11_01.aio.operations.AvailableDelegationsOperations>`
* 2019-12-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2019_12_01.aio.operations.AvailableDelegationsOperations>`
* 2020-03-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_03_01.aio.operations.AvailableDelegationsOperations>`
* 2020-04-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_04_01.aio.operations.AvailableDelegationsOperations>`
* 2020-05-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_05_01.aio.operations.AvailableDelegationsOperations>`
* 2020-06-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_06_01.aio.operations.AvailableDelegationsOperations>`
* 2020-07-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_07_01.aio.operations.AvailableDelegationsOperations>`
* 2020-08-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_08_01.aio.operations.AvailableDelegationsOperations>`
* 2020-11-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2020_11_01.aio.operations.AvailableDelegationsOperations>`
* 2021-02-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2021_02_01.aio.operations.AvailableDelegationsOperations>`
* 2021-05-01: :class:`AvailableDelegationsOperations<azure.mgmt.network.v2021_05_01.aio.operations.AvailableDelegationsOperations>`
"""
api_version = self._get_api_version('available_delegations')
if api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AvailableDelegationsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AvailableDelegationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'available_delegations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def available_endpoint_services(self):
"""Instance depends on the API version:
* 2017-06-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2017_06_01.aio.operations.AvailableEndpointServicesOperations>`
* 2017-10-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2017_10_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-01-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_01_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-02-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_02_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-04-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_04_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-06-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_06_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-07-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_07_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-08-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_08_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-10-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_10_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-11-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_11_01.aio.operations.AvailableEndpointServicesOperations>`
* 2018-12-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2018_12_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-02-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_02_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-04-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_04_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-06-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_06_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-07-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_07_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-08-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_08_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-09-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_09_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-11-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_11_01.aio.operations.AvailableEndpointServicesOperations>`
* 2019-12-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2019_12_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-03-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_03_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-04-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_04_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-05-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_05_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-06-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_06_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-07-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_07_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-08-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_08_01.aio.operations.AvailableEndpointServicesOperations>`
* 2020-11-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2020_11_01.aio.operations.AvailableEndpointServicesOperations>`
* 2021-02-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2021_02_01.aio.operations.AvailableEndpointServicesOperations>`
* 2021-05-01: :class:`AvailableEndpointServicesOperations<azure.mgmt.network.v2021_05_01.aio.operations.AvailableEndpointServicesOperations>`
"""
api_version = self._get_api_version('available_endpoint_services')
if api_version == '2017-06-01':
from ..v2017_06_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2017-10-01':
from ..v2017_10_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AvailableEndpointServicesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'available_endpoint_services'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def available_private_endpoint_types(self):
"""Instance depends on the API version:
* 2019-04-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_04_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2019-06-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_06_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2019-07-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_07_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2019-08-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_08_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2019-09-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_09_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2019-11-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_11_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2019-12-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2019_12_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-03-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_03_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-04-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_04_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-05-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_05_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-06-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_06_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-07-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_07_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-08-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_08_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2020-11-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2020_11_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2021-02-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2021_02_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
* 2021-05-01: :class:`AvailablePrivateEndpointTypesOperations<azure.mgmt.network.v2021_05_01.aio.operations.AvailablePrivateEndpointTypesOperations>`
"""
api_version = self._get_api_version('available_private_endpoint_types')
if api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AvailablePrivateEndpointTypesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'available_private_endpoint_types'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def available_resource_group_delegations(self):
"""Instance depends on the API version:
* 2018-08-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2018_08_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2018-10-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2018_10_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2018-11-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2018_11_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2018-12-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2018_12_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-02-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_02_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-04-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_04_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-06-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_06_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-07-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_07_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-08-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_08_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-09-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_09_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-11-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_11_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2019-12-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2019_12_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-03-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_03_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-04-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_04_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-05-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_05_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-06-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_06_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-07-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_07_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-08-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_08_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2020-11-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2020_11_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2021-02-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2021_02_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
* 2021-05-01: :class:`AvailableResourceGroupDelegationsOperations<azure.mgmt.network.v2021_05_01.aio.operations.AvailableResourceGroupDelegationsOperations>`
"""
api_version = self._get_api_version('available_resource_group_delegations')
if api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AvailableResourceGroupDelegationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'available_resource_group_delegations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def available_service_aliases(self):
"""Instance depends on the API version:
* 2019-08-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2019_08_01.aio.operations.AvailableServiceAliasesOperations>`
* 2019-09-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2019_09_01.aio.operations.AvailableServiceAliasesOperations>`
* 2019-11-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2019_11_01.aio.operations.AvailableServiceAliasesOperations>`
* 2019-12-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2019_12_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-03-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_03_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-04-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_04_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-05-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_05_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-06-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_06_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-07-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_07_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-08-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_08_01.aio.operations.AvailableServiceAliasesOperations>`
* 2020-11-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2020_11_01.aio.operations.AvailableServiceAliasesOperations>`
* 2021-02-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2021_02_01.aio.operations.AvailableServiceAliasesOperations>`
* 2021-05-01: :class:`AvailableServiceAliasesOperations<azure.mgmt.network.v2021_05_01.aio.operations.AvailableServiceAliasesOperations>`
"""
api_version = self._get_api_version('available_service_aliases')
if api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AvailableServiceAliasesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'available_service_aliases'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def azure_firewall_fqdn_tags(self):
"""Instance depends on the API version:
* 2018-08-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2018_08_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2018-10-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2018_10_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2018-11-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2018_11_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2018-12-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2018_12_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-02-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_02_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-04-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_04_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-06-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_06_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-07-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_07_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-08-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_08_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-09-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_09_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-11-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_11_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2019-12-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2019_12_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-03-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_03_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-04-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_04_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-05-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_05_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-06-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_06_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-07-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_07_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-08-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_08_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2020-11-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2020_11_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2021-02-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2021_02_01.aio.operations.AzureFirewallFqdnTagsOperations>`
* 2021-05-01: :class:`AzureFirewallFqdnTagsOperations<azure.mgmt.network.v2021_05_01.aio.operations.AzureFirewallFqdnTagsOperations>`
"""
api_version = self._get_api_version('azure_firewall_fqdn_tags')
if api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AzureFirewallFqdnTagsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'azure_firewall_fqdn_tags'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def azure_firewalls(self):
"""Instance depends on the API version:
* 2018-04-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_04_01.aio.operations.AzureFirewallsOperations>`
* 2018-06-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_06_01.aio.operations.AzureFirewallsOperations>`
* 2018-07-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_07_01.aio.operations.AzureFirewallsOperations>`
* 2018-08-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_08_01.aio.operations.AzureFirewallsOperations>`
* 2018-10-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_10_01.aio.operations.AzureFirewallsOperations>`
* 2018-11-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_11_01.aio.operations.AzureFirewallsOperations>`
* 2018-12-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2018_12_01.aio.operations.AzureFirewallsOperations>`
* 2019-02-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_02_01.aio.operations.AzureFirewallsOperations>`
* 2019-04-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_04_01.aio.operations.AzureFirewallsOperations>`
* 2019-06-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_06_01.aio.operations.AzureFirewallsOperations>`
* 2019-07-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_07_01.aio.operations.AzureFirewallsOperations>`
* 2019-08-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_08_01.aio.operations.AzureFirewallsOperations>`
* 2019-09-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_09_01.aio.operations.AzureFirewallsOperations>`
* 2019-11-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_11_01.aio.operations.AzureFirewallsOperations>`
* 2019-12-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2019_12_01.aio.operations.AzureFirewallsOperations>`
* 2020-03-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_03_01.aio.operations.AzureFirewallsOperations>`
* 2020-04-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_04_01.aio.operations.AzureFirewallsOperations>`
* 2020-05-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_05_01.aio.operations.AzureFirewallsOperations>`
* 2020-06-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_06_01.aio.operations.AzureFirewallsOperations>`
* 2020-07-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_07_01.aio.operations.AzureFirewallsOperations>`
* 2020-08-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_08_01.aio.operations.AzureFirewallsOperations>`
* 2020-11-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2020_11_01.aio.operations.AzureFirewallsOperations>`
* 2021-02-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2021_02_01.aio.operations.AzureFirewallsOperations>`
* 2021-05-01: :class:`AzureFirewallsOperations<azure.mgmt.network.v2021_05_01.aio.operations.AzureFirewallsOperations>`
"""
api_version = self._get_api_version('azure_firewalls')
if api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import AzureFirewallsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import AzureFirewallsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'azure_firewalls'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def bastion_hosts(self):
"""Instance depends on the API version:
* 2019-04-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_04_01.aio.operations.BastionHostsOperations>`
* 2019-06-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_06_01.aio.operations.BastionHostsOperations>`
* 2019-07-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_07_01.aio.operations.BastionHostsOperations>`
* 2019-08-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_08_01.aio.operations.BastionHostsOperations>`
* 2019-09-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_09_01.aio.operations.BastionHostsOperations>`
* 2019-11-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_11_01.aio.operations.BastionHostsOperations>`
* 2019-12-01: :class:`BastionHostsOperations<azure.mgmt.network.v2019_12_01.aio.operations.BastionHostsOperations>`
* 2020-03-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_03_01.aio.operations.BastionHostsOperations>`
* 2020-04-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_04_01.aio.operations.BastionHostsOperations>`
* 2020-05-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_05_01.aio.operations.BastionHostsOperations>`
* 2020-06-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_06_01.aio.operations.BastionHostsOperations>`
* 2020-07-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_07_01.aio.operations.BastionHostsOperations>`
* 2020-08-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_08_01.aio.operations.BastionHostsOperations>`
* 2020-11-01: :class:`BastionHostsOperations<azure.mgmt.network.v2020_11_01.aio.operations.BastionHostsOperations>`
* 2021-02-01: :class:`BastionHostsOperations<azure.mgmt.network.v2021_02_01.aio.operations.BastionHostsOperations>`
* 2021-05-01: :class:`BastionHostsOperations<azure.mgmt.network.v2021_05_01.aio.operations.BastionHostsOperations>`
"""
api_version = self._get_api_version('bastion_hosts')
if api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import BastionHostsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import BastionHostsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'bastion_hosts'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def bgp_service_communities(self):
"""Instance depends on the API version:
* 2016-12-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2016_12_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2017-03-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2017_03_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2017-06-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2017_06_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2017-10-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2017_10_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-01-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_01_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-02-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_02_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-04-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_04_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-06-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_06_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-07-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_07_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-08-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_08_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-10-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_10_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-11-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_11_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2018-12-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2018_12_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-02-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_02_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-04-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_04_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-06-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_06_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-07-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_07_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-08-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_08_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-09-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_09_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-11-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_11_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2019-12-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2019_12_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-03-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_03_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-04-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_04_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-05-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_05_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-06-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_06_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-07-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_07_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-08-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_08_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2020-11-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2020_11_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2021-02-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2021_02_01.aio.operations.BgpServiceCommunitiesOperations>`
* 2021-05-01: :class:`BgpServiceCommunitiesOperations<azure.mgmt.network.v2021_05_01.aio.operations.BgpServiceCommunitiesOperations>`
"""
api_version = self._get_api_version('bgp_service_communities')
if api_version == '2016-12-01':
from ..v2016_12_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2017-03-01':
from ..v2017_03_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2017-06-01':
from ..v2017_06_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2017-10-01':
from ..v2017_10_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import BgpServiceCommunitiesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'bgp_service_communities'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def connection_monitors(self):
"""Instance depends on the API version:
* 2017-10-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2017_10_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-01-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_01_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-02-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_02_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-04-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_04_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-06-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_06_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-07-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_07_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-08-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_08_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-10-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_10_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-11-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_11_01.aio.operations.ConnectionMonitorsOperations>`
* 2018-12-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2018_12_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-02-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_02_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-04-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_04_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-06-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_06_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-07-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_07_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-08-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_08_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-09-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_09_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-11-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_11_01.aio.operations.ConnectionMonitorsOperations>`
* 2019-12-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2019_12_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-03-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_03_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-04-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_04_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-05-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_05_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-06-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_06_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-07-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_07_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-08-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_08_01.aio.operations.ConnectionMonitorsOperations>`
* 2020-11-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2020_11_01.aio.operations.ConnectionMonitorsOperations>`
* 2021-02-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2021_02_01.aio.operations.ConnectionMonitorsOperations>`
* 2021-05-01: :class:`ConnectionMonitorsOperations<azure.mgmt.network.v2021_05_01.aio.operations.ConnectionMonitorsOperations>`
"""
api_version = self._get_api_version('connection_monitors')
if api_version == '2017-10-01':
from ..v2017_10_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import ConnectionMonitorsOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import ConnectionMonitorsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'connection_monitors'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def connectivity_configurations(self):
"""Instance depends on the API version:
* 2021-02-01-preview: :class:`ConnectivityConfigurationsOperations<azure.mgmt.network.v2021_02_01_preview.aio.operations.ConnectivityConfigurationsOperations>`
"""
api_version = self._get_api_version('connectivity_configurations')
if api_version == '2021-02-01-preview':
from ..v2021_02_01_preview.aio.operations import ConnectivityConfigurationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'connectivity_configurations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def custom_ip_prefixes(self):
"""Instance depends on the API version:
* 2020-06-01: :class:`CustomIPPrefixesOperations<azure.mgmt.network.v2020_06_01.aio.operations.CustomIPPrefixesOperations>`
* 2020-07-01: :class:`CustomIPPrefixesOperations<azure.mgmt.network.v2020_07_01.aio.operations.CustomIPPrefixesOperations>`
* 2020-08-01: :class:`CustomIPPrefixesOperations<azure.mgmt.network.v2020_08_01.aio.operations.CustomIPPrefixesOperations>`
* 2020-11-01: :class:`CustomIPPrefixesOperations<azure.mgmt.network.v2020_11_01.aio.operations.CustomIPPrefixesOperations>`
* 2021-02-01: :class:`CustomIPPrefixesOperations<azure.mgmt.network.v2021_02_01.aio.operations.CustomIPPrefixesOperations>`
* 2021-05-01: :class:`CustomIPPrefixesOperations<azure.mgmt.network.v2021_05_01.aio.operations.CustomIPPrefixesOperations>`
"""
api_version = self._get_api_version('custom_ip_prefixes')
if api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import CustomIPPrefixesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import CustomIPPrefixesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import CustomIPPrefixesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import CustomIPPrefixesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import CustomIPPrefixesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import CustomIPPrefixesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'custom_ip_prefixes'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def ddos_custom_policies(self):
"""Instance depends on the API version:
* 2018-11-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2018_11_01.aio.operations.DdosCustomPoliciesOperations>`
* 2018-12-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2018_12_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-02-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_02_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-04-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_04_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-06-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_06_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-07-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_07_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-08-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_08_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-09-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_09_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-11-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_11_01.aio.operations.DdosCustomPoliciesOperations>`
* 2019-12-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2019_12_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-03-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_03_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-04-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_04_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-05-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_05_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-06-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_06_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-07-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_07_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-08-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_08_01.aio.operations.DdosCustomPoliciesOperations>`
* 2020-11-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2020_11_01.aio.operations.DdosCustomPoliciesOperations>`
* 2021-02-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2021_02_01.aio.operations.DdosCustomPoliciesOperations>`
* 2021-05-01: :class:`DdosCustomPoliciesOperations<azure.mgmt.network.v2021_05_01.aio.operations.DdosCustomPoliciesOperations>`
"""
api_version = self._get_api_version('ddos_custom_policies')
if api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import DdosCustomPoliciesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'ddos_custom_policies'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def ddos_protection_plans(self):
"""Instance depends on the API version:
* 2018-02-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_02_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-04-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_04_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-06-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_06_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-07-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_07_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-08-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_08_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-10-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_10_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-11-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_11_01.aio.operations.DdosProtectionPlansOperations>`
* 2018-12-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_12_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-02-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_02_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-04-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_04_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-06-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_06_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-07-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_07_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-08-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_08_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-09-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_09_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-11-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_11_01.aio.operations.DdosProtectionPlansOperations>`
* 2019-12-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_12_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-03-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_03_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-04-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_04_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-05-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_05_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-06-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_06_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-07-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_07_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-08-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_08_01.aio.operations.DdosProtectionPlansOperations>`
* 2020-11-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_11_01.aio.operations.DdosProtectionPlansOperations>`
* 2021-02-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2021_02_01.aio.operations.DdosProtectionPlansOperations>`
* 2021-05-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2021_05_01.aio.operations.DdosProtectionPlansOperations>`
"""
api_version = self._get_api_version('ddos_protection_plans')
if api_version == '2018-02-01':
from ..v2018_02_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-05-01':
from ..v2020_05_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-06-01':
from ..v2020_06_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-07-01':
from ..v2020_07_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-08-01':
from ..v2020_08_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-11-01':
from ..v2020_11_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2021-02-01':
from ..v2021_02_01.aio.operations import DdosProtectionPlansOperations as OperationClass
elif api_version == '2021-05-01':
from ..v2021_05_01.aio.operations import DdosProtectionPlansOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'ddos_protection_plans'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def default_security_rules(self):
"""Instance depends on the API version:
* 2017-06-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_06_01.aio.operations.DefaultSecurityRulesOperations>`
* 2017-10-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_10_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-01-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_01_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-02-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_02_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-04-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_04_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-06-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_06_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-07-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_07_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-08-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_08_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-10-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_10_01.aio.operations.DefaultSecurityRulesOperations>`
* 2018-11-01: :class:`DefaultSecurityRulesOperations | codeparrot/github-code-clean |
# coding: utf8
{
' (leave empty to detach account)': ' (leave empty to detach account)',
' Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': ' Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.',
' by ': ' by ',
' is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities.': ' is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities.',
' on ': ' on ',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'# of Houses Damaged': '# of Houses Damaged',
'# of Houses Destroyed': 'Số căn nhà bị phá hủy',
'# of International Staff': '# of International Staff',
'# of National Staff': '# of National Staff',
'# of People Affected': 'Số người bị ảnh hưởng',
'# of People Deceased': '# of People Deceased',
'# of People Injured': '# of People Injured',
'# of Vehicles': '# of Vehicles',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%s rows deleted': '%s rows deleted',
'%s rows updated': '%s rows updated',
'(Constraints Only)': '(Constraints Only)',
') & then click on the map below to adjust the Lat/Lon fields:': ') & then click on the map below to adjust the Lat/Lon fields:',
'* Required Fields': '* Required Fields',
'0-15 minutes': '0-15 minutes',
'1 Assessment': '1 Assessment',
'1 location, shorter time, can contain multiple Tasks': '1 location, shorter time, can contain multiple Tasks',
'1-3 days': '1-3 days',
'1. Fill the necessary fields in BLOCK letters.': '1. Fill the necessary fields in BLOCK letters.',
'15-30 minutes': '15-30 phút',
'2 different options are provided here currently:': '2 different options are provided here currently:',
'2. Always use one box per letter and leave one box space to seperate words.': '2. Always use one box per letter and leave one box space to seperate words.',
'2x4 Car': '2x4 Car',
'30-60 minutes': '30-60 minutes',
'4-7 days': '4-7 days',
'4x4 Car': '4x4 Car',
'8-14 days': '8-14 ngày',
'A Reference Document such as a file, URL or contact person to verify this data. You can type the 1st few characters of the document name to link to an existing document.': 'tài liệu tham khảo như file, URL hay thông tin liên hệ đế xác nhận dữ liệu này.Bạn có thể gõ một vài ký tự đầu của tên tài liệu để kết nối tới tài liệu có sẵn',
'A Warehouse is a physical place to store items.': 'A Warehouse is a physical place to store items.',
'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.': 'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.',
'A brief description of the group (optional)': 'A brief description of the group (optional)',
'A file downloaded from a GPS containing a series of geographic points in XML format.': 'A file downloaded from a GPS containing a series of geographic points in XML format.',
'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.',
'A library of digital resources, such as photos, documents and reports': 'A library of digital resources, such as photos, documents and reports',
'A place within a Site like a Shelf, room, bin number etc.': 'Một nơi trên site như số ngăn ,số phòng,số thùng v.v',
'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.': 'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.',
'A snapshot of the location or additional documents that contain supplementary information about the Site Location can be uploaded here.': 'A snapshot of the location or additional documents that contain supplementary information about the Site Location can be uploaded here.',
'A snapshot of the location or additional documents that contain supplementary information about the Site can be uploaded here.': 'Upload ảnh chụp vị trí hoặc tài liệu bổ sung chứa thông tin bổ sung về trang web tại đây',
'A survey series with id %s does not exist. Please go back and create one.': 'A survey series with id %s does not exist. Please go back and create one.',
'ABOUT': 'ABOUT',
'ABOUT THIS MODULE': 'Giới thiệu Module này',
'ACCESS DATA': 'ACCESS DATA',
'ANY': 'ANY',
'API is documented here': 'API is documented here',
'Ability to Fill Out Surveys': 'Ability to Fill Out Surveys',
'Ability to customize the list of details tracked at a Shelter': 'Ability to customize the list of details tracked at a Shelter',
'Ability to customize the list of human resource tracked at a Shelter': 'Khả năng tùy chỉnh danh sách nguồn nhân lực theo dõi tại nơi cư trú',
'Ability to customize the list of important facilities needed at a Shelter': 'Khả năng tùy chỉnh danh sách các điều kiện quan trọng cần thiết tại một cơ sở cư trú',
'Ability to track partial fulfillment of the request': 'Ability to track partial fulfillment of the request',
'Ability to view Results of Completed and/or partially filled out Surveys': 'Ability to view Results of Completed and/or partially filled out Surveys',
'About': 'About',
'About Sahana': 'About Sahana',
'About Sahana Eden': 'About Sahana Eden',
'About this module': 'About this module',
'Access denied': 'Access denied',
'Accessibility of Affected Location': 'Accessibility of Affected Location',
'Account registered, however registration is still pending approval - please wait until confirmation received.': 'Account registered, however registration is still pending approval - please wait until confirmation received.',
'Acronym': 'Acronym',
"Acronym of the organization's name, eg. IFRC.": "Acronym of the organization's name, eg. IFRC.",
'Actionable': 'Actionable',
'Actionable by all targeted recipients': 'Actionable by all targeted recipients',
'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>': 'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>',
'Actioned?': 'Actioned?',
'Active Problems': 'Active Problems',
'Activities': 'Activities',
'Activities matching Assessments:': 'Activities matching Assessments:',
'Activities of boys 13-17yrs before disaster': 'Activities of boys 13-17yrs before disaster',
'Activities of boys 13-17yrs now': 'Activities of boys 13-17yrs now',
'Activities of boys <12yrs before disaster': 'Activities of boys <12yrs before disaster',
'Activities of boys <12yrs now': 'Activities of boys <12yrs now',
'Activities of girls 13-17yrs before disaster': 'Activities of girls 13-17yrs before disaster',
'Activities of girls 13-17yrs now': 'Activities of girls 13-17yrs now',
'Activities of girls <12yrs before disaster': 'Activities of girls <12yrs before disaster',
'Activities of girls <12yrs now': 'Activities of girls <12yrs now',
'Activities:': 'Activities:',
'Activity': 'Activity',
'Activity Added': 'Activity Added',
'Activity Deleted': 'Activity Deleted',
'Activity Details': 'Chi tiết Hoạt động',
'Activity Report': 'Activity Report',
'Activity Reports': 'Activity Reports',
'Activity Type': 'Activity Type',
'Activity Updated': 'Activity Updated',
'Add': 'Thêm',
'Add Activity': 'Thêm hoạt động',
'Add Activity Report': 'Add Activity Report',
'Add Activity Type': 'Thêm loại hoạt động',
'Add Address': 'Add Address',
'Add Aid Request': 'Thêm yêu cầu cứu trợ',
'Add Assessment': 'Add Assessment',
'Add Assessment Summary': 'Add Assessment Summary',
'Add Baseline': 'Add Baseline',
'Add Baseline Type': 'Add Baseline Type',
'Add Bed Type': 'Add Bed Type',
'Add Bin Type': 'Add Bin Type',
'Add Bins': 'Thêm Bin',
'Add Budget': 'Add Budget',
'Add Bundle': 'Add Bundle',
'Add Catalog': 'Thêm Catalog',
'Add Catalog Item': 'Add Catalog Item',
'Add Catalog.': 'Add Catalog.',
'Add Category': 'Thêm nhóm',
'Add Category<>Sub-Category<>Catalog Relation': 'Add Category<>Sub-Category<>Catalog Relation',
'Add Cholera Treatment Capability Information': 'Add Cholera Treatment Capability Information',
'Add Cluster': 'Thêm cụm',
'Add Cluster Subsector': 'Add Cluster Subsector',
'Add Config': 'Add Config',
'Add Contact': 'Thêm thông tin liên lạc',
'Add Contact Information': 'Thêm thông tin liên lạc',
'Add Disaster Victims': 'Add Disaster Victims',
'Add Distribution': 'Add Distribution',
'Add Distribution.': 'Add Distribution.',
'Add Donor': 'Thêm tên người quyên góp vào danh sách',
'Add Feature Class': 'Add Feature Class',
'Add Feature Layer': 'Thêm lớp đặc tính',
'Add Flood Report': 'Add Flood Report',
'Add Group': 'Thêm nhóm',
'Add Group Member': 'Add Group Member',
'Add Hospital': 'Thêm Bệnh viện',
'Add Identification Report': 'Add Identification Report',
'Add Identity': 'Thêm thông tin định danh',
'Add Image': 'Thêm ảnh',
'Add Impact': 'Add Impact',
'Add Impact Type': 'Add Impact Type',
'Add Incident': 'Add Incident',
'Add Incident Report': 'Thêm Báo cáo sự việc',
'Add Inventory Item': 'Add Inventory Item',
'Add Inventory Store': 'Add Inventory Store',
'Add Item': 'Add Item',
'Add Item (s)': 'Add Item (s)',
'Add Item Catalog': 'Add Item Catalog',
'Add Item Catalog ': 'Add Item Catalog ',
'Add Item Catalog Category ': 'Thêm tiêu chí cho catalog hàng hóa',
'Add Item Category': 'Add Item Category',
'Add Item Packet': 'Add Item Packet',
'Add Item Sub-Category': 'Add Item Sub-Category',
'Add Key': 'Add Key',
'Add Kit': 'Thêm Kit',
'Add Layer': 'Thêm lớp',
'Add Location': 'Add Location',
'Add Locations': 'Thêm địa điểm mới',
'Add Log Entry': 'Add Log Entry',
'Add Member': 'Thêm thành viên',
'Add Membership': 'Add Membership',
'Add Message': 'Thêm Tin nhắn',
'Add Need': 'Add Need',
'Add Need Type': 'Add Need Type',
'Add New': 'Add New',
'Add New Activity': 'Add New Activity',
'Add New Address': 'Thêm Địa chỉ mới',
'Add New Aid Request': 'Thêm yêu cầu cứu trợ mới',
'Add New Assessment': 'Add New Assessment',
'Add New Assessment Summary': 'Add New Assessment Summary',
'Add New Baseline': 'Add New Baseline',
'Add New Baseline Type': 'Add New Baseline Type',
'Add New Bin': 'Add New Bin',
'Add New Bin Type': 'Add New Bin Type',
'Add New Budget': 'Add New Budget',
'Add New Bundle': 'Add New Bundle',
'Add New Catalog Item': 'Add New Catalog Item',
'Add New Cluster': 'Thêm cụm mới',
'Add New Cluster Subsector': 'Add New Cluster Subsector',
'Add New Config': 'Thêm cấu hình mới',
'Add New Contact': 'Thêm đầu mối liên lạc mới',
'Add New Distribution': 'Add New Distribution',
'Add New Distribution Item': 'Add New Distribution Item',
'Add New Document': 'Thêm Tài liệu mới',
'Add New Donor': 'Thêm Người quyên góp',
'Add New Entry': 'Add New Entry',
'Add New Feature Class': 'Add New Feature Class',
'Add New Feature Layer': 'Add New Feature Layer',
'Add New Flood Report': 'Thêm báo cáo lũ lụt mới',
'Add New Group': 'Thêm nhóm mới',
'Add New Hospital': 'Thêm Bệnh viện mới',
'Add New Identity': 'Thêm thông tin nhận dạng mới',
'Add New Image': 'Thêm ảnh mới',
'Add New Impact': 'Add New Impact',
'Add New Impact Type': 'Add New Impact Type',
'Add New Incident': 'Thêm sự kiện',
'Add New Incident Report': 'Add New Incident Report',
'Add New Inventory Item': 'Add New Inventory Item',
'Add New Inventory Store': 'Add New Inventory Store',
'Add New Item': 'Add New Item',
'Add New Item Catalog': 'Add New Item Catalog',
'Add New Item Catalog Category': 'Add New Item Catalog Category',
'Add New Item Category': 'Add New Item Category',
'Add New Item Packet': 'Add New Item Packet',
'Add New Item Sub-Category': 'Add New Item Sub-Category',
'Add New Item to Kit': 'Add New Item to Kit',
'Add New Key': 'Thêm Key mới ',
'Add New Kit': 'Thêm Kit mới',
'Add New Layer': 'Add New Layer',
'Add New Location': 'Add New Location',
'Add New Log Entry': 'Add New Log Entry',
'Add New Marker': 'Add New Marker',
'Add New Member': 'Thêm thành viên mới',
'Add New Membership': 'Add New Membership',
'Add New Metadata': 'Add New Metadata',
'Add New Need': 'Add New Need',
'Add New Need Type': 'Add New Need Type',
'Add New Office': 'Thêm văn phòng mới',
'Add New Organization': 'Thêm một tô chức mới',
'Add New Photo': 'Add New Photo',
'Add New Position': 'Add New Position',
'Add New Problem': 'Thêm vấn đề mới',
'Add New Project': 'Add New Project',
'Add New Projection': 'Add New Projection',
'Add New Rapid Assessment': 'Add New Rapid Assessment',
'Add New Received Item': 'Add New Received Item',
'Add New Record': 'Add New Record',
'Add New Report': 'Thêm báo cáo mới',
'Add New Request': 'Thêm yêu cầu mới',
'Add New Request Item': 'Thêm yêu cầu hàng hóa mới',
'Add New Resource': 'Thêm nguồn lực mới',
'Add New Response': 'Thêm phản hồi mới',
'Add New River': 'Add New River',
'Add New Role': 'Thêm vai trò mới',
'Add New Role to User': 'Gán vai trò mới cho người dùng',
'Add New Sector': 'Add New Sector',
'Add New Sent Item': 'Add New Sent Item',
'Add New Setting': 'Add New Setting',
'Add New Shelter': 'Thêm Nơi cư trú mới',
'Add New Shelter Service': 'Add New Shelter Service',
'Add New Shelter Type': 'Add New Shelter Type',
'Add New Site': 'Add New Site',
'Add New Skill': 'Thêm kỹ năng mới',
'Add New Skill Type': 'Add New Skill Type',
'Add New Solution': 'Add New Solution',
'Add New Staff': 'Add New Staff',
'Add New Staff Type': 'Add New Staff Type',
'Add New Storage Location': 'Thêm Vị trí kho lưu trữ mới',
'Add New Survey Answer': 'Add New Survey Answer',
'Add New Survey Question': 'Add New Survey Question',
'Add New Survey Section': 'Add New Survey Section',
'Add New Survey Series': 'Add New Survey Series',
'Add New Survey Template': 'Thêm mẫu khảo sát mới',
'Add New Task': 'Thêm một công việc mới',
'Add New Team': 'Thêm đội mới',
'Add New Theme': 'Add New Theme',
'Add New Ticket': 'Add New Ticket',
'Add New Track': 'Add New Track',
'Add New Unit': 'Thêm đơn vị mới',
'Add New User': 'Thêm người dùng mới',
'Add New User to Role': 'Add New User to Role',
'Add New Warehouse': 'Add New Warehouse',
'Add New Warehouse Item': 'Add New Warehouse Item',
'Add Office': 'Thêm thông tin văn phòng',
'Add Organization': 'Thêm Tổ chức',
'Add Peer': 'Add Peer',
'Add Person': 'Thêm cá nhân',
'Add Personal Effects': 'Add Personal Effects',
'Add Photo': 'Add Photo',
'Add Position': 'Add Position',
'Add Problem': 'Add Problem',
'Add Project': 'Thêm dự án',
'Add Projection': 'Add Projection',
'Add Question': 'Add Question',
'Add Rapid Assessment': 'Add Rapid Assessment',
'Add Recipient': 'Thêm người nhận viện trợ',
'Add Recipient Site': 'Add Recipient Site',
'Add Recipient Site.': 'Add Recipient Site.',
'Add Record': 'Add Record',
'Add Recovery Report': 'Add Recovery Report',
'Add Reference Document': 'Add Reference Document',
'Add Report': 'Add Report',
'Add Request': 'Thêm yêu cầu',
'Add Request Detail': 'thêm chi tiết yêu cầu',
'Add Request Item': 'Thêm yêu cầu hàng hóa',
'Add Resource': 'Thêm tại nguyên',
'Add Response': 'Add Response',
'Add River': 'Add River',
'Add Role': 'Add Role',
'Add Section': 'Add Section',
'Add Sector': 'Thêm lĩnh vực',
'Add Sender Organization': 'Thêm tổ chức gửi',
'Add Sender Site': 'Add Sender Site',
'Add Sender Site.': 'Add Sender Site.',
'Add Service Profile': 'Add Service Profile',
'Add Setting': 'Add Setting',
'Add Shelter': 'Add Shelter',
'Add Shelter Service': 'Add Shelter Service',
'Add Shelter Type': 'Add Shelter Type',
'Add Shipment Transit Log': 'Add Shipment Transit Log',
'Add Shipment/Way Bills': 'Add Shipment/Way Bills',
'Add Site': 'Add Site',
'Add Skill': 'Thêm kỹ năng',
'Add Skill Type': 'Thêm loại kỹ năng',
'Add Skill Types': 'Thêm loại kỹ năng',
'Add Solution': 'Add Solution',
'Add Staff': 'Add Staff',
'Add Staff Type': 'Add Staff Type',
'Add Status': 'Add Status',
'Add Storage Bin ': 'Add Storage Bin ',
'Add Storage Bin Type': 'Add Storage Bin Type',
'Add Storage Location': 'Add Storage Location',
'Add Storage Location ': 'Add Storage Location ',
'Add Sub-Category': 'Thêm danh mục cấp dưới',
'Add Subscription': 'Add Subscription',
'Add Survey Answer': 'Thêm trả lời khảo sát',
'Add Survey Question': 'Thêm câu hỏi khảo sát',
'Add Survey Section': 'Add Survey Section',
'Add Survey Series': 'Add Survey Series',
'Add Survey Template': 'Thêm mẫu khảo sát',
'Add Task': 'Add Task',
'Add Team': 'Thêm đội',
'Add Theme': 'Add Theme',
'Add Ticket': 'Add Ticket',
'Add Unit': 'Thêm đơn vị',
'Add User': 'Thêm người dùng',
'Add Volunteer': 'Add Volunteer',
'Add Volunteer Registration': 'Thêm Đăng ký tình nguyện viên',
'Add Warehouse': 'Add Warehouse',
'Add Warehouse Item': 'Add Warehouse Item',
'Add a Person': 'Add a Person',
'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.': 'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.',
'Add a Volunteer': 'Add a Volunteer',
'Add a new Site from where the Item is being sent.': 'Thêm Site nơi gửi hàng hóa đến ',
'Add a new Site where the Item is being sent to.': 'Add a new Site where the Item is being sent to.',
'Add an Photo.': 'Add an Photo.',
'Add main Item Category.': 'Add main Item Category.',
'Add main Item Sub-Category.': 'Add main Item Sub-Category.',
'Add new Group': 'Add new Group',
'Add new Individual': 'Add new Individual',
'Add new position.': 'Thêm địa điểm mới',
'Add new project.': 'Thêm dự án mới',
'Add new staff role.': 'Thêm vai trò nhân viên mới',
'Add the Storage Bin Type.': 'Add the Storage Bin Type.',
'Add the Storage Location where this bin is located.': 'Add the Storage Location where this bin is located.',
'Add the Storage Location where this this Bin belongs to.': 'Thêm vị trí kho lưu trữ chứa Bin này',
'Add the main Warehouse/Site information where this Bin belongs to.': 'Add the main Warehouse/Site information where this Bin belongs to.',
'Add the main Warehouse/Site information where this Item is to be added.': 'Thêm thông tin Nhà kho/Site chứa hàng hóa đã được nhập thông tin',
'Add the main Warehouse/Site information where this Storage location is.': 'Add the main Warehouse/Site information where this Storage location is.',
'Add the unit of measure if it doesnt exists already.': 'Add the unit of measure if it doesnt exists already.',
'Add to Bundle': 'Add to Bundle',
'Add to Catalog': 'Add to Catalog',
'Add to budget': 'Add to budget',
'Add/Edit/Remove Layers': 'Thêm/Sửa/Xóa các lớp',
'Additional Beds / 24hrs': 'Additional Beds / 24hrs',
'Additional Comments': 'Additional Comments',
"Additional quantity quantifier – e.g. '4x5'.": "Additional quantity quantifier – e.g. '4x5'.",
'Address': 'Địa chỉ',
'Address Details': 'Address Details',
'Address Type': 'Loại địa chỉ',
'Address added': 'Địa chỉ đã được thêm',
'Address deleted': 'Đã xóa địa chỉ',
'Address updated': 'Address updated',
'Addresses': 'Các địa chỉ',
'Adequate': 'Adequate',
'Adequate food and water available': 'Adequate food and water available',
'Adjust Item(s) Quantity': 'Adjust Item(s) Quantity',
'Adjust Items due to Theft/Loss': 'Adjust Items due to Theft/Loss',
'Admin': 'Quản trị viên',
'Admin Email': 'Email của quản trị viên',
'Admin Name': 'Tên quản trị viên',
'Admin Tel': 'Số điện thoại của Quản trị viên',
'Administration': 'Quản trị',
'Administrator': 'Quản trị viên',
'Admissions/24hrs': 'Admissions/24hrs',
'Adolescent (12-20)': 'Adolescent (12-20)',
'Adolescent participating in coping activities': 'Adolescent participating in coping activities',
'Adult (21-50)': 'Adult (21-50)',
'Adult ICU': 'Adult ICU',
'Adult Psychiatric': 'Bệnh nhân tâm thần',
'Adult female': 'Nữ giới',
'Adult male': 'Adult male',
'Adults in prisons': 'Adults in prisons',
'Advanced Bin Search': 'Advanced Bin Search',
'Advanced Catalog Search': 'Tìm kiếm danh mục nâng cao',
'Advanced Category Search': 'Tìm kiếm danh mục nâng cao',
'Advanced Item Search': 'Advanced Item Search',
'Advanced Location Search': 'Tìm kiếm vị trí nâng cao',
'Advanced Site Search': 'Advanced Site Search',
'Advanced Sub-Category Search': 'Advanced Sub-Category Search',
'Advanced Unit Search': 'Advanced Unit Search',
'Advanced:': 'Advanced:',
'Advisory': 'Advisory',
'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.': 'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.',
'Age Group': 'Nhóm tuổi',
'Age group': 'Age group',
'Age group does not match actual age.': 'Nhóm tuổi không phù hợp với tuổi thật',
'Aggravating factors': 'Aggravating factors',
'Aggregate Items': 'Aggregate Items',
'Agriculture': 'Agriculture',
'Aid Request': 'Yêu cầu cứu trợ',
'Aid Request Details': 'Chi tiết yêu cầu cứu trợ',
'Aid Request added': 'Đã thêm yêu cầu viện trợ',
'Aid Request deleted': 'Đã xóa yêu cầu cứu trợ',
'Aid Request updated': 'Đã cập nhật Yêu cầu cứu trợ',
'Aid Requests': 'yêu cầu cứu trợ',
'Air Transport Service': 'Air Transport Service',
'Air tajin': 'Air tajin',
'Aircraft Crash': 'Aircraft Crash',
'Aircraft Hijacking': 'Aircraft Hijacking',
'Airport Closure': 'Đóng cửa sân bay',
'Airspace Closure': 'Airspace Closure',
'Alcohol': 'Alcohol',
'Alert': 'Alert',
'All': 'All',
'All Inbound & Outbound Messages are stored here': 'All Inbound & Outbound Messages are stored here',
'All Locations': 'All Locations',
'All Requested Items': 'Hàng hóa được yêu cầu',
'All Resources': 'All Resources',
'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.': 'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.',
'Allowed to push': 'Cho phép bấm nút',
'Allows a Budget to be drawn up': 'Allows a Budget to be drawn up',
'Allows authorized users to control which layers are available to the situation map.': 'Cho phép người dùng đã đăng nhập kiểm soát layer nào phù hợp với bản đồ tình huống',
'Alternative infant nutrition in use': 'Alternative infant nutrition in use',
'Alternative places for studying': 'Alternative places for studying',
'Alternative places for studying available': 'Alternative places for studying available',
'Ambulance Service': 'Ambulance Service',
'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.': 'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.',
'Analysis of Completed Surveys': 'Analysis of Completed Surveys',
'Animal Die Off': 'Animal Die Off',
'Animal Feed': 'Animal Feed',
'Animals': 'Animals',
'Answer Choices (One Per Line)': 'Chọn câu trả lời',
'Anthropolgy': 'Anthropolgy',
'Antibiotics available': 'Antibiotics available',
'Antibiotics needed per 24h': 'Antibiotics needed per 24h',
'Any available Metadata in the files will be read automatically, such as Timestamp, Author, Latitude & Longitude.': 'Thông tin có sẵn trong file như Timestamp,Tác giả, Kinh độ, Vĩ độ sẽ được đọc tự động',
'Apparent Age': 'Apparent Age',
'Apparent Gender': 'Apparent Gender',
'Appropriate clothing available': 'Appropriate clothing available',
'Appropriate cooking equipment/materials in HH': 'Appropriate cooking equipment/materials in HH',
'Approx. number of cases/48h': 'Approx. number of cases/48h',
'Approximately how many children under 5 with diarrhea in the past 48 hours?': 'Approximately how many children under 5 with diarrhea in the past 48 hours?',
'Archive not Delete': 'Archive not Delete',
'Arctic Outflow': 'Arctic Outflow',
'Are basic medical supplies available for health services since the disaster?': 'Are basic medical supplies available for health services since the disaster?',
'Are breast milk substitutes being used here since the disaster?': 'Are breast milk substitutes being used here since the disaster?',
'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?': 'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?',
'Are the chronically ill receiving sufficient care and assistance?': 'Are the chronically ill receiving sufficient care and assistance?',
'Are there adults living in prisons in this area?': 'Are there adults living in prisons in this area?',
'Are there alternative places for studying?': 'Are there alternative places for studying?',
'Are there cases of diarrhea among children under the age of 5?': 'Are there cases of diarrhea among children under the age of 5?',
'Are there children living in adult prisons in this area?': 'Are there children living in adult prisons in this area?',
'Are there children living in boarding schools in this area?': 'Are there children living in boarding schools in this area?',
'Are there children living in homes for disabled children in this area?': 'Are there children living in homes for disabled children in this area?',
'Are there children living in juvenile detention in this area?': 'Are there children living in juvenile detention in this area?',
'Are there children living in orphanages in this area?': 'Are there children living in orphanages in this area?',
'Are there children with chronical illnesses in your community?': 'Are there children with chronical illnesses in your community?',
'Are there health services functioning for the community since the disaster?': 'Are there health services functioning for the community since the disaster?',
'Are there older people living in care homes in this area?': 'Are there older people living in care homes in this area?',
'Are there older people with chronical illnesses in your community?': 'Are there older people with chronical illnesses in your community?',
'Are there people with chronical illnesses in your community?': 'Are there people with chronical illnesses in your community?',
'Are there separate latrines for women and men available?': 'Are there separate latrines for women and men available?',
'Are there staff present and caring for the residents in these institutions?': 'Are there staff present and caring for the residents in these institutions?',
'Area': 'Area',
'Assessment': 'Assessment',
'Assessment Details': 'Assessment Details',
'Assessment Reported': 'Assessment Reported',
'Assessment Summaries': 'Assessment Summaries',
'Assessment Summary Details': 'Assessment Summary Details',
'Assessment Summary added': 'Assessment Summary added',
'Assessment Summary deleted': 'Assessment Summary deleted',
'Assessment Summary updated': 'Assessment Summary updated',
'Assessment Type': 'Assessment Type',
'Assessment added': 'Assessment added',
'Assessment deleted': 'Assessment deleted',
'Assessment updated': 'Đã cập nhật Trị giá tính thuế',
'Assessments': 'Assessments',
'Assessments Needs vs. Activities': 'Assessments Needs vs. Activities',
'Assessments and Activities': 'Assessments and Activities',
'Assessments are structured reports done by Professional Organizations': 'Assessments are structured reports done by Professional Organizations',
'Assessments are structured reports done by Professional Organizations - data includes WFP Assessments': 'Assessments are structured reports done by Professional Organizations - data includes WFP Assessments',
'Assessments:': 'Assessments:',
'Assessor': 'Assessor',
'Assign Storage Location': 'Assign Storage Location',
'Assign to Org.': 'Assign to Org.',
'Assigned': 'Assigned',
'Assigned To': 'Assigned To',
'Assigned to': 'Assigned to',
'Assistance for immediate repair/reconstruction of houses': 'Assistance for immediate repair/reconstruction of houses',
'Assistant': 'Assistant',
'At/Visited Location (not virtual)': 'At/Visited Location (not virtual)',
'Attend to information sources as described in <instruction>': 'Attend to information sources as described in <instruction>',
'Attribution': 'Attribution',
'Audit Read': 'Audit Read',
'Audit Write': 'Audit Write',
"Authenticate system's Twitter account": "Authenticate system's Twitter account",
'Author': 'Author',
'Automotive': 'Automotive',
'Available Beds': 'Available Beds',
'Available Messages': 'Available Messages',
'Available Records': 'Available Records',
'Available databases and tables': 'Available databases and tables',
'Available from': 'Available from',
'Available in Viewer?': 'Available in Viewer?',
'Available until': 'Sẵn sàng cho đến khi',
'Availablity': 'Availablity',
'Avalanche': 'Avalanche',
'Avoid the subject event as per the <instruction>': 'Avoid the subject event as per the <instruction>',
'Babies who are not being breastfed, what are they being fed on?': 'Babies who are not being breastfed, what are they being fed on?',
'Baby And Child Care': 'Chăm sóc trẻ em',
'Background Colour': 'Background Colour',
'Background Colour for Text blocks': 'Background Colour for Text blocks',
'Bahai': 'Bahai',
'Baldness': 'Baldness',
'Balochi': 'Balochi',
'Banana': 'Banana',
'Bank/micro finance': 'Tài chính Ngân hàng',
'Base Layer?': 'Base Layer?',
'Base Unit': 'Đơn vị cơ sở',
'Baseline Number of Beds': 'Baseline Number of Beds',
'Baseline Type': 'Baseline Type',
'Baseline Type Details': 'Baseline Type Details',
'Baseline Type added': 'Baseline Type added',
'Baseline Type deleted': 'Baseline Type deleted',
'Baseline Type updated': 'Baseline Type updated',
'Baseline Types': 'Baseline Types',
'Baseline added': 'Baseline added',
'Baseline deleted': 'Baseline deleted',
'Baseline number of beds of that type in this unit.': 'Baseline number of beds of that type in this unit.',
'Baseline updated': 'Baseline updated',
'Baselines': 'Baselines',
'Baselines Details': 'Baselines Details',
'Basic Assessment': 'Basic Assessment',
'Basic Assessment Reported': 'Basic Assessment Reported',
'Basic Details': 'Basic Details',
'Basic information on the requests and donations, such as category, the units, contact details and the status.': 'Thông tin cơ bản về các yêu cầu và quyên góp như thể loại, tên đơn vị, chi tiết liên lạc và tình trạng',
'Basic medical supplies available prior to disaster': 'Basic medical supplies available prior to disaster',
'Basic medical supplies available since disaster': 'Basic medical supplies available since disaster',
'Basic reports on the Shelter and drill-down by region': 'Báo cáo cơ bản về nơi cư trú và báo cáo chi tiết theo vùng',
'Baud': 'Bốt',
'Baud rate to use for your modem - The default is safe for most cases': 'Baud rate to use for your modem - The default is safe for most cases',
'Bed Capacity': 'Bed Capacity',
'Bed Capacity per Unit': 'Bed Capacity per Unit',
'Bed Type': 'Loại Giường',
'Bed type already registered': 'Bed type already registered',
'Bedding materials available': 'Bedding materials available',
'Beneficiary Type': 'Beneficiary Type',
'Biological Hazard': 'Biological Hazard',
'Biscuits': 'Biscuits',
'Blizzard': 'Blizzard',
'Blood Type (AB0)': 'Blood Type (AB0)',
'Blowing Snow': 'Gió tuyết đang thổi',
'Boat': 'Boat',
'Bodies found': 'Bodies found',
'Bodies recovered': 'Bodies recovered',
'Body': 'Body',
'Body Recovery Reports': 'Body Recovery Reports',
'Body Recovery Request': 'Body Recovery Request',
'Body Recovery Requests': 'Body Recovery Requests',
'Bomb': 'Bomb',
'Bomb Explosion': 'Nổ bom',
'Bomb Threat': 'Bomb Threat',
'Border Colour for Text blocks': 'Màu viền cho khối văn bản',
'Boys 13-18 yrs in affected area': 'Boys 13-18 yrs in affected area',
'Boys 13-18 yrs not attending school': 'Boys 13-18 yrs not attending school',
'Boys 6-12 yrs in affected area': 'Boys 6-12 yrs in affected area',
'Boys 6-12 yrs not attending school': 'Boys 6-12 yrs not attending school',
'Breast milk substitutes in use since disaster': 'Breast milk substitutes in use since disaster',
'Breast milk substitutes used prior to disaster': 'Breast milk substitutes used prior to disaster',
'Bricks': 'Bricks',
'Bridge Closed': 'Bridge Closed',
'Bucket': 'Bucket',
'Buddhist': 'Người theo đạo Phật',
'Budget': 'Ngân sách',
'Budget Details': 'Budget Details',
'Budget Updated': 'Budget Updated',
'Budget added': 'Budget added',
'Budget deleted': 'Budget deleted',
'Budget updated': 'Budget updated',
'Budgeting Module': 'Budgeting Module',
'Budgets': 'Ngân sách',
'Buffer': 'Buffer',
'Building Aide': 'Building Aide',
'Building Collapsed': 'Sập nhà',
'Built using the Template agreed by a group of NGOs working together as the': 'Built using the Template agreed by a group of NGOs working together as the',
'Bulk Uploader': 'Bulk Uploader',
'Bundle': 'Bundle',
'Bundle Contents': 'Bundle Contents',
'Bundle Details': 'Bundle Details',
'Bundle Updated': 'Cập nhật Bundle',
'Bundle added': 'Bundle added',
'Bundle deleted': 'Bundle deleted',
'Bundle updated': 'Bundle updated',
'Bundles': 'Bundles',
'Burn': 'Burn',
'Burn ICU': 'Burn ICU',
'Burned/charred': 'Burned/charred',
'Business damaged': 'Business damaged',
'By Warehouse': 'By Warehouse',
'CBA Women': 'CBA Women',
'CSS file %s not writable - unable to apply theme!': 'không viết được file CSS %s - không thể áp dụng chủ đề',
'Calculate': 'Tính toán',
'Camp': 'Camp',
'Camp Coordination/Management': 'Camp Coordination/Management',
'Can users register themselves for authenticated login access?': 'Can users register themselves for authenticated login access?',
"Can't import tweepy": "Can't import tweepy",
'Cancel': 'Cancel',
'Cancelled': 'Cancelled',
'Candidate Matches for Body %s': 'Candidate Matches for Body %s',
'Canned Fish': 'Canned Fish',
'Cannot be empty': 'Cannot be empty',
'Cannot delete whilst there are linked records. Please delete linked records first.': 'Không xóa được khi đang có bản thu liên quan.Hãy xóa bản thu trước',
'Capacity (Max Persons)': 'Capacity (Max Persons)',
'Capacity (W x D X H)': 'Capacity (W x D X H)',
'Capture Information on Disaster Victim groups (Tourists, Passengers, Families, etc.)': 'Nắm bắt thông tin của các nạn nhân chịu ảnh hưởng của thiên tai(Khách du lịch,Gia đình...)',
'Capture Information on each disaster victim': 'Capture Information on each disaster victim',
'Capturing organizational information of a relief organization and all the projects they have in the region': 'Capturing organizational information of a relief organization and all the projects they have in the region',
'Capturing the essential services each Volunteer is providing and where': 'Capturing the essential services each Volunteer is providing and where',
'Capturing the projects each organization is providing and where': 'Capturing the projects each organization is providing and where',
'Cardiology': 'Bệnh tim mạch',
'Cash available to restart business': 'Cash available to restart business',
'Cassava': 'Cassava',
'Casual Labor': 'Nhân công thời vụ',
'Catalog': 'Catalog',
'Catalog Item': 'Catalog Item',
'Catalog Item added': 'Catalog Item added',
'Catalog Item deleted': 'Catalog Item deleted',
'Catalog Item updated': 'Catalog Item updated',
'Catalog Items': 'Catalog Items',
'Catalog Name': 'Catalog Name',
'Category': 'Category',
'Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog Relation',
'Category<>Sub-Category<>Catalog Relation added': 'Category<>Sub-Category<>Catalog Relation added',
'Category<>Sub-Category<>Catalog Relation deleted': 'Category<>Sub-Category<>Catalog Relation deleted',
'Category<>Sub-Category<>Catalog Relation updated': 'Category<>Sub-Category<>Catalog Relation updated',
'Central point to record details on People': 'Central point to record details on People',
'Change Password': 'Change Password',
'Check for errors in the URL, maybe the address was mistyped.': 'Kiểm tra lỗi URL, có lẽ địa chỉ URL bị gõ sai.',
'Check if the URL is pointing to a directory instead of a webpage.': 'Kiểm tra URL trỏ về thư mục hay trang web',
'Check outbox for the message status': 'Check outbox for the message status',
'Check to delete': 'Check to delete',
'Checklist': 'Checklist',
'Checklist created': 'Checklist created',
'Checklist deleted': 'Checklist deleted',
'Checklist of Operations': 'Checklist of Operations',
'Checklist updated': 'Checklist updated',
'Chemical Hazard': 'Chemical Hazard',
'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack': 'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack',
'Chicken': 'Chicken',
'Child': 'Child',
'Child (2-11)': 'Child (2-11)',
'Child (< 18 yrs)': 'Child (< 18 yrs)',
'Child Abduction Emergency': 'Child Abduction Emergency',
'Child headed households (<18 yrs)': 'Child headed households (<18 yrs)',
'Children (2-5 years)': 'Children (2-5 years)',
'Children (5-15 years)': 'Children (5-15 years)',
'Children (< 2 years)': 'Trẻ em (dưới 2 tuổi)',
'Children in adult prisons': 'Children in adult prisons',
'Children in boarding schools': 'Children in boarding schools',
'Children in homes for disabled children': 'Children in homes for disabled children',
'Children in juvenile detention': 'Children in juvenile detention',
'Children in orphanages': 'Children in orphanages',
'Children living on their own (without adults)': 'Children living on their own (without adults)',
'Children not enrolled in new school': 'Children not enrolled in new school',
'Children orphaned by the disaster': 'Children orphaned by the disaster',
'Children separated from their parents/caregivers': 'Children separated from their parents/caregivers',
'Children that have been sent to safe places': 'Children that have been sent to safe places',
'Children who have disappeared since the disaster': 'Children who have disappeared since the disaster',
'Children with chronical illnesses': 'Children with chronical illnesses',
'Chinese (Taiwan)': 'Chinese (Taiwan)',
'Cholera Treatment': 'Cholera Treatment',
'Cholera Treatment Capability': 'Cholera Treatment Capability',
'Cholera Treatment Center': 'Cholera Treatment Center',
'Cholera-Treatment-Center': 'Cholera-Treatment-Center',
'Choosing Skill and Resources of Volunteers': 'Choosing Skill and Resources of Volunteers',
'Christian': 'Christian',
'Church': 'Church',
'Circumstances of disappearance, other victims/witnesses who last saw the missing person alive.': 'Hoàn cảnh mất tích, những nhân chứng nhìn thấy lần gần đây nhất nạn nhân còn sống',
'City': 'City',
'Civil Emergency': 'Civil Emergency',
'Clear Selection': 'Clear Selection',
"Click on 'Pledge' button in the left-hand column to make a Pledge to match a request for aid.": "Click on 'Pledge' button in the left-hand column to make a Pledge to match a request for aid.",
'Click on the link ': 'Click on the link ',
'Client IP': 'Client IP',
'Clinical Laboratory': 'Clinical Laboratory',
'Clinical Operations': 'Clinical Operations',
'Clinical Status': 'Clinical Status',
'Close map': 'Close map',
'Closed': 'Đã đóng',
'Closure': 'Closure',
'Clothing': 'Clothing',
'Cluster': 'Cluster',
'Cluster Details': 'Cluster Details',
'Cluster Distance': 'Cluster Distance',
'Cluster Subsector': 'Cluster Subsector',
'Cluster Subsector Details': 'Cluster Subsector Details',
'Cluster Subsector added': 'Cluster Subsector added',
'Cluster Subsector deleted': 'Cluster Subsector deleted',
'Cluster Subsector updated': 'Cluster Subsector updated',
'Cluster Subsectors': 'Cluster Subsectors',
'Cluster Threshold': 'Cluster Threshold',
'Cluster added': 'Đã thêm cụm',
'Cluster deleted': 'Cluster deleted',
'Cluster updated': 'Cluster updated',
'Cluster(s)': 'Cluster(s)',
'Clusters': 'Clusters',
'Code': 'Mã',
'Cold Wave': 'Cold Wave',
'Collective center': 'Collective center',
'Colour for Underline of Subheadings': 'Colour for Underline of Subheadings',
'Colour of Buttons when hovering': 'Colour of Buttons when hovering',
'Colour of bottom of Buttons when not pressed': 'Colour of bottom of Buttons when not pressed',
'Colour of bottom of Buttons when pressed': 'Colour of bottom of Buttons when pressed',
'Colour of dropdown menus': 'Colour of dropdown menus',
'Colour of selected Input fields': 'Màu của trường đã được chọn',
'Colour of selected menu items': 'Colour of selected menu items',
'Column Choices (One Per Line': 'Chọn cột',
'Combined Method': 'Combined Method',
'Come back later.': 'Come back later.',
'Come back later. Everyone visiting this site is probably experiencing the same problem as you.': 'Come back later. Everyone visiting this site is probably experiencing the same problem as you.',
'Comments': 'Bình luận',
'Commiting a changed spreadsheet to the database': 'Commiting a changed spreadsheet to the database',
'Communication problems': 'Communication problems',
'Community Centre': 'Community Centre',
'Community Health Center': 'Trung tâm sức khỏe cộng đồng',
'Community Member': 'Thành viên cộng đồng',
'Complete Unit Label for e.g. meter for m.': 'hoàn thành các bản đơn vị, ví dụ đơn vị của mét là m',
'Completed': 'Completed',
'Complexion': 'Complexion',
'Compose': 'Compose',
'Compromised': 'Compromised',
'Config': 'Tùy chỉnh',
'Config added': 'Cấu hình đã được thêm',
'Config deleted': 'Config deleted',
'Config updated': 'Cập nhật tùy chỉnh',
'Configs': 'Configs',
'Configure Run-time Settings': 'Configure Run-time Settings',
'Confirmed': 'Confirmed',
'Confirmed Incidents': 'Confirmed Incidents',
'Conflict Details': 'Conflict Details',
'Conflict Resolution': 'Conflict Resolution',
'Consumable': 'Consumable',
'Contact': 'Contact',
'Contact Data': 'Dữ liệu liên lạc',
'Contact Details': 'Contact Details',
'Contact Information': 'Contact Information',
'Contact Method': 'Contact Method',
'Contact Person': 'Contact Person',
'Contact details': 'Contact details',
'Contact information added': 'Contact information added',
'Contact information deleted': 'Contact information deleted',
'Contact information updated': 'Contact information updated',
'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.',
'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.',
'Contact us': 'Contact us',
'Contacts': 'Contacts',
'Contents': 'Contents',
'Contradictory values!': 'Contradictory values!',
'Contributor': 'Người đóng góp',
'Conversion Tool': 'Conversion Tool',
'Cooking NFIs': 'Cooking NFIs',
'Cooking Oil': 'Cooking Oil',
'Coordinate Conversion': 'Coordinate Conversion',
'Copy': 'Copy',
'Copy any data from the one to be deleted into the one to keep': 'Copy any data from the one to be deleted into the one to keep',
'Corn': 'Corn',
'Cost Type': 'Cost Type',
'Cost per Megabyte': 'Cost per Megabyte',
'Cost per Minute': 'Cost per Minute',
"Couldn't import tweepy library": "Couldn't import tweepy library",
'Country': 'Country',
'Country of Residence': 'Country of Residence',
'Create & manage Distribution groups to receive Alerts': 'Create & manage Distribution groups to receive Alerts',
'Create Checklist': 'Create Checklist',
'Create Group Entry': 'Create Group Entry',
'Create Impact Assessment': 'Create Impact Assessment',
'Create Import Job': 'Create Import Job',
'Create Mobile Impact Assessment': 'Create Mobile Impact Assessment',
'Create New Import Job': 'Create New Import Job',
'Create Rapid Assessment': 'Create Rapid Assessment',
'Create Request': 'Khởi tạo yêu cầu',
'Create Task': 'Create Task',
'Create a group entry in the registry.': 'Create a group entry in the registry.',
'Create, enter, and manage surveys.': 'Create, enter, and manage surveys.',
'Creation of Surveys': 'Creation of Surveys',
'Crime': 'Tội phạm',
'Criteria': 'Criteria',
'Currency': 'Currency',
'Current Group Members': 'Nhóm thành viên hiện tại',
'Current Identities': 'Current Identities',
'Current Location': 'Current Location',
'Current Log Entries': 'Current Log Entries',
'Current Memberships': 'Thành viên hiện tại',
'Current Registrations': 'Current Registrations',
'Current Status': 'Current Status',
'Current Team Members': 'Current Team Members',
'Current Twitter account': 'Tài khoản Twitter hiện tại',
'Current greatest needs of vulnerable groups': 'Current greatest needs of vulnerable groups',
'Current main income sources': 'Current main income sources',
'Current major expenses': 'Current major expenses',
'Current number of patients': 'Current number of patients',
'Current problems, categories': 'Current problems, categories',
'Current problems, details': 'Current problems, details',
'Current request': 'Yêu cầu hiện tại',
'Current response': 'Current response',
'Current session': 'Current session',
'Current type of health problems, adults': 'Current type of health problems, adults',
'Current type of health problems, children': 'Current type of health problems, children',
'Current type of source for drinking water': 'Current type of source for drinking water',
'Current type of source for sanitary water': 'Current type of source for sanitary water',
'Custom Database Resource (e.g., anything defined as a resource in Sahana)': 'Custom Database Resource (e.g., anything defined as a resource in Sahana)',
'Customisable category of aid': 'Các tiêu chí cứu trợ có thể tùy chỉnh',
'DECISION': 'DECISION',
'DNA Profile': 'DNA Profile',
'DNA Profiling': 'DNA Profiling',
'Daily': 'Hàng ngày',
'Dam Overflow': 'Tràn đập',
'Dangerous Person': 'Người nguy hiểm',
'Data uploaded': 'Đã cập nhật dữ liệu',
'Database': 'Database',
'Date': 'Date',
'Date & Time': 'Date & Time',
'Date Requested': 'Date Requested',
'Date Required': 'Date Required',
'Date and Time': 'Date and Time',
'Date and Time of Goods receipt. By default shows the current time but can be modified by editing in the drop down list.': 'Ngày giờ nhận hàng hóa.Hiển thị thời gian theo mặc định nhưng vẫn có thể chỉnh sửa',
'Date and time this report relates to.': 'Date and time this report relates to.',
'Date of Birth': 'Date of Birth',
'Date of Latest Information on Beneficiaries Reached': 'Date of Latest Information on Beneficiaries Reached',
'Date of Report': 'Ngày báo cáo',
'Date/Time': 'Ngày/Giờ',
'Date/Time of Find': 'Ngày giờ tìm kiếm',
'Date/Time of disappearance': 'Date/Time of disappearance',
'De-duplicator': 'De-duplicator',
'Dead Body Details': 'Dead Body Details',
'Dead Body Reports': 'Báo cáo thiệt hại về người',
'Deaths in the past 24h': 'Deaths in the past 24h',
'Deaths/24hrs': 'Số người chết/24h',
'Debug': 'Debug',
'Decimal Degrees': 'Độ âm',
'Decomposed': 'Decomposed',
'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.',
'Default Marker': 'Default Marker',
'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.',
'Default synchronization policy': 'Chính sách đồng bộ hóa mặc định',
'Defaults': 'Defaults',
'Defaults updated': 'Defaults updated',
'Defecation area for animals': 'Defecation area for animals',
'Defines the icon used for display of features on handheld GPS.': 'Defines the icon used for display of features on handheld GPS.',
'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.': 'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.',
'Defines the marker used for display & the attributes visible in the popup.': 'Defines the marker used for display & the attributes visible in the popup.',
'Degrees must be a number between -180 and 180': 'Degrees must be a number between -180 and 180',
'Dehydration': 'Dehydration',
'Delete': 'Delete',
'Delete Aid Request': 'Xóa yêu cầu cứu trợ',
'Delete Assessment': 'Delete Assessment',
'Delete Assessment Summary': 'Delete Assessment Summary',
'Delete Baseline': 'Delete Baseline',
'Delete Baseline Type': 'Delete Baseline Type',
'Delete Budget': 'Delete Budget',
'Delete Bundle': 'Delete Bundle',
'Delete Catalog Item': 'Delete Catalog Item',
'Delete Cluster Subsector': 'Delete Cluster Subsector',
'Delete Config': 'Delete Config',
'Delete Distribution': 'Delete Distribution',
'Delete Distribution Item': 'Delete Distribution Item',
'Delete Document': 'Delete Document',
'Delete Donor': 'Delete Donor',
'Delete Entry': 'Delete Entry',
'Delete Feature Class': 'Delete Feature Class',
'Delete Feature Layer': 'Delete Feature Layer',
'Delete Group': 'Delete Group',
'Delete Hospital': 'Xóa Bệnh viện',
'Delete Image': 'Delete Image',
'Delete Impact': 'Delete Impact',
'Delete Impact Type': 'Delete Impact Type',
'Delete Incident': 'Delete Incident',
'Delete Incident Report': 'Delete Incident Report',
'Delete Inventory Item': 'Delete Inventory Item',
'Delete Inventory Store': 'Xóa kho lưu trữ',
'Delete Item': 'Xóa mục',
'Delete Item Category': 'Delete Item Category',
'Delete Item Packet': 'Delete Item Packet',
'Delete Key': 'Delete Key',
'Delete Kit': 'Delete Kit',
'Delete Layer': 'Xóa Layer',
'Delete Location': 'Xóa Vị trí',
'Delete Marker': 'Delete Marker',
'Delete Membership': 'Delete Membership',
'Delete Message': 'Delete Message',
'Delete Metadata': 'Delete Metadata',
'Delete Need': 'Delete Need',
'Delete Need Type': 'Delete Need Type',
'Delete Office': 'Delete Office',
'Delete Old': 'Delete Old',
'Delete Organization': 'Delete Organization',
'Delete Peer': 'Delete Peer',
'Delete Person': 'Delete Person',
'Delete Photo': 'Delete Photo',
'Delete Project': 'Delete Project',
'Delete Projection': 'Delete Projection',
'Delete Rapid Assessment': 'Delete Rapid Assessment',
'Delete Received Item': 'Delete Received Item',
'Delete Received Shipment': 'Delete Received Shipment',
'Delete Record': 'Delete Record',
'Delete Recovery Report': 'Delete Recovery Report',
'Delete Report': 'Delete Report',
'Delete Request': 'Xóa yêu cầu',
'Delete Request Item': 'Xóa yêu cầu hàng hóa',
'Delete Resource': 'Delete Resource',
'Delete Section': 'Delete Section',
'Delete Sector': 'Delete Sector',
'Delete Sent Item': 'Delete Sent Item',
'Delete Sent Shipment': 'Delete Sent Shipment',
'Delete Service Profile': 'Delete Service Profile',
'Delete Setting': 'Delete Setting',
'Delete Skill': 'Delete Skill',
'Delete Skill Type': 'Delete Skill Type',
'Delete Staff Type': 'Delete Staff Type',
'Delete Status': 'Delete Status',
'Delete Subscription': 'Delete Subscription',
'Delete Survey Answer': 'Delete Survey Answer',
'Delete Survey Question': 'Xóa câu hỏi khảo sát',
'Delete Survey Section': 'Delete Survey Section',
'Delete Survey Series': 'Delete Survey Series',
'Delete Survey Template': 'Xóa mẫu khảo sát',
'Delete Unit': 'Xóa đơn vị',
'Delete User': 'Delete User',
'Delete Volunteer': 'Delete Volunteer',
'Delete Warehouse': 'Delete Warehouse',
'Delete Warehouse Item': 'Delete Warehouse Item',
'Delete from Server?': 'Delete from Server?',
'Delivered': 'Delivered',
'Delphi Decision Maker': 'Delphi Decision Maker',
'Demographic': 'Ngành nhân khẩu học',
'Demonstrations': 'Biểu tình',
'Dental Examination': 'Khám nha khoa',
'Dental Profile': 'Dental Profile',
'Department/Unit Name': 'Department/Unit Name',
'Deployment': 'Deployment',
'Describe the condition of the roads to your hospital.': 'Mô tả tình trạng các con đường tới bệnh viện.',
'Describe the procedure which this record relates to (e.g. "medical examination")': 'Describe the procedure which this record relates to (e.g. "medical examination")',
'Description': 'Mô tả',
'Description of Bin Type': 'Description of Bin Type',
'Description of Contacts': 'Description of Contacts',
'Description of defecation area': 'Mo tả khu vực defecation',
'Description of drinking water source': 'Description of drinking water source',
'Description of sanitary water source': 'Description of sanitary water source',
'Description of water source before the disaster': 'Description of water source before the disaster',
'Descriptive Text (e.g., Prose, etc)': 'Descriptive Text (e.g., Prose, etc)',
'Designated for': 'Designated for',
'Desire to remain with family': 'Desire to remain with family',
'Destination': 'Điểm đích',
"Detailed address of the site for informational/logistics purpose. Please note that you can add GIS/Mapping data about this site in the 'Location' field mentioned below.": "Detailed address of the site for informational/logistics purpose. Please note that you can add GIS/Mapping data about this site in the 'Location' field mentioned below.",
'Details': 'Details',
'Dialysis': 'Dialysis',
'Diarrhea': 'Diarrhea',
'Diarrhea among children under 5': 'Diarrhea among children under 5',
'Dignitary Visit': 'Dignitary Visit',
'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': 'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.',
'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': 'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.',
'Direction': 'Hướng',
'Disabled': 'Disabled',
'Disabled participating in coping activities': 'Disabled participating in coping activities',
'Disabled?': 'Disabled?',
'Disaster Victim Identification': 'Disaster Victim Identification',
'Disaster Victim Registry': 'Disaster Victim Registry',
'Disaster clean-up/repairs': 'Disaster clean-up/repairs',
'Discharge (cusecs)': 'Discharge (cusecs)',
'Discharges/24hrs': 'Discharges/24hrs',
'Discussion Forum': 'Discussion Forum',
'Discussion Forum on item': 'Discussion Forum on item',
'Disease vectors': 'Disease vectors',
'Dispatch': 'Gửi hàng cứu trợ',
'Dispatch Items': 'Dispatch Items',
'Dispensary': 'Y tế dự phòng',
'Displaced': 'Displaced',
'Displaced Populations': 'Displaced Populations',
'Display Polygons?': 'Display Polygons?',
'Display Routes?': 'Display Routes?',
'Display Tracks?': 'Display Tracks?',
'Display Waypoints?': 'Display Waypoints?',
'Dispose': 'Dispose',
'Dispose Expired/Unusable Items': 'Dispose Expired/Unusable Items',
'Distance between defecation area and water source': 'Distance between defecation area and water source',
'Distance between latrines and temporary shelter in meters': 'Distance between latrines and temporary shelter in meters',
'Distance between shelter and latrines': 'Distance between shelter and latrines',
'Distance(Kms)': 'Distance(Kms)',
'Distribution': 'Distribution',
'Distribution Details': 'Distribution Details',
'Distribution Item': 'Hàng hóa đóng góp',
'Distribution Item Details': 'Distribution Item Details',
'Distribution Item added': 'Distribution Item added',
'Distribution Item deleted': 'Distribution Item deleted',
'Distribution Item updated': 'Distribution Item updated',
'Distribution Items': 'Distribution Items',
'Distribution added': 'Distribution added',
'Distribution deleted': 'Distribution deleted',
'Distribution groups': 'Distribution groups',
'Distribution updated': 'Distribution updated',
'Distributions': 'Distributions',
'District': 'Quận',
'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do households each have at least 2 containers (10-20 litres each) to hold water?': 'Do households each have at least 2 containers (10-20 litres each) to hold water?',
'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?': 'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?',
'Do households have bedding materials available (tarps, plastic mats, blankets)?': 'Do households have bedding materials available (tarps, plastic mats, blankets)?',
'Do households have household water storage containers?': 'Do households have household water storage containers?',
'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?': 'Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?',
'Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?': 'Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?',
'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do women and girls have easy access to sanitary materials?': 'Do women and girls have easy access to sanitary materials?',
'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do you have access to cash to restart your business?': 'Do you have access to cash to restart your business?',
'Do you know of any incidents of violence?': 'Do you know of any incidents of violence?',
'Do you know of children living on their own (without adults)?': 'Do you know of children living on their own (without adults)?',
'Do you know of children separated from their parents or caregivers?': 'Do you know of children separated from their parents or caregivers?',
'Do you know of children that have been orphaned by the disaster?': 'Do you know of children that have been orphaned by the disaster?',
'Do you know of children that have been sent to safe places?': 'Do you know of children that have been sent to safe places?',
'Do you know of children that have disappeared without explanation in the period since the disaster?': 'Do you know of children that have disappeared without explanation in the period since the disaster?',
'Do you know of older people who are primary caregivers of children?': 'Do you know of older people who are primary caregivers of children?',
'Do you know of parents/caregivers missing children?': 'Do you know of parents/caregivers missing children?',
'Do you really want to delete these records?': 'Do you really want to delete these records?',
'Do you want to over-write the file metadata with new default values?': 'Bạn có muốn thay dữ liệu file bằng giá trị mặc định mới không?',
'Do you want to receive this shipment?': 'Do you want to receive this shipment?',
'Document': 'Document',
'Document Details': 'Chi tiết văn bản',
'Document Scan': 'Document Scan',
'Document added': 'Đã thêm tài liệu',
'Document deleted': 'Document deleted',
'Document updated': 'Document updated',
'Documents': 'Documents',
'Documents and Photos': 'Documents and Photos',
'Does this facility provide a cholera treatment center?': 'Does this facility provide a cholera treatment center?',
'Doing nothing (no structured activity)': 'Không làm gì (không có hoạt động theo kế hoạch',
'Dollars': 'Dollars',
'Domestic chores': 'Công việc nội trợ',
'Donation Phone #': 'Donation Phone #',
'Donor': 'Donor',
'Donor Details': 'Donor Details',
'Donor added': 'Đã thêm người quyên góp',
'Donor deleted': 'Donor deleted',
'Donor updated': 'Đã cập nhật người quyên góp',
'Donors': 'Donors',
'Donors Report': 'Báo cáo về tình hình quyên góp',
'Door frame': 'Door frame',
'Draft': 'Bản nháp',
'Drainage': 'Drainage',
'Drawing up a Budget for Staff & Equipment across various Locations.': 'Drawing up a Budget for Staff & Equipment across various Locations.',
'Drill Down by Group': 'Drill Down by Group',
'Drill Down by Incident': 'Drill Down by Incident',
'Drill Down by Shelter': 'Drill Down by Shelter',
'Driving License': 'Driving License',
'Drought': 'Drought',
'Drugs': 'Drugs',
'Dug Well': 'Dug Well',
'Duplicate?': 'Duplicate?',
'Duration': 'Duration',
'Dust Storm': 'Dust Storm',
'Dwellings': 'Dwellings',
'E-mail': 'E-mail',
'EMS Reason': 'EMS Reason',
'EMS Status': 'Tình trạng EMS',
'EMS Status Reason': 'EMS Status Reason',
'EMS Traffic Status': 'EMS Traffic Status',
'ER Status': 'ER Status',
'ER Status Reason': 'ER Status Reason',
'Early Recovery': 'Early Recovery',
'Earthquake': 'Động đất',
'Easy access to sanitation items for women/girls': 'Easy access to sanitation items for women/girls',
'Edit': 'Edit',
'Edit Activity': 'Edit Activity',
'Edit Address': 'Edit Address',
'Edit Aid Request': 'Chỉnh sửa Yêu cầu cứu trợ',
'Edit Application': 'Edit Application',
'Edit Assessment': 'Chỉnh sửa Đánh giá',
'Edit Assessment Summary': 'Edit Assessment Summary',
'Edit Baseline': 'Edit Baseline',
'Edit Baseline Type': 'Edit Baseline Type',
'Edit Budget': 'Edit Budget',
'Edit Bundle': 'Edit Bundle',
'Edit Catalog Item': 'Edit Catalog Item',
'Edit Category<>Sub-Category<>Catalog Relation': 'Edit Category<>Sub-Category<>Catalog Relation',
'Edit Cluster': 'Edit Cluster',
'Edit Cluster Subsector': 'Edit Cluster Subsector',
'Edit Config': 'Edit Config',
'Edit Contact': 'Edit Contact',
'Edit Contact Information': 'Chỉnh sửa thông tin liên lạc',
'Edit Contents': 'Edit Contents',
'Edit Defaults': 'Edit Defaults',
'Edit Description': 'Edit Description',
'Edit Details': 'Chỉnh sửa chi tiết',
'Edit Disaster Victims': 'Edit Disaster Victims',
'Edit Distribution': 'Chỉnh sửa Quyên góp',
'Edit Distribution Item': 'Edit Distribution Item',
'Edit Document': 'Chỉnh sửa tài liệu',
'Edit Donor': 'Edit Donor',
'Edit Email Settings': 'Edit Email Settings',
'Edit Feature Class': 'Edit Feature Class',
'Edit Feature Layer': 'Edit Feature Layer',
'Edit Flood Report': 'Edit Flood Report',
'Edit Gateway Settings': 'Edit Gateway Settings',
'Edit Group': 'Edit Group',
'Edit Hospital': 'Chỉnh sửa Bệnh viện',
'Edit Identification Report': 'Chỉnh sửa báo cáo định dạng',
'Edit Identity': 'Edit Identity',
'Edit Image': 'Edit Image',
'Edit Image Details': 'Edit Image Details',
'Edit Impact': 'Edit Impact',
'Edit Impact Type': 'Edit Impact Type',
'Edit Incident': 'Chỉnh sửa Các sự việc xảy ra',
'Edit Incident Report': 'Edit Incident Report',
'Edit Inventory Item': 'Edit Inventory Item',
'Edit Inventory Store': 'Edit Inventory Store',
'Edit Item': 'Edit Item',
'Edit Item Catalog': 'Edit Item Catalog',
'Edit Item Catalog Categories': 'Chỉnh sửa danh mục hàng hóa',
'Edit Item Category': 'Edit Item Category',
'Edit Item Packet': 'Edit Item Packet',
'Edit Item Sub-Categories': 'Edit Item Sub-Categories',
'Edit Key': 'Chỉnh sửa Key',
'Edit Kit': 'Edit Kit',
'Edit Layer': 'Edit Layer',
'Edit Location': 'Edit Location',
'Edit Log Entry': 'Edit Log Entry',
'Edit Map Services': 'Chỉnh sửa dịch vụ bản đồ',
'Edit Marker': 'Edit Marker',
'Edit Membership': 'Edit Membership',
'Edit Message': 'Edit Message',
'Edit Messaging Settings': 'Edit Messaging Settings',
'Edit Metadata': 'Chỉnh sửa dữ liệu',
'Edit Modem Settings': 'Edit Modem Settings',
'Edit Need': 'Edit Need',
'Edit Need Type': 'Edit Need Type',
'Edit Office': 'Edit Office',
'Edit Options': 'Edit Options',
'Edit Organization': 'Edit Organization',
'Edit Parameters': 'Edit Parameters',
'Edit Peer Details': 'Chỉnh sửa chi tiết nhóm người',
'Edit Person Details': 'Edit Person Details',
'Edit Personal Effects Details': 'Edit Personal Effects Details',
'Edit Photo': 'Edit Photo',
'Edit Pledge': 'Edit Pledge',
'Edit Position': 'Edit Position',
'Edit Problem': 'Chỉnh sửa Vấn đề',
'Edit Project': 'Edit Project',
'Edit Projection': 'Edit Projection',
'Edit Rapid Assessment': 'Edit Rapid Assessment',
'Edit Received Item': 'Edit Received Item',
'Edit Received Shipment': 'Edit Received Shipment',
'Edit Record': 'Edit Record',
'Edit Recovery Details': 'Chỉnh sửa chi tiết khôi phục',
'Edit Registration': 'Edit Registration',
'Edit Registration Details': 'Edit Registration Details',
'Edit Report': 'Chỉnh sửa báo cáo',
'Edit Request': 'Edit Request',
'Edit Request Item': 'Chỉnh sửa yêu cầu hàng hóa',
'Edit Resource': 'Edit Resource',
'Edit Response': 'Chỉnh sửa phản hồi',
'Edit River': 'Edit River',
'Edit Role': 'Chỉnh sửa Vai trò',
'Edit Sector': 'Edit Sector',
'Edit Sent Item': 'Edit Sent Item',
'Edit Sent Shipment': 'Edit Sent Shipment',
'Edit Setting': 'Chỉnh sửa cài đặt',
'Edit Settings': 'Edit Settings',
'Edit Shelter': 'Chỉnh sửa thông tin cư trú',
'Edit Shelter Service': 'Chỉnh sửa dịch vụ cư trú',
'Edit Shelter Type': 'Edit Shelter Type',
'Edit Shipment Transit Log': 'Edit Shipment Transit Log',
'Edit Shipment/Way Bills': 'Edit Shipment/Way Bills',
'Edit Shipment<>Item Relation': 'Edit Shipment<>Item Relation',
'Edit Site': 'Edit Site',
'Edit Skill': 'Chỉnh sửa kỹ năng',
'Edit Skill Type': 'Edit Skill Type',
'Edit Solution': 'Edit Solution',
'Edit Staff': 'Edit Staff',
'Edit Staff Type': 'Edit Staff Type',
'Edit Storage Bin Type(s)': 'Edit Storage Bin Type(s)',
'Edit Storage Bins': 'Edit Storage Bins',
'Edit Storage Location': 'Edit Storage Location',
'Edit Subscription': 'Edit Subscription',
'Edit Survey Answer': 'Chỉnh sửa trả lời khảo sát',
'Edit Survey Question': 'Edit Survey Question',
'Edit Survey Section': 'Edit Survey Section',
'Edit Survey Series': 'Edit Survey Series',
'Edit Survey Template': 'Edit Survey Template',
'Edit Task': 'Edit Task',
'Edit Team': 'Edit Team',
'Edit Theme': 'Edit Theme',
'Edit Themes': 'Edit Themes',
'Edit Ticket': 'Edit Ticket',
'Edit Track': 'Edit Track',
'Edit Tropo Settings': 'Edit Tropo Settings',
'Edit Unit': 'Edit Unit',
'Edit User': 'Edit User',
'Edit Volunteer Details': 'Edit Volunteer Details',
'Edit Volunteer Registration': 'Chỉnh sửa đăng ký tình nguyện viên',
'Edit Warehouse': 'Edit Warehouse',
'Edit Warehouse Item': 'Edit Warehouse Item',
'Edit current record': 'Chỉnh sửa bản thu hiện tại',
'Edit message': 'Edit message',
'Edit the Application': 'Chỉnh sửa ứng dụng',
'Editable?': 'Editable?',
'Education': 'Giáo dục',
'Education materials received': 'Đã nhận được tài liệu, dụng cụ phục vụ học tập',
'Education materials, source': 'Dụng cụ học tập, nguồn',
'Effects Inventory': 'Effects Inventory',
'Eggs': 'Eggs',
'Either a shelter or a location must be specified': 'Either a shelter or a location must be specified',
'Either file upload or document URL required.': 'Either file upload or document URL required.',
'Either file upload or image URL required.': 'yêu cầu upload file hoặc URL ảnh',
'Elderly person headed households (>60 yrs)': 'Elderly person headed households (>60 yrs)',
'Electrical': 'Electrical',
'Elevated': 'Nâng cao lên',
'Email': 'Email',
'Email Settings': 'Email Settings',
'Email address verified, however registration is still pending approval - please wait until confirmation received.': 'Địa chỉ email đã được xác nhận, tuy nhiên đăng ký vẫn còn chờ duyệt - hãy đợi đến khi nhận được phê chuẩn',
'Email settings updated': 'Email settings updated',
'Embalming': 'Embalming',
'Embassy': 'Embassy',
'Emergency Capacity Building project': 'Emergency Capacity Building project',
'Emergency Department': 'Bộ phận cấp cứu',
'Emergency Shelter': 'Emergency Shelter',
'Emergency Support Facility': 'Emergency Support Facility',
'Emergency Support Service': 'Emergency Support Service',
'Emergency Telecommunications': 'Emergency Telecommunications',
'Enable/Disable Layers': 'Kích hoạt/Tắt Layer',
'Enabled': 'Enabled',
'End date': 'Ngày kết thúc',
'End date should be after start date': 'End date should be after start date',
'End of Period': 'End of Period',
'English': 'English',
'Enter Coordinates in Deg Min Sec': 'Nhập tọa độ ở dạng Độ,Phút,Giây',
'Enter Coordinates:': 'Enter Coordinates:',
'Enter a GPS Coord': 'Enter a GPS Coord',
'Enter a date before': 'Enter a date before',
'Enter a location': 'Enter a location',
'Enter a name for the spreadsheet you are uploading (mandatory).': 'Nhập tên cho bảng tính bạn đang tải lên(bắt buộc)',
'Enter a new support request.': 'Nhập một yêu cầu hỗ trợ mới',
'Enter a summary of the request here.': 'Nhập tóm tắt các yêu cầu ở đây',
'Enter a unique label!': 'Enter a unique label!',
'Enter a valid email': 'Enter a valid email',
'Enter tags separated by commas.': 'Enter tags separated by commas.',
'Enter the same password as above': 'Enter the same password as above',
'Enter your firstname': 'Nhập họ của bạn',
'Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages.': 'Không bắt buộc phải nhập số điện thoại nhưng nếu nhập, bạn sẽ nhận được tin nhắn từ hệ thống',
'Entry deleted': 'Entry deleted',
'Equipment': 'Equipment',
'Error encountered while applying the theme.': 'Error encountered while applying the theme.',
'Error in message': 'Error in message',
'Error logs for "%(app)s"': 'Error logs for "%(app)s"',
'Errors': 'Lỗi',
'Estimated # of households who are affected by the emergency': 'Ước tính # số hộ chịu ảnh hưởng từ thiên tai',
'Estimated # of people who are affected by the emergency': 'Estimated # of people who are affected by the emergency',
'Estimated total number of people in institutions': 'Estimated total number of people in institutions',
'Euros': 'Euro',
'Evacuating': 'Evacuating',
'Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)': 'Đánh giá thông tin trong thư. (giá trị này KHÔNG NÊN sử dụng trong các ứng dụng cảnh báo công cộng)',
'Event Time': 'Event Time',
'Event Type': 'Loại Sự kiện',
'Event type': 'Event type',
'Example': 'Example',
'Exceeded': 'Exceeded',
'Excreta disposal': 'Excreta disposal',
'Execute a pre-planned activity identified in <instruction>': 'Execute a pre-planned activity identified in <instruction>',
'Existing food stocks, main dishes': 'Existing food stocks, main dishes',
'Existing food stocks, side dishes': 'Existing food stocks, side dishes',
'Expected In': 'Expected In',
'Expected Out': 'Theo dự kiến',
'Expiry Time': 'Expiry Time',
'Explosive Hazard': 'Explosive Hazard',
'Export': 'Export',
'Export Data': 'Export Data',
'Export Database as CSV': 'Export Database as CSV',
'Export in GPX format': 'Export in GPX format',
'Export in KML format': 'Export in KML format',
'Export in OSM format': 'Export in OSM format',
'Export in PDF format': 'Export in PDF format',
'Export in RSS format': 'Export in RSS format',
'Export in XLS format': 'Export in XLS format',
'Eye Color': 'Màu mắt',
'Facebook': 'Facebook',
'Facial hair, color': 'Facial hair, color',
'Facial hair, type': 'Facial hair, type',
'Facial hear, length': 'Facial hear, length',
'Facility Operations': 'Facility Operations',
'Facility Status': 'Facility Status',
'Facility Type': 'Facility Type',
'Factors affecting school attendance': 'Factors affecting school attendance',
'Failed!': 'Failed!',
'Falling Object Hazard': 'Falling Object Hazard',
'Families/HH': 'Families/HH',
'Family': 'Family',
'Family tarpaulins received': 'Family tarpaulins received',
'Family tarpaulins, source': 'Family tarpaulins, source',
'Family/friends': 'Gia đình/Bạn bè',
'Farmland/fishing material assistance, Rank': 'Farmland/fishing material assistance, Rank',
'Fax': 'Fax',
'Feature Class': 'Feature Class',
'Feature Class Details': 'Feature Class Details',
'Feature Class added': 'Feature Class added',
'Feature Class deleted': 'Feature Class deleted',
'Feature Class updated': 'Feature Class updated',
'Feature Classes': 'Các mức phân loại tính năng',
'Feature Classes are collections of Locations (Features) of the same type': 'Các mức phân loại tính năng là tập hợp các vị trí (tính năng) cùng loại',
'Feature Layer Details': 'Feature Layer Details',
'Feature Layer added': 'Lớp đặc tính đã được thêm',
'Feature Layer deleted': 'Feature Layer deleted',
'Feature Layer updated': 'Cập nhật Layer tính năng',
'Feature Layers': 'Feature Layers',
'Feature Namespace': 'Feature Namespace',
'Feature Type': 'Loại tính năng',
'Features Include': 'Features Include',
'Female': 'Female',
'Female headed households': 'Female headed households',
'Few': 'Một vài',
'Field Hospital': 'Field Hospital',
'File': 'File',
'Fill in Latitude': 'Fill in Latitude',
'Fill in Longitude': 'Fill in Longitude',
'Filter': 'Filter',
'Filter Field': 'Filter Field',
'Filter Value': 'Giá trị lọc',
'Filtered search of aid pledges and requests': 'Filtered search of aid pledges and requests',
'Find': 'Tìm',
'Find Dead Body Report': 'Find Dead Body Report',
'Find Recovery Report': 'Tìm Báo cáo phục hồi',
'Find Volunteers': 'Find Volunteers',
'Find by Name': 'Find by Name',
'Finder': 'Finder',
'Fingerprint': 'Fingerprint',
'Fingerprinting': 'Dấu vân tay',
'Fingerprints': 'Fingerprints',
'Finish': 'Finish',
'Finished Jobs': 'Finished Jobs',
'Fire': 'Fire',
'Fire suppression and rescue': 'Fire suppression and rescue',
'First Name': 'First Name',
'First name': 'Tên',
'Fishing': 'Fishing',
'Flash Flood': 'Flash Flood',
'Flash Freeze': 'Flash Freeze',
'Fleet Management': 'Fleet Management',
'Flexible Impact Assessments': 'Flexible Impact Assessments',
'Flood': 'Lũ lụt',
'Flood Alerts': 'Flood Alerts',
'Flood Alerts show water levels in various parts of the country': 'Flood Alerts show water levels in various parts of the country',
'Flood Report': 'Flood Report',
'Flood Report Details': 'Chi tiết báo cáo tình hình lũ lụt',
'Flood Report added': 'Báo cáo lũ lụt đã được thêm',
'Flood Report deleted': 'Flood Report deleted',
'Flood Report updated': 'Flood Report updated',
'Flood Reports': 'Flood Reports',
'Flow Status': 'Flow Status',
'Focal Point': 'Tiêu điểm ',
'Fog': 'Fog',
'Food': 'Food',
'Food Supply': 'Food Supply',
'Food assistance available/expected': 'Food assistance available/expected',
'Footer': 'Footer',
'Footer file %s missing!': 'Footer file %s missing!',
'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).': 'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).',
'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.': 'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.',
'For each sync partner, there is a default sync job that runs after a specified interval of time. You can also set up more sync jobs which could be customized on your needs. Click the link on the right to get started.': 'Đối với mỗi đối tác đồng bộ, có một công việc đồng bộ mặc định chạy sau một khoảng thời gian nhất định. Bạn cũng có thể thiết lập thêm công việc đồng bộ hơn nữa để có thể tùy biến theo nhu cầu. Nhấp vào liên kết bên phải để bắt đầu',
'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners': 'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners',
'For live help from the Sahana community on using this application, go to': 'For live help from the Sahana community on using this application, go to',
'For messages that support alert network internal functions': 'For messages that support alert network internal functions',
'For more details on the Sahana Eden system, see the': 'For more details on the Sahana Eden system, see the',
'For more information, see ': 'For more information, see ',
'For:': 'For:',
'Forest Fire': 'Forest Fire',
'Formal camp': 'Trại chính thức',
'Format': 'Định dạng',
'Forms': 'Forms',
'Found': 'Found',
'Freezing Drizzle': 'Freezing Drizzle',
'Freezing Rain': 'Freezing Rain',
'Freezing Spray': 'Freezing Spray',
'French': 'French',
'Friday': 'Friday',
'From Location': 'From Location',
'From Warehouse': 'From Warehouse',
'Frost': 'Frost',
'Full': 'Full',
'Full beard': 'Full beard',
'Fullscreen Map': 'Fullscreen Map',
'Functional Tests': 'Functional Tests',
'Functions available': 'Functions available',
'Funding Organization': 'Funding Organization',
'Funeral': 'Funeral',
'GIS Reports of Shelter': 'GIS Reports of Shelter',
'GIS integration to view location details of the Shelter': 'GIS integration to view location details of the Shelter',
'GPS': 'GPS',
'GPS Marker': 'Đánh dấu GPS',
'GPS Track': 'GPS Track',
'GPS Track File': 'GPS Track File',
'GPX Track': 'GPX Track',
'Gale Wind': 'Gale Wind',
'Gap Analysis': 'Gap Analysis',
'Gap Analysis Map': 'Gap Analysis Map',
'Gap Analysis Report': 'Gap Analysis Report',
'Gap Map': 'Gap Map',
'Gap Report': 'Gap Report',
'Gateway Settings': 'Gateway Settings',
'Gateway settings updated': 'Gateway settings updated',
'Gender': 'Gender',
'General Medical/Surgical': 'General Medical/Surgical',
'General emergency and public safety': 'General emergency and public safety',
'Generator': 'Bộ sinh',
'Geocoder Selection': 'Geocoder Selection',
'Geometry Name': 'Geometry Name',
'Geophysical (inc. landslide)': 'Geophysical (inc. landslide)',
'Geraldo module not available within the running Python - this needs installing for PDF output!': 'Geraldo module not available within the running Python - this needs installing for PDF output!',
'Girls 13-18 yrs in affected area': 'Girls 13-18 yrs in affected area',
'Girls 13-18 yrs not attending school': 'Girls 13-18 yrs not attending school',
'Girls 6-12 yrs in affected area': 'Girls 6-12 yrs in affected area',
'Girls 6-12 yrs not attending school': 'Girls 6-12 yrs not attending school',
'Give a brief description of the image, e.g. what can be seen where on the picture (optional).': 'Give a brief description of the image, e.g. what can be seen where on the picture (optional).',
'Give information about where and when you have seen the person': 'Give information about where and when you have seen the person',
'Give information about where and when you have seen them': 'Give information about where and when you have seen them',
'Global Messaging Settings': 'Cài đặt hộp thư tin nhắn toàn cầu',
'Goatee': 'Goatee',
'Government': 'Government',
'Government UID': 'Government UID',
'Government building': 'Government building',
'Grade': 'Grade',
'Greek': 'Greek',
'Group': 'Group',
'Group Details': 'Group Details',
'Group ID': 'Group ID',
'Group Member added': 'Group Member added',
'Group Members': 'Group Members',
'Group Memberships': 'Group Memberships',
'Group Title': 'Group Title',
'Group Type': 'Loại nhóm',
'Group added': 'Đã thêm nhóm',
'Group deleted': 'Group deleted',
'Group description': 'Mô tả nhóm',
'Group name': 'Group name',
'Group type': 'Loại nhóm',
'Group updated': 'Group updated',
'Groups': 'Groups',
'Groups removed': 'Groups removed',
'Guest': 'Guest',
'Hail': 'Hail',
'Hair Color': 'Hair Color',
'Hair Length': 'Hair Length',
'Hair Style': 'Kiểu tóc',
'Has data from this Reference Document been entered into Sahana?': 'Has data from this Reference Document been entered into Sahana?',
'Has the safety and security of women and children in your community changed since the emergency?': 'Has the safety and security of women and children in your community changed since the emergency?',
'Has your business been damaged in the course of the disaster?': 'Has your business been damaged in the course of the disaster?',
'Have households received any shelter/NFI assistance or is assistance expected in the coming days?': 'Have households received any shelter/NFI assistance or is assistance expected in the coming days?',
'Have normal food sources been disrupted?': 'Have normal food sources been disrupted?',
'Have schools received or are expecting to receive any assistance?': 'Have schools received or are expecting to receive any assistance?',
'Have the people received or are you expecting any medical or food assistance in the coming days?': 'Have the people received or are you expecting any medical or food assistance in the coming days?',
'Hazard Pay': 'Hazard Pay',
'Hazardous Material': 'Hazardous Material',
'Hazardous Road Conditions': 'Hazardous Road Conditions',
'Header Background': 'Header Background',
'Header background file %s missing!': 'Header background file %s missing!',
'Headquarters': 'Headquarters',
'Health': 'Health',
'Health care assistance, Rank': 'Health care assistance, Rank',
'Health center': 'Trung tâm y tế',
'Health center with beds': 'Health center with beds',
'Health center without beds': 'Health center without beds',
'Health services functioning prior to disaster': 'Health services functioning prior to disaster',
'Health services functioning since disaster': 'Health services functioning since disaster',
'Healthcare Worker': 'Healthcare Worker',
'Heat Wave': 'Heat Wave',
'Heat and Humidity': 'Heat and Humidity',
'Height': 'Height',
'Height (cm)': 'Height (cm)',
'Help': 'Help',
'Helps to monitor status of hospitals': 'Hỗ trợ giám sát trạng thái các bệnh viện',
'Helps to report and search for Missing Persons': 'Hỗ trợ báo cáo và tìm kếm những người mất tích',
'Here are the solution items related to the problem.': 'Here are the solution items related to the problem.',
'High': 'High',
'High Water': 'High Water',
'Hindu': 'Hindu',
'History': 'Lịch sử',
'Hit the back button on your browser to try again.': 'Nhấp vào nút Back trên trình duyệt để tải lại',
'Holiday Address': 'Holiday Address',
'Home': 'Trang chủ',
'Home Address': 'Địa chỉ nhà',
'Home Country': 'Quê quán',
'Home Crime': 'Home Crime',
'Hospital': 'Bệnh viện',
'Hospital Details': 'Chi tiết thông tin bệnh viện',
'Hospital Status Report': 'Báo cáo tình trạng bệnh viện',
'Hospital information added': 'Đã thêm thông tin Bệnh viện',
'Hospital information deleted': 'Đã xóa thông tin bệnh viện',
'Hospital information updated': 'Đã cập nhật thông tin bệnh viện',
'Hospital status assessment.': 'Đánh giá trạng thái bệnh viện',
'Hospitals': 'Bệnh viện',
'Hot Spot': 'Điểm nóng',
'Hourly': 'Hourly',
'Household kits received': 'Household kits received',
'Household kits, source': 'Household kits, source',
'How did boys 13-17yrs spend most of their time prior to the disaster?': 'How did boys 13-17yrs spend most of their time prior to the disaster?',
'How did boys <12yrs spend most of their time prior to the disaster?': 'How did boys <12yrs spend most of their time prior to the disaster?',
'How did boys girls 13-17yrs spend most of their time prior to the disaster?': 'How did boys girls 13-17yrs spend most of their time prior to the disaster?',
'How did girls <12yrs spend most of their time prior to the disaster?': 'How did girls <12yrs spend most of their time prior to the disaster?',
'How do boys 13-17yrs spend most of their time now?': 'How do boys 13-17yrs spend most of their time now?',
'How do boys <12yrs spend most of their time now?': 'How do boys <12yrs spend most of their time now?',
'How do girls 13-17yrs spend most of their time now?': 'How do girls 13-17yrs spend most of their time now?',
'How do girls <12yrs spend most of their time now?': 'How do girls <12yrs spend most of their time now?',
'How does it work?': 'How does it work?',
'How is this person affected by the disaster? (Select all that apply)': 'How is this person affected by the disaster? (Select all that apply)',
'How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.': 'How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.',
'How long does it take you to walk to the health service?': 'How long does it take you to walk to the health service?',
'How long will the food last?': 'How long will the food last?',
'How long will this water resource last?': 'How long will this water resource last?',
'How many Boys (0-17 yrs) are Dead due to the crisis': 'How many Boys (0-17 yrs) are Dead due to the crisis',
'How many Boys (0-17 yrs) are Injured due to the crisis': 'How many Boys (0-17 yrs) are Injured due to the crisis',
'How many Boys (0-17 yrs) are Missing due to the crisis': 'Có bao nhiêu bé trai (0 đến 17 tuổi) bị mất tích do thiên tai',
'How many Girls (0-17 yrs) are Dead due to the crisis': 'How many Girls (0-17 yrs) are Dead due to the crisis',
'How many Girls (0-17 yrs) are Injured due to the crisis': 'How many Girls (0-17 yrs) are Injured due to the crisis',
'How many Girls (0-17 yrs) are Missing due to the crisis': 'How many Girls (0-17 yrs) are Missing due to the crisis',
'How many Men (18 yrs+) are Dead due to the crisis': 'Bao nhiêu người (trên 18 tuổi) chết trong thảm họa',
'How many Men (18 yrs+) are Injured due to the crisis': 'How many Men (18 yrs+) are Injured due to the crisis',
'How many Men (18 yrs+) are Missing due to the crisis': 'How many Men (18 yrs+) are Missing due to the crisis',
'How many Women (18 yrs+) are Dead due to the crisis': 'How many Women (18 yrs+) are Dead due to the crisis',
'How many Women (18 yrs+) are Injured due to the crisis': 'Số nạn nhân là nữ trên 18 tuổi chịu ảnh hưởng của cuộc khủng hoảng',
'How many Women (18 yrs+) are Missing due to the crisis': 'How many Women (18 yrs+) are Missing due to the crisis',
'How many days will the supplies last?': 'How many days will the supplies last?',
'How many doctors in the health centers are still actively working?': 'How many doctors in the health centers are still actively working?',
'How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?': 'How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?',
'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?': 'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?',
'How many latrines are available in the village/IDP centre/Camp?': 'How many latrines are available in the village/IDP centre/Camp?',
'How many midwives in the health centers are still actively working?': 'How many midwives in the health centers are still actively working?',
'How many new cases have been admitted to this facility in the past 24h?': 'How many new cases have been admitted to this facility in the past 24h?',
'How many nurses in the health centers are still actively working?': 'How many nurses in the health centers are still actively working?',
'How many of the patients with the disease died in the past 24h at this facility?': 'How many of the patients with the disease died in the past 24h at this facility?',
'How many of the primary school age boys (6-12) in the area are not attending school?': 'How many of the primary school age boys (6-12) in the area are not attending school?',
'How many of the primary school age girls (6-12) in the area are not attending school?': 'How many of the primary school age girls (6-12) in the area are not attending school?',
'How many of the primary/secondary schools are now open and running a regular schedule of class?': 'How many of the primary/secondary schools are now open and running a regular schedule of class?',
'How many of the secondary school age boys (13-18) in the area are not attending school?': 'How many of the secondary school age boys (13-18) in the area are not attending school?',
'How many of the secondary school age girls (13-18) in the area are not attending school?': 'How many of the secondary school age girls (13-18) in the area are not attending school?',
'How many patients with the disease are currently hospitalized at this facility?': 'How many patients with the disease are currently hospitalized at this facility?',
'How many primary school age boys (6-12) are in the affected area?': 'How many primary school age boys (6-12) are in the affected area?',
'How many primary school age girls (6-12) are in the affected area?': 'How many primary school age girls (6-12) are in the affected area?',
'How many primary/secondary schools were opening prior to the disaster?': 'How many primary/secondary schools were opening prior to the disaster?',
'How many secondary school age boys (13-18) are in the affected area?': 'How many secondary school age boys (13-18) are in the affected area?',
'How many secondary school age girls (13-18) are in the affected area?': 'How many secondary school age girls (13-18) are in the affected area?',
'How many teachers have been affected by the disaster (affected = unable to work)?': 'How many teachers have been affected by the disaster (affected = unable to work)?',
'How many teachers worked in the schools prior to the disaster?': 'How many teachers worked in the schools prior to the disaster?',
'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.': 'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.',
'Humanitarian NGO': 'Humanitarian NGO',
'Hurricane': 'Hurricane',
'Hurricane Force Wind': 'Hurricane Force Wind',
'Hygiene': 'Hygiene',
'Hygiene NFIs': 'Hygiene NFIs',
'Hygiene kits received': 'Hygiene kits received',
'Hygiene kits, source': 'Dụng cụ vệ sinh, nguồn',
'Hygiene practice': 'Hygiene practice',
'Hygiene problems': 'Hygiene problems',
'ID Label': 'ID Label',
'ID Tag': 'ID Tag',
'ID Tag Number': 'ID Tag Number',
'ID type': 'ID type',
'Ice Pressure': 'Áp suất băng',
'Iceberg': 'Iceberg',
'Ideally a full URL to the source file, otherwise just a note on where data came from.': 'Ideally a full URL to the source file, otherwise just a note on where data came from.',
'Identification': 'Identification',
'Identification Report': 'Identification Report',
'Identification Reports': 'Identification Reports',
'Identification Status': 'Identification Status',
'Identification label of the Storage bin.': 'Nhãn xác định Bin lưu trữ',
'Identified as': 'Identified as',
'Identified by': 'Identified by',
'Identity': 'Identity',
'Identity Details': 'Identity Details',
'Identity added': 'Identity added',
'Identity deleted': 'Identity deleted',
'Identity updated': 'Identity updated',
'If Unit = m, Base Unit = Km, then multiplicator is 0.0001 since 1m = 0.001 km.': 'If Unit = m, Base Unit = Km, then multiplicator is 0.0001 since 1m = 0.001 km.',
'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.': 'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.',
'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.': 'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.',
'If no marker defined then the system default marker is used': 'If no marker defined then the system default marker is used',
'If no, specify why': 'If no, specify why',
'If the location is a geographic area, then state at what level here.': 'If the location is a geographic area, then state at what level here.',
'If this is set to True then mails will be deleted from the server after downloading.': 'If this is set to True then mails will be deleted from the server after downloading.',
'If this record should be restricted then select which role is required to access the record here.': 'If this record should be restricted then select which role is required to access the record here.',
'If this record should be restricted then select which role(s) are permitted to access the record here.': 'If this record should be restricted then select which role(s) are permitted to access the record here.',
"If this setting is enabled then all deleted records are just flagged as deleted instead of being really deleted. They will appear in the raw database access but won't be visible to normal users.": "If this setting is enabled then all deleted records are just flagged as deleted instead of being really deleted. They will appear in the raw database access but won't be visible to normal users.",
'If yes, specify what and by whom': 'If yes, specify what and by whom',
'If yes, which and how': 'nếu có thì cái nào và như thế nào',
"If you cannot find the person you want to register as a volunteer, you can add them by clicking 'Add Person' below:": "If you cannot find the person you want to register as a volunteer, you can add them by clicking 'Add Person' below:",
"If you cannot find the person you want to report missing, you can add them by clicking 'Add Person' below:": "If you cannot find the person you want to report missing, you can add them by clicking 'Add Person' below:",
"If you cannot find the record of the person you want to report missing, you can add it by clicking 'Add Person' below:": "If you cannot find the record of the person you want to report missing, you can add it by clicking 'Add Person' below:",
'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.': 'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.',
'If you know what the Geonames ID of this location is then you can enter it here.': 'If you know what the Geonames ID of this location is then you can enter it here.',
'If you know what the OSM ID of this location is then you can enter it here.': 'If you know what the OSM ID of this location is then you can enter it here.',
'If you need to add a new document then you can click here to attach one.': 'Nếu cần thêm một tài liệu mới, nhấn vào đây để đính kèm',
'If you would like to help, then please': 'If you would like to help, then please',
'Illegal Immigrant': 'Illegal Immigrant',
'Image': 'Image',
'Image Details': 'Hình ảnh chi tiết',
'Image Tags': 'Image Tags',
'Image Type': 'Image Type',
'Image Upload': 'Image Upload',
'Image added': 'Image added',
'Image deleted': 'Image deleted',
'Image updated': 'Image updated',
'Image/Attachment': 'Image/Attachment',
'Image/Other Attachment': 'Image/Other Attachment',
'Imagery': 'Imagery',
'Images': 'Images',
'Immediate reconstruction assistance, Rank': 'Immediate reconstruction assistance, Rank',
'Impact Assessments': 'Impact Assessments',
'Impact Details': 'Impact Details',
'Impact Type': 'Impact Type',
'Impact Type Details': 'Impact Type Details',
'Impact Type added': 'Impact Type added',
'Impact Type deleted': 'Impact Type deleted',
'Impact Type updated': 'Impact Type updated',
'Impact Types': 'Impact Types',
'Impact added': 'Impact added',
'Impact deleted': 'Impact deleted',
'Impact updated': 'Impact updated',
'Impacts': 'Impacts',
'Import': 'Import',
'Import & Export Data': 'Import & Export Data',
'Import Data': 'Import Data',
'Import Job': 'Import Job',
'Import Jobs': 'Chuyển đổi nghề nghiệp',
'Import and Export': 'Import and Export',
'Import from Ushahidi Instance': 'Import from Ushahidi Instance',
'Import if Master': 'Import if Master',
'Import job created': 'Import job created',
'Import multiple tables as CSV': 'Chuyển đổi định dạng bảng sang CSV',
'Import/Export': 'Import/Export',
'Important': 'Quan trọng',
'Importantly where there are no aid services being provided': 'Importantly where there are no aid services being provided',
'Imported': 'Imported',
'Importing data from spreadsheets': 'Importing data from spreadsheets',
'Improper decontamination': 'Improper decontamination',
'Improper handling of dead bodies': 'Improper handling of dead bodies',
'In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'Trong GeoServer, đây là tên lớp. Trong WFS getCapabilities, đây là tên FeatureType, phần sau dấu hai chấm (:).',
'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).',
'In Inventories': 'In Inventories',
'In Process': 'In Process',
'In Progress': 'In Progress',
'In Transit': 'In Transit',
'In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?': 'In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?',
'Inbound Mail Settings': 'Inbound Mail Settings',
'Incident': 'Incident',
'Incident Categories': 'Incident Categories',
'Incident Details': 'Incident Details',
'Incident Report': 'Incident Report',
'Incident Report Details': 'Incident Report Details',
'Incident Report added': 'Incident Report added',
'Incident Report deleted': 'Incident Report deleted',
'Incident Report updated': 'Incident Report updated',
'Incident Reporting': 'Incident Reporting',
'Incident Reporting System': 'Incident Reporting System',
'Incident Reports': 'Incident Reports',
'Incident added': 'Incident added',
'Incident deleted': 'Incident deleted',
'Incident updated': 'Incident updated',
'Incidents': 'Incidents',
'Incomplete': 'Incomplete',
'Individuals': 'Individuals',
'Industrial Crime': 'Industrial Crime',
'Industry Fire': 'Industry Fire',
'Industry close to village/camp': 'Industry close to village/camp',
'Infant (0-1)': 'Trẻ sơ sinh',
'Infectious Disease': 'Infectious Disease',
'Infectious Diseases': 'Infectious Diseases',
'Infestation': 'Infestation',
'Informal Leader': 'Informal Leader',
'Informal camp': 'Informal camp',
'Information gaps': 'Information gaps',
'Infusion catheters available': 'Infusion catheters available',
'Infusion catheters need per 24h': 'Infusion catheters need per 24h',
'Infusion catheters needed per 24h': 'Infusion catheters needed per 24h',
'Infusions available': 'Infusions available',
'Infusions needed per 24h': 'Infusions needed per 24h',
'Input Job': 'Input Job',
'Instant Porridge': 'Instant Porridge',
"Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.": "Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.",
'Institution': 'Institution',
'Insufficient': 'Insufficient',
'Insufficient vars: Need module, resource, jresource, instance': 'Insufficient vars: Need module, resource, jresource, instance',
'Intake Items': 'Intake Items',
'Intergovernmental Organisation': 'Intergovernmental Organisation',
'Internal State': 'Internal State',
'International NGO': 'Tổ chức phi chính phủ quốc tế',
'International Organization': 'International Organization',
'International Staff': 'International Staff',
'Intervention': 'Intervention',
'Interview taking place at': 'Interview taking place at',
'Invalid': 'Invalid',
'Invalid Query': 'Truy vấn không hợp lệ',
'Invalid email': 'Invalid email',
'Invalid request!': 'Yêu cầu không hợp lệ',
'Invalid ticket': 'Ticket không hợp lệ',
'Inventories with Item': 'Inventories with Item',
'Inventory': 'Inventory',
'Inventory Item Details': 'Chi tiết hàng hóa trong kho',
'Inventory Item added': 'Bổ sung hàng hóa vào kho lưu trữ.',
'Inventory Item deleted': 'Inventory Item deleted',
'Inventory Item updated': 'Inventory Item updated',
'Inventory Items': 'Inventory Items',
'Inventory Management': 'Inventory Management',
'Inventory Store': 'Inventory Store',
'Inventory Store Details': 'Chi tiết kho lưu trữ',
'Inventory Store added': 'Inventory Store added',
'Inventory Store deleted': 'Inventory Store deleted',
'Inventory Store updated': 'Inventory Store updated',
'Inventory Stores': 'Inventory Stores',
'Inventory of Effects': 'Kho dự phòng',
'Inventory/Ledger': 'Inventory/Ledger',
'Is adequate food and water available for these institutions?': 'Is adequate food and water available for these institutions?',
'Is it safe to collect water?': 'Is it safe to collect water?',
'Is there any industrial or agro-chemical production close to the affected area/village?': 'Is there any industrial or agro-chemical production close to the affected area/village?',
'Issuing Authority': 'Issuing Authority',
'It is built using the Template agreed by a group of NGOs working together as the': 'It is built using the Template agreed by a group of NGOs working together as the',
'Item': 'Item',
'Item Catalog Categories': 'Item Catalog Categories',
'Item Catalog Category': 'Item Catalog Category',
'Item Catalog Category Details': 'Item Catalog Category Details',
'Item Catalog Category added': 'Item Catalog Category added',
'Item Catalog Category deleted': 'Item Catalog Category deleted',
'Item Catalog Category updated': 'Item Catalog Category updated',
'Item Catalog Details': 'Item Catalog Details',
'Item Catalog added': 'Item Catalog added',
'Item Catalog deleted': 'Đã xóa danh mục hàng hóa',
'Item Catalog updated': 'Item Catalog updated',
'Item Catalogs': 'Item Catalogs',
'Item Categories': 'Item Categories',
'Item Category': 'Item Category',
'Item Category Details': 'Item Category Details',
'Item Category added': 'Item Category added',
'Item Category deleted': 'Đã xóa Tiêu chí hàng hóa',
'Item Category updated': 'Item Category updated',
'Item Details': 'Item Details',
'Item Packet Details': 'Item Packet Details',
'Item Packet added': 'Item Packet added',
'Item Packet deleted': 'Item Packet deleted',
'Item Packet updated': 'Item Packet updated',
'Item Packets': 'Item Packets',
'Item Sub-Categories': 'Item Sub-Categories',
'Item Sub-Category': 'Item Sub-Category',
'Item Sub-Category Details': 'Item Sub-Category Details',
'Item Sub-Category added': 'Item Sub-Category added',
'Item Sub-Category deleted': 'Item Sub-Category deleted',
'Item Sub-Category updated': 'Đã cập nhật tiêu chí phụ của hàng hóa',
'Item added': 'Item added',
'Item already in Bundle!': 'Hàng đã có trong Bundle!',
'Item already in Kit!': 'Item already in Kit!',
'Item already in budget!': 'Item already in budget!',
'Item deleted': 'Item deleted',
'Item updated': 'Item updated',
'Items': 'Hàng hóa',
'Japanese': 'Japanese',
'Jerry can': 'Jerry can',
'Jew': 'Jew',
'Job Title': 'Job Title',
'Jobs': 'Jobs',
'Just Once': 'Just Once',
'KPIs': 'KPIs',
'Key': 'Key',
'Key Details': 'Key Details',
'Key added': 'Key added',
'Key deleted': 'Key deleted',
'Key updated': 'Key updated',
'Keys': 'Keys',
'Kit': 'Kit',
'Kit Contents': 'Kit Contents',
'Kit Details': 'Chi tiết Kit',
'Kit Updated': 'Kit Updated',
'Kit added': 'Kit added',
'Kit deleted': 'Đã xóa Kit',
'Kit updated': 'Kit updated',
'Kits': 'Kits',
'Known Identities': 'Known Identities',
'Known incidents of violence against women/girls': 'Known incidents of violence against women/girls',
'Known incidents of violence since disaster': 'Known incidents of violence since disaster',
'LICENCE': 'bản quyền',
'LICENSE': 'LICENSE',
'LMS Administration': 'Quản trị LMS',
'Label': 'Nhãn',
'Lack of material': 'Lack of material',
'Lack of school uniform': 'Lack of school uniform',
'Lack of supplies at school': 'Lack of supplies at school',
'Lack of transport to school': 'Lack of transport to school',
'Lactating women': 'Lactating women',
'Lahar': 'Lahar',
'Landslide': 'Landslide',
'Language': 'Language',
'Last Name': 'Last Name',
'Last known location': 'Last known location',
'Last name': 'Last name',
'Last synchronization time': 'Last synchronization time',
'Last updated by': 'Last updated by',
'Last updated on': 'Last updated on',
'Latitude': 'Latitude',
'Latitude & Longitude': 'Latitude & Longitude',
'Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': 'Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.',
'Latitude should be between': 'Latitude should be between',
'Law enforcement, military, homeland and local/private security': 'Law enforcement, military, homeland and local/private security',
'Layer': 'Layer',
'Layer Details': 'Layer Details',
'Layer added': 'Layer added',
'Layer deleted': 'Đã xóa layer',
'Layer updated': 'Đã cập nhật Layer',
'Layers': 'Layers',
'Layers updated': 'Đã cập nhật Layer',
'Layout': 'Layout',
'Legend Format': 'Legend Format',
'Length': 'Độ dài',
'Level': 'Level',
"Level is higher than parent's": "Level is higher than parent's",
'Library support not available for OpenID': 'Library support not available for OpenID',
'Line': 'Line',
'Link Item & Shipment': 'Link Item & Shipment',
'Link an Item & Shipment': 'Link an Item & Shipment',
'Linked Records': 'Linked Records',
'Linked records': 'Linked records',
'List': 'List',
'List / Add Baseline Types': 'List / Add Baseline Types',
'List / Add Impact Types': 'List / Add Impact Types',
'List / Add Services': 'List / Add Services',
'List / Add Types': 'List / Add Types',
'List Activities': 'List Activities',
'List Aid Requests': 'Danh sách Yêu cầu cứu trợ',
'List All': 'List All',
'List All Entries': 'List All Entries',
'List All Memberships': 'Danh sách tất cả các thành viên',
'List Assessment Summaries': 'List Assessment Summaries',
'List Assessments': 'Danh sách Trị giá tính thuế',
'List Baseline Types': 'List Baseline Types',
'List Baselines': 'List Baselines',
'List Budgets': 'List Budgets',
'List Bundles': 'List Bundles',
'List Catalog Items': 'List Catalog Items',
'List Category<>Sub-Category<>Catalog Relation': 'List Category<>Sub-Category<>Catalog Relation',
'List Checklists': 'Danh sách Checklists ',
'List Cluster Subsectors': 'List Cluster Subsectors',
'List Clusters': 'List Clusters',
'List Configs': 'List Configs',
'List Conflicts': 'List Conflicts',
'List Contacts': 'List Contacts',
'List Distribution Items': 'List Distribution Items',
'List Distributions': 'Danh sách ủng hộ,quyên góp',
'List Documents': 'List Documents',
'List Donors': 'List Donors',
'List Feature Classes': 'List Feature Classes',
'List Feature Layers': 'List Feature Layers',
'List Flood Reports': 'List Flood Reports',
'List Groups': 'Danh sách Nhóm',
'List Groups/View Members': 'List Groups/View Members',
'List Hospitals': 'Danh sách Bệnh viện',
'List Identities': 'List Identities',
'List Images': 'List Images',
'List Impact Assessments': 'List Impact Assessments',
'List Impact Types': 'List Impact Types',
'List Impacts': 'List Impacts',
'List Incident Reports': 'List Incident Reports',
'List Incidents': 'List Incidents',
'List Inventory Items': 'List Inventory Items',
'List Inventory Stores': 'List Inventory Stores',
'List Item Catalog Categories': 'List Item Catalog Categories',
'List Item Catalogs': 'List Item Catalogs',
'List Item Categories': 'List Item Categories',
'List Item Packets': 'List Item Packets',
'List Item Sub-Categories': 'List Item Sub-Categories',
'List Items': 'List Items',
'List Keys': 'List Keys',
'List Kits': 'Danh sách Kit',
'List Layers': 'List Layers',
'List Locations': 'Danh sách Vị trí',
'List Log Entries': 'List Log Entries',
'List Markers': 'List Markers',
'List Members': 'List Members',
'List Memberships': 'Danh sách thành viên',
'List Messages': 'Danh sách tin nhắn ',
'List Metadata': 'Danh sách dữ liệu',
'List Missing Persons': 'Danh sách những người mất tích',
'List Need Types': 'List Need Types',
'List Needs': 'List Needs',
'List Offices': 'List Offices',
'List Organizations': 'List Organizations',
'List Peers': 'List Peers',
'List Personal Effects': 'List Personal Effects',
'List Persons': 'List Persons',
'List Photos': 'List Photos',
'List Positions': 'List Positions',
'List Problems': 'List Problems',
'List Projections': 'List Projections',
'List Projects': 'List Projects',
'List Rapid Assessments': 'List Rapid Assessments',
'List Received Items': 'List Received Items',
'List Received Shipments': 'List Received Shipments',
'List Records': 'List Records',
'List Registrations': 'List Registrations',
'List Reports': 'List Reports',
'List Request Items': 'Danh sách Hang hóa yêu cầu',
'List Requests': 'Danh sách yêu cầu',
'List Resources': 'Danh sách tài nguyên',
'List Responses': 'List Responses',
'List Rivers': 'Danh sách sông',
'List Roles': 'Danh sách Vai trò',
'List Sections': 'List Sections',
'List Sector': 'List Sector',
'List Sent Items': 'List Sent Items',
'List Sent Shipments': 'List Sent Shipments',
'List Service Profiles': 'List Service Profiles',
'List Settings': 'List Settings',
'List Shelter Services': 'List Shelter Services',
'List Shelter Types': 'List Shelter Types',
'List Shelters': 'List Shelters',
'List Shipment Transit Logs': 'List Shipment Transit Logs',
'List Shipment/Way Bills': 'Danh sách Đơn hàng/Phí đường bộ',
'List Shipment<>Item Relation': 'List Shipment<>Item Relation',
'List Sites': 'List Sites',
'List Skill Types': 'List Skill Types',
'List Skills': 'Danh sách kỹ năng',
'List Solutions': 'List Solutions',
'List Staff': 'Danh sách Nhân viên',
'List Staff Types': 'List Staff Types',
'List Status': 'List Status',
'List Storage Bin Type(s)': 'List Storage Bin Type(s)',
'List Storage Bins': 'List Storage Bins',
'List Storage Location': 'Danh sách vị trí kho lưu trữ',
'List Subscriptions': 'Danh sách Đăng ký',
'List Survey Answers': 'List Survey Answers',
'List Survey Questions': 'Danh sách câu hỏi khảo sát',
'List Survey Sections': 'List Survey Sections',
'List Survey Series': 'List Survey Series',
'List Survey Templates': 'List Survey Templates',
'List Tasks': 'List Tasks',
'List Teams': 'List Teams',
'List Themes': 'List Themes',
'List Tickets': 'Danh sách Ticket',
'List Tracks': 'List Tracks',
'List Units': 'Danh sách đơn vị',
'List Users': 'Danh sách người dùng',
'List Volunteers': 'List Volunteers',
'List Warehouse Items': 'List Warehouse Items',
'List Warehouses': 'List Warehouses',
'List all': 'Hiển thị tất cả',
'List of Items': 'List of Items',
'List of Missing Persons': 'Danh sách những người mất tích',
'List of Peers': 'List of Peers',
'List of Reports': 'List of Reports',
'List of Requests': 'Danh sách yêu cầu',
'List of Spreadsheets': 'List of Spreadsheets',
'List of Spreadsheets uploaded': 'List of Spreadsheets uploaded',
'List of Volunteers for this skills set': 'List of Volunteers for this skills set',
'List of addresses': 'Danh sách các địa chỉ',
'List unidentified': 'List unidentified',
'List/Add': 'Danh sách/Thêm',
'Lists "who is doing what & where". Allows relief agencies to coordinate their activities': 'Danh sách "Ai làm gì, ở đâu"Cho phép các tổ chức cứu trợ điều phối hoạt động của mình',
'Live Help': 'Trợ giúp',
'Livelihood': 'Livelihood',
'Load Cleaned Data into Database': 'Load Cleaned Data into Database',
'Load Details': 'Load Details',
'Load Raw File into Grid': 'Load Raw File into Grid',
'Load the details to help decide which is the best one to keep out of the 2.': 'Load the details to help decide which is the best one to keep out of the 2.',
'Loading Locations...': 'Loading Locations...',
'Local Name': 'Tên địa phương',
'Local Names': 'Local Names',
'Location': 'Location',
'Location 1': 'Location 1',
'Location 2': 'Location 2',
'Location De-duplicated': 'Location De-duplicated',
'Location Details': 'Location Details',
'Location added': 'Location added',
'Location deleted': 'Đã xóa vị trí',
'Location details': 'Location details',
'Location updated': 'Location updated',
'Location: ': 'Location: ',
'Locations': 'Locations',
'Locations De-duplicator': 'Locations De-duplicator',
'Locations of this level need to have a parent of level': 'Locations of this level need to have a parent of level',
'Locations should be different!': 'Locations should be different!',
'Lockdown': 'Lockdown',
'Log': 'Log',
'Log Entry Details': 'Log Entry Details',
'Log entry added': 'Log entry added',
'Log entry deleted': 'Xóa theo dõi đăng nhập',
'Log entry updated': 'Cập nhật theo dõi đăng nhập',
'Login': 'Đăng nhập',
'Logistics': 'Logistics',
'Logistics Management': 'Logistics Management',
'Logistics Management System': 'Logistics Management System',
'Logo': 'Logo',
'Logo file %s missing!': 'Logo file %s missing!',
'Logout': 'Logout',
'Long Text': 'Long Text',
'Longitude': 'Longitude',
'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.': 'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.',
'Longitude is West - East (sideways). Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': 'Kinh độ trải dài theo hướng Đông-Tây. Kinh tuyến không nằm trên kinh tuyến gốc (Greenwich Mean Time) hướng về phía đông, vắt ngang châu Âu và châu Á.',
'Longitude should be between': 'Longitude should be between',
'Looting': 'Looting',
'Lost Password': 'Lost Password',
'Low': 'Low',
'Magnetic Storm': 'Magnetic Storm',
'Main cash source': 'Main cash source',
'Main income sources before disaster': 'Main income sources before disaster',
'Major outward damage': 'Major outward damage',
'Make Pledge': 'Make Pledge',
'Make Request': 'Make Request',
'Make a Request': 'Tạo yêu cầu',
'Make a Request for Aid': 'Tạo yêu cầu cứu trợ',
'Make preparations per the <instruction>': 'Make preparations per the <instruction>',
'Male': 'Male',
'Malnutrition present prior to disaster': 'Malnutrition present prior to disaster',
'Manage': 'Manage',
'Manage Category': 'Manage Category',
'Manage Item catalog': 'Manage Item catalog',
'Manage Kits': 'Manage Kits',
'Manage Relief Item Catalogue': 'Manage Relief Item Catalogue',
'Manage Sub-Category': 'Quản lý Tiêu chí phụ',
'Manage Users & Roles': 'Manage Users & Roles',
'Manage Warehouses/Sites': 'Manage Warehouses/Sites',
'Manage requests of hospitals for assistance.': 'Manage requests of hospitals for assistance.',
'Manage volunteers by capturing their skills, availability and allocation': 'Nắm bắt kỹ năng, khả năng và khu vực hoạt động của tình nguyện viên để quản lý',
'Manager': 'Manager',
'Managing Office': 'Managing Office',
'Managing, Storing and Distributing Catalog Items.': 'Managing, Storing and Distributing Catalog Items.',
'Managing, Storing and Distributing Relief Items': 'Quản lý, Lưu trữ và Quyên góp hàng cứu trợ',
'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).',
'Mandatory. The URL to access the service.': 'Mandatory. The URL to access the service.',
'Manual': 'Manual',
'Manual Synchronization': 'Manual Synchronization',
'Many': 'Many',
'Map': 'Map',
'Map Height': 'Chiều cao bản đồ',
'Map Service Catalogue': 'Catalogue bản đồ dịch vụ',
'Map Settings': 'Cài đặt bản đồ',
'Map Viewing Client': 'Map Viewing Client',
'Map Width': 'Độ rộng bản đồ',
'Map of Hospitals': 'Bản đồ bệnh viện',
'Mapping': 'Mapping',
'Marine Security': 'Marine Security',
'Marital Status': 'Tình trạng hôn nhân',
'Marker': 'Marker',
'Marker Details': 'Chi tiết Đèn hiệu',
'Marker added': 'Marker added',
'Marker deleted': 'Marker deleted',
'Marker updated': 'Marker updated',
'Markers': 'Markers',
'Master Message Log': 'Master Message Log',
'Master Message Log to process incoming reports & requests': 'Kiểm soát log tin nhắn để xử lý báo cáo và yêu cầu gửi đến',
'Match Percentage': 'Match Percentage',
'Match percentage indicates the % match between these two records': 'Match percentage indicates the % match between these two records',
'Matching Records': 'Matching Records',
'Matrix of Choices (Multiple Answers)': 'Matrix of Choices (Multiple Answers)',
'Matrix of Choices (Only one answer)': 'Matrix of Choices (Only one answer)',
'Matrix of Text Fields': 'Matrix of Text Fields',
'Max Persons per Dwelling': 'Max Persons per Dwelling',
'Maximum Weight': 'Maximum Weight',
'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.': 'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.',
'Maximum weight capacity of the items the storage bin can contain. followed by choosing the unit from the drop down list.': 'Maximum weight capacity of the items the storage bin can contain. followed by choosing the unit from the drop down list.',
'Medical and public health': 'Medical and public health',
'Medicine': 'Medicine',
'Medium': 'Medium',
'Megabytes per Month': 'Megabytes per Month',
'Members': 'Members',
'Membership': 'Membership',
'Membership Details': 'Membership Details',
'Membership added': 'Đã thêm thành viên',
'Membership deleted': 'Membership deleted',
'Membership updated': 'Cập nhật thông tin thành viên',
'Memberships': 'Memberships',
'Message': 'Message',
'Message Details': 'Message Details',
'Message Variable': 'Message Variable',
'Message added': 'Đã thêm tin nhắn',
'Message deleted': 'Message deleted',
'Message sent to outbox': 'Message sent to outbox',
'Message updated': 'Message updated',
'Message variable': 'Message variable',
'Messages': 'Messages',
'Messaging': 'Messaging',
'Messaging settings updated': 'Messaging settings updated',
'Metadata': 'Metadata',
'Metadata Details': 'Metadata Details',
'Metadata added': 'Đã thêm dữ liệu',
'Metadata can be supplied here to be applied to all uploaded photos, if desired.': 'Metadata can be supplied here to be applied to all uploaded photos, if desired.',
'Metadata deleted': 'Metadata deleted',
'Metadata updated': 'Metadata updated',
'Meteorite': 'Meteorite',
'Meteorological (inc. flood)': 'Meteorological (inc. flood)',
'Method used': 'Method used',
'Micronutrient malnutrition prior to disaster': 'Micronutrient malnutrition prior to disaster',
'Middle Name': 'Middle Name',
'Migrants or ethnic minorities': 'Dân di cư hoặc dân tộc thiểu số',
'Military': 'Military',
'Minorities participating in coping activities': 'Minorities participating in coping activities',
'Minutes must be a number between 0 and 60': 'Minutes must be a number between 0 and 60',
'Minutes per Month': 'Minutes per Month',
'Minutes should be a number greater than 0 and less than 60': 'Minutes should be a number greater than 0 and less than 60',
'Miscellaneous': 'Miscellaneous',
'Missing': 'Missing',
'Missing Person': 'Người mất tích',
'Missing Person Details': 'Chi tiết về người mất tích',
'Missing Person Reports': 'Báo cáo số người mất tích',
'Missing Persons': 'Người mất tích',
'Missing Persons Registry': 'Missing Persons Registry',
'Missing Persons Report': 'Báo cáo số người mất tích',
'Missing Report': 'Missing Report',
'Missing Senior Citizen': 'Missing Senior Citizen',
'Missing Vulnerable Person': 'Missing Vulnerable Person',
'Mobile': 'Mobile',
'Mobile Assess': 'Mobile Assess',
'Mobile Assess.': 'Mobile Assess.',
'Mobile Basic Assessment': 'Mobile Basic Assessment',
'Mobile Phone': 'Mobile Phone',
'Mode': 'Mode',
'Modem Settings': 'Modem Settings',
'Modem settings updated': 'Modem settings updated',
'Moderator': 'Moderator',
'Modify Information on groups and individuals': 'Modify Information on groups and individuals',
'Modifying data in spreadsheet before importing it to the database': 'Modifying data in spreadsheet before importing it to the database',
'Module Administration': 'Quản trị Mô-đun',
'Module disabled!': 'Module disabled!',
'Module provides access to information on current Flood Levels.': 'Module provides access to information on current Flood Levels.',
'Module stores structured reports done by Professional Organizations - currently data includes WFP Assessments.': 'Module stores structured reports done by Professional Organizations - currently data includes WFP Assessments.',
'Monday': 'Thứ Hai',
'Monthly Cost': 'Monthly Cost',
'Monthly Salary': 'Monthly Salary',
'Months': 'Months',
'Morgue Status': 'Morgue Status',
'Morgue Units Available': 'Morgue Units Available',
'Mosque': 'Mosque',
'Motorcycle': 'Motorcycle',
'Moustache': 'Moustache',
'Movements (Filter In/Out/Lost)': 'Movements (Filter In/Out/Lost)',
'MultiPolygon': 'MultiPolygon',
'Multiple': 'Multiple',
'Multiple Choice (Multiple Answers)': 'Multiple Choice (Multiple Answers)',
'Multiple Choice (Only One Answer)': 'Multiple Choice (Only One Answer)',
'Multiple Text Fields': 'Multiple Text Fields',
'Multiplicator': 'Multiplicator',
'Muslim': 'Muslim',
'My Tasks': 'My Tasks',
'N/A': 'Không xác định',
"NB SMS requests are filtered to just those which are 'actionable', whilst the Tweet requests are unfiltered, so that is likely to be a good place to start Searching.": "NB SMS requests are filtered to just those which are 'actionable', whilst the Tweet requests are unfiltered, so that is likely to be a good place to start Searching.",
'Name': 'Tên',
'Name and/or ID': 'Name and/or ID',
'Name and/or ID Label': 'Name and/or ID Label',
'Name of Storage Bin Type.': 'Tên loại Bin lưu trữ',
'Name of the file (& optional sub-path) located in static which should be used for the background of the header.': 'Name of the file (& optional sub-path) located in static which should be used for the background of the header.',
'Name of the file (& optional sub-path) located in static which should be used for the top-left image.': 'Name of the file (& optional sub-path) located in static which should be used for the top-left image.',
'Name of the file (& optional sub-path) located in views which should be used for footer.': 'Name of the file (& optional sub-path) located in views which should be used for footer.',
'Name of the person in local language and script (optional).': 'Name of the person in local language and script (optional).',
'Name of the unit or department this report refers to. Leave empty if your hospital has no subdivisions.': 'Name of the unit or department this report refers to. Leave empty if your hospital has no subdivisions.',
'Names can be added in multiple languages': 'Names can be added in multiple languages',
'National ID Card': 'Chứng minh thư',
'National NGO': 'Các tổ chức phi chính phủ ',
'National Staff': 'National Staff',
'Nationality': 'Nationality',
'Nationality of the person.': 'Nationality of the person.',
'Nautical Accident': 'Nautical Accident',
'Nautical Hijacking': 'Nautical Hijacking',
'Need Type': 'Need Type',
'Need Type Details': 'Need Type Details',
'Need Type added': 'Need Type added',
'Need Type deleted': 'Need Type deleted',
'Need Type updated': 'Need Type updated',
'Need Types': 'Need Types',
"Need a 'url' argument!": "Need a 'url' argument!",
'Need added': 'Need added',
'Need deleted': 'Need deleted',
'Need to configure Twitter Authentication': 'Need to configure Twitter Authentication',
'Need to select 2 Locations': 'Need to select 2 Locations',
'Need to specify a Budget!': 'Need to specify a Budget!',
'Need to specify a Kit!': 'Need to specify a Kit!',
'Need to specify a Resource!': 'Need to specify a Resource!',
'Need to specify a bundle!': 'Need to specify a bundle!',
'Need to specify a group!': 'Need to specify a group!',
'Need to specify a location to search for.': 'Cần chọn địa điểm tìm kiếm',
'Need to specify a role!': 'Yêu cầu xác định vai trò',
'Need to specify a table!': 'Need to specify a table!',
'Need to specify a user!': 'Need to specify a user!',
'Need updated': 'Need updated',
'Needs': 'Needs',
'Needs Details': 'Needs Details',
'Needs to reduce vulnerability to violence': 'Needs to reduce vulnerability to violence',
'Negative Flow Isolation': 'Negative Flow Isolation',
'Neighbourhood': 'Neighbourhood',
'Neonatal ICU': 'Neonatal ICU',
'Neonatology': 'Neonatology',
'Network': 'Network',
'Neurology': 'Neurology',
'New': 'New',
'New Assessment reported from': 'New Assessment reported from',
'New Checklist': 'Checklist mới',
'New Peer': 'New Peer',
'New Record': 'New Record',
'New Report': 'New Report',
'New Request': 'Yêu cầu mới',
'New Solution Choice': 'New Solution Choice',
'New Synchronization Peer': 'New Synchronization Peer',
'New cases in the past 24h': 'New cases in the past 24h',
'Next': 'Next',
'No': 'No',
'No Activities Found': 'No Activities Found',
'No Addresses currently registered': 'Hiện tại chưa đăng ký Địa chỉ',
'No Aid Requests have been made yet': 'Chưa có yêu cầu cứu trợ nào được tạo',
'No Assessment Summaries currently registered': 'No Assessment Summaries currently registered',
'No Assessments currently registered': 'Chưa đăng ký trị giá tính thuế',
'No Baseline Types currently registered': 'No Baseline Types currently registered',
'No Baselines currently registered': 'No Baselines currently registered',
'No Budgets currently registered': 'No Budgets currently registered',
'No Bundles currently registered': 'No Bundles currently registered',
'No Catalog Items currently registered': 'No Catalog Items currently registered',
'No Category<>Sub-Category<>Catalog Relation currently registered': 'Hiện tại chưa có Category<>Sub-Category<>Catalog Relation được đăng ký',
'No Checklist available': 'No Checklist available',
'No Cluster Subsectors currently registered': 'No Cluster Subsectors currently registered',
'No Clusters currently registered': 'No Clusters currently registered',
'No Configs currently defined': 'No Configs currently defined',
'No Details currently registered': 'No Details currently registered',
'No Distribution Items currently registered': 'Chưa đăng ký danh sách hàng hóa đóng góp',
'No Distributions currently registered': 'No Distributions currently registered',
'No Documents found': 'No Documents found',
'No Donors currently registered': 'No Donors currently registered',
'No Feature Classes currently defined': 'No Feature Classes currently defined',
'No Feature Layers currently defined': 'No Feature Layers currently defined',
'No Flood Reports currently registered': 'Chưa đăng ký báo cáo lũ lụt',
'No Groups currently defined': 'Hiện tại không xác định được nhóm',
'No Groups currently registered': 'No Groups currently registered',
'No Hospitals currently registered': 'Chưa có bệnh viện nào đăng ký',
'No Identification Report Available': 'No Identification Report Available',
'No Identities currently registered': 'No Identities currently registered',
'No Image': 'Không có ảnh',
'No Images currently registered': 'Hiện tại không có ảnh nào được đăng ký',
'No Impact Types currently registered': 'No Impact Types currently registered',
'No Impacts currently registered': 'No Impacts currently registered',
'No Incident Reports currently registered': 'No Incident Reports currently registered',
'No Incidents currently registered': 'Chưa sự việc nào được đưa lên',
'No Inventory Items currently registered': 'No Inventory Items currently registered',
'No Inventory Stores currently registered': 'No Inventory Stores currently registered',
'No Item Catalog Category currently registered': 'No Item Catalog Category currently registered',
'No Item Catalog currently registered': 'No Item Catalog currently registered',
'No Item Categories currently registered': 'No Item Categories currently registered',
'No Item Packets currently registered': 'No Item Packets currently registered',
'No Item Sub-Category currently registered': 'No Item Sub-Category currently registered',
'No Item currently registered': 'No Item currently registered',
'No Items currently registered': 'No Items currently registered',
'No Items currently requested': 'Hiện tại không có hàng hóa nào được yêu cầu',
'No Keys currently defined': 'No Keys currently defined',
'No Kits currently registered': 'No Kits currently registered',
'No Locations currently available': 'No Locations currently available',
'No Locations currently registered': 'No Locations currently registered',
'No Markers currently available': 'Chưa đăng ký marker ',
'No Members currently registered': 'Chưa đăng ký thành viên',
'No Memberships currently defined': 'Chưa xác nhận đăng ký thành viên',
'No Memberships currently registered': 'Chưa có thông tin đăng ký thành viên',
'No Messages currently in Outbox': 'No Messages currently in Outbox',
'No Metadata currently defined': 'No Metadata currently defined',
'No Need Types currently registered': 'No Need Types currently registered',
'No Needs currently registered': 'No Needs currently registered',
'No Offices currently registered': 'No Offices currently registered',
'No Offices found!': 'No Offices found!',
'No Organizations currently registered': 'No Organizations currently registered',
'No People currently registered in this shelter': 'No People currently registered in this shelter',
'No Persons currently registered': 'No Persons currently registered',
'No Persons currently reported missing': 'No Persons currently reported missing',
'No Persons found': 'No Persons found',
'No Photos found': 'Không tìm thấy ảnh nào',
'No Presence Log Entries currently registered': 'No Presence Log Entries currently registered',
'No Problems currently defined': 'No Problems currently defined',
'No Projections currently defined': 'Hiện tại chưa xác định được kế hoạch dự phòng',
'No Projects currently registered': 'Chưa đăng ký dự án',
'No Rapid Assessments currently registered': 'No Rapid Assessments currently registered',
'No Received Items currently registered': 'No Received Items currently registered',
'No Received Shipments': 'No Received Shipments',
| codeparrot/github-code-clean |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
file generate_contributions.py
This file is part of LyX, the document processor.
Licence details can be found in the file COPYING.
author Angus Leeming
Full author contact details are available in file CREDITS
This script both stores and manipulates the raw data needed to
create CREDITS, credits.inc and blanket-permission.inc
Usage:
$ python generate_contributions.py \
CREDITS \
credits.inc \
blanket-permission.inc
where the arguments are the pathnames of the generated files.
'''
import codecs, sys, textwrap
def xml_escape(s):
s = s.replace("&", "&")
s = s.replace("<", "<")
s = s.replace(">", ">")
s = s.replace('"', '"')
return s
class contributor:
def __init__(self,
name,
contact,
licence,
permission_title,
archive_id,
permission_date,
credit):
self.name = name
self.contact = contact
self.licence = licence
self.permission_title = permission_title
self.archive_id = archive_id
self.permission_date = permission_date
self.credit = credit
def as_txt_credits(self):
result = [ '@b%s\n' % self.name ]
if len(self.contact) != 0:
if self.contact.find("http") != -1:
result.append('@i%s\n' % self.contact)
else:
result.append('@iE-mail: %s\n' % self.contact)
result.append(' %s\n' % self.credit.replace('\n', '\n '))
return "".join(result)
def as_php_credits(self, wrapper):
return '''
$output=$output.credits_contrib("%s",
"%s",
"%s");
''' % ( xml_escape(self.name),
xml_escape(self.contact),
"\n".join(wrapper.wrap(xml_escape(self.credit))) )
def as_php_blanket(self):
return '''
$output=$output.blanket_contrib("%s",
"%s",
"%s",
"%s",
"%s");
''' % ( xml_escape(self.name),
xml_escape(self.contact),
xml_escape(self.permission_title),
xml_escape(self.archive_id),
xml_escape(self.permission_date) )
def error(message):
if message:
sys.stderr.write(message + '\n')
sys.exit(1)
def usage(prog_name):
return '''
Usage:
$ python generate_contributions.py \\
CREDITS \\
credits.inc \\
blanket-permission.inc
where the arguments are the pathnames of the generated files.
'''
def collate_incomplete(contributors):
missing_credit = []
missing_licence = []
for contributor in contributors:
if len(contributor.credit) == 0:
missing_credit.append(contributor.name)
if len(contributor.licence) == 0:
missing_licence.append(contributor.name)
return '''WARNING!
The following contributors do not have a CREDITS entry:
%s
These ones have no explicit licence statement:
%s
''' % ( ",\n ".join(missing_credit), ",\n ".join(missing_licence))
def as_txt_credits(contributors):
results = []
for contributor in contributors:
if len(contributor.credit) != 0:
results.append(contributor.as_txt_credits())
results.append('''
If your name doesn't appear here although you've done something for LyX, or your entry is wrong or incomplete, just drop some e-mail to lyx@lyx.org. Thanks.
''')
return "".join(results)
def header():
return '''<?php
// WARNING! This file is autogenerated.
// Any changes to it will be lost.
// Please modify generate_contributions.py direct.
'''
def footer():
return '''
'''
def as_php_credits(contributors, file):
results = []
results.append(header())
results.append('''
function credits_contrib($name, $email, $msg) {
$email = str_replace(' () ', '@', $email);
$email = str_replace(' ! ', '.', $email);
if (isset($email) && $email != "") {
if (strncasecmp($email,"http",4) == 0)
$output =$output. "<dt><b>[[${email} | ${name}]]</b>";
else
$output=$output. "<dt><b>[[mailto:${email} | ${name}]]</b>";
} else
$output=$output. "<dt><b>${name}</b>";
$msg = preg_replace("/\\n */", "\\n ", ltrim($msg));
$output=$output. "
</dt>
<dd>
${msg}
</dd>";
return $output;
}
function credits_output() {
$output=$output."<p>
If your name doesn't appear here although you've done
something for LyX, or your entry is wrong or incomplete,
just drop an e-mail to the
[[mailto:lyx-devel@lists.lyx.org | lyx-devel]]
mailing list. Thanks.
</p>
<dl>";
''')
wrapper = textwrap.TextWrapper(width=60, subsequent_indent=" ")
for contributor in contributors:
if len(contributor.credit) != 0:
results.append(contributor.as_php_credits(wrapper))
results.append('''
$output=$output."</dl>";
return $output;
}
''')
results.append(footer())
return "".join(results)
def as_php_blanket(contributors, file):
results = []
results.append(header())
results.append('''
function blanket_contrib($name, $email, $msg_title, $msg_ref, $date) {
$email = str_replace(' () ', '@', $email);
$email = str_replace(' ! ', '.', $email);
$output=$output. "
<dt>
<b>[[mailto:${email} | ${name}]]</b>
</dt>
<dd>
See the lyx-devel mailing list message
"";
if (isset($msg_ref) && $msg_ref != "") {
$msg_ref = htmlspecialchars("$msg_ref");
$output=$output. "[[http://marc.info/?l=lyx-devel&" . ${msg_ref} . "|" . ${msg_title} . "]]";
} else {
$output=$output. "${msg_title}";
}
$output=$output. ""
of $date.
</dd>";
return $output;
}
function blanket_output() {
$output=$output."<p>
The following people hereby grant permission to license their
contributions to LyX under the
[[http://www.opensource.org/licenses/gpl-license.php |
Gnu General Public License]], version 2 or later.
</p>
<dl>";
''')
for contributor in contributors:
if contributor.licence == "GPL":
results.append(contributor.as_php_blanket())
results.append('''
$output=$output."</dl>";
$output=$output."
<p>
The following people hereby grant permission to license their
contributions to LyX under the
[[http://www.opensource.org/licenses/artistic-license-2.0.php |
Artistic License 2]].
</p>
<dl>";
''')
for contributor in contributors:
if contributor.licence == "Artistic":
results.append(contributor.as_php_blanket())
results.append('''
$output=$output."</dl>";
return $output;
}
''')
results.append(footer())
return "".join(results)
def main(argv, contributors):
if len(argv) != 4:
error(usage(argv[0]))
txt_credits_data = unicode(as_txt_credits(contributors)).encode("utf-8")
txt_credits = open(argv[1], "w")
txt_credits.write(txt_credits_data)
php_credits_data = unicode(as_php_credits(contributors, argv[2])).encode("utf-8")
php_credits = open(argv[2], "w")
php_credits.write(php_credits_data)
php_blanket_data = unicode(as_php_blanket(contributors, argv[3])).encode("utf-8")
php_blanket = open(argv[3], "w")
php_blanket.write(php_blanket_data)
warning_data = unicode(collate_incomplete(contributors) + '\n').encode("utf-8")
sys.stderr.write(warning_data)
# Store the raw data.
contributors = [
contributor(u"Ronen Abravanel",
"ronena () gmail ! com",
"GPL",
"Re: Patch: Diagram inset",
"m=128486837824718",
"19 September 2010",
u"Support for feyn diagrams"),
contributor(u"Maarten Afman",
"info () afman ! net",
"GPL",
"Fwd: Re: The LyX licence",
"m=110958096916679",
"27 February 2005",
u"Dutch translation team member"),
contributor(u"Hatim Alahmadi",
"dr.hatim () hotmail ! com",
"GPL",
"license issue",
"m=121727417724431",
"28 July 2008",
u"Arabic translation"),
contributor(u"Asger Alstrup",
"aalstrup () laerdal ! dk",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899716913300",
"21 February 2005",
u"General hacking of user interface stuff and those other bits and pieces"),
contributor(u"Jesper Stemann Andersen",
"jesper () sait ! dk",
"GPL",
"Contributions GPLed",
"m=130336947315984",
"21 April 2011",
u"Danish translation"),
contributor(u"Pascal André",
"andre () via ! ecp ! fr",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111263406200012",
"1 April 2005",
u"External style definition files, linuxdoc sgml support and more ftp-site ftp.lyx.org"),
contributor(u"Liviu Andronic",
"landronimirc () gmail ! com",
"GPL",
"contributions GPLed",
"m=121869084720708",
"14 August 2008",
u"Romanian localization and support for the frletter document class"),
contributor(u"Georger Araujo",
"georger_br () yahoo ! com ! br",
"GPL",
"pt_BR.po translation for LyX 2.1.3",
"m=143058265303480",
"2 May 2015",
u"Brazilian Portuguese translation"),
contributor(u"João Luis Meloni Assirati",
"assirati () nonada ! if ! usp ! br",
"GPL",
"Re: The LyX licence",
"m=110918749022256",
"23 February 2005",
u"Added support for unix sockets and thence the 'inverse DVI' feature"),
contributor(u"Patrick Atamaniuk",
"atamaniuk () frobs ! net",
"GPL",
"License for my contributions",
"m=129594232112957",
"28 January 2011",
u"fix-cm module"),
contributor(u"Gioele Barabucci",
"gioele () svario ! it",
"GPL",
"Contribution license",
"m=136933235620262",
"23 May 2013",
u"ACM-SIGS layouts"),
contributor(u"Özgür Uğraş Baran",
"ugras.baran () gmail ! com",
"GPL",
"Re: [patch] new InsetCommandParams",
"m=116124030512963",
"19 October 2006",
u"New commandparams structure, Nomenclature inset"),
contributor(u"Susana Barbosa",
"susana.barbosa () fc ! up ! pt",
"GPL",
"License",
"m=118707828425316",
"14 August 2007",
u"Portuguese translation"),
contributor(u"Yves Bastide",
"yves.bastide () irisa ! fr",
"GPL",
"Re: The LyX licence",
"m=110959913631678",
"28 February 2005",
u"Bug fixes"),
contributor(u"Heinrich Bauer",
"heinrich.bauer () t-mobile ! de",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910430117798",
"22 February 2005",
u"Fixes for dvi output original version of page selection for printing"),
contributor(u"Georg Baum",
"georg.baum () post ! rwth-aachen ! de",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899912526043",
"21 February 2005",
u"tex2lyx improvements, bug fixes, unicode work"),
contributor(u"Hans Bausewein",
"hans () comerwell ! xs4all ! nl",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111262999400394",
"2 April 2005",
'"case insensitive" and "complete word" search'),
contributor(u"Kornel Benko",
"Kornel.Benko () berlin ! de",
"GPL",
"The LyX licence",
"m=123100818303101",
"3 January 2009",
u"small bugfixes, CMake build system, Slovak translation"),
contributor(u"Jacob Bishop",
"bishop.jacob () gmail ! com",
"GPL",
"Contributions...APA 6 Layout",
"m=135654106502977",
"26 December 2012",
u"APA 6 Layout"),
contributor(u"Punyashloka Biswal",
"punya.biswal () gmail ! com",
"GPL",
"Re: Patch for ticket #6848",
"m=128298296923913",
"28 August 2010",
u"Bug fixes"),
contributor(u"Graham Biswell",
"graham () gbiswell ! com",
"GPL",
"Re: The LyX licence",
"m=111269177728853",
"5 April 2005",
u"Small bugfixes that were very hard to find"),
contributor(u"Lars Gullik Bjønnes",
"larsbj () gullik ! net",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110907078027047",
"22 February 2005",
u"Improvements to user interface (menus and keyhandling) including a configurable toolbar and a few other (not so) minor things, like rewriting most of the LyX kernel. Also previous source maintainer."),
contributor(u"Alfredo Braunstein",
"abraunst () lyx ! org",
"GPL",
"Re: The LyX licence",
"m=110927069513172",
"24 February 2005",
u"A (pseudo) threaded graphics loader queue, lots of fixes, etc."),
contributor(u"Christian Buescher",
"christian.buescher () uni-bielefeld ! de",
"",
"",
"",
"",
u"User-definable keys, lyxserver and more"),
contributor(u"Johnathan Burchill",
"jkerrb () users ! sourceforge ! net",
"GPL",
"Re: The LyX licence",
"m=110908472818670",
"22 February 2005",
u"Ported John Levon's original 'change tracking' code to later versions of LyX. Numerous bug fixes thereof."),
contributor(u"Francesc Burrull i Mestres",
"fburrull () mat ! upc ! es",
"",
"",
"",
"",
u"Catalan translation"),
contributor(u"Sergiu Carpov",
"ssmiler () gmail ! com",
"GPL",
"Re: Bug #5522",
"m=124721248310586",
"10 July 2009",
u"Bug fixes"),
contributor(u"Humberto Nicolás Castejón",
"beconico () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=111833854105023",
"9 June 2005",
u"Spanish translation of the Windows installer"),
contributor(u"Matěj Cepl",
"matej () ceplovi ! cz",
"GPL",
"Re: The LyX licence",
"m=110913090232039",
"22 February 2005",
u"Improvements to the czech keymaps"),
contributor(u"Albert Chin",
"lyx-devel () mlists ! thewrittenword ! com",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111220294831831",
"30 March 2005",
u"Bug fixes"),
contributor(u"Jean-Pierre Chrétien",
"jeanpierre.chretien () free ! fr",
"GPL",
"Re: The LyX licence",
"m=111842518713710",
"10 June 2005",
u"French translations"),
contributor(u"Claudio Coco",
"lacocio () libero ! it",
"GPL",
"Agreement to GNU General Public licence",
"m=113749629514591",
"17 January 2006",
u"Italian translation"),
contributor(u"Yuri Chornoivan",
"yurchor () ukr ! net",
"GPL",
"Permission grant",
"m=121681339315810",
"23 July 2008",
u"Ukrainian translation"),
contributor(u"Tommaso Cucinotta",
"cucinotta () sssup !it",
"GPL",
"Re: View Menu proposal",
"m=119030065212621",
"20 Sep 2007",
u"Advanced search feature"),
contributor(u"Matthias Kalle Dalheimer",
"kalle () kdab ! net",
"GPL",
"Re: The LyX licence",
"m=110908857130107",
"22 February 2005",
u"Qt2 port"),
contributor(u"Ulysse Danglis",
"o2d () freemail ! gr",
"GPL",
"License of el.po",
"m=126738357204586",
"28 February 2010",
u"Greek translations"),
contributor(u"Ewan Davies",
"ewan.davies () googlemail ! com",
"GPL",
"Re: Starting Development",
"m=124248720628359",
"17 May 2009",
u"doxygen to LFUNs.lyx conversion"),
contributor(u"Jack Dessert",
"jackdesert556 () gmail ! com",
"GPL",
"License",
"m=126994985831115",
"30 March 2010",
u"Patches for configure.py"),
contributor(u"Min Ding",
"u5032331 () uds ! anu ! edu ! au",
"GPL",
"Accept GUN GPL",
"m=139864105011133",
"27 April 2014",
u"Chinese (simplified) translations"),
contributor(u"Anders Ekberg",
"anek () chalmers ! se",
"GPL",
"License agreement",
"m=113725822602516",
"14 January 2006",
u"Improvements to the Swedish translation of the Windows Installer"),
contributor(u"Martin Engbers",
"martin.engbers () gmx ! de",
"GPL",
"Re: [patch] Icon replacement",
"m=123877725311464",
"Apr 3 2009",
u"icon loading tweaks"),
contributor(u"Matthias Ettrich",
"ettrich () trolltech ! com",
"GPL",
"Fwd: Re: The LyX licence",
"m=110959638810040",
"28 February 2005",
u"Started the project, implemented the early versions, various improvements including undo/redo, tables, and much, much more"),
contributor(u"Baruch Even",
"baruch () ev-en ! org",
"GPL",
"Re: The LyX licence",
"m=110936007609786",
"25 February 2005",
u"New graphics handling scheme and more"),
contributor(u"Dov Feldstern",
"dfeldstern () fastimap ! com",
"GPL",
"Re: Farsi support re-submission plus a little more",
"m=118064913824836",
"31 May 2007",
u"RTL/BiDi-related fixes"),
contributor(u"Michał Fita",
"michal ! fita () gmail ! com",
"GPL",
"Statement for Polish translation",
"m=121615623122376",
"15 July 2008",
u"Polish translation"),
contributor(u"Ronald Florence",
"ron () 18james ! com",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111262821108510",
"31 March 2005",
u"Maintainer of the OS X port(s)"),
contributor(u"José Ramom Flores d'as Seixas",
"fa2ramon () usc ! es",
"GPL",
"Re: Galician translation",
"m=116136920230072",
"20 October 2006",
u"Galician documentation and localization"),
contributor(u"John Michael Floyd",
"jmf () pwd ! nsw ! gov ! au",
"",
"",
"",
"",
u"Bug fix to the spellchecker"),
contributor(u"Nicola Focci",
"nicola.focci () gmail ! com",
"GPL",
"Permission",
"m=120946605432341",
"29 April 2008",
u"Italian translation of documentations"),
contributor(u"Enrico Forestieri",
"forenr () tlc ! unipr ! it",
"GPL",
"Re: lyxpreview2ppm.py",
"m=111894292115287",
"16 June 2005",
u"Italian translations, many bug fixes and features"),
contributor(u"Eitan Frachtenberg",
"sky8an () gmail ! com",
"GPL",
"Re: [PATCH] BibTeX annotation support",
"m=111130799028250",
"20 March 2005",
u"BibTeX annotation support"),
contributor(u"Darren Freeman",
"dfreeman () ieee ! org",
"GPL",
"Licence",
"m=118612951707590",
"3 August 2007",
u"Improvements to mouse wheel scrolling; many bug reports"),
contributor(u"Max Funk",
"maxkhfunk () gmx ! net",
"GPL",
"GPL",
"m=130659936521230",
"28 May 2011",
u"Bug fixes"),
contributor(u"Edscott Wilson Garcia",
"edscott () xfce ! org",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111219295119021",
"30 March 2005",
u"Bug fixes"),
contributor(u"Ignacio García",
"ignacio.gmorales () gmail ! com",
"GPL",
"Re: es_EmbeddedObjects",
"m=117079592919653",
"06 February 2007",
u"Spanish translation of documentations"),
contributor(u"Michael Gerz",
"michael.gerz () teststep ! org",
"GPL",
"Re: The LyX licence",
"m=110909251110103",
"22 February 2005",
u"Change tracking, German localization, bug fixes"),
contributor(u"Stefano Ghirlanda",
"stefano.ghirlanda () unibo ! it",
"GPL",
"Re: The LyX licence",
"m=110959835300777",
"28 February 2005",
u"Improvements to lyxserver"),
contributor(u"Hartmut Goebel",
"h.goebel () crazy-compilers ! com",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111225910223564",
"30 March 2005",
u"Improvements to Koma-Script classes"),
contributor(u"Riccardo Gori",
"goriccardo () gmail ! com",
"GPL",
"Re: r35561 - lyx-devel/trunk/src/insets",
"m=128626762015975",
"5 Oct 2010",
u"Fixing tabular code"),
contributor(u"Peter Gumm",
"gumm () mathematik ! uni-marburg ! de",
"GPL",
"Re: xy-pic manual",
"m=122469079629276",
"22 October 2008",
u"XY-pic manual"),
contributor(u"İbrahim Güngör",
"h.ibrahim.gungor () gmail ! com",
"GPL",
"Update Turkish Translation",
"m=122583550732670",
"4 Nov 2008",
u"Turkish translation"),
contributor(u"Hartmut Haase",
"hha4491 () web ! de",
"GPL",
"Re: The LyX licence",
"m=110915427710167",
"23 February 2005",
u"German translation of the documentation"),
contributor(u"Helge Hafting",
"helgehaf () aitel ! hist ! no",
"GPL",
"Re: The LyX licence",
"m=110916171925288",
"23 February 2005",
u"Norwegian documentation and localization"),
contributor(u"Richard Heck",
"rgheck () comcast ! net",
"GPL",
"GPL Statement",
"m=117501689204059",
"27 March 2007",
u"Bug fixes, layout modules, BibTeX code, XHTML export. Current stable branch maintainer."),
contributor(u"Bennett Helm",
"bennett.helm () fandm ! edu",
"GPL",
"Re: The LyX licence",
"m=110907988312372",
"22 February 2005",
u"Maintainer of the OSX ports, taking over from Ronald Florence"),
contributor(u"Kevin B. Hendricks",
"kevin.hendricks () sympatico ! ca",
"GPL",
"Fwd: Re: Integration of libmythes and hunspell",
"m=124190107613441",
"9 May 2009",
u"Author of the MyThes thesaurus library"),
contributor(u"Claus Hentschel",
"claus.hentschel () mbau ! fh-hannover ! de",
"",
"",
"",
"",
u"Win32 port of LyX 1.1.x"),
contributor(u"Josh Hieronymous",
"josh.p.hieronymus () gmail ! com",
"GPL",
"licensing my contributions to LyX",
"m=137426932127289",
"19 July 2013",
u"XHTML and ePub Improvements (GSOC Student)"),
contributor(u"Claus Hindsgaul",
"claus_h () image ! dk",
"GPL",
"Re: The LyX licence",
"m=110908607416324",
"22 February 2005",
u"Danish translation"),
contributor(u"Martin Hoffmann",
"hoffimar () gmail ! com",
"GPL",
"Re: #8703: 'new shortcut' box closes if no shortcut",
"m=138105799411067",
"6 October 2013",
u"Dialog usability fix"),
contributor(u"Bernard Hurley",
"bernard () fong-hurley ! org ! uk",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111218682804142",
"30 March 2005",
u"Fixes to literate programming support"),
contributor(u"Marius Ionescu",
"felijohn () gmail ! com",
"GPL",
"permission to licence",
"m=115935958330941",
"27 September 2006",
u"Romanian localization"),
contributor(u"Bernhard Iselborn",
"bernhard.iselborn () sap ! com",
"GPL",
"RE: The LyX licence",
"m=111268306522212",
"5 April 2005",
u"Some minor bug-fixes, FAQ, linuxdoc sgml support"),
contributor(u"Masanori Iwami",
"masa.iwm () gmail ! com",
"GPL",
"Re: [patch] Addition of input method support",
"m=117541512517453",
"1 April 2007",
u"Development of CJK language support"),
contributor(u"Michal Jaegermann",
"michal () ellpspace ! math ! ualberta ! ca",
"GPL",
"Re: The LyX licence",
"m=110909853626643",
"22 February 2005",
u"Fix to a very hard-to-find egcs bug that crashed LyX on alpha architecture"),
contributor(u"Harshula Jayasuriya",
"harshula () gmail ! com",
"GPL",
"Re: Bug in export to DocBook",
"m=116884249725701",
"15 January 2007",
u"Fix docbook generation of nested lists"),
contributor(u"David L. Johnson",
"david.johnson () lehigh ! edu",
"GPL",
"GPL",
"m=110908492016593",
"22 February 2005",
u"Public relations, feedback, documentation and support"),
contributor(u"Robert van der Kamp",
"robnet () wxs ! nl",
"GPL",
"Re: The LyX licence",
"m=111268623330209",
"5 April 2005",
u"Various small things and code simplifying"),
contributor(u"Amir Karger",
"amirkarger () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=110912688520245",
"23 February 2005",
u"Tutorial, reLyX: the LaTeX to LyX translator"),
contributor(u"Zahari Dmitrov Kassabov",
"zaharid () gmail ! com",
"GPL",
"GPL Statement",
"m=135540059615508",
"13 December 2012",
u"Bug fixes"),
contributor(u"Carmen Kauffmann",
"",
"",
"",
"",
"",
u"Original name that is now two characters shorter"),
contributor(u"KDE Artists",
"http://artist.kde.org/",
"",
"",
"",
"",
u"Authors of several of the icons LyX uses"),
contributor(u"Andreas Klostermann",
"andreas_klostermann () web ! de",
"GPL",
"blanket-permission",
"m=111054675600338",
"11 March 2005",
u"Gtk reference insertion dialog"),
contributor(u"Timo Kluck",
"tkluck () gmail ! com",
"GPL",
"license statement",
"m=132334049317495",
"8 December 2011",
u"Dutch translation, icon fixes"),
contributor(u"Kostantino",
"ciclope10 () alice ! it",
"GPL",
"Permission granted",
"m=115513400621782",
"9 August 2006",
u"Italian localization of the interface"),
contributor(u"Scott Kostyshak",
"skostysh@princeton.edu",
"GPL",
"GPL Statement",
"m=133076234031944",
"3 March 2012",
u"Small UI fixes"),
contributor(u"Michael Koziarski",
"koziarski () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=110909592017966",
"22 February 2005",
u"Gnome port"),
contributor(u"Peter Kremer",
"kremer () bme-tel ! ttt ! bme ! hu",
"",
"",
"",
"",
u"Hungarian translation and bind file for menu shortcuts"),
contributor(u'Marcus Kriele',
"mkriele () me ! com",
"GPL",
"License permission",
"m=130384781027177",
"26 April 2011",
u"Fixing various sv* layouts"),
contributor(u'Valeriy Kruchko',
"lerkru () gmail ! com",
"GPL",
"Re: translation in to russian about 68%",
"m=125904983806681",
"24 November 2009",
u"Russian translation of the user interface"),
contributor(u"Peter Kümmel",
"syntheticpp () gmx ! net",
"GPL",
"License",
"m=114968828021007",
"7 June 2006",
u"Qt4 coding, CMake build system, bug fixing, testing, clean ups, and profiling"),
contributor(u"Bernd Kümmerlen",
"bkuemmer () gmx ! net",
"GPL",
"Re: The LyX licence",
"m=110934318821667",
"25 February 2005",
u"Initial version of the koma-script textclasses"),
contributor(u"Felix Kurth",
"felix () fkurth ! de",
"GPL",
"Re: The LyX licence",
"m=110908918916109",
"22 February 2005",
u"Support for textclass g-brief2"),
contributor(u"Rob Lahaye",
"lahaye () snu ! ac ! kr",
"GPL",
"Re: The LyX licence",
"m=110908714131711",
"22 February 2005",
u"Xforms dialogs and GUI related code"),
contributor(u"Jean-Marc Lasgouttes",
"lasgouttes () lyx ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899928510452",
"21 February 2005",
u"configure and Makefile-stuff, many bugfixes and more. Previous stable branch maintainer."),
contributor(u"Victor Lavrenko",
"lyx () lavrenko ! pp ! ru",
"",
"",
"",
"",
u"Russian translation"),
contributor(u"Angus Leeming",
"leeming () lyx ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899671520339",
"21 February 2005",
u"GUI-I-fication of insets and more"),
contributor(u"Edwin Leuven",
"e.leuven () gmail ! com",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899657530749",
"21 February 2005",
u"Tabular and misc UI stuff"),
contributor(u"John Levon",
"levon () movementarian ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899535600562",
"21 February 2005",
u"Qt2 frontend, GUII work, bugfixes"),
contributor(u"Ling Li",
"ling () caltech ! edu",
"GPL",
"Re: LyX 1.4cvs crash on Fedora Core 3",
"m=111204368700246",
"28 March 2005",
u"Added native support for \makebox to mathed. Several bug fixes, both to the source code and to the llncs layout file"),
contributor(u"LibreOffice Team",
"http://www.libreoffice.org/",
"LGPL",
"",
"",
"",
u"Libreoffice Icon Theme"),
contributor(u"Tomasz Łuczak",
"tlu () technodat ! com ! pl",
"GPL",
"Re: [Cvslog] lyx-devel po/: ChangeLog pl.po lib/: CREDITS",
"m=113580483406067",
"28 December 2005",
u"Polish translation and mw* layouts files"),
contributor(u"Hangzai Luo",
"memcache () gmail ! com",
"GPL",
"Re: [patch] tex2lyx crash when full path is given from commandline on Win32",
"m=118326161706627",
"1 July 2007",
u"Bugfixes"),
contributor(u"Mohamed Magdy",
"physicist2010 () gmail ! com>",
"GPL",
"A permission to use my Arabic-Translation for LyX?",
"m=126877445318267",
"16 March 2010",
u"Arabic translation"),
contributor(u"Jari-Matti Mäkelä",
"jmjmak () utu ! fi",
"GPL",
"Re: lyx fi translation update",
"m=142987910907596",
"24 April 2015",
u"Contribution to the Finnish Localization."),
contributor(u"Tetsuya Makimura",
"makimura () ims ! tsukuba.ac ! jp",
"GPL",
"Re: Support request for Japanese without CJK, again (Re: [Fwd: About Japanese edition ...)",
"m=121905769227884",
"18 August 2008",
u"Improvements to the Japanese language support."),
contributor(u"José Matos",
"jamatos () fc ! up ! pt",
"GPL",
"Re: The LyX licence",
"m=110907762926766",
"22 February 2005",
u"linuxdoc sgml support. Previous release manager."),
contributor(u"Roman Maurer",
"roman.maurer () amis ! net",
"GPL",
"Re: The LyX licence",
"m=110952616722307",
"27 February 2005",
u"Slovenian translation coordinator"),
contributor(u"John McCabe-Dansted",
"gmatht () gmail ! com",
"GPL",
"Re: Randomly Generated Crash Reports Useful?",
"m=124515770509946",
"15 June 2009",
u"Keys-test module, bug fixing"),
contributor(u"Caolán McNamara",
"caolanm () redhat ! com",
"GPL",
"Statement for enchant integration",
"m=126389593805123",
"19 January 2010",
u"Support for the enchant spell checking library"),
contributor(u"Tino Meinen",
"a.t.meinen () chello ! nl",
"GPL",
"Re: Licensing your contributions to LyX",
"m=113078277722316",
"31 October 2005",
u"Dutch translation coordinator"),
contributor(u"Siegfried Meunier-Guttin-Cluzel",
"meunier () coria ! fr",
"GPL",
"French translations",
"m=119485816312776",
"12 November 2007",
u"French translations of the documentation"),
contributor(u"Günter Milde",
"milde () users ! berlios ! de",
"GPL",
"copyleft",
"m=122398147620761",
"14 October 2008",
u"Unicode and layout file fixes"),
contributor(u"Dustin J. Mitchell",
"dustin () v ! igoro ! us",
"GPL",
"Fwd: Your patch for LyX",
"m=139255709609015",
"16 February 2014",
u"Fix for csv2lyx"),
contributor(u"Joan Montané",
"jmontane () gmail ! com",
"GPL",
"Re: LyX translation updates needed",
"m=118765575314017",
"21 August 2007",
u"Catalan translations of menus"),
contributor(u"Stéphane Mourey",
"stephane.mourey () impossible-exil ! info",
"GPL",
"Re: gpl",
"m=141381522413781",
"20 October 2014",
u"New lfun server-get-statistics"),
contributor(u"Iñaki Larrañaga Murgoitio",
"dooteo () euskalgnu ! org",
"GPL",
"Re: The LyX licence",
"m=110908606525783",
"22 February 2005",
u"Basque documentation and localization"),
contributor(u"Daniel Naber",
"daniel.naber () t-online ! de",
"GPL",
"Re: The LyX licence",
"m=110911176213928",
"22 February 2005",
u"Improvements to the find&replace dialog"),
contributor(u"Pablo De Napoli",
"pdenapo () mate ! dm ! uba ! ar",
"GPL",
"Re: The LyX licence",
"m=110908904400120",
"22 February 2005",
u"Math panel dialogs"),
contributor(u"Dirk Niggemann",
"dabn100 () cam ! ac ! uk",
"",
"",
"",
"",
u"config. handling enhancements, bugfixes, printer enhancements path mingling"),
contributor(u"Jens Nöckel",
"noeckel () uoregon !edu",
"GPL",
"GPL statement",
"m=128485749516885",
"19 September 2010",
u"Mac OS X enhancements"),
contributor(u"Rob Oakes",
"lyx-devel () oak-tree ! us>",
"GPL",
"Outline Contributions",
"m=124615188102843",
"27 June 2009",
u"Improvements to the outliner."),
contributor(u"Oxygen Team",
"http://www.oxygen-icons.org/",
"LGPL",
"",
"",
"",
u"Oxygen Icon Theme"),
contributor(u"Carl Ollivier-Gooch",
"cfog () mech ! ubc ! ca",
"GPL",
"Re: The LyX licence --- a gentle nudge",
"m=111220662413921",
"30 March 2005",
u"Support for two-column figure (figure*) and table (table*) environments. Fixed minibuffer entry of floats."),
contributor(u"Gilad Orr",
"giladorr () gmail ! com",
"GPL",
"Internationalization-Hebrew",
"m=138314500901798",
"28 October 2013",
u"Hebrew translation."),
contributor(u'Panayotis "PAP" Papasotiriou',
"papasot () upatras ! gr",
"GPL",
"Re: The LyX licence",
"m=110933552929119",
"25 February 2005",
u"Support for kluwer and ijmpd document classes"),
contributor(u'Andrey V. Panov',
"panov () canopus ! iacp ! dvo ! ru",
"GPL",
"Re: Russian translation for LyX",
"m=119853644302866",
"24 December 2007",
u"Russian translation of the user interface"),
contributor(u'Dal Ho Park',
"airdalho () gmail ! com",
"GPL",
"splash.lyx translation (Korean)",
"m=139436383128181",
"9 March 2014",
u"Korean translation"),
contributor(u'Bo Peng',
"ben.bob () gmail ! com",
"GPL",
"Re: Python version of configure script (preview version)",
"m=112681895510418",
"15 September 2005",
u"Conversion of all shell scripts to Python, shortcuts dialog, session, view-source, auto-view, embedding features and scons build system."),
contributor(u'John Perry',
"john.perry () usm ! edu",
"GPL",
"Contributions",
"m=128874016511551",
"2 November 2010",
u"Named theorems module."),
contributor(u"Joacim Persson",
"sp2joap1 () ida ! his ! se",
"",
"",
"",
"",
u"po-file for Swedish, a tool for picking shortcuts, bug reports and hacking atrandom"),
contributor(u"Zvezdan Petkovic",
"zpetkovic () acm ! org",
"GPL",
"Re: The LyX licence",
"m=111276877900892",
"6 April 2005",
u"Better support for serbian and serbocroatian"),
contributor(u"Prannoy Pilligundla",
"prannoy.bits () gmail ! com",
"GPL",
"Contribution license",
"m=139332446711707",
"25 February 2014",
u"Full screen statusbar toggling"),
contributor(u"Geoffroy Piroux",
"piroux () fyma ! ucl ! ac ! be",
"",
"",
"",
"",
u"Mathematica backend for mathed"),
contributor(u"Benjamin Piwowarski",
"benjamin ! piwowarski () lip6 ! fr",
"GPL",
"GPL statement",
"m=133958334631163",
"13 June 2012",
u"AppleScript, integration with bibliography managers"),
contributor(u"Neoklis Polyzotis",
"alkis () soe ! ucsc ! edu",
"GPL",
"Fwd: Re: The LyX licence",
"m=111039215519777",
"9 March 2005",
u"Keymap work"),
contributor(u"André Pönitz",
"andre.poenitz () mathematik ! tu-chemnitz ! de",
"GPL",
"Re: The LyX licence",
"m=111143534724146",
"21 March 2005",
u"mathed rewrite to use STL file io with streams --export and --import command line options"),
contributor(u"Kornelia Pönitz",
"kornelia.poenitz () mathematik ! tu-chemnitz ! de",
"GPL",
"Re: The LyX licence",
"m=111121553103800",
"19 March 2005",
u"heavy mathed testing; provided siamltex document class"),
contributor(u"Bernhard Psaier",
"",
"",
"",
"",
"",
u"Designer of the LyX-Banner"),
contributor(u"Thomas Pundt",
"thomas () pundt ! de",
"GPL",
"Re: The LyX licence",
"m=111277917703326",
"6 April 2005",
u"initial configure script"),
contributor(u"Allan Rae",
"rae () itee ! uq ! edu ! au",
"GPL",
"lyx-1.3.6cvs configure.in patch",
"m=110905169512662",
"21 February 2005",
u"GUI-I architect, LyX PR head, LDN, bug reports/fixes, Itemize Bullet Selection, xforms-0.81 + gcc-2.6.3 compatibility"),
contributor(u"Manoj Rajagopalan",
"rmanoj () umich ! edu",
"GPL",
"Re: patch for case-insensitive reference sorting",
"m=123506398801004",
"Feb 19 2009",
u"reference dialog tweaks"),
contributor(u"Vincent van Ravesteijn",
"V.F.vanRavesteijn () tudelft ! nl",
"GPL",
"RE: crash lyx-1.6rc1",
"m=121786603726114",
"4 August 2008",
u"lots of fixes"),
contributor(u"Adrien Rebollo",
"adrien.rebollo () gmx ! fr",
"GPL",
"Re: The LyX licence",
"m=110918633227093",
"23 February 2005",
u"French translation of the docs; latin 3, 4 and 9 support"),
contributor(u"Garst R. Reese",
"garstr () isn ! net",
"GPL",
"blanket-permission.txt:",
"m=110911480107491",
"22 February 2005",
u"provided hollywood and broadway classes for writing screen scripts and plays"),
contributor(u"Bernhard Reiter",
"ockham () gmx ! net",
"GPL",
"Re: RFC: GThesaurus.C et al.",
"m=112912017013984",
"12 October 2005",
u"Gtk frontend"),
contributor(u"Ruurd Reitsma",
"rareitsma () yahoo ! com",
"GPL",
"Fwd: Re: The LyX licence",
"m=110959179412819",
"28 February 2005",
u"Creator of the native port of LyX to Windows"),
contributor(u"Bernd Rellermeyer",
"bernd.rellermeyer () arcor ! de",
"GPL",
"Re: The LyX licence",
"m=111317142419908",
"10 April 2005",
u"Support for Koma-Script family of classes"),
contributor(u"Michael Ressler",
"mike.ressler () alum ! mit ! edu",
"GPL",
"Re: The LyX licence",
"m=110926603925431",
"24 February 2005",
u"documentation maintainer, AASTeX support"),
contributor(u"Richman Reuven",
"richman.reuven () gmail ! com",
"GPL",
"gpl 2+ ok :)",
"m=130368087529359",
"24 April 2011",
u"Hebrew localisation"),
contributor(u"Christian Ridderström",
"christian.ridderstrom () gmail ! com",
"GPL",
"Re: The LyX licence",
"m=110910933124056",
"22 February 2005",
u"The driving force behind, and maintainer of, the LyX wiki wiki.\nSwedish translation of the Windows installer"),
contributor(u"Julien Rioux",
"jrioux () lyx ! org",
"GPL",
"Re: #6361: configure.py ignores packages required by user-defined modules",
"m=125986505101722",
"3 December 2009",
u"Bug fixes, lilypond and revtex support, citation modules."),
contributor(u"Bernhard Roider",
"bernhard.roider () sonnenkinder ! org",
"GPL",
"Re: [PATCH] immediatly display saved filename in tab",
"m=117009852211669",
"29 January 2007",
u"Various bug fixes"),
contributor(u"Jim Rotmalm",
"jim.rotmalm () gmail ! com",
"GPL",
"License for my contributions.",
"m=129582352017079",
"24 January 2011",
u"Swedish translation"),
contributor(u"Paul A. Rubin",
"rubin () msu ! edu",
"GPL",
"Re: [patch] reworked AMS classes (bugs 4087, 4223)",
"m=119072721929143",
"25 September 2007",
u"Major rework of the AMS classes"),
contributor(u"Guy Rutenberg",
"guyrutenberg () gmail ! com",
"GPL",
"Re: [PATCH] Strange Behaivor: xdg-open left as zombie",
"m=137365070116624",
"12 July 2013",
u"System call fixes"),
contributor(u"Ran Rutenberg",
"ran.rutenberg () gmail ! com",
"GPL",
"The New Hebrew Translation of the Introduction",
"m=116172457024967",
"24 October 2006",
u"Hebrew translation"),
contributor(u'Pavel Sanda',
"ps () ucw ! cz",
"GPL",
"Re: czech translation",
"m=115522417204086",
"10 August 2006",
u"Czech translation, added various features, lfuns docs/review. Current release manager."),
contributor(u"Szõke Sándor",
"alex () lyx ! hu",
"GPL",
"Contribution to LyX",
"m=113449408830523",
"13 December 2005",
u"Hungarian translation"),
contributor(u"Janus Sandsgaard",
"janus () janus ! dk",
"GPL",
"Re: The LyX licence",
"m=111839355328045",
"10 June 2005",
u"Danish translation of the Windows installer"),
contributor(u"Stefan Schimanski",
"sts () 1stein ! org",
"GPL",
"GPL statement",
"m=117541472517274",
"1 April 2007",
u"font improvements, bug fixes"),
contributor(u"Horst Schirmeier",
"horst () schirmeier ! com",
"GPL",
"Re: [patch] reordering capabilities for GuiBibtex",
"m=120009631506298",
"12 January 2008",
u"small fixes"),
contributor(u"Hubert Schreier",
"schreier () sc ! edu",
"",
"",
"",
"",
u"spellchecker (ispell frontend); beautiful document-manager based on the simple table of contents (removed)"),
contributor(u"Ivan Schreter",
"schreter () kdk ! sk",
"",
"",
"",
"",
u"international support and kbmaps for slovak, czech, german, ... wysiwyg figure"),
contributor(u"Eulogio Serradilla Rodríguez",
"eulogio.sr () terra ! es",
"GPL",
"Re: The LyX licence",
"m=110915313018478",
"23 February 2005",
u"contribution to the spanish internationalization"),
contributor(u"Nickolay Shashkin",
"mecareful () gmail ! com",
"GPL",
"GPL statement",
"m=134026564400578",
"21 June 2012",
u"bugfixes"),
contributor(u"Miyata Shigeru",
"miyata () kusm ! kyoto-u ! ac ! jp",
"",
"",
"",
"",
u"OS/2 port"),
contributor(u"Alejandro Aguilar Sierra",
"asierra () servidor ! unam ! mx",
"GPL",
"Fwd: Re: The LyX licence",
"m=110918647812358",
"23 February 2005",
u"Fast parsing with lyxlex, pseudoactions, mathpanel, Math Editor, combox and more"),
contributor(u"Lior Silberman",
"lior () princeton ! edu",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910432427450",
"22 February 2005",
u"Tweaks to various XForms dialogs. Implemented the --userdir command line option, enabling LyX to run with multiple configurations for different users. Implemented the original code to make colours for different inset properties configurable."),
contributor(u"Waluyo Adi Siswanto",
"was.uthm () gmail ! com",
"GPL",
"Licence contributions",
"m=123595530114385",
"Mar 2 2009",
u"Indonesian translation"),
contributor(u"Giovanni Sora",
"g.sora () tiscali ! it",
"GPL",
"License ia.po",
"m=129968786830788",
"9 March 2011",
u"Interlingua translation"),
contributor(u"Andre Spiegel",
"spiegel () gnu ! org",
"GPL",
"Re: The LyX licence",
"m=110908534728505",
"22 February 2005",
u"vertical spaces"),
contributor(u"Jürgen Spitzmüller",
"juergen.sp () t-online ! de",
"GPL",
"Re: The LyX licence",
"m=110907530127164",
"22 February 2005",
u"Qt frontend, bugfixes. Former stable branch maintainer."),
contributor(u"John Spray",
"jcs116 () york ! ac ! uk",
"GPL",
"Re: The LyX licence",
"m=110909415400170",
"22 February 2005",
u"Gtk frontend"),
contributor(u"Ben Stanley",
"ben.stanley () exemail ! com ! au",
"GPL",
"Re: The LyX licence",
"m=110923981012056",
"24 February 2005",
u"fix bugs with error insets placement"),
contributor(u"Uwe Stöhr",
"uwestoehr () web ! de",
"GPL",
"Re: The LyX licence",
"m=111833345825278",
"9 June 2005",
u"Current documentation maintainer, Windows installer, bug fixes"),
contributor(u"David Suárez de Lis",
"excalibor () iname ! com",
"",
"",
"",
"",
u"maintaining es.po since v1.0.0 and other small i18n issues small fixes"),
contributor(u"Peter Sütterlin",
"p.suetterlin () astro ! uu ! nl",
"GPL",
"Re: The LyX licence",
"m=110915086404972",
"23 February 2005",
u"aapaper support, german documentation translation, bug reports"),
contributor(u"Stefan Swerk",
"stefan_lyx () swerk ! priv ! at",
"GPL",
"Contribution license",
"m=142644092217864",
"15 March 2015",
u"europasscv support"),
contributor(u"Kayvan Aghaiepour Sylvan",
"kayvan () sylvan ! com",
"GPL",
"Re: The LyX licence",
"m=110908748407087",
"22 February 2005",
u"noweb2lyx and reLyX integration of noweb files. added Import->Noweb and key bindings to menus"),
contributor(u"TaoWang (mgc)",
"mgcgogo () gmail ! com",
"GPL",
"Re: Chinese Version of Tutorial.lyx",
"m=125785021631705",
"10 November 2009",
u"translation of documentation and user interface to Simplified Chinese"),
contributor(u'Sergey Tereschenko',
"serg.partizan () gmail ! com",
"GPL",
"my contributions",
"m=126065880524135",
"12 December 2009",
u"Russian translation of the user interface"),
contributor(u"Reuben Thomas",
"rrt () sc3d ! org",
"GPL",
"Re: The LyX licence",
"m=110911018202083",
"22 February 2005",
u"ENTCS document class and lots of useful bug reports"),
contributor(u"Dekel Tsur",
"dtsur () cs ! ucsd ! edu",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910437519054",
"22 February 2005",
u"Hebrew support, general file converter, many many bug fixes"),
contributor(u"Matthias Urlichs",
"smurf () smurf ! noris ! de",
"GPL",
"Re: The LyX licence",
"m=110912859312991",
"22 February 2005",
u"bug reports and small fixes"),
contributor(u"H. Turgut Uyar",
"uyar () ce ! itu ! edu ! tr",
"GPL",
"Re: The LyX licence",
"m=110917146423892",
"23 February 2005",
u"turkish kbmaps"),
contributor(u"Mostafa Vahedi",
"vahedi58 () yahoo ! com",
"GPL",
"Re: improving Arabic-like language support",
"m=117769964731842",
"27 April 2007",
u"Farsi support and translations"),
contributor(u"Marko Vendelin",
"markov () ioc ! ee",
"GPL",
"Re: The LyX licence",
"m=110909439912594",
"22 February 2005",
u"Gnome frontend"),
contributor(u"Joost Verburg",
"joostverburg () users ! sourceforge ! net",
"GPL",
"Re: New Windows Installer",
"m=114957884100403",
"6 June 2006",
u"A new and improved Windows installer"),
contributor(u"Martin Vermeer",
"martin.vermeer () hut ! fi",
"GPL",
"Re: The LyX licence",
"m=110907543900367",
"22 February 2005",
u"support for optional argument in sections/captions svjour/svjog, egs and llncs document classes. Lot of bug hunting (and fixing!)"),
contributor(u"Jürgen Vigna",
"jug () lyx ! org",
"GPL",
"Re: Licensing of tex2lyx (and perhaps LyX itself?)",
"m=110899839906262",
"21 February 2005",
u"complete rewrite of the tabular, text inset; fax and plain text export support; iletter and dinbrief support"),
contributor(u"Pauli Virtanen",
"pauli.virtanen () hut ! fi",
"GPL",
"Re: The LyX licence",
"m=110918662408397",
"23 February 2005",
u"Finnish localization of the interface"),
contributor(u"Ramanathan Vishnampet",
"rvishnampet () gmail ! com",
"GPL",
"Re: [Patch] -fobjc-exceptions for compiling linkback sources with g++ on Mac",
"",
"17 February 2014",
u"Support for g++ on 4.8 Mac"),
contributor(u"Herbert Voß",
"herbert.voss () alumni ! tu-berlin ! de",
"GPL",
"Fwd: Re: The LyX licence",
"m=110910439013234",
"22 February 2005",
u"The one who answers all questions on lyx-users mailing list and maintains www.lyx.org/help/ Big insetgraphics and bibliography cleanups"),
contributor(u"Andreas Vox",
"avox () arcor ! de",
"GPL",
"Re: The LyX licence",
"m=110907443424620",
"22 February 2005",
u"Bug fixes, feedback on LyX behaviour on the Mac, and improvements to DocBook export"),
contributor(u"venom00 (c/o J-M Lasgouttes)",
"venom00 () arcadiaclub ! com",
"GPL",
"I love GPL, what about you?",
"m=129098897014967",
"29 November 2010",
u"Bug fixing"),
contributor(u"Jason Waskiewicz",
"jason.waskiewicz () sendit ! nodak ! edu",
"GPL",
"[Fwd: Re: tufte-book layout for LyX]",
"m=125659179116032",
"26 October 2009",
u"Layouts for the Tufte document classes"),
contributor(u"John P. Weiss",
"jpweiss () frontiernet ! net",
"Artistic",
"Re: Small problem with BlanketPermission on the new site.",
"m=123238170812776",
"18 January 2009",
u"Bugreports and suggestions, slides class support, editor of the documentationproject, 6/96-9/97. Tutorial chapter 1"),
contributor(u"Edmar Wienskoski",
"edmar () freescale ! com",
"GPL",
"Re: The LyX licence",
"m=111280236425781",
"6 April 2005",
u"literate programming support; various bug fixes"),
contributor(u"Mate Wierdl",
"mw () wierdlmpc ! msci ! memphis ! edu",
"",
"",
"",
"",
u"Maintainer of the @lists.lyx.org mailing-lists"),
contributor(u"Serge Winitzki",
"winitzki () erebus ! phys ! cwru ! edu",
"",
"",
"",
"",
u"updates to the Scientific Word bindings"),
contributor(u"Stephan Witt",
"stephan.witt () beusen ! de",
"GPL",
"Re: The LyX licence",
"m=110909031824764",
"22 February 2005",
u"support for CVS revision control, native spell checker interface for Mac OS"),
contributor(u"Russ Woodroofe",
"paranoia () math ! cornell ! edu",
"GPL",
"Re: AMS math question environment",
"m=123091448326090",
"1 January 2009",
u"question layout environment"),
contributor(u"Mingyi Wu",
"mingi.eo97g () g2 ! nctu ! edu ! tw",
"GPL",
"newcomer",
"m=139389779502232",
"3 March 2014",
u"Chinese (traditional) translations"),
contributor(u"Roy Xia",
"royxia062 () gmail ! com",
"GPL",
"GPL Statement",
"m=139434481324689",
"9 March 2014",
u"Bugfixing"),
contributor(u"Yihui Xie",
"xie () yihui ! name",
"GPL",
"GPL Statement",
"m=130523685427995",
"3 June 2011",
u"Bugfixing, Chinese translation, Sweave support"),
contributor(u"Huang Ying",
"huangy () sh ! necas ! nec ! com ! cn",
"GPL",
"Re: The LyX licence",
"m=110956742604611",
"28 February 2005",
u"Gtk frontend"),
contributor(u"Koji Yokota",
"yokota () res ! otaru-uc ! ac ! jp",
"GPL",
"Re: [PATCH] po/ja.po: Japanese message file for 1.5.0 (merged from",
"m=118033214223720",
"28 May 2007",
u"Japanese translation"),
contributor(u"Abdelrazak Younes",
"younes.a () free ! fr",
"GPL",
"Re: [Patch] RFQ: ParagraphList Rewrite",
"m=113993670602439",
"14 February 2006",
u"Qt4 frontend, editing optimisations"),
contributor(u"Henner Zeller",
"henner.zeller () freiheit ! com",
"GPL",
"Re: The LyX licence",
"m=110911591218107",
"22 February 2005",
u"rotation of wysiwyg figures"),
contributor(u"Xiaokun Zhu",
"xiaokun () aero ! gla ! ac ! uk",
"",
"",
"",
"",
u"bug reports and small fixes") ]
if __name__ == "__main__":
main(sys.argv, contributors)
| codeparrot/github-code-clean |
########################################################################
# $HeadURL$
# File : InstallTools.py
# Author : Ricardo Graciani
########################################################################
"""
Collection of Tools for installation of DIRAC components:
MySQL, DB's, Services's, Agents
It only makes use of defaults in LocalInstallation Section in dirac.cfg
The Following Options are used::
/DIRAC/Setup: Setup to be used for any operation
/LocalInstallation/InstanceName: Name of the Instance for the current Setup (default /DIRAC/Setup)
/LocalInstallation/LogLevel: LogLevel set in "run" script for all components installed
/LocalInstallation/RootPath: Used instead of rootPath in "run" script if defined (if links are used to named versions)
/LocalInstallation/InstancePath: Location where runit and startup directories are created (default rootPath)
/LocalInstallation/UseVersionsDir: DIRAC is installed under versions/<Versioned Directory> with a link from pro
(This option overwrites RootPath and InstancePath)
/LocalInstallation/Host: Used when build the URL to be published for the installed service (default: socket.getfqdn())
/LocalInstallation/RunitDir: Location where runit directory is created (default InstancePath/runit)
/LocalInstallation/StartupDir: Location where startup directory is created (default InstancePath/startup)
/LocalInstallation/MySQLDir: Location where mysql databases are created (default InstancePath/mysql)
/LocalInstallation/Database/User: (default Dirac)
/LocalInstallation/Database/Password: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/RootPwd: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/Host: (must be set for SystemAdministrator Service to work)
/LocalInstallation/Database/MySQLSmallMem: Configure a MySQL with small memory requirements for testing purposes innodb_buffer_pool_size=200MB
/LocalInstallation/Database/MySQLLargeMem: Configure a MySQL with high memory requirements for production purposes innodb_buffer_pool_size=10000MB
The setupSite method (used by the dirac-setup-site command) will use the following info::
/LocalInstallation/Systems: List of Systems to be defined for this instance in the CS (default: Configuration, Framework)
/LocalInstallation/Databases: List of Databases to be installed and configured
/LocalInstallation/Services: List of System/ServiceName to be setup
/LocalInstallation/Agents: List of System/AgentName to be setup
/LocalInstallation/WebPortal: Boolean to setup the Web Portal (default no)
/LocalInstallation/ConfigurationMaster: Boolean, requires Configuration/Server to be given in the list of Services (default: no)
/LocalInstallation/PrivateConfiguration: Boolean, requires Configuration/Server to be given in the list of Services (default: no)
If a Master Configuration Server is being installed the following Options can be used::
/LocalInstallation/ConfigurationName: Name of the Configuration (default: Setup )
/LocalInstallation/AdminUserName: Name of the Admin user (default: None )
/LocalInstallation/AdminUserDN: DN of the Admin user certificate (default: None )
/LocalInstallation/AdminUserEmail: Email of the Admin user (default: None )
/LocalInstallation/AdminGroupName: Name of the Admin group (default: dirac_admin )
/LocalInstallation/HostDN: DN of the host certificate (default: None )
/LocalInstallation/VirtualOrganization: Name of the main Virtual Organization (default: None)
"""
__RCSID__ = "$Id$"
#
import os, re, glob, stat, time, shutil, socket
gDefaultPerms = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH
import DIRAC
from DIRAC import rootPath
from DIRAC import gLogger
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Utilities.CFG import CFG
from DIRAC.Core.Utilities.Version import getVersion
from DIRAC.Core.Utilities.Subprocess import systemCall
from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.ConfigurationSystem.Client.Helpers import cfgPath, cfgPathToList, cfgInstallPath, \
cfgInstallSection, ResourcesDefaults, CSGlobals
from DIRAC.Core.Security.Properties import ALARMS_MANAGEMENT, SERVICE_ADMINISTRATOR, \
CS_ADMINISTRATOR, JOB_ADMINISTRATOR, \
FULL_DELEGATION, PROXY_MANAGEMENT, OPERATOR, \
NORMAL_USER, TRUSTED_HOST
from DIRAC.ConfigurationSystem.Client import PathFinder
from DIRAC.Core.Base.private.ModuleLoader import ModuleLoader
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Base.ExecutorModule import ExecutorModule
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities.PrettyPrint import printTable
from DIRAC.Core.Utilities.Platform import getPlatformString
# On command line tools this can be set to True to abort after the first error.
exitOnError = False
# First some global defaults
gLogger.debug( 'DIRAC Root Path =', rootPath )
def loadDiracCfg( verbose = False ):
"""
Read again defaults from dirac.cfg
"""
global localCfg, cfgFile, setup, instance, logLevel, linkedRootPath, host
global basePath, instancePath, runitDir, startDir
global db, mysqlDir, mysqlDbDir, mysqlLogDir, mysqlMyOrg, mysqlMyCnf, mysqlStartupScript
global mysqlRootPwd, mysqlUser, mysqlPassword, mysqlHost, mysqlMode
global mysqlSmallMem, mysqlLargeMem, mysqlPort, mysqlRootUser
from DIRAC.Core.Utilities.Network import getFQDN
localCfg = CFG()
cfgFile = os.path.join( rootPath, 'etc', 'dirac.cfg' )
try:
localCfg.loadFromFile( cfgFile )
except Exception:
gLogger.always( "Can't load ", cfgFile )
gLogger.always( "Might be OK if setting up the site" )
setup = localCfg.getOption( cfgPath( 'DIRAC', 'Setup' ), '' )
instance = localCfg.getOption( cfgInstallPath( 'InstanceName' ), setup )
logLevel = localCfg.getOption( cfgInstallPath( 'LogLevel' ), 'INFO' )
linkedRootPath = localCfg.getOption( cfgInstallPath( 'RootPath' ), rootPath )
useVersionsDir = localCfg.getOption( cfgInstallPath( 'UseVersionsDir' ), False )
host = localCfg.getOption( cfgInstallPath( 'Host' ), getFQDN() )
basePath = os.path.dirname( rootPath )
instancePath = localCfg.getOption( cfgInstallPath( 'InstancePath' ), rootPath )
if useVersionsDir:
# This option takes precedence
instancePath = os.path.dirname( os.path.dirname( rootPath ) )
linkedRootPath = os.path.join( instancePath, 'pro' )
if verbose:
gLogger.notice( 'Using Instance Base Dir at', instancePath )
runitDir = os.path.join( instancePath, 'runit' )
runitDir = localCfg.getOption( cfgInstallPath( 'RunitDir' ), runitDir )
if verbose:
gLogger.notice( 'Using Runit Dir at', runitDir )
startDir = os.path.join( instancePath, 'startup' )
startDir = localCfg.getOption( cfgInstallPath( 'StartupDir' ), startDir )
if verbose:
gLogger.notice( 'Using Startup Dir at', startDir )
# Now some MySQL default values
db = {}
mysqlDir = os.path.join( instancePath, 'mysql' )
mysqlDir = localCfg.getOption( cfgInstallPath( 'MySQLDir' ), mysqlDir )
if verbose:
gLogger.notice( 'Using MySQL Dir at', mysqlDir )
mysqlDbDir = os.path.join( mysqlDir, 'db' )
mysqlLogDir = os.path.join( mysqlDir, 'log' )
mysqlMyOrg = os.path.join( rootPath, 'mysql', 'etc', 'my.cnf' )
mysqlMyCnf = os.path.join( mysqlDir, '.my.cnf' )
mysqlStartupScript = os.path.join( rootPath, 'mysql', 'share', 'mysql', 'mysql.server' )
mysqlRootPwd = localCfg.getOption( cfgInstallPath( 'Database', 'RootPwd' ), mysqlRootPwd )
if verbose and mysqlRootPwd:
gLogger.notice( 'Reading Root MySQL Password from local configuration' )
mysqlUser = localCfg.getOption( cfgInstallPath( 'Database', 'User' ), '' )
if mysqlUser:
if verbose:
gLogger.notice( 'Reading MySQL User from local configuration' )
else:
mysqlUser = 'Dirac'
mysqlPassword = localCfg.getOption( cfgInstallPath( 'Database', 'Password' ), mysqlPassword )
if verbose and mysqlPassword:
gLogger.notice( 'Reading %s MySQL Password from local configuration ' % mysqlUser )
mysqlHost = localCfg.getOption( cfgInstallPath( 'Database', 'Host' ), '' )
if mysqlHost:
if verbose:
gLogger.notice( 'Using MySQL Host from local configuration', mysqlHost )
else:
# if it is not defined use the same as for dirac services
mysqlHost = host
mysqlPort = localCfg.getOption( cfgInstallPath( 'Database', 'Port' ), 0 )
if mysqlPort:
if verbose:
gLogger.notice( 'Using MySQL Port from local configuration ', mysqlPort )
else:
# if it is not defined use the same as for dirac services
mysqlPort = 3306
mysqlRootUser = localCfg.getOption( cfgInstallPath( 'Database', 'RootUser' ), '' )
if mysqlRootUser:
if verbose:
gLogger.notice( 'Using MySQL root user from local configuration ', mysqlRootUser )
else:
# if it is not defined use root
mysqlRootUser = 'root'
mysqlMode = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLMode' ), '' )
if verbose and mysqlMode:
gLogger.notice( 'Configuring MySQL server as %s' % mysqlMode )
mysqlSmallMem = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLSmallMem' ), False )
if verbose and mysqlSmallMem:
gLogger.notice( 'Configuring MySQL server for Low Memory uasge' )
mysqlLargeMem = localCfg.getOption( cfgInstallPath( 'Database', 'MySQLLargeMem' ), False )
if verbose and mysqlLargeMem:
gLogger.notice( 'Configuring MySQL server for Large Memory uasge' )
# FIXME: we probably need a better way to do this
mysqlRootPwd = ''
mysqlPassword = ''
mysqlMode = ''
localCfg = None
cfgFile = ''
setup = ''
instance = ''
logLevel = ''
linkedRootPath = ''
host = ''
basePath = ''
instancePath = ''
runitDir = ''
startDir = ''
db = {}
mysqlDir = ''
mysqlDbDir = ''
mysqlLogDir = ''
mysqlMyOrg = ''
mysqlMyCnf = ''
mysqlStartupScript = ''
mysqlUser = ''
mysqlHost = ''
mysqlPort = ''
mysqlRootUser = ''
mysqlSmallMem = ''
mysqlLargeMem = ''
loadDiracCfg()
def getInfo( extensions ):
result = getVersion()
if not result['OK']:
return result
rDict = result['Value']
if setup:
rDict['Setup'] = setup
else:
rDict['Setup'] = 'Unknown'
return S_OK( rDict )
def getExtensions():
"""
Get the list of installed extensions
"""
initList = glob.glob( os.path.join( rootPath, '*DIRAC', '__init__.py' ) )
extensions = [ os.path.basename( os.path.dirname( k ) ) for k in initList]
try:
extensions.remove( 'DIRAC' )
except Exception:
error = 'DIRAC is not properly installed'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK( extensions )
def _addCfgToDiracCfg( cfg, verbose = False ):
"""
Merge cfg into existing dirac.cfg file
"""
global localCfg
if str( localCfg ):
newCfg = localCfg.mergeWith( cfg )
else:
newCfg = cfg
result = newCfg.writeToFile( cfgFile )
if not result:
return result
loadDiracCfg( verbose )
return result
def _addCfgToCS( cfg ):
"""
Merge cfg into central CS
"""
cfgClient = CSAPI()
result = cfgClient.downloadCSData()
if not result['OK']:
return result
result = cfgClient.mergeFromCFG( cfg )
if not result['OK']:
return result
result = cfgClient.commit()
return result
def _addCfgToLocalCS( cfg ):
"""
Merge cfg into local CS
"""
csName = localCfg.getOption( cfgPath( 'DIRAC', 'Configuration', 'Name' ) , '' )
if not csName:
error = 'Missing %s' % cfgPath( 'DIRAC', 'Configuration', 'Name' )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
csCfg = CFG()
csFile = os.path.join( rootPath, 'etc', '%s.cfg' % csName )
if os.path.exists( csFile ):
csCfg.loadFromFile( csFile )
if str( csCfg ):
newCfg = csCfg.mergeWith( cfg )
else:
newCfg = cfg
return newCfg.writeToFile( csFile )
def _getCentralCfg( installCfg ):
"""
Create the skeleton of central Cfg for an initial Master CS
"""
# First copy over from installation cfg
centralCfg = CFG()
# DIRAC/Extensions
extensions = localCfg.getOption( cfgInstallPath( 'Extensions' ), [] )
while 'Web' in list( extensions ):
extensions.remove( 'Web' )
centralCfg.createNewSection( 'DIRAC', '' )
if extensions:
centralCfg['DIRAC'].addKey( 'Extensions', ','.join( extensions ), '' )
vo = localCfg.getOption( cfgInstallPath( 'VirtualOrganization' ), '' )
if vo:
centralCfg['DIRAC'].addKey( 'VirtualOrganization', vo, '' )
for section in [ 'Systems', 'Resources',
'Resources/Sites', 'Resources/Domains',
'Operations', 'Website', 'Registry' ]:
if installCfg.isSection( section ):
centralCfg.createNewSection( section, contents = installCfg[section] )
# Now try to add things from the Installation section
# Registry
adminUserName = localCfg.getOption( cfgInstallPath( 'AdminUserName' ), '' )
adminUserDN = localCfg.getOption( cfgInstallPath( 'AdminUserDN' ), '' )
adminUserEmail = localCfg.getOption( cfgInstallPath( 'AdminUserEmail' ), '' )
adminGroupName = localCfg.getOption( cfgInstallPath( 'AdminGroupName' ), 'dirac_admin' )
hostDN = localCfg.getOption( cfgInstallPath( 'HostDN' ), '' )
defaultGroupName = 'user'
adminGroupProperties = [ ALARMS_MANAGEMENT, SERVICE_ADMINISTRATOR,
CS_ADMINISTRATOR, JOB_ADMINISTRATOR,
FULL_DELEGATION, PROXY_MANAGEMENT, OPERATOR ]
defaultGroupProperties = [ NORMAL_USER ]
defaultHostProperties = [ TRUSTED_HOST, CS_ADMINISTRATOR,
JOB_ADMINISTRATOR, FULL_DELEGATION,
PROXY_MANAGEMENT, OPERATOR ]
for section in ( cfgPath( 'Registry' ),
cfgPath( 'Registry', 'Users' ),
cfgPath( 'Registry', 'Groups' ),
cfgPath( 'Registry', 'Hosts' ) ):
if not centralCfg.isSection( section ):
centralCfg.createNewSection( section )
if adminUserName:
if not ( adminUserDN and adminUserEmail ):
gLogger.error( 'AdminUserName is given but DN or Mail is missing it will not be configured' )
else:
for section in [ cfgPath( 'Registry', 'Users', adminUserName ),
cfgPath( 'Registry', 'Groups', defaultGroupName ),
cfgPath( 'Registry', 'Groups', adminGroupName ) ]:
if not centralCfg.isSection( section ):
centralCfg.createNewSection( section )
if centralCfg['Registry'].existsKey( 'DefaultGroup' ):
centralCfg['Registry'].deleteKey( 'DefaultGroup' )
centralCfg['Registry'].addKey( 'DefaultGroup', defaultGroupName, '' )
if centralCfg['Registry']['Users'][adminUserName].existsKey( 'DN' ):
centralCfg['Registry']['Users'][adminUserName].deleteKey( 'DN' )
centralCfg['Registry']['Users'][adminUserName].addKey( 'DN', adminUserDN, '' )
if centralCfg['Registry']['Users'][adminUserName].existsKey( 'Email' ):
centralCfg['Registry']['Users'][adminUserName].deleteKey( 'Email' )
centralCfg['Registry']['Users'][adminUserName].addKey( 'Email' , adminUserEmail, '' )
# Add Admin User to Admin Group and default group
for group in [adminGroupName, defaultGroupName]:
if not centralCfg['Registry']['Groups'][group].isOption( 'Users' ):
centralCfg['Registry']['Groups'][group].addKey( 'Users', '', '' )
users = centralCfg['Registry']['Groups'][group].getOption( 'Users', [] )
if adminUserName not in users:
centralCfg['Registry']['Groups'][group].appendToOption( 'Users', ', %s' % adminUserName )
if not centralCfg['Registry']['Groups'][group].isOption( 'Properties' ):
centralCfg['Registry']['Groups'][group].addKey( 'Properties', '', '' )
properties = centralCfg['Registry']['Groups'][adminGroupName].getOption( 'Properties', [] )
for prop in adminGroupProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Groups'][adminGroupName].appendToOption( 'Properties', ', %s' % prop )
properties = centralCfg['Registry']['Groups'][defaultGroupName].getOption( 'Properties', [] )
for prop in defaultGroupProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Groups'][defaultGroupName].appendToOption( 'Properties', ', %s' % prop )
# Add the master Host description
if hostDN:
hostSection = cfgPath( 'Registry', 'Hosts', host )
if not centralCfg.isSection( hostSection ):
centralCfg.createNewSection( hostSection )
if centralCfg['Registry']['Hosts'][host].existsKey( 'DN' ):
centralCfg['Registry']['Hosts'][host].deleteKey( 'DN' )
centralCfg['Registry']['Hosts'][host].addKey( 'DN', hostDN, '' )
if not centralCfg['Registry']['Hosts'][host].isOption( 'Properties' ):
centralCfg['Registry']['Hosts'][host].addKey( 'Properties', '', '' )
properties = centralCfg['Registry']['Hosts'][host].getOption( 'Properties', [] )
for prop in defaultHostProperties:
if prop not in properties:
properties.append( prop )
centralCfg['Registry']['Hosts'][host].appendToOption( 'Properties', ', %s' % prop )
# Operations
if adminUserEmail:
operationsCfg = __getCfg( cfgPath( 'Operations', 'Defaults', 'EMail' ), 'Production', adminUserEmail )
centralCfg = centralCfg.mergeWith( operationsCfg )
operationsCfg = __getCfg( cfgPath( 'Operations', 'Defaults', 'EMail' ), 'Logging', adminUserEmail )
centralCfg = centralCfg.mergeWith( operationsCfg )
# Website
websiteCfg = __getCfg( cfgPath( 'Website', 'Authorization',
'systems', 'configuration' ), 'Default', 'all' )
websiteCfg['Website'].addKey( 'DefaultGroups',
', '.join( ['visitor', defaultGroupName, adminGroupName] ), '' )
websiteCfg['Website'].addKey( 'DefaultSetup', setup, '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showHistory' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'commitConfiguration' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showCurrentDiff' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'showDiff' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'rollbackToVersion' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].addKey( 'manageRemoteConfig' ,
'CSAdministrator' , '' )
websiteCfg['Website']['Authorization']['systems']['configuration'].appendToOption( 'manageRemoteConfig' ,
', ServiceAdministrator' )
centralCfg = centralCfg.mergeWith( websiteCfg )
return centralCfg
def __getCfg( section, option = '', value = '' ):
"""
Create a new Cfg with given info
"""
if not section:
return None
cfg = CFG()
sectionList = []
for sect in cfgPathToList( section ):
if not sect:
continue
sectionList.append( sect )
cfg.createNewSection( cfgPath( *sectionList ) )
if not sectionList:
return None
if option and value:
sectionList.append( option )
cfg.setOption( cfgPath( *sectionList ), value )
return cfg
def addOptionToDiracCfg( option, value ):
"""
Add Option to dirac.cfg
"""
optionList = cfgPathToList( option )
optionName = optionList[-1]
section = cfgPath( *optionList[:-1] )
cfg = __getCfg( section, optionName, value )
if not cfg:
return S_ERROR( 'Wrong option: %s = %s' % ( option, value ) )
if _addCfgToDiracCfg( cfg ):
return S_OK()
return S_ERROR( 'Could not merge %s=%s with local configuration' % ( option, value ) )
def addDefaultOptionsToCS( gConfig, componentType, systemName,
component, extensions, mySetup = setup,
specialOptions = {}, overwrite = False,
addDefaultOptions = True ):
"""
Add the section with the component options to the CS
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', mySetup, system )
if gConfig:
compInstance = gConfig.getValue( instanceOption, '' )
else:
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
sectionName = "Agents"
if componentType == 'service':
sectionName = "Services"
elif componentType == 'executor':
sectionName = "Executors"
# Check if the component CS options exist
addOptions = True
componentSection = cfgPath( 'Systems', system, compInstance, sectionName, component )
if not overwrite:
if gConfig:
result = gConfig.getOptions( componentSection )
if result['OK']:
addOptions = False
if not addOptions:
return S_OK( 'Component options already exist' )
# Add the component options now
result = getComponentCfg( componentType, system, component, compInstance, extensions, specialOptions, addDefaultOptions )
if not result['OK']:
return result
compCfg = result['Value']
gLogger.notice( 'Adding to CS', '%s %s/%s' % ( componentType, system, component ) )
resultAddToCFG = _addCfgToCS( compCfg )
if componentType == 'executor':
# Is it a container ?
execList = compCfg.getOption( '%s/Load' % componentSection, [] )
for element in execList:
result = addDefaultOptionsToCS( gConfig, componentType, systemName, element, extensions, setup,
{}, overwrite )
resultAddToCFG.setdefault( 'Modules', {} )
resultAddToCFG['Modules'][element] = result['OK']
return resultAddToCFG
def addDefaultOptionsToComponentCfg( componentType, systemName, component, extensions ):
"""
Add default component options local component cfg
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', setup, system )
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
# Add the component options now
result = getComponentCfg( componentType, system, component, compInstance, extensions )
if not result['OK']:
return result
compCfg = result['Value']
compCfgFile = os.path.join( rootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
return compCfg.writeToFile( compCfgFile )
def addCfgToComponentCfg( componentType, systemName, component, cfg ):
"""
Add some extra configuration to the local component cfg
"""
sectionName = 'Services'
if componentType == 'agent':
sectionName = 'Agents'
if not cfg:
return S_OK()
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', setup, system )
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
compCfgFile = os.path.join( rootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
compCfg = CFG()
if os.path.exists( compCfgFile ):
compCfg.loadFromFile( compCfgFile )
sectionPath = cfgPath( 'Systems', system, compInstance, sectionName )
newCfg = __getCfg( sectionPath )
newCfg.createNewSection( cfgPath( sectionPath, component ), 'Added by InstallTools', cfg )
if newCfg.writeToFile( compCfgFile ):
return S_OK( compCfgFile )
error = 'Can not write %s' % compCfgFile
gLogger.error( error )
return S_ERROR( error )
def getComponentCfg( componentType, system, component, compInstance, extensions,
specialOptions = {}, addDefaultOptions = True ):
"""
Get the CFG object of the component configuration
"""
sectionName = 'Services'
if componentType == 'agent':
sectionName = 'Agents'
if componentType == 'executor':
sectionName = 'Executors'
componentModule = component
if "Module" in specialOptions:
componentModule = specialOptions['Module']
compCfg = CFG()
if addDefaultOptions:
extensionsDIRAC = [ x + 'DIRAC' for x in extensions ] + extensions
for ext in extensionsDIRAC + ['DIRAC']:
cfgTemplatePath = os.path.join( rootPath, ext, '%sSystem' % system, 'ConfigTemplate.cfg' )
if os.path.exists( cfgTemplatePath ):
gLogger.notice( 'Loading configuration template', cfgTemplatePath )
# Look up the component in this template
loadCfg = CFG()
loadCfg.loadFromFile( cfgTemplatePath )
compCfg = loadCfg.mergeWith( compCfg )
compPath = cfgPath( sectionName, componentModule )
if not compCfg.isSection( compPath ):
error = 'Can not find %s in template' % compPath
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
compCfg = compCfg[sectionName][componentModule]
# Delete Dependencies section if any
compCfg.deleteKey( 'Dependencies' )
sectionPath = cfgPath( 'Systems', system, compInstance, sectionName )
cfg = __getCfg( sectionPath )
cfg.createNewSection( cfgPath( sectionPath, component ), '', compCfg )
for option, value in specialOptions.items():
cfg.setOption( cfgPath( sectionPath, component, option ), value )
# Add the service URL
if componentType == "service":
port = compCfg.getOption( 'Port' , 0 )
if port and host:
urlsPath = cfgPath( 'Systems', system, compInstance, 'URLs' )
cfg.createNewSection( urlsPath )
cfg.setOption( cfgPath( urlsPath, component ),
'dips://%s:%d/%s/%s' % ( host, port, system, component ) )
return S_OK( cfg )
def addDatabaseOptionsToCS( gConfig, systemName, dbName, mySetup = setup, overwrite = False ):
"""
Add the section with the database options to the CS
"""
system = systemName.replace( 'System', '' )
instanceOption = cfgPath( 'DIRAC', 'Setups', mySetup, system )
if gConfig:
compInstance = gConfig.getValue( instanceOption, '' )
else:
compInstance = localCfg.getOption( instanceOption, '' )
if not compInstance:
return S_ERROR( '%s not defined in %s' % ( instanceOption, cfgFile ) )
# Check if the component CS options exist
addOptions = True
if not overwrite:
databasePath = cfgPath( 'Systems', system, compInstance, 'Databases', dbName )
result = gConfig.getOptions( databasePath )
if result['OK']:
addOptions = False
if not addOptions:
return S_OK( 'Database options already exist' )
# Add the component options now
result = getDatabaseCfg( system, dbName, compInstance )
if not result['OK']:
return result
databaseCfg = result['Value']
gLogger.notice( 'Adding to CS', '%s/%s' % ( system, dbName ) )
return _addCfgToCS( databaseCfg )
def getDatabaseCfg( system, dbName, compInstance ):
"""
Get the CFG object of the database configuration
"""
databasePath = cfgPath( 'Systems', system, compInstance, 'Databases', dbName )
cfg = __getCfg( databasePath, 'DBName', dbName )
cfg.setOption( cfgPath( databasePath, 'Host' ), mysqlHost )
cfg.setOption( cfgPath( databasePath, 'Port' ), mysqlPort )
return S_OK( cfg )
def addSystemInstance( systemName, compInstance, mySetup = setup, myCfg = False ):
"""
Add a new system instance to dirac.cfg and CS
"""
system = systemName.replace( 'System', '' )
gLogger.notice( 'Adding %s system as %s instance for %s setup to dirac.cfg and CS' % ( system, compInstance, mySetup ) )
cfg = __getCfg( cfgPath( 'DIRAC', 'Setups', mySetup ), system, compInstance )
if myCfg:
if not _addCfgToDiracCfg( cfg ):
return S_ERROR( 'Failed to add system instance to dirac.cfg' )
return _addCfgToCS( cfg )
def printStartupStatus( rDict ):
"""
Print in nice format the return dictionary from getStartupComponentStatus
(also returned by runsvctrlComponent)
"""
fields = ['Name','Runit','Uptime','PID']
records = []
try:
for comp in rDict:
records.append( [comp,
rDict[comp]['RunitStatus'],
rDict[comp]['Timeup'],
str( rDict[comp]['PID'] ) ] )
printTable( fields, records )
except Exception, x:
print "Exception while gathering data for printing: %s" % str( x )
return S_OK()
def printOverallStatus( rDict ):
"""
Print in nice format the return dictionary from getOverallStatus
"""
fields = ['System','Name','Type','Setup','Installed','Runit','Uptime','PID']
records = []
try:
for compType in rDict:
for system in rDict[compType]:
for component in rDict[compType][system]:
record = [ system, component, compType.lower()[:-1] ]
if rDict[compType][system][component]['Setup']:
record.append( 'SetUp' )
else:
record.append( 'NotSetUp' )
if rDict[compType][system][component]['Installed']:
record.append( 'Installed' )
else:
record.append( 'NotInstalled' )
record.append( str( rDict[compType][system][component]['RunitStatus'] ) )
record.append( str( rDict[compType][system][component]['Timeup'] ) )
record.append( str( rDict[compType][system][component]['PID'] ) )
records.append( record )
printTable( fields, records )
except Exception, x:
print "Exception while gathering data for printing: %s" % str( x )
return S_OK()
def getAvailableSystems( extensions ):
"""
Get the list of all systems (in all given extensions) locally available
"""
systems = []
for extension in extensions:
extensionPath = os.path.join( DIRAC.rootPath, extension, '*System' )
for system in [ os.path.basename( k ).split( 'System' )[0] for k in glob.glob( extensionPath ) ]:
if system not in systems:
systems.append( system )
return systems
def getSoftwareComponents( extensions ):
"""
Get the list of all the components ( services and agents ) for which the software
is installed on the system
"""
# The Gateway does not need a handler
services = { 'Framework' : ['Gateway'] }
agents = {}
executors = {}
for extension in ['DIRAC'] + [ x + 'DIRAC' for x in extensions]:
if not os.path.exists( os.path.join( rootPath, extension ) ):
# Not all the extensions are necessarily installed in this instance
continue
systemList = os.listdir( os.path.join( rootPath, extension ) )
for sys in systemList:
system = sys.replace( 'System', '' )
try:
agentDir = os.path.join( rootPath, extension, sys, 'Agent' )
agentList = os.listdir( agentDir )
for agent in agentList:
if agent[-3:] == ".py":
agentFile = os.path.join( agentDir, agent )
afile = open( agentFile, 'r' )
body = afile.read()
afile.close()
if body.find( 'AgentModule' ) != -1 or body.find( 'OptimizerModule' ) != -1:
if not agents.has_key( system ):
agents[system] = []
agents[system].append( agent.replace( '.py', '' ) )
except OSError:
pass
try:
serviceDir = os.path.join( rootPath, extension, sys, 'Service' )
serviceList = os.listdir( serviceDir )
for service in serviceList:
if service.find( 'Handler' ) != -1 and service[-3:] == '.py':
if not services.has_key( system ):
services[system] = []
if system == 'Configuration' and service == 'ConfigurationHandler.py':
service = 'ServerHandler.py'
services[system].append( service.replace( '.py', '' ).replace( 'Handler', '' ) )
except OSError:
pass
try:
executorDir = os.path.join( rootPath, extension, sys, 'Executor' )
executorList = os.listdir( executorDir )
for executor in executorList:
if executor[-3:] == ".py":
executorFile = os.path.join( executorDir, executor )
afile = open( executorFile, 'r' )
body = afile.read()
afile.close()
if body.find( 'OptimizerExecutor' ) != -1:
if not executors.has_key( system ):
executors[system] = []
executors[system].append( executor.replace( '.py', '' ) )
except OSError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getInstalledComponents():
"""
Get the list of all the components ( services and agents )
installed on the system in the runit directory
"""
services = {}
agents = {}
executors = {}
systemList = os.listdir( runitDir )
for system in systemList:
systemDir = os.path.join( runitDir, system )
components = os.listdir( systemDir )
for component in components:
try:
runFile = os.path.join( systemDir, component, 'run' )
rfile = open( runFile, 'r' )
body = rfile.read()
rfile.close()
if body.find( 'dirac-service' ) != -1:
if not services.has_key( system ):
services[system] = []
services[system].append( component )
elif body.find( 'dirac-agent' ) != -1:
if not agents.has_key( system ):
agents[system] = []
agents[system].append( component )
elif body.find( 'dirac-executor' ) != -1:
if not executors.has_key( system ):
executors[system] = []
executors[system].append( component )
except IOError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getSetupComponents():
"""
Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
services = {}
agents = {}
executors = {}
if not os.path.isdir( startDir ):
return S_ERROR( 'Startup Directory does not exit: %s' % startDir )
componentList = os.listdir( startDir )
for component in componentList:
try:
runFile = os.path.join( startDir, component, 'run' )
rfile = open( runFile, 'r' )
body = rfile.read()
rfile.close()
if body.find( 'dirac-service' ) != -1:
system, service = component.split( '_' )[0:2]
if not services.has_key( system ):
services[system] = []
services[system].append( service )
elif body.find( 'dirac-agent' ) != -1:
system, agent = component.split( '_' )[0:2]
if not agents.has_key( system ):
agents[system] = []
agents[system].append( agent )
elif body.find( 'dirac-executor' ) != -1:
system, executor = component.split( '_' )[0:2]
if not executors.has_key( system ):
executors[system] = []
executors[system].append( executor )
except IOError:
pass
resultDict = {}
resultDict['Services'] = services
resultDict['Agents'] = agents
resultDict['Executors'] = executors
return S_OK( resultDict )
def getStartupComponentStatus( componentTupleList ):
"""
Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
try:
if componentTupleList:
cList = []
for componentTuple in componentTupleList:
cList.extend( glob.glob( os.path.join( startDir, '_'.join( componentTuple ) ) ) )
else:
cList = glob.glob( os.path.join( startDir, '*' ) )
except Exception:
error = 'Failed to parse List of Components'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 0, ['runsvstat'] + cList )
if not result['OK']:
return result
output = result['Value'][1].strip().split( '\n' )
componentDict = {}
for line in output:
if not line:
continue
cname, routput = line.split( ':' )
cname = cname.replace( '%s/' % startDir, '' )
run = False
reResult = re.search( '^ run', routput )
if reResult:
run = True
down = False
reResult = re.search( '^ down', routput )
if reResult:
down = True
reResult = re.search( '([0-9]+) seconds', routput )
timeup = 0
if reResult:
timeup = reResult.group( 1 )
reResult = re.search( 'pid ([0-9]+)', routput )
pid = 0
if reResult:
pid = reResult.group( 1 )
runsv = "Not running"
if run or down:
runsv = "Running"
reResult = re.search( 'runsv not running', routput )
if reResult:
runsv = "Not running"
runDict = {}
runDict['Timeup'] = timeup
runDict['PID'] = pid
runDict['RunitStatus'] = "Unknown"
if run:
runDict['RunitStatus'] = "Run"
if down:
runDict['RunitStatus'] = "Down"
if runsv == "Not running":
runDict['RunitStatus'] = "NoRunitControl"
componentDict[cname] = runDict
return S_OK( componentDict )
def getComponentModule( gConfig, system, component, compType ):
"""
Get the component software module
"""
setup = CSGlobals.getSetup()
instance = gConfig.getValue( cfgPath( 'DIRAC', 'Setups', setup, system ), '' )
if not instance:
return S_OK( component )
module = gConfig.getValue( cfgPath( 'Systems', system, instance, compType, component, 'Module' ), '' )
if not module:
module = component
return S_OK( module )
def getOverallStatus( extensions ):
"""
Get the list of all the components ( services and agents )
set up for running with runsvdir in startup directory
"""
result = getSoftwareComponents( extensions )
if not result['OK']:
return result
softDict = result['Value']
result = getSetupComponents()
if not result['OK']:
return result
setupDict = result['Value']
result = getInstalledComponents()
if not result['OK']:
return result
installedDict = result['Value']
result = getStartupComponentStatus( [] )
if not result['OK']:
return result
runitDict = result['Value']
# Collect the info now
resultDict = {'Services':{}, 'Agents':{}, 'Executors':{} }
for compType in ['Services', 'Agents', 'Executors' ]:
if softDict.has_key( 'Services' ):
for system in softDict[compType]:
resultDict[compType][system] = {}
for component in softDict[compType][system]:
if system == 'Configuration' and component == 'Configuration':
# Fix to avoid missing CS due to different between Service name and Handler name
component = 'Server'
resultDict[compType][system][component] = {}
resultDict[compType][system][component]['Setup'] = False
resultDict[compType][system][component]['Installed'] = False
resultDict[compType][system][component]['RunitStatus'] = 'Unknown'
resultDict[compType][system][component]['Timeup'] = 0
resultDict[compType][system][component]['PID'] = 0
# TODO: why do we need a try here?
try:
if component in setupDict[compType][system]:
resultDict[compType][system][component]['Setup'] = True
except Exception:
pass
try:
if component in installedDict[compType][system]:
resultDict[compType][system][component]['Installed'] = True
except Exception:
pass
try:
compDir = system + '_' + component
if runitDict.has_key( compDir ):
resultDict[compType][system][component]['RunitStatus'] = runitDict[compDir]['RunitStatus']
resultDict[compType][system][component]['Timeup'] = runitDict[compDir]['Timeup']
resultDict[compType][system][component]['PID'] = runitDict[compDir]['PID']
except Exception:
#print str(x)
pass
# Installed components can be not the same as in the software list
if installedDict.has_key( 'Services' ):
for system in installedDict[compType]:
for component in installedDict[compType][system]:
if compType in resultDict:
if system in resultDict[compType]:
if component in resultDict[compType][system]:
continue
resultDict[compType][system][component] = {}
resultDict[compType][system][component]['Setup'] = False
resultDict[compType][system][component]['Installed'] = True
resultDict[compType][system][component]['RunitStatus'] = 'Unknown'
resultDict[compType][system][component]['Timeup'] = 0
resultDict[compType][system][component]['PID'] = 0
# TODO: why do we need a try here?
try:
if component in setupDict[compType][system]:
resultDict[compType][system][component]['Setup'] = True
except Exception:
pass
try:
compDir = system + '_' + component
if runitDict.has_key( compDir ):
resultDict[compType][system][component]['RunitStatus'] = runitDict[compDir]['RunitStatus']
resultDict[compType][system][component]['Timeup'] = runitDict[compDir]['Timeup']
resultDict[compType][system][component]['PID'] = runitDict[compDir]['PID']
except Exception:
#print str(x)
pass
return S_OK( resultDict )
def checkComponentModule( componentType, system, module ):
"""
Check existence of the given module
and if it inherits from the proper class
"""
if componentType == 'agent':
loader = ModuleLoader( "Agent", PathFinder.getAgentSection, AgentModule )
elif componentType == 'service':
loader = ModuleLoader( "Service", PathFinder.getServiceSection,
RequestHandler, moduleSuffix = "Handler" )
elif componentType == 'executor':
loader = ModuleLoader( "Executor", PathFinder.getExecutorSection, ExecutorModule )
else:
return S_ERROR( 'Unknown component type %s' % componentType )
return loader.loadModule( "%s/%s" % ( system, module ) )
def checkComponentSoftware( componentType, system, component, extensions ):
"""
Check the component software
"""
result = getSoftwareComponents( extensions )
if not result['OK']:
return result
if componentType == 'service':
softDict = result['Value']['Services']
elif componentType == 'agent':
softDict = result['Value']['Agents']
else:
return S_ERROR( 'Unknown component type %s' % componentType )
if system in softDict and component in softDict[system]:
return S_OK()
return S_ERROR( 'Unknown Component %s/%s' % ( system, component ) )
def runsvctrlComponent( system, component, mode ):
"""
Execute runsvctrl and check status of the specified component
"""
if not mode in ['u', 'd', 'o', 'p', 'c', 'h', 'a', 'i', 'q', '1', '2', 't', 'k', 'x', 'e']:
return S_ERROR( 'Unknown runsvctrl mode "%s"' % mode )
startCompDirs = glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) )
# Make sure that the Configuration server restarts first and the SystemAdmin restarts last
tmpList = list( startCompDirs )
for comp in tmpList:
if "Framework_SystemAdministrator" in comp:
startCompDirs.append( startCompDirs.pop( startCompDirs.index( comp ) ) )
if "Configuration_Server" in comp:
startCompDirs.insert( 0, startCompDirs.pop( startCompDirs.index( comp ) ) )
startCompList = [ [k] for k in startCompDirs]
for startComp in startCompList:
result = execCommand( 0, ['runsvctrl', mode] + startComp )
if not result['OK']:
return result
time.sleep( 1 )
# Check the runsv status
if system == '*' or component == '*':
time.sleep( 5 )
# Final check
result = getStartupComponentStatus( [( system, component )] )
if not result['OK']:
return S_ERROR( 'Failed to start the component' )
return result
def getLogTail( system, component, length = 100 ):
"""
Get the tail of the component log file
"""
retDict = {}
for startCompDir in glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) ):
compName = os.path.basename( startCompDir )
logFileName = os.path.join( startCompDir, 'log', 'current' )
if not os.path.exists( logFileName ):
retDict[compName] = 'No log file found'
else:
logFile = open( logFileName, 'r' )
lines = [ line.strip() for line in logFile.readlines() ]
logFile.close()
if len( lines ) < length:
retDict[compName] = '\n'.join( lines )
else:
retDict[compName] = '\n'.join( lines[-length:] )
return S_OK( retDict )
def setupSite( scriptCfg, cfg = None ):
"""
Setup a new site using the options defined
"""
# First we need to find out what needs to be installed
# by default use dirac.cfg, but if a cfg is given use it and
# merge it into the dirac.cfg
diracCfg = CFG()
installCfg = None
if cfg:
try:
installCfg = CFG()
installCfg.loadFromFile( cfg )
for section in ['DIRAC', 'LocalSite', cfgInstallSection]:
if installCfg.isSection( section ):
diracCfg.createNewSection( section, contents = installCfg[section] )
if instancePath != basePath:
if not diracCfg.isSection( 'LocalSite' ):
diracCfg.createNewSection( 'LocalSite' )
diracCfg.setOption( cfgPath( 'LocalSite', 'InstancePath' ), instancePath )
_addCfgToDiracCfg( diracCfg, verbose = True )
except Exception:
error = 'Failed to load %s' % cfg
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# Now get the necessary info from localCfg
setupSystems = localCfg.getOption( cfgInstallPath( 'Systems' ), ['Configuration', 'Framework'] )
installMySQLFlag = localCfg.getOption( cfgInstallPath( 'InstallMySQL' ), False )
setupDatabases = localCfg.getOption( cfgInstallPath( 'Databases' ), [] )
setupServices = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Services' ), [] ) ]
setupAgents = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Agents' ), [] ) ]
setupExecutors = [ k.split( '/' ) for k in localCfg.getOption( cfgInstallPath( 'Executors' ), [] ) ]
setupWeb = localCfg.getOption( cfgInstallPath( 'WebPortal' ), False )
setupWebApp = localCfg.getOption( cfgInstallPath( 'WebApp' ), False )
setupConfigurationMaster = localCfg.getOption( cfgInstallPath( 'ConfigurationMaster' ), False )
setupPrivateConfiguration = localCfg.getOption( cfgInstallPath( 'PrivateConfiguration' ), False )
setupConfigurationName = localCfg.getOption( cfgInstallPath( 'ConfigurationName' ), setup )
setupAddConfiguration = localCfg.getOption( cfgInstallPath( 'AddConfiguration' ), True )
for serviceTuple in setupServices:
error = ''
if len( serviceTuple ) != 2:
error = 'Wrong service specification: system/service'
# elif serviceTuple[0] not in setupSystems:
# error = 'System %s not available' % serviceTuple[0]
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
serviceSysInstance = serviceTuple[0]
if not serviceSysInstance in setupSystems:
setupSystems.append( serviceSysInstance )
for agentTuple in setupAgents:
error = ''
if len( agentTuple ) != 2:
error = 'Wrong agent specification: system/agent'
# elif agentTuple[0] not in setupSystems:
# error = 'System %s not available' % agentTuple[0]
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
agentSysInstance = agentTuple[0]
if not agentSysInstance in setupSystems:
setupSystems.append( agentSysInstance )
for executorTuple in setupExecutors:
error = ''
if len( executorTuple ) != 2:
error = 'Wrong executor specification: system/executor'
if error:
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
executorSysInstance = executorTuple[0]
if not executorSysInstance in setupSystems:
setupSystems.append( executorSysInstance )
# And to find out the available extensions
result = getExtensions()
if not result['OK']:
return result
extensions = [ k.replace( 'DIRAC', '' ) for k in result['Value']]
# Make sure the necessary directories are there
if basePath != instancePath:
if not os.path.exists( instancePath ):
try:
os.makedirs( instancePath )
except Exception:
error = 'Can not create directory for instance %s' % instancePath
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
if not os.path.isdir( instancePath ):
error = 'Instance directory %s is not valid' % instancePath
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
instanceEtcDir = os.path.join( instancePath, 'etc' )
etcDir = os.path.dirname( cfgFile )
if not os.path.exists( instanceEtcDir ):
try:
os.symlink( etcDir, instanceEtcDir )
except Exception:
error = 'Can not create link to configuration %s' % instanceEtcDir
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
if os.path.realpath( instanceEtcDir ) != os.path.realpath( etcDir ):
error = 'Instance etc (%s) is not the same as DIRAC etc (%s)' % ( instanceEtcDir, etcDir )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# if any server or agent needs to be install we need the startup directory and runsvdir running
if setupServices or setupAgents or setupExecutors or setupWeb:
if not os.path.exists( startDir ):
try:
os.makedirs( startDir )
except Exception:
error = 'Can not create %s' % startDir
if exitOnError:
gLogger.exception( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# And need to make sure runsvdir is running
result = execCommand( 0, ['ps', '-ef'] )
if not result['OK']:
if exitOnError:
gLogger.error( 'Failed to verify runsvdir running', result['Message'] )
DIRAC.exit( -1 )
return S_ERROR( result['Message'] )
processList = result['Value'][1].split( '\n' )
cmd = 'runsvdir %s' % startDir
cmdFound = False
for process in processList:
if process.find( cmd ) != -1:
cmdFound = True
if not cmdFound:
gLogger.notice( 'Starting runsvdir ...' )
os.system( "runsvdir %s 'log: DIRAC runsv' &" % startDir )
if ['Configuration', 'Server'] in setupServices and setupConfigurationMaster:
# This server hosts the Master of the CS
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
gLogger.notice( 'Installing Master Configuration Server' )
cfg = __getCfg( cfgPath( 'DIRAC', 'Setups', setup ), 'Configuration', instance )
_addCfgToDiracCfg( cfg )
cfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Master' , 'yes' )
cfg.setOption( cfgPath( 'DIRAC', 'Configuration', 'Name' ) , setupConfigurationName )
serversCfgPath = cfgPath( 'DIRAC', 'Configuration', 'Servers' )
if not localCfg.getOption( serversCfgPath , [] ):
serverUrl = 'dips://%s:9135/Configuration/Server' % host
cfg.setOption( serversCfgPath, serverUrl )
gConfigurationData.setOptionInCFG( serversCfgPath, serverUrl )
instanceOptionPath = cfgPath( 'DIRAC', 'Setups', setup )
instanceCfg = __getCfg( instanceOptionPath, 'Configuration', instance )
cfg = cfg.mergeWith( instanceCfg )
_addCfgToDiracCfg( cfg )
result = getComponentCfg( 'service', 'Configuration', 'Server', instance, extensions, addDefaultOptions = True )
if not result['OK']:
if exitOnError:
DIRAC.exit( -1 )
else:
return result
compCfg = result['Value']
cfg = cfg.mergeWith( compCfg )
gConfigurationData.mergeWithLocal( cfg )
addDefaultOptionsToComponentCfg( 'service', 'Configuration', 'Server', [] )
if installCfg:
centralCfg = _getCentralCfg( installCfg )
else:
centralCfg = _getCentralCfg( localCfg )
_addCfgToLocalCS( centralCfg )
setupComponent( 'service', 'Configuration', 'Server', [], checkModule = False )
runsvctrlComponent( 'Configuration', 'Server', 't' )
while ['Configuration', 'Server'] in setupServices:
setupServices.remove( ['Configuration', 'Server'] )
time.sleep( 5 )
# Now need to check if there is valid CS to register the info
result = scriptCfg.enableCS()
if not result['OK']:
if exitOnError:
DIRAC.exit( -1 )
return result
cfgClient = CSAPI()
if not cfgClient.initialize():
error = 'Configuration Server not defined'
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# We need to make sure components are connecting to the Master CS, that is the only one being update
from DIRAC import gConfig
localServers = localCfg.getOption( cfgPath( 'DIRAC', 'Configuration', 'Servers' ) )
masterServer = gConfig.getValue( cfgPath( 'DIRAC', 'Configuration', 'MasterServer' ), '' )
initialCfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Servers' , localServers )
masterCfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'Servers' , masterServer )
_addCfgToDiracCfg( masterCfg )
# 1.- Setup the instances in the CS
# If the Configuration Server used is not the Master, it can take some time for this
# info to be propagated, this may cause the later setup to fail
if setupAddConfiguration:
gLogger.notice( 'Registering System instances' )
for system in setupSystems:
addSystemInstance( system, instance, setup, True )
for system, service in setupServices:
if not addDefaultOptionsToCS( None, 'service', system, service, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'service', system, service, extensions )
for system, agent in setupAgents:
if not addDefaultOptionsToCS( None, 'agent', system, agent, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'agent', system, agent, extensions )
for system, executor in setupExecutors:
if not addDefaultOptionsToCS( None, 'executor', system, executor, extensions, overwrite = True )['OK']:
# If we are not allowed to write to the central CS, add the configuration to the local file
addDefaultOptionsToComponentCfg( 'executor', system, executor, extensions )
else:
gLogger.warn( 'Configuration parameters definition is not requested' )
if ['Configuration', 'Server'] in setupServices and setupPrivateConfiguration:
cfg = __getCfg( cfgPath( 'DIRAC', 'Configuration' ), 'AutoPublish' , 'no' )
_addCfgToDiracCfg( cfg )
# 2.- Check if MySQL is to be installed
if installMySQLFlag:
gLogger.notice( 'Installing MySQL' )
getMySQLPasswords()
installMySQL()
# 3.- Install requested Databases
# if MySQL is not installed locally, we assume a host is given
if setupDatabases:
result = getDatabases()
if not result['OK']:
if exitOnError:
gLogger.error( 'Failed to get databases', result['Message'] )
DIRAC.exit( -1 )
return result
installedDatabases = result['Value']
for dbName in setupDatabases:
if dbName not in installedDatabases:
extension, system = installDatabase( dbName )['Value']
gLogger.notice( 'Database %s from %s/%s installed' % ( dbName, extension, system ) )
result = addDatabaseOptionsToCS( None, system, dbName, overwrite = True )
if not result['OK']:
gLogger.error( 'Database %s CS registration failed: %s' % ( dbName, result['Message'] ) )
else:
gLogger.notice( 'Database %s already installed' % dbName )
if mysqlPassword:
if not _addMySQLToDiracCfg():
error = 'Failed to add MySQL user password to local configuration'
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# 4.- Then installed requested services
for system, service in setupServices:
setupComponent( 'service', system, service, extensions )
# 5.- Now the agents
for system, agent in setupAgents:
setupComponent( 'agent', system, agent, extensions )
# 6.- Now the executors
for system, executor in setupExecutors:
setupComponent( 'executor', system, executor, extensions )
# 7.- And finally the Portal
if setupWeb:
if setupWebApp:
setupNewPortal()
else:
setupPortal()
if localServers != masterServer:
_addCfgToDiracCfg( initialCfg )
for system, service in setupServices:
runsvctrlComponent( system, service, 't' )
for system, agent in setupAgents:
runsvctrlComponent( system, agent, 't' )
for system, executor in setupExecutors:
runsvctrlComponent( system, executor, 't' )
return S_OK()
def _createRunitLog( runitCompDir ):
logDir = os.path.join( runitCompDir, 'log' )
os.makedirs( logDir )
logConfigFile = os.path.join( logDir, 'config' )
fd = open( logConfigFile, 'w' )
fd.write(
"""s10000000
n20
""" )
fd.close()
logRunFile = os.path.join( logDir, 'run' )
fd = open( logRunFile, 'w' )
fd.write(
"""#!/bin/bash
#
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec svlogd .
""" % { 'bashrc' : os.path.join( instancePath, 'bashrc' ) } )
fd.close()
os.chmod( logRunFile, gDefaultPerms )
def installComponent( componentType, system, component, extensions, componentModule = '', checkModule = True ):
"""
Install runit directory for the specified component
"""
# Check if the component is already installed
runitCompDir = os.path.join( runitDir, system, component )
if os.path.exists( runitCompDir ):
msg = "%s %s_%s already installed" % ( componentType, system, component )
gLogger.notice( msg )
return S_OK( runitCompDir )
# Check that the software for the component is installed
# Any "Load" or "Module" option in the configuration defining what modules the given "component"
# needs to load will be taken care of by checkComponentModule.
if checkModule:
cModule = componentModule
if not cModule:
cModule = component
result = checkComponentModule( componentType, system, cModule )
if not result['OK']:
if not checkComponentSoftware( componentType, system, cModule, extensions )['OK'] and componentType != 'executor':
error = 'Software for %s %s/%s is not installed' % ( componentType, system, component )
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
gLogger.notice( 'Installing %s %s/%s' % ( componentType, system, component ) )
# Now do the actual installation
try:
componentCfg = os.path.join( linkedRootPath, 'etc', '%s_%s.cfg' % ( system, component ) )
if not os.path.exists( componentCfg ):
fd = open( componentCfg, 'w' )
fd.close()
_createRunitLog( runitCompDir )
runFile = os.path.join( runitCompDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
[ "%(componentType)s" = "agent" ] && renice 20 -p $$
#
exec python $DIRAC/DIRAC/Core/scripts/dirac-%(componentType)s.py %(system)s/%(component)s %(componentCfg)s < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'componentType': componentType,
'system' : system,
'component': component,
'componentCfg': componentCfg } )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for %s %s/%s' % ( componentType, system, component )
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( runitCompDir )
def setupComponent( componentType, system, component, extensions, componentModule = '', checkModule = True ):
"""
Install and create link in startup
"""
result = installComponent( componentType, system, component, extensions, componentModule, checkModule )
if not result['OK']:
return result
# Create the startup entry now
runitCompDir = result['Value']
startCompDir = os.path.join( startDir, '%s_%s' % ( system, component ) )
if not os.path.exists( startDir ):
os.makedirs( startDir )
if not os.path.lexists( startCompDir ):
gLogger.notice( 'Creating startup link at', startCompDir )
os.symlink( runitCompDir, startCompDir )
time.sleep( 10 )
# Check the runsv status
start = time.time()
while ( time.time() - 20 ) < start:
result = getStartupComponentStatus( [ ( system, component )] )
if not result['OK']:
continue
if result['Value'] and result['Value']['%s_%s' % ( system, component )]['RunitStatus'] == "Run":
break
time.sleep( 1 )
# Final check
result = getStartupComponentStatus( [( system, component )] )
if not result['OK']:
return S_ERROR( 'Failed to start the component %s_%s' % ( system, component ) )
resDict = {}
resDict['ComponentType'] = componentType
resDict['RunitStatus'] = result['Value']['%s_%s' % ( system, component )]['RunitStatus']
return S_OK( resDict )
def unsetupComponent( system, component ):
"""
Remove link from startup
"""
for startCompDir in glob.glob( os.path.join( startDir, '%s_%s' % ( system, component ) ) ):
try:
os.unlink( startCompDir )
except Exception:
gLogger.exception()
return S_OK()
def uninstallComponent( system, component ):
"""
Remove startup and runit directories
"""
result = runsvctrlComponent( system, component, 'd' )
if not result['OK']:
pass
result = unsetupComponent( system, component )
for runitCompDir in glob.glob( os.path.join( runitDir, system, component ) ):
try:
shutil.rmtree( runitCompDir )
except Exception:
gLogger.exception()
return S_OK()
def installPortal():
"""
Install runit directories for the Web Portal
"""
# Check that the software for the Web Portal is installed
error = ''
webDir = os.path.join( linkedRootPath, 'Web' )
if not os.path.exists( webDir ):
error = 'Web extension not installed at %s' % webDir
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
# First the lighthttpd server
# Check if the component is already installed
runitHttpdDir = os.path.join( runitDir, 'Web', 'httpd' )
runitPasterDir = os.path.join( runitDir, 'Web', 'paster' )
if os.path.exists( runitHttpdDir ):
msg = "lighthttpd already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Lighttpd' )
# Now do the actual installation
try:
_createRunitLog( runitHttpdDir )
runFile = os.path.join( runitHttpdDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
exec lighttpdSvc.sh < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ), } )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for lighttpd'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
# Second the Web portal
# Check if the component is already installed
if os.path.exists( runitPasterDir ):
msg = "Web Portal already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Web Portal' )
# Now do the actual installation
try:
_createRunitLog( runitPasterDir )
runFile = os.path.join( runitPasterDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
cd %(DIRAC)s/Web
exec paster serve --reload production.ini < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'DIRAC': linkedRootPath} )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for Web Portal'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( [runitHttpdDir, runitPasterDir] )
def setupPortal():
"""
Install and create link in startup
"""
result = installPortal()
if not result['OK']:
return result
# Create the startup entries now
runitCompDir = result['Value']
startCompDir = [ os.path.join( startDir, 'Web_httpd' ),
os.path.join( startDir, 'Web_paster' ) ]
if not os.path.exists( startDir ):
os.makedirs( startDir )
for i in range( 2 ):
if not os.path.lexists( startCompDir[i] ):
gLogger.notice( 'Creating startup link at', startCompDir[i] )
os.symlink( runitCompDir[i], startCompDir[i] )
time.sleep( 1 )
time.sleep( 5 )
# Check the runsv status
start = time.time()
while ( time.time() - 10 ) < start:
result = getStartupComponentStatus( [ ( 'Web', 'httpd' ), ( 'Web', 'paster' ) ] )
if not result['OK']:
return S_ERROR( 'Failed to start the Portal' )
if result['Value'] and \
result['Value']['%s_%s' % ( 'Web', 'httpd' )]['RunitStatus'] == "Run" and \
result['Value']['%s_%s' % ( 'Web', 'paster' )]['RunitStatus'] == "Run" :
break
time.sleep( 1 )
# Final check
return getStartupComponentStatus( [ ( 'Web', 'httpd' ), ( 'Web', 'paster' ) ] )
def setupNewPortal():
"""
Install and create link in startup
"""
result = installNewPortal()
if not result['OK']:
return result
# Create the startup entries now
runitCompDir = result['Value']
startCompDir = os.path.join( startDir, 'Web_WebApp' )
if not os.path.exists( startDir ):
os.makedirs( startDir )
if not os.path.lexists( startCompDir ):
gLogger.notice( 'Creating startup link at', startCompDir )
os.symlink( runitCompDir, startCompDir )
time.sleep( 5 )
# Check the runsv status
start = time.time()
while ( time.time() - 10 ) < start:
result = getStartupComponentStatus( [( 'Web', 'WebApp' )] )
if not result['OK']:
return S_ERROR( 'Failed to start the Portal' )
if result['Value'] and \
result['Value']['%s_%s' % ( 'Web', 'WebApp' )]['RunitStatus'] == "Run":
break
time.sleep( 1 )
# Final check
return getStartupComponentStatus( [ ('Web', 'WebApp') ] )
def installNewPortal():
"""
Install runit directories for the Web Portal
"""
result = execCommand( False, ["pip", "install", "tornado"] )
if not result['OK']:
error = "Tornado can not be installed:%s" % result['Value']
gLogger.error( error )
DIRAC.exit(-1)
return error
else:
gLogger.notice("Tornado is installed successfully!")
# Check that the software for the Web Portal is installed
error = ''
webDir = os.path.join( linkedRootPath, 'WebAppDIRAC' )
if not os.path.exists( webDir ):
error = 'WebApp extension not installed at %s' % webDir
if exitOnError:
gLogger.error( error )
DIRAC.exit( -1 )
return S_ERROR( error )
#compile the JS code
prodMode = ""
webappCompileScript = os.path.join( linkedRootPath, "WebAppDIRAC/scripts", "dirac-webapp-compile.py" )
if os.path.isfile( webappCompileScript ):
os.chmod( webappCompileScript , gDefaultPerms )
gLogger.notice( "Executing %s..." % webappCompileScript )
if os.system( "python '%s' > '%s.out' 2> '%s.err'" % ( webappCompileScript,
webappCompileScript,
webappCompileScript ) ):
gLogger.error( "Compile script %s failed. Check %s.err" % ( webappCompileScript,
webappCompileScript ) )
else:
prodMode = "-p"
# Check if the component is already installed
runitWebAppDir = os.path.join( runitDir, 'Web', 'WebApp' )
# Check if the component is already installed
if os.path.exists( runitWebAppDir ):
msg = "Web Portal already installed"
gLogger.notice( msg )
else:
gLogger.notice( 'Installing Web Portal' )
# Now do the actual installation
try:
_createRunitLog( runitWebAppDir )
runFile = os.path.join( runitWebAppDir, 'run' )
fd = open( runFile, 'w' )
fd.write(
"""#!/bin/bash
rcfile=%(bashrc)s
[ -e $rcfile ] && source $rcfile
#
exec 2>&1
#
exec python %(DIRAC)s/WebAppDIRAC/scripts/dirac-webapp-run.py %(prodMode)s < /dev/null
""" % {'bashrc': os.path.join( instancePath, 'bashrc' ),
'DIRAC': linkedRootPath,
'prodMode':prodMode} )
fd.close()
os.chmod( runFile, gDefaultPerms )
except Exception:
error = 'Failed to prepare setup for Web Portal'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execCommand( 5, [runFile] )
gLogger.notice( result['Value'][1] )
return S_OK( runitWebAppDir )
def fixMySQLScripts( startupScript = mysqlStartupScript ):
"""
Edit MySQL scripts to point to desired locations for db and my.cnf
"""
gLogger.verbose( 'Updating:', startupScript )
try:
fd = open( startupScript, 'r' )
orgLines = fd.readlines()
fd.close()
fd = open( startupScript, 'w' )
for line in orgLines:
if line.find( 'export HOME' ) == 0:
continue
if line.find( 'datadir=' ) == 0:
line = 'datadir=%s\n' % mysqlDbDir
gLogger.debug( line )
line += 'export HOME=%s\n' % mysqlDir
if line.find( 'basedir=' ) == 0:
platform = getPlatformString()
line = 'basedir=%s\n' % os.path.join( rootPath, platform )
if line.find( 'extra_args=' ) == 0:
line = 'extra_args="-n"\n'
if line.find( '$bindir/mysqld_safe --' ) >= 0 and not ' --no-defaults ' in line:
line = line.replace( 'mysqld_safe', 'mysqld_safe --no-defaults' )
fd.write( line )
fd.close()
except Exception:
error = 'Failed to Update MySQL startup script'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK()
def mysqlInstalled( doNotExit = False ):
"""
Check if MySQL is already installed
"""
if os.path.exists( mysqlDbDir ) or os.path.exists( mysqlLogDir ):
return S_OK()
if doNotExit:
return S_ERROR()
error = 'MySQL not properly Installed'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
def getMySQLPasswords():
"""
Get MySQL passwords from local configuration or prompt
"""
import getpass
global mysqlRootPwd, mysqlPassword
if not mysqlRootPwd:
mysqlRootPwd = getpass.getpass( 'MySQL root password: ' )
if not mysqlPassword:
# Take it if it is already defined
mysqlPassword = localCfg.getOption( '/Systems/Databases/Password', '' )
if not mysqlPassword:
mysqlPassword = getpass.getpass( 'MySQL Dirac password: ' )
return S_OK()
def setMySQLPasswords( root = '', dirac = '' ):
"""
Set MySQL passwords
"""
global mysqlRootPwd, mysqlPassword
if root:
mysqlRootPwd = root
if dirac:
mysqlPassword = dirac
return S_OK()
def startMySQL():
"""
Start MySQL server
"""
result = mysqlInstalled()
if not result['OK']:
return result
return execCommand( 0, [mysqlStartupScript, 'start'] )
def stopMySQL():
"""
Stop MySQL server
"""
result = mysqlInstalled()
if not result['OK']:
return result
return execCommand( 0, [mysqlStartupScript, 'stop'] )
def installMySQL():
"""
Attempt an installation of MySQL
mode:
-Master
-Slave
-None
"""
fixMySQLScripts()
if mysqlInstalled( doNotExit = True )['OK']:
gLogger.notice( 'MySQL already installed' )
return S_OK()
if mysqlMode.lower() not in [ '', 'master', 'slave' ]:
error = 'Unknown MySQL server Mode'
if exitOnError:
gLogger.fatal( error, mysqlMode )
DIRAC.exit( -1 )
gLogger.error( error, mysqlMode )
return S_ERROR( error )
if mysqlHost:
gLogger.notice( 'Installing MySQL server at', mysqlHost )
if mysqlMode:
gLogger.notice( 'This is a MySQl %s server' % mysqlMode )
try:
os.makedirs( mysqlDbDir )
os.makedirs( mysqlLogDir )
except Exception:
error = 'Can not create MySQL dirs'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
try:
fd = open( mysqlMyOrg, 'r' )
myOrg = fd.readlines()
fd.close()
fd = open( mysqlMyCnf, 'w' )
for line in myOrg:
if line.find( '[mysqld]' ) == 0:
line += '\n'.join( [ 'innodb_file_per_table', '' ] )
elif line.find( 'innodb_log_arch_dir' ) == 0:
line = ''
elif line.find( 'innodb_data_file_path' ) == 0:
line = line.replace( '2000M', '200M' )
elif line.find( 'server-id' ) == 0 and mysqlMode.lower() == 'master':
# MySQL Configuration for Master Server
line = '\n'.join( ['server-id = 1',
'# DIRAC Master-Server',
'sync-binlog = 1',
'replicate-ignore-table = mysql.MonitorData',
'# replicate-ignore-db=db_name',
'log-bin = mysql-bin',
'log-slave-updates', '' ] )
elif line.find( 'server-id' ) == 0 and mysqlMode.lower() == 'slave':
# MySQL Configuration for Slave Server
line = '\n'.join( ['server-id = %s' % int( time.time() ),
'# DIRAC Slave-Server',
'sync-binlog = 1',
'replicate-ignore-table = mysql.MonitorData',
'# replicate-ignore-db=db_name',
'log-bin = mysql-bin',
'log-slave-updates', '' ] )
elif line.find( '/opt/dirac/mysql' ) > -1:
line = line.replace( '/opt/dirac/mysql', mysqlDir )
if mysqlSmallMem:
if line.find( 'innodb_buffer_pool_size' ) == 0:
line = 'innodb_buffer_pool_size = 200M\n'
elif mysqlLargeMem:
if line.find( 'innodb_buffer_pool_size' ) == 0:
line = 'innodb_buffer_pool_size = 10G\n'
fd.write( line )
fd.close()
except Exception:
error = 'Can not create my.cnf'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
gLogger.notice( 'Initializing MySQL...' )
result = execCommand( 0, ['mysql_install_db',
'--defaults-file=%s' % mysqlMyCnf,
'--datadir=%s' % mysqlDbDir ] )
if not result['OK']:
return result
gLogger.notice( 'Starting MySQL...' )
result = startMySQL()
if not result['OK']:
return result
gLogger.notice( 'Setting MySQL root password' )
result = execCommand( 0, ['mysqladmin', '-u', mysqlRootUser, 'password', mysqlRootPwd] )
if not result['OK']:
return result
# MySQL tends to define root@host user rather than root@host.domain
hostName = mysqlHost.split('.')[0]
result = execMySQL( "UPDATE user SET Host='%s' WHERE Host='%s'" % (mysqlHost,hostName),
localhost=True )
if not result['OK']:
return result
result = execMySQL( "FLUSH PRIVILEGES" )
if not result['OK']:
return result
if mysqlHost and socket.gethostbyname( mysqlHost ) != '127.0.0.1' :
result = execCommand( 0, ['mysqladmin', '-u', mysqlRootUser, '-h', mysqlHost, 'password', mysqlRootPwd] )
if not result['OK']:
return result
result = execMySQL( "DELETE from user WHERE Password=''", localhost=True )
if not _addMySQLToDiracCfg():
return S_ERROR( 'Failed to add MySQL user password to local configuration' )
return S_OK()
def getMySQLStatus():
"""
Get the status of the MySQL database installation
"""
result = execCommand( 0, ['mysqladmin', 'status' ] )
if not result['OK']:
return result
output = result['Value'][1]
_d1, uptime, nthreads, nquestions, nslow, nopens, nflash, nopen, nqpersec = output.split( ':' )
resDict = {}
resDict['UpTime'] = uptime.strip().split()[0]
resDict['NumberOfThreads'] = nthreads.strip().split()[0]
resDict['NumberOfQuestions'] = nquestions.strip().split()[0]
resDict['NumberOfSlowQueries'] = nslow.strip().split()[0]
resDict['NumberOfOpens'] = nopens.strip().split()[0]
resDict['OpenTables'] = nopen.strip().split()[0]
resDict['FlushTables'] = nflash.strip().split()[0]
resDict['QueriesPerSecond'] = nqpersec.strip().split()[0]
return S_OK( resDict )
def getAvailableDatabases( extensions ):
dbDict = {}
for extension in extensions + ['']:
databases = glob.glob( os.path.join( rootPath, '%sDIRAC' % extension, '*', 'DB', '*.sql' ) )
for dbPath in databases:
dbName = os.path.basename( dbPath ).replace( '.sql', '' )
dbDict[dbName] = {}
dbDict[dbName]['Extension'] = extension
dbDict[dbName]['System'] = dbPath.split( '/' )[-3].replace( 'System', '' )
return S_OK( dbDict )
def getDatabases():
"""
Get the list of installed databases
"""
result = execMySQL( 'SHOW DATABASES' )
if not result['OK']:
return result
dbList = []
for dbName in result['Value']:
if not dbName[0] in ['Database', 'information_schema', 'mysql', 'test']:
dbList.append( dbName[0] )
return S_OK( dbList )
def installDatabase( dbName ):
"""
Install requested DB in MySQL server
"""
global mysqlRootPwd, mysqlPassword
if not mysqlRootPwd:
rootPwdPath = cfgInstallPath( 'Database', 'RootPwd' )
return S_ERROR( 'Missing %s in %s' % ( rootPwdPath, cfgFile ) )
if not mysqlPassword:
mysqlPassword = localCfg.getOption( cfgPath( 'Systems', 'Databases', 'Password' ), mysqlPassword )
if not mysqlPassword:
mysqlPwdPath = cfgPath( 'Systems', 'Databases', 'Password' )
return S_ERROR( 'Missing %s in %s' % ( mysqlPwdPath, cfgFile ) )
gLogger.notice( 'Installing', dbName )
dbFile = glob.glob( os.path.join( rootPath, '*', '*', 'DB', '%s.sql' % dbName ) )
if not dbFile:
error = 'Database %s not found' % dbName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
dbFile = dbFile[0]
# just check
result = execMySQL( 'SHOW STATUS' )
if not result['OK']:
error = 'Could not connect to MySQL server'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# now creating the Database
result = execMySQL( 'CREATE DATABASE `%s`' % dbName )
if not result['OK']:
gLogger.error( 'Failed to create databases', result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return result
perms = "SELECT,INSERT,LOCK TABLES,UPDATE,DELETE,CREATE,DROP,ALTER,CREATE VIEW, SHOW VIEW"
for cmd in ["GRANT %s ON `%s`.* TO '%s'@'localhost' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlPassword ),
"GRANT %s ON `%s`.* TO '%s'@'%s' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlHost, mysqlPassword ),
"GRANT %s ON `%s`.* TO '%s'@'%%' IDENTIFIED BY '%s'" % ( perms, dbName, mysqlUser,
mysqlPassword ) ]:
result = execMySQL( cmd )
if not result['OK']:
error = "Error executing '%s'" % cmd
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
result = execMySQL( 'FLUSH PRIVILEGES' )
if not result['OK']:
gLogger.error( 'Failed to flush provileges', result['Message'] )
if exitOnError:
exit( -1 )
return result
# first getting the lines to be executed, and then execute them
try:
cmdLines = _createMySQLCMDLines( dbFile )
# We need to run one SQL cmd at once, mysql is much happier that way.
# Create a string of commands, ignoring comment lines
sqlString = '\n'.join( x for x in cmdLines if not x.startswith( "--" ) )
# Now run each command (They are seperated by ;)
# Ignore any empty ones
cmds = [ x.strip() for x in sqlString.split( ";" ) if x.strip() ]
for cmd in cmds:
result = execMySQL( cmd, dbName )
if not result['OK']:
error = 'Failed to initialize Database'
gLogger.notice( cmd )
gLogger.error( error, result['Message'] )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
except Exception, e:
gLogger.error( str( e ) )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return S_OK( dbFile.split( '/' )[-4:-2] )
def _createMySQLCMDLines( dbFile ):
""" Creates a list of MYSQL commands to be executed, inspecting the dbFile(s)
"""
cmdLines = []
fd = open( dbFile )
dbLines = fd.readlines()
fd.close()
for line in dbLines:
# Should we first source an SQL file (is this sql file an extension)?
if line.lower().startswith('source'):
sourcedDBbFileName = line.split( ' ' )[1].replace( '\n', '' )
gLogger.info( "Found file to source: %s" % sourcedDBbFileName )
sourcedDBbFile = os.path.join( rootPath, sourcedDBbFileName )
fdSourced = open( sourcedDBbFile )
dbLinesSourced = fdSourced.readlines()
fdSourced.close()
for lineSourced in dbLinesSourced:
if lineSourced.strip():
cmdLines.append( lineSourced.strip() )
# Creating/adding cmdLines
else:
if line.strip():
cmdLines.append( line.strip() )
return cmdLines
def execMySQL( cmd, dbName = 'mysql', localhost=False ):
"""
Execute MySQL Command
"""
global db
from DIRAC.Core.Utilities.MySQL import MySQL
if not mysqlRootPwd:
return S_ERROR( 'MySQL root password is not defined' )
if dbName not in db:
dbHost = mysqlHost
if localhost:
dbHost = 'localhost'
db[dbName] = MySQL( dbHost, mysqlRootUser, mysqlRootPwd, dbName, mysqlPort )
if not db[dbName]._connected:
error = 'Could not connect to MySQL server'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
return db[dbName]._query( cmd )
def _addMySQLToDiracCfg():
"""
Add the database access info to the local configuration
"""
if not mysqlPassword:
return S_ERROR( 'Missing %s in %s' % ( cfgInstallPath( 'Database', 'Password' ), cfgFile ) )
sectionPath = cfgPath( 'Systems', 'Databases' )
cfg = __getCfg( sectionPath, 'User', mysqlUser )
cfg.setOption( cfgPath( sectionPath, 'Password' ), mysqlPassword )
return _addCfgToDiracCfg( cfg )
def configureCE( ceName = '', ceType = '', cfg = None, currentSectionPath = '' ):
"""
Produce new dirac.cfg including configuration for new CE
"""
from DIRAC.Resources.Computing.ComputingElementFactory import ComputingElementFactory
from DIRAC import gConfig
cesCfg = ResourcesDefaults.getComputingElementDefaults( ceName, ceType, cfg, currentSectionPath )
ceNameList = cesCfg.listSections()
if not ceNameList:
error = 'No CE Name provided'
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
for ceName in ceNameList:
if 'CEType' not in cesCfg[ceName]:
error = 'Missing Type for CE "%s"' % ceName
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
localsiteCfg = localCfg['LocalSite']
# Replace Configuration under LocalSite with new Configuration
for ceName in ceNameList:
if localsiteCfg.existsKey( ceName ):
gLogger.notice( ' Removing existing CE:', ceName )
localsiteCfg.deleteKey( ceName )
gLogger.notice( 'Configuring CE:', ceName )
localsiteCfg.createNewSection( ceName, contents = cesCfg[ceName] )
# Apply configuration and try to instantiate the CEs
gConfig.loadCFG( localCfg )
for ceName in ceNameList:
ceFactory = ComputingElementFactory()
try:
ceInstance = ceFactory.getCE( ceType, ceName )
except Exception:
error = 'Fail to instantiate CE'
gLogger.exception( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
if not ceInstance['OK']:
error = 'Fail to instantiate CE: %s' % ceInstance['Message']
gLogger.error( error )
if exitOnError:
DIRAC.exit( -1 )
return S_ERROR( error )
# Everything is OK, we can save the new cfg
localCfg.writeToFile( cfgFile )
gLogger.always( 'LocalSite section in %s has been uptdated with new configuration:' % os.path.basename( cfgFile ) )
gLogger.always( str( localCfg['LocalSite'] ) )
return S_OK( ceNameList )
def configureLocalDirector( ceNameList = '' ):
"""
Install a Local DIRAC TaskQueueDirector, basically write the proper configuration file
"""
if ceNameList:
result = setupComponent( 'agent', 'WorkloadManagement', 'TaskQueueDirector', [] )
if not result['OK']:
return result
# Now write a local Configuration for the Director
directorCfg = CFG()
directorCfg.addKey( 'SubmitPools', 'DIRAC', 'Added by InstallTools' )
directorCfg.addKey( 'DefaultSubmitPools', 'DIRAC', 'Added by InstallTools' )
directorCfg.addKey( 'ComputingElements', ', '.join( ceNameList ), 'Added by InstallTools' )
result = addCfgToComponentCfg( 'agent', 'WorkloadManagement', 'TaskQueueDirector', directorCfg )
if not result['OK']:
return result
return runsvctrlComponent( 'WorkloadManagement', 'TaskQueueDirector', 't' )
def execCommand( timeout, cmd ):
"""
Execute command tuple and handle Error cases
"""
result = systemCall( timeout, cmd )
if not result['OK']:
if timeout and result['Message'].find( 'Timeout' ) == 0:
return result
gLogger.error( 'Failed to execute', '%s: %s' % ( cmd[0], result['Message'] ) )
if exitOnError:
DIRAC.exit( -1 )
return result
if result['Value'][0]:
error = 'Failed to execute'
gLogger.error( error, cmd[0] )
gLogger.error( 'Exit code:' , ( '%s\n' % result['Value'][0] ) + '\n'.join( result['Value'][1:] ) )
if exitOnError:
DIRAC.exit( -1 )
error = S_ERROR( error )
error['Value'] = result['Value']
return error
gLogger.verbose( result['Value'][1] )
return result
| codeparrot/github-code-clean |
"""
Abstract base classes define the primitives that renderers and
graphics contexts must implement to serve as a matplotlib backend
:class:`RendererBase`
An abstract base class to handle drawing/rendering operations.
:class:`FigureCanvasBase`
The abstraction layer that separates the
:class:`matplotlib.figure.Figure` from the backend specific
details like a user interface drawing area
:class:`GraphicsContextBase`
An abstract base class that provides color, line styles, etc...
:class:`Event`
The base class for all of the matplotlib event
handling. Derived classes suh as :class:`KeyEvent` and
:class:`MouseEvent` store the meta data like keys and buttons
pressed, x and y locations in pixel and
:class:`~matplotlib.axes.Axes` coordinates.
:class:`ShowBase`
The base class for the Show class of each interactive backend;
the 'show' callable is then set to Show.__call__, inherited from
ShowBase.
"""
import os
import warnings
import time
import io
import numpy as np
import matplotlib.cbook as cbook
import matplotlib.colors as colors
import matplotlib.transforms as transforms
import matplotlib.widgets as widgets
#import matplotlib.path as path
from matplotlib import rcParams
from matplotlib import is_interactive
from matplotlib import get_backend
from matplotlib._pylab_helpers import Gcf
from matplotlib.transforms import Bbox, TransformedBbox, Affine2D
import matplotlib.tight_bbox as tight_bbox
import matplotlib.textpath as textpath
from matplotlib.path import Path
from matplotlib.cbook import mplDeprecation
try:
from PIL import Image
_has_pil = True
except ImportError:
_has_pil = False
_backend_d = {}
def register_backend(format, backend_class):
_backend_d[format] = backend_class
class ShowBase(object):
"""
Simple base class to generate a show() callable in backends.
Subclass must override mainloop() method.
"""
def __call__(self, block=None):
"""
Show all figures. If *block* is not None, then
it is a boolean that overrides all other factors
determining whether show blocks by calling mainloop().
The other factors are:
it does not block if run inside "ipython --pylab";
it does not block in interactive mode.
"""
managers = Gcf.get_all_fig_managers()
if not managers:
return
for manager in managers:
manager.show()
if block is not None:
if block:
self.mainloop()
return
else:
return
# Hack: determine at runtime whether we are
# inside ipython in pylab mode.
from matplotlib import pyplot
try:
ipython_pylab = not pyplot.show._needmain
# IPython versions >= 0.10 tack the _needmain
# attribute onto pyplot.show, and always set
# it to False, when in --pylab mode.
ipython_pylab = ipython_pylab and get_backend() != 'WebAgg'
# TODO: The above is a hack to get the WebAgg backend
# working with `ipython --pylab` until proper integration
# is implemented.
except AttributeError:
ipython_pylab = False
# Leave the following as a separate step in case we
# want to control this behavior with an rcParam.
if ipython_pylab:
return
if not is_interactive() or get_backend() == 'WebAgg':
self.mainloop()
def mainloop(self):
pass
class RendererBase:
"""An abstract base class to handle drawing/rendering operations.
The following methods *must* be implemented in the backend:
* :meth:`draw_path`
* :meth:`draw_image`
* :meth:`draw_text`
* :meth:`get_text_width_height_descent`
The following methods *should* be implemented in the backend for
optimization reasons:
* :meth:`draw_markers`
* :meth:`draw_path_collection`
* :meth:`draw_quad_mesh`
"""
def __init__(self):
self._texmanager = None
self._text2path = textpath.TextToPath()
def open_group(self, s, gid=None):
"""
Open a grouping element with label *s*. If *gid* is given, use
*gid* as the id of the group. Is only currently used by
:mod:`~matplotlib.backends.backend_svg`.
"""
pass
def close_group(self, s):
"""
Close a grouping element with label *s*
Is only currently used by :mod:`~matplotlib.backends.backend_svg`
"""
pass
def draw_path(self, gc, path, transform, rgbFace=None):
"""
Draws a :class:`~matplotlib.path.Path` instance using the
given affine transform.
"""
raise NotImplementedError
def draw_markers(self, gc, marker_path, marker_trans, path,
trans, rgbFace=None):
"""
Draws a marker at each of the vertices in path. This includes
all vertices, including control points on curves. To avoid
that behavior, those vertices should be removed before calling
this function.
*gc*
the :class:`GraphicsContextBase` instance
*marker_trans*
is an affine transform applied to the marker.
*trans*
is an affine transform applied to the path.
This provides a fallback implementation of draw_markers that
makes multiple calls to :meth:`draw_path`. Some backends may
want to override this method in order to draw the marker only
once and reuse it multiple times.
"""
for vertices, codes in path.iter_segments(trans, simplify=False):
if len(vertices):
x, y = vertices[-2:]
self.draw_path(gc, marker_path,
marker_trans +
transforms.Affine2D().translate(x, y),
rgbFace)
def draw_path_collection(self, gc, master_transform, paths, all_transforms,
offsets, offsetTrans, facecolors, edgecolors,
linewidths, linestyles, antialiaseds, urls,
offset_position):
"""
Draws a collection of paths selecting drawing properties from
the lists *facecolors*, *edgecolors*, *linewidths*,
*linestyles* and *antialiaseds*. *offsets* is a list of
offsets to apply to each of the paths. The offsets in
*offsets* are first transformed by *offsetTrans* before being
applied. *offset_position* may be either "screen" or "data"
depending on the space that the offsets are in.
This provides a fallback implementation of
:meth:`draw_path_collection` that makes multiple calls to
:meth:`draw_path`. Some backends may want to override this in
order to render each set of path data only once, and then
reference that path multiple times with the different offsets,
colors, styles etc. The generator methods
:meth:`_iter_collection_raw_paths` and
:meth:`_iter_collection` are provided to help with (and
standardize) the implementation across backends. It is highly
recommended to use those generators, so that changes to the
behavior of :meth:`draw_path_collection` can be made globally.
"""
path_ids = []
for path, transform in self._iter_collection_raw_paths(
master_transform, paths, all_transforms):
path_ids.append((path, transform))
for xo, yo, path_id, gc0, rgbFace in self._iter_collection(
gc, master_transform, all_transforms, path_ids, offsets,
offsetTrans, facecolors, edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
path, transform = path_id
transform = transforms.Affine2D(
transform.get_matrix()).translate(xo, yo)
self.draw_path(gc0, path, transform, rgbFace)
def draw_quad_mesh(self, gc, master_transform, meshWidth, meshHeight,
coordinates, offsets, offsetTrans, facecolors,
antialiased, edgecolors):
"""
This provides a fallback implementation of
:meth:`draw_quad_mesh` that generates paths and then calls
:meth:`draw_path_collection`.
"""
from matplotlib.collections import QuadMesh
paths = QuadMesh.convert_mesh_to_paths(
meshWidth, meshHeight, coordinates)
if edgecolors is None:
edgecolors = facecolors
linewidths = np.array([gc.get_linewidth()], np.float_)
return self.draw_path_collection(
gc, master_transform, paths, [], offsets, offsetTrans, facecolors,
edgecolors, linewidths, [], [antialiased], [None], 'screen')
def draw_gouraud_triangle(self, gc, points, colors, transform):
"""
Draw a Gouraud-shaded triangle.
*points* is a 3x2 array of (x, y) points for the triangle.
*colors* is a 3x4 array of RGBA colors for each point of the
triangle.
*transform* is an affine transform to apply to the points.
"""
raise NotImplementedError
def draw_gouraud_triangles(self, gc, triangles_array, colors_array,
transform):
"""
Draws a series of Gouraud triangles.
*points* is a Nx3x2 array of (x, y) points for the trianglex.
*colors* is a Nx3x4 array of RGBA colors for each point of the
triangles.
*transform* is an affine transform to apply to the points.
"""
transform = transform.frozen()
for tri, col in zip(triangles_array, colors_array):
self.draw_gouraud_triangle(gc, tri, col, transform)
def _iter_collection_raw_paths(self, master_transform, paths,
all_transforms):
"""
This is a helper method (along with :meth:`_iter_collection`) to make
it easier to write a space-efficent :meth:`draw_path_collection`
implementation in a backend.
This method yields all of the base path/transform
combinations, given a master transform, a list of paths and
list of transforms.
The arguments should be exactly what is passed in to
:meth:`draw_path_collection`.
The backend should take each yielded path and transform and
create an object that can be referenced (reused) later.
"""
Npaths = len(paths)
Ntransforms = len(all_transforms)
N = max(Npaths, Ntransforms)
if Npaths == 0:
return
transform = transforms.IdentityTransform()
for i in range(N):
path = paths[i % Npaths]
if Ntransforms:
transform = all_transforms[i % Ntransforms]
yield path, transform + master_transform
def _iter_collection(self, gc, master_transform, all_transforms,
path_ids, offsets, offsetTrans, facecolors,
edgecolors, linewidths, linestyles,
antialiaseds, urls, offset_position):
"""
This is a helper method (along with
:meth:`_iter_collection_raw_paths`) to make it easier to write
a space-efficent :meth:`draw_path_collection` implementation in a
backend.
This method yields all of the path, offset and graphics
context combinations to draw the path collection. The caller
should already have looped over the results of
:meth:`_iter_collection_raw_paths` to draw this collection.
The arguments should be the same as that passed into
:meth:`draw_path_collection`, with the exception of
*path_ids*, which is a list of arbitrary objects that the
backend will use to reference one of the paths created in the
:meth:`_iter_collection_raw_paths` stage.
Each yielded result is of the form::
xo, yo, path_id, gc, rgbFace
where *xo*, *yo* is an offset; *path_id* is one of the elements of
*path_ids*; *gc* is a graphics context and *rgbFace* is a color to
use for filling the path.
"""
Ntransforms = len(all_transforms)
Npaths = len(path_ids)
Noffsets = len(offsets)
N = max(Npaths, Noffsets)
Nfacecolors = len(facecolors)
Nedgecolors = len(edgecolors)
Nlinewidths = len(linewidths)
Nlinestyles = len(linestyles)
Naa = len(antialiaseds)
Nurls = len(urls)
if (Nfacecolors == 0 and Nedgecolors == 0) or Npaths == 0:
return
if Noffsets:
toffsets = offsetTrans.transform(offsets)
gc0 = self.new_gc()
gc0.copy_properties(gc)
if Nfacecolors == 0:
rgbFace = None
if Nedgecolors == 0:
gc0.set_linewidth(0.0)
xo, yo = 0, 0
for i in range(N):
path_id = path_ids[i % Npaths]
if Noffsets:
xo, yo = toffsets[i % Noffsets]
if offset_position == 'data':
if Ntransforms:
transform = (all_transforms[i % Ntransforms] +
master_transform)
else:
transform = master_transform
xo, yo = transform.transform_point((xo, yo))
xp, yp = transform.transform_point((0, 0))
xo = -(xp - xo)
yo = -(yp - yo)
if not (np.isfinite(xo) and np.isfinite(yo)):
continue
if Nfacecolors:
rgbFace = facecolors[i % Nfacecolors]
if Nedgecolors:
if Nlinewidths:
gc0.set_linewidth(linewidths[i % Nlinewidths])
if Nlinestyles:
gc0.set_dashes(*linestyles[i % Nlinestyles])
fg = edgecolors[i % Nedgecolors]
if len(fg) == 4:
if fg[3] == 0.0:
gc0.set_linewidth(0)
else:
gc0.set_foreground(fg)
else:
gc0.set_foreground(fg)
if rgbFace is not None and len(rgbFace) == 4:
if rgbFace[3] == 0:
rgbFace = None
gc0.set_antialiased(antialiaseds[i % Naa])
if Nurls:
gc0.set_url(urls[i % Nurls])
yield xo, yo, path_id, gc0, rgbFace
gc0.restore()
def get_image_magnification(self):
"""
Get the factor by which to magnify images passed to :meth:`draw_image`.
Allows a backend to have images at a different resolution to other
artists.
"""
return 1.0
def draw_image(self, gc, x, y, im):
"""
Draw the image instance into the current axes;
*gc*
a GraphicsContext containing clipping information
*x*
is the distance in pixels from the left hand side of the canvas.
*y*
the distance from the origin. That is, if origin is
upper, y is the distance from top. If origin is lower, y
is the distance from bottom
*im*
the :class:`matplotlib._image.Image` instance
"""
raise NotImplementedError
def option_image_nocomposite(self):
"""
override this method for renderers that do not necessarily
want to rescale and composite raster images. (like SVG)
"""
return False
def option_scale_image(self):
"""
override this method for renderers that support arbitrary
scaling of image (most of the vector backend).
"""
return False
def draw_tex(self, gc, x, y, s, prop, angle, ismath='TeX!', mtext=None):
"""
"""
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath="TeX")
def draw_text(self, gc, x, y, s, prop, angle, ismath=False, mtext=None):
"""
Draw the text instance
*gc*
the :class:`GraphicsContextBase` instance
*x*
the x location of the text in display coords
*y*
the y location of the text baseline in display coords
*s*
the text string
*prop*
a :class:`matplotlib.font_manager.FontProperties` instance
*angle*
the rotation angle in degrees
*mtext*
a :class:`matplotlib.text.Text` instance
**backend implementers note**
When you are trying to determine if you have gotten your bounding box
right (which is what enables the text layout/alignment to work
properly), it helps to change the line in text.py::
if 0: bbox_artist(self, renderer)
to if 1, and then the actual bounding box will be plotted along with
your text.
"""
self._draw_text_as_path(gc, x, y, s, prop, angle, ismath)
def _get_text_path_transform(self, x, y, s, prop, angle, ismath):
"""
return the text path and transform
*prop*
font property
*s*
text to be converted
*usetex*
If True, use matplotlib usetex mode.
*ismath*
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
text2path = self._text2path
fontsize = self.points_to_pixels(prop.get_size_in_points())
if ismath == "TeX":
verts, codes = text2path.get_text_path(prop, s, ismath=False,
usetex=True)
else:
verts, codes = text2path.get_text_path(prop, s, ismath=ismath,
usetex=False)
path = Path(verts, codes)
angle = angle / 180. * 3.141592
if self.flipy():
transform = Affine2D().scale(fontsize / text2path.FONT_SCALE,
fontsize / text2path.FONT_SCALE)
transform = transform.rotate(angle).translate(x, self.height - y)
else:
transform = Affine2D().scale(fontsize / text2path.FONT_SCALE,
fontsize / text2path.FONT_SCALE)
transform = transform.rotate(angle).translate(x, y)
return path, transform
def _draw_text_as_path(self, gc, x, y, s, prop, angle, ismath):
"""
draw the text by converting them to paths using textpath module.
*prop*
font property
*s*
text to be converted
*usetex*
If True, use matplotlib usetex mode.
*ismath*
If True, use mathtext parser. If "TeX", use *usetex* mode.
"""
path, transform = self._get_text_path_transform(
x, y, s, prop, angle, ismath)
color = gc.get_rgb()
gc.set_linewidth(0.0)
self.draw_path(gc, path, transform, rgbFace=color)
def get_text_width_height_descent(self, s, prop, ismath):
"""
get the width and height, and the offset from the bottom to the
baseline (descent), in display coords of the string s with
:class:`~matplotlib.font_manager.FontProperties` prop
"""
if ismath == 'TeX':
# todo: handle props
size = prop.get_size_in_points()
texmanager = self._text2path.get_texmanager()
fontsize = prop.get_size_in_points()
w, h, d = texmanager.get_text_width_height_descent(s, fontsize,
renderer=self)
return w, h, d
dpi = self.points_to_pixels(72)
if ismath:
dims = self._text2path.mathtext_parser.parse(s, dpi, prop)
return dims[0:3] # return width, height, descent
flags = self._text2path._get_hinting_flag()
font = self._text2path._get_font(prop)
size = prop.get_size_in_points()
font.set_size(size, dpi)
# the width and height of unrotated string
font.set_text(s, 0.0, flags=flags)
w, h = font.get_width_height()
d = font.get_descent()
w /= 64.0 # convert from subpixels
h /= 64.0
d /= 64.0
return w, h, d
def flipy(self):
"""
Return true if y small numbers are top for renderer Is used
for drawing text (:mod:`matplotlib.text`) and images
(:mod:`matplotlib.image`) only
"""
return True
def get_canvas_width_height(self):
'return the canvas width and height in display coords'
return 1, 1
def get_texmanager(self):
"""
return the :class:`matplotlib.texmanager.TexManager` instance
"""
if self._texmanager is None:
from matplotlib.texmanager import TexManager
self._texmanager = TexManager()
return self._texmanager
def new_gc(self):
"""
Return an instance of a :class:`GraphicsContextBase`
"""
return GraphicsContextBase()
def points_to_pixels(self, points):
"""
Convert points to display units
*points*
a float or a numpy array of float
return points converted to pixels
You need to override this function (unless your backend
doesn't have a dpi, eg, postscript or svg). Some imaging
systems assume some value for pixels per inch::
points to pixels = points * pixels_per_inch/72.0 * dpi/72.0
"""
return points
def strip_math(self, s):
return cbook.strip_math(s)
def start_rasterizing(self):
"""
Used in MixedModeRenderer. Switch to the raster renderer.
"""
pass
def stop_rasterizing(self):
"""
Used in MixedModeRenderer. Switch back to the vector renderer
and draw the contents of the raster renderer as an image on
the vector renderer.
"""
pass
def start_filter(self):
"""
Used in AggRenderer. Switch to a temporary renderer for image
filtering effects.
"""
pass
def stop_filter(self, filter_func):
"""
Used in AggRenderer. Switch back to the original renderer.
The contents of the temporary renderer is processed with the
*filter_func* and is drawn on the original renderer as an
image.
"""
pass
class GraphicsContextBase:
"""
An abstract base class that provides color, line styles, etc...
"""
# a mapping from dash styles to suggested offset, dash pairs
dashd = {
'solid': (None, None),
'dashed': (0, (6.0, 6.0)),
'dashdot': (0, (3.0, 5.0, 1.0, 5.0)),
'dotted': (0, (1.0, 3.0)),
}
def __init__(self):
self._alpha = 1.0
self._forced_alpha = False # if True, _alpha overrides A from RGBA
self._antialiased = 1 # use 0,1 not True, False for extension code
self._capstyle = 'butt'
self._cliprect = None
self._clippath = None
self._dashes = None, None
self._joinstyle = 'round'
self._linestyle = 'solid'
self._linewidth = 1
self._rgb = (0.0, 0.0, 0.0, 1.0)
self._orig_color = (0.0, 0.0, 0.0, 1.0)
self._hatch = None
self._url = None
self._gid = None
self._snap = None
self._sketch = None
def copy_properties(self, gc):
'Copy properties from gc to self'
self._alpha = gc._alpha
self._forced_alpha = gc._forced_alpha
self._antialiased = gc._antialiased
self._capstyle = gc._capstyle
self._cliprect = gc._cliprect
self._clippath = gc._clippath
self._dashes = gc._dashes
self._joinstyle = gc._joinstyle
self._linestyle = gc._linestyle
self._linewidth = gc._linewidth
self._rgb = gc._rgb
self._orig_color = gc._orig_color
self._hatch = gc._hatch
self._url = gc._url
self._gid = gc._gid
self._snap = gc._snap
self._sketch = gc._sketch
def restore(self):
"""
Restore the graphics context from the stack - needed only
for backends that save graphics contexts on a stack
"""
pass
def get_alpha(self):
"""
Return the alpha value used for blending - not supported on
all backends
"""
return self._alpha
def get_antialiased(self):
"Return true if the object should try to do antialiased rendering"
return self._antialiased
def get_capstyle(self):
"""
Return the capstyle as a string in ('butt', 'round', 'projecting')
"""
return self._capstyle
def get_clip_rectangle(self):
"""
Return the clip rectangle as a :class:`~matplotlib.transforms.Bbox`
instance
"""
return self._cliprect
def get_clip_path(self):
"""
Return the clip path in the form (path, transform), where path
is a :class:`~matplotlib.path.Path` instance, and transform is
an affine transform to apply to the path before clipping.
"""
if self._clippath is not None:
return self._clippath.get_transformed_path_and_affine()
return None, None
def get_dashes(self):
"""
Return the dash information as an offset dashlist tuple.
The dash list is a even size list that gives the ink on, ink
off in pixels.
See p107 of to PostScript `BLUEBOOK
<http://www-cdf.fnal.gov/offline/PostScript/BLUEBOOK.PDF>`_
for more info.
Default value is None
"""
return self._dashes
def get_forced_alpha(self):
"""
Return whether the value given by get_alpha() should be used to
override any other alpha-channel values.
"""
return self._forced_alpha
def get_joinstyle(self):
"""
Return the line join style as one of ('miter', 'round', 'bevel')
"""
return self._joinstyle
def get_linestyle(self, style):
"""
Return the linestyle: one of ('solid', 'dashed', 'dashdot',
'dotted').
"""
return self._linestyle
def get_linewidth(self):
"""
Return the line width in points as a scalar
"""
return self._linewidth
def get_rgb(self):
"""
returns a tuple of three or four floats from 0-1.
"""
return self._rgb
def get_url(self):
"""
returns a url if one is set, None otherwise
"""
return self._url
def get_gid(self):
"""
Return the object identifier if one is set, None otherwise.
"""
return self._gid
def get_snap(self):
"""
returns the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
return self._snap
def set_alpha(self, alpha):
"""
Set the alpha value used for blending - not supported on all backends.
If ``alpha=None`` (the default), the alpha components of the
foreground and fill colors will be used to set their respective
transparencies (where applicable); otherwise, ``alpha`` will override
them.
"""
if alpha is not None:
self._alpha = alpha
self._forced_alpha = True
else:
self._alpha = 1.0
self._forced_alpha = False
self.set_foreground(self._orig_color)
def set_antialiased(self, b):
"""
True if object should be drawn with antialiased rendering
"""
# use 0, 1 to make life easier on extension code trying to read the gc
if b:
self._antialiased = 1
else:
self._antialiased = 0
def set_capstyle(self, cs):
"""
Set the capstyle as a string in ('butt', 'round', 'projecting')
"""
if cs in ('butt', 'round', 'projecting'):
self._capstyle = cs
else:
raise ValueError('Unrecognized cap style. Found %s' % cs)
def set_clip_rectangle(self, rectangle):
"""
Set the clip rectangle with sequence (left, bottom, width, height)
"""
self._cliprect = rectangle
def set_clip_path(self, path):
"""
Set the clip path and transformation. Path should be a
:class:`~matplotlib.transforms.TransformedPath` instance.
"""
assert path is None or isinstance(path, transforms.TransformedPath)
self._clippath = path
def set_dashes(self, dash_offset, dash_list):
"""
Set the dash style for the gc.
*dash_offset*
is the offset (usually 0).
*dash_list*
specifies the on-off sequence as points.
``(None, None)`` specifies a solid line
"""
if dash_list is not None:
dl = np.asarray(dash_list)
if np.any(dl <= 0.0):
raise ValueError("All values in the dash list must be positive")
self._dashes = dash_offset, dash_list
def set_foreground(self, fg, isRGBA=False):
"""
Set the foreground color. fg can be a MATLAB format string, a
html hex color string, an rgb or rgba unit tuple, or a float between 0
and 1. In the latter case, grayscale is used.
If you know fg is rgba, set ``isRGBA=True`` for efficiency.
"""
self._orig_color = fg
if self._forced_alpha:
self._rgb = colors.colorConverter.to_rgba(fg, self._alpha)
elif isRGBA:
self._rgb = fg
else:
self._rgb = colors.colorConverter.to_rgba(fg)
def set_graylevel(self, frac):
"""
Set the foreground color to be a gray level with *frac*
"""
self._orig_color = frac
self._rgb = (frac, frac, frac, self._alpha)
def set_joinstyle(self, js):
"""
Set the join style to be one of ('miter', 'round', 'bevel')
"""
if js in ('miter', 'round', 'bevel'):
self._joinstyle = js
else:
raise ValueError('Unrecognized join style. Found %s' % js)
def set_linewidth(self, w):
"""
Set the linewidth in points
"""
self._linewidth = w
def set_linestyle(self, style):
"""
Set the linestyle to be one of ('solid', 'dashed', 'dashdot',
'dotted'). One may specify customized dash styles by providing
a tuple of (offset, dash pairs). For example, the predefiend
linestyles have following values.:
'dashed' : (0, (6.0, 6.0)),
'dashdot' : (0, (3.0, 5.0, 1.0, 5.0)),
'dotted' : (0, (1.0, 3.0)),
"""
if style in self.dashd:
offset, dashes = self.dashd[style]
elif isinstance(style, tuple):
offset, dashes = style
else:
raise ValueError('Unrecognized linestyle: %s' % str(style))
self._linestyle = style
self.set_dashes(offset, dashes)
def set_url(self, url):
"""
Sets the url for links in compatible backends
"""
self._url = url
def set_gid(self, id):
"""
Sets the id.
"""
self._gid = id
def set_snap(self, snap):
"""
Sets the snap setting which may be:
* True: snap vertices to the nearest pixel center
* False: leave vertices as-is
* None: (auto) If the path contains only rectilinear line
segments, round to the nearest pixel center
"""
self._snap = snap
def set_hatch(self, hatch):
"""
Sets the hatch style for filling
"""
self._hatch = hatch
def get_hatch(self):
"""
Gets the current hatch style
"""
return self._hatch
def get_hatch_path(self, density=6.0):
"""
Returns a Path for the current hatch.
"""
if self._hatch is None:
return None
return Path.hatch(self._hatch, density)
def get_sketch_params(self):
"""
Returns the sketch parameters for the artist.
Returns
-------
sketch_params : tuple or `None`
A 3-tuple with the following elements:
* `scale`: The amplitude of the wiggle perpendicular to the
source line.
* `length`: The length of the wiggle along the line.
* `randomness`: The scale factor by which the length is
shrunken or expanded.
May return `None` if no sketch parameters were set.
"""
return self._sketch
def set_sketch_params(self, scale=None, length=None, randomness=None):
"""
Sets the the sketch parameters.
Parameters
----------
scale : float, optional
The amplitude of the wiggle perpendicular to the source
line, in pixels. If scale is `None`, or not provided, no
sketch filter will be provided.
length : float, optional
The length of the wiggle along the line, in pixels
(default 128.0)
randomness : float, optional
The scale factor by which the length is shrunken or
expanded (default 16.0)
"""
if scale is None:
self._sketch = None
else:
self._sketch = (scale, length or 128.0, randomness or 16.0)
class TimerBase(object):
'''
A base class for providing timer events, useful for things animations.
Backends need to implement a few specific methods in order to use their
own timing mechanisms so that the timer events are integrated into their
event loops.
Mandatory functions that must be implemented:
* `_timer_start`: Contains backend-specific code for starting
the timer
* `_timer_stop`: Contains backend-specific code for stopping
the timer
Optional overrides:
* `_timer_set_single_shot`: Code for setting the timer to
single shot operating mode, if supported by the timer
object. If not, the `Timer` class itself will store the flag
and the `_on_timer` method should be overridden to support
such behavior.
* `_timer_set_interval`: Code for setting the interval on the
timer, if there is a method for doing so on the timer
object.
* `_on_timer`: This is the internal function that any timer
object should call, which will handle the task of running
all callbacks that have been set.
Attributes:
* `interval`: The time between timer events in
milliseconds. Default is 1000 ms.
* `single_shot`: Boolean flag indicating whether this timer
should operate as single shot (run once and then
stop). Defaults to `False`.
* `callbacks`: Stores list of (func, args) tuples that will be
called upon timer events. This list can be manipulated
directly, or the functions `add_callback` and
`remove_callback` can be used.
'''
def __init__(self, interval=None, callbacks=None):
#Initialize empty callbacks list and setup default settings if necssary
if callbacks is None:
self.callbacks = []
else:
self.callbacks = callbacks[:] # Create a copy
if interval is None:
self._interval = 1000
else:
self._interval = interval
self._single = False
# Default attribute for holding the GUI-specific timer object
self._timer = None
def __del__(self):
'Need to stop timer and possibly disconnect timer.'
self._timer_stop()
def start(self, interval=None):
'''
Start the timer object. `interval` is optional and will be used
to reset the timer interval first if provided.
'''
if interval is not None:
self._set_interval(interval)
self._timer_start()
def stop(self):
'''
Stop the timer.
'''
self._timer_stop()
def _timer_start(self):
pass
def _timer_stop(self):
pass
def _get_interval(self):
return self._interval
def _set_interval(self, interval):
# Force to int since none of the backends actually support fractional
# milliseconds, and some error or give warnings.
interval = int(interval)
self._interval = interval
self._timer_set_interval()
interval = property(_get_interval, _set_interval)
def _get_single_shot(self):
return self._single
def _set_single_shot(self, ss=True):
self._single = ss
self._timer_set_single_shot()
single_shot = property(_get_single_shot, _set_single_shot)
def add_callback(self, func, *args, **kwargs):
'''
Register `func` to be called by timer when the event fires. Any
additional arguments provided will be passed to `func`.
'''
self.callbacks.append((func, args, kwargs))
def remove_callback(self, func, *args, **kwargs):
'''
Remove `func` from list of callbacks. `args` and `kwargs` are optional
and used to distinguish between copies of the same function registered
to be called with different arguments.
'''
if args or kwargs:
self.callbacks.remove((func, args, kwargs))
else:
funcs = [c[0] for c in self.callbacks]
if func in funcs:
self.callbacks.pop(funcs.index(func))
def _timer_set_interval(self):
'Used to set interval on underlying timer object.'
pass
def _timer_set_single_shot(self):
'Used to set single shot on underlying timer object.'
pass
def _on_timer(self):
'''
Runs all function that have been registered as callbacks. Functions
can return False (or 0) if they should not be called any more. If there
are no callbacks, the timer is automatically stopped.
'''
for func, args, kwargs in self.callbacks:
ret = func(*args, **kwargs)
# docstring above explains why we use `if ret == False` here,
# instead of `if not ret`.
if ret == False:
self.callbacks.remove((func, args, kwargs))
if len(self.callbacks) == 0:
self.stop()
class Event:
"""
A matplotlib event. Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`. The following attributes
are defined and shown with their default values
*name*
the event name
*canvas*
the FigureCanvas instance generating the event
*guiEvent*
the GUI event that triggered the matplotlib event
"""
def __init__(self, name, canvas, guiEvent=None):
self.name = name
self.canvas = canvas
self.guiEvent = guiEvent
class IdleEvent(Event):
"""
An event triggered by the GUI backend when it is idle -- useful
for passive animation
"""
pass
class DrawEvent(Event):
"""
An event triggered by a draw operation on the canvas
In addition to the :class:`Event` attributes, the following event
attributes are defined:
*renderer*
the :class:`RendererBase` instance for the draw event
"""
def __init__(self, name, canvas, renderer):
Event.__init__(self, name, canvas)
self.renderer = renderer
class ResizeEvent(Event):
"""
An event triggered by a canvas resize
In addition to the :class:`Event` attributes, the following event
attributes are defined:
*width*
width of the canvas in pixels
*height*
height of the canvas in pixels
"""
def __init__(self, name, canvas):
Event.__init__(self, name, canvas)
self.width, self.height = canvas.get_width_height()
class CloseEvent(Event):
"""
An event triggered by a figure being closed
In addition to the :class:`Event` attributes, the following event
attributes are defined:
"""
def __init__(self, name, canvas, guiEvent=None):
Event.__init__(self, name, canvas, guiEvent)
class LocationEvent(Event):
"""
An event that has a screen location
The following additional attributes are defined and shown with
their default values.
In addition to the :class:`Event` attributes, the following
event attributes are defined:
*x*
x position - pixels from left of canvas
*y*
y position - pixels from bottom of canvas
*inaxes*
the :class:`~matplotlib.axes.Axes` instance if mouse is over axes
*xdata*
x coord of mouse in data coords
*ydata*
y coord of mouse in data coords
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
# the last event that was triggered before this one
lastevent = None
def __init__(self, name, canvas, x, y, guiEvent=None):
"""
*x*, *y* in figure coords, 0,0 = bottom, left
"""
Event.__init__(self, name, canvas, guiEvent=guiEvent)
self.x = x
self.y = y
if x is None or y is None:
# cannot check if event was in axes if no x,y info
self.inaxes = None
self._update_enter_leave()
return
# Find all axes containing the mouse
if self.canvas.mouse_grabber is None:
axes_list = [a for a in self.canvas.figure.get_axes()
if a.in_axes(self)]
else:
axes_list = [self.canvas.mouse_grabber]
if len(axes_list) == 0: # None found
self.inaxes = None
self._update_enter_leave()
return
elif (len(axes_list) > 1): # Overlap, get the highest zorder
axes_list.sort(key=lambda x: x.zorder)
self.inaxes = axes_list[-1] # Use the highest zorder
else: # Just found one hit
self.inaxes = axes_list[0]
try:
trans = self.inaxes.transData.inverted()
xdata, ydata = trans.transform_point((x, y))
except ValueError:
self.xdata = None
self.ydata = None
else:
self.xdata = xdata
self.ydata = ydata
self._update_enter_leave()
def _update_enter_leave(self):
'process the figure/axes enter leave events'
if LocationEvent.lastevent is not None:
last = LocationEvent.lastevent
if last.inaxes != self.inaxes:
# process axes enter/leave events
try:
if last.inaxes is not None:
last.canvas.callbacks.process('axes_leave_event', last)
except:
pass
# See ticket 2901582.
# I think this is a valid exception to the rule
# against catching all exceptions; if anything goes
# wrong, we simply want to move on and process the
# current event.
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
else:
# process a figure enter event
if self.inaxes is not None:
self.canvas.callbacks.process('axes_enter_event', self)
LocationEvent.lastevent = self
class MouseEvent(LocationEvent):
"""
A mouse event ('button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event').
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*button*
button pressed None, 1, 2, 3, 'up', 'down' (up and down are used
for scroll events)
*key*
the key depressed when the mouse event triggered (see
:class:`KeyEvent`)
*step*
number of scroll steps (positive for 'up', negative for 'down')
Example usage::
def on_press(event):
print('you pressed', event.button, event.xdata, event.ydata)
cid = fig.canvas.mpl_connect('button_press_event', on_press)
"""
x = None # x position - pixels from left of canvas
y = None # y position - pixels from right of canvas
button = None # button pressed None, 1, 2, 3
dblclick = None # whether or not the event is the result of a double click
inaxes = None # the Axes instance if mouse us over axes
xdata = None # x coord of mouse in data coords
ydata = None # y coord of mouse in data coords
step = None # scroll steps for scroll events
def __init__(self, name, canvas, x, y, button=None, key=None,
step=0, dblclick=False, guiEvent=None):
"""
x, y in figure coords, 0,0 = bottom, left
button pressed None, 1, 2, 3, 'up', 'down'
"""
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.button = button
self.key = key
self.step = step
self.dblclick = dblclick
def __str__(self):
return ("MPL MouseEvent: xy=(%d,%d) xydata=(%s,%s) button=%d " +
"dblclick=%s inaxes=%s") % (self.x, self.y, self.xdata,
self.ydata, self.button,
self.dblclick, self.inaxes)
class PickEvent(Event):
"""
a pick event, fired when the user picks a location on the canvas
sufficiently close to an artist.
Attrs: all the :class:`Event` attributes plus
*mouseevent*
the :class:`MouseEvent` that generated the pick
*artist*
the :class:`~matplotlib.artist.Artist` picked
other
extra class dependent attrs -- eg a
:class:`~matplotlib.lines.Line2D` pick may define different
extra attributes than a
:class:`~matplotlib.collections.PatchCollection` pick event
Example usage::
line, = ax.plot(rand(100), 'o', picker=5) # 5 points tolerance
def on_pick(event):
thisline = event.artist
xdata, ydata = thisline.get_data()
ind = event.ind
print('on pick line:', zip(xdata[ind], ydata[ind]))
cid = fig.canvas.mpl_connect('pick_event', on_pick)
"""
def __init__(self, name, canvas, mouseevent, artist,
guiEvent=None, **kwargs):
Event.__init__(self, name, canvas, guiEvent)
self.mouseevent = mouseevent
self.artist = artist
self.__dict__.update(kwargs)
class KeyEvent(LocationEvent):
"""
A key event (key press, key release).
Attach additional attributes as defined in
:meth:`FigureCanvasBase.mpl_connect`.
In addition to the :class:`Event` and :class:`LocationEvent`
attributes, the following attributes are defined:
*key*
the key(s) pressed. Could be **None**, a single case sensitive ascii
character ("g", "G", "#", etc.), a special key
("control", "shift", "f1", "up", etc.) or a
combination of the above (e.g., "ctrl+alt+g", "ctrl+alt+G").
.. note::
Modifier keys will be prefixed to the pressed key and will be in the
order "ctrl", "alt", "super". The exception to this rule is when the
pressed key is itself a modifier key, therefore "ctrl+alt" and
"alt+control" can both be valid key values.
Example usage::
def on_key(event):
print('you pressed', event.key, event.xdata, event.ydata)
cid = fig.canvas.mpl_connect('key_press_event', on_key)
"""
def __init__(self, name, canvas, key, x=0, y=0, guiEvent=None):
LocationEvent.__init__(self, name, canvas, x, y, guiEvent=guiEvent)
self.key = key
class FigureCanvasBase(object):
"""
The canvas the figure renders into.
Public attributes
*figure*
A :class:`matplotlib.figure.Figure` instance
"""
events = [
'resize_event',
'draw_event',
'key_press_event',
'key_release_event',
'button_press_event',
'button_release_event',
'scroll_event',
'motion_notify_event',
'pick_event',
'idle_event',
'figure_enter_event',
'figure_leave_event',
'axes_enter_event',
'axes_leave_event',
'close_event'
]
supports_blit = True
def __init__(self, figure):
figure.set_canvas(self)
self.figure = figure
# a dictionary from event name to a dictionary that maps cid->func
self.callbacks = cbook.CallbackRegistry()
self.widgetlock = widgets.LockDraw()
self._button = None # the button pressed
self._key = None # the key pressed
self._lastx, self._lasty = None, None
self.button_pick_id = self.mpl_connect('button_press_event', self.pick)
self.scroll_pick_id = self.mpl_connect('scroll_event', self.pick)
self.mouse_grabber = None # the axes currently grabbing mouse
self.toolbar = None # NavigationToolbar2 will set me
self._is_saving = False
if False:
## highlight the artists that are hit
self.mpl_connect('motion_notify_event', self.onHilite)
## delete the artists that are clicked on
#self.mpl_disconnect(self.button_pick_id)
#self.mpl_connect('button_press_event',self.onRemove)
def is_saving(self):
"""
Returns `True` when the renderer is in the process of saving
to a file, rather than rendering for an on-screen buffer.
"""
return self._is_saving
def onRemove(self, ev):
"""
Mouse event processor which removes the top artist
under the cursor. Connect this to the 'mouse_press_event'
using::
canvas.mpl_connect('mouse_press_event',canvas.onRemove)
"""
def sort_artists(artists):
# This depends on stable sort and artists returned
# from get_children in z order.
L = [(h.zorder, h) for h in artists]
L.sort()
return [h for zorder, h in L]
# Find the top artist under the cursor
under = sort_artists(self.figure.hitlist(ev))
h = None
if under:
h = under[-1]
# Try deleting that artist, or its parent if you
# can't delete the artist
while h:
if h.remove():
self.draw_idle()
break
parent = None
for p in under:
if h in p.get_children():
parent = p
break
h = parent
def onHilite(self, ev):
"""
Mouse event processor which highlights the artists
under the cursor. Connect this to the 'motion_notify_event'
using::
canvas.mpl_connect('motion_notify_event',canvas.onHilite)
"""
if not hasattr(self, '_active'):
self._active = dict()
under = self.figure.hitlist(ev)
enter = [a for a in under if a not in self._active]
leave = [a for a in self._active if a not in under]
#print "within:"," ".join([str(x) for x in under])
#print "entering:",[str(a) for a in enter]
#print "leaving:",[str(a) for a in leave]
# On leave restore the captured colour
for a in leave:
if hasattr(a, 'get_color'):
a.set_color(self._active[a])
elif hasattr(a, 'get_edgecolor'):
a.set_edgecolor(self._active[a][0])
a.set_facecolor(self._active[a][1])
del self._active[a]
# On enter, capture the color and repaint the artist
# with the highlight colour. Capturing colour has to
# be done first in case the parent recolouring affects
# the child.
for a in enter:
if hasattr(a, 'get_color'):
self._active[a] = a.get_color()
elif hasattr(a, 'get_edgecolor'):
self._active[a] = (a.get_edgecolor(), a.get_facecolor())
else:
self._active[a] = None
for a in enter:
if hasattr(a, 'get_color'):
a.set_color('red')
elif hasattr(a, 'get_edgecolor'):
a.set_edgecolor('red')
a.set_facecolor('lightblue')
else:
self._active[a] = None
self.draw_idle()
def pick(self, mouseevent):
if not self.widgetlock.locked():
self.figure.pick(mouseevent)
def blit(self, bbox=None):
"""
blit the canvas in bbox (default entire canvas)
"""
pass
def resize(self, w, h):
"""
set the canvas size in pixels
"""
pass
def draw_event(self, renderer):
"""
This method will be call all functions connected to the
'draw_event' with a :class:`DrawEvent`
"""
s = 'draw_event'
event = DrawEvent(s, self, renderer)
self.callbacks.process(s, event)
def resize_event(self):
"""
This method will be call all functions connected to the
'resize_event' with a :class:`ResizeEvent`
"""
s = 'resize_event'
event = ResizeEvent(s, self)
self.callbacks.process(s, event)
def close_event(self, guiEvent=None):
"""
This method will be called by all functions connected to the
'close_event' with a :class:`CloseEvent`
"""
s = 'close_event'
try:
event = CloseEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
except (TypeError, AttributeError):
pass
# Suppress the TypeError when the python session is being killed.
# It may be that a better solution would be a mechanism to
# disconnect all callbacks upon shutdown.
# AttributeError occurs on OSX with qt4agg upon exiting
# with an open window; 'callbacks' attribute no longer exists.
def key_press_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_press_event' with a :class:`KeyEvent`
"""
self._key = key
s = 'key_press_event'
event = KeyEvent(
s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
def key_release_event(self, key, guiEvent=None):
"""
This method will be call all functions connected to the
'key_release_event' with a :class:`KeyEvent`
"""
s = 'key_release_event'
event = KeyEvent(
s, self, key, self._lastx, self._lasty, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._key = None
def pick_event(self, mouseevent, artist, **kwargs):
"""
This method will be called by artists who are picked and will
fire off :class:`PickEvent` callbacks registered listeners
"""
s = 'pick_event'
event = PickEvent(s, self, mouseevent, artist, **kwargs)
self.callbacks.process(s, event)
def scroll_event(self, x, y, step, guiEvent=None):
"""
Backend derived classes should call this function on any
scroll wheel event. x,y are the canvas coords: 0,0 is lower,
left. button and key are as defined in MouseEvent.
This method will be call all functions connected to the
'scroll_event' with a :class:`MouseEvent` instance.
"""
if step >= 0:
self._button = 'up'
else:
self._button = 'down'
s = 'scroll_event'
mouseevent = MouseEvent(s, self, x, y, self._button, self._key,
step=step, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_press_event(self, x, y, button, dblclick=False, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button press. x,y are the canvas coords: 0,0 is lower, left.
button and key are as defined in :class:`MouseEvent`.
This method will be call all functions connected to the
'button_press_event' with a :class:`MouseEvent` instance.
"""
self._button = button
s = 'button_press_event'
mouseevent = MouseEvent(s, self, x, y, button, self._key,
dblclick=dblclick, guiEvent=guiEvent)
self.callbacks.process(s, mouseevent)
def button_release_event(self, x, y, button, guiEvent=None):
"""
Backend derived classes should call this function on any mouse
button release.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'button_release_event' with a :class:`MouseEvent` instance.
"""
s = 'button_release_event'
event = MouseEvent(s, self, x, y, button, self._key, guiEvent=guiEvent)
self.callbacks.process(s, event)
self._button = None
def motion_notify_event(self, x, y, guiEvent=None):
"""
Backend derived classes should call this function on any
motion-notify-event.
*x*
the canvas coordinates where 0=left
*y*
the canvas coordinates where 0=bottom
*guiEvent*
the native UI event that generated the mpl event
This method will be call all functions connected to the
'motion_notify_event' with a :class:`MouseEvent` instance.
"""
self._lastx, self._lasty = x, y
s = 'motion_notify_event'
event = MouseEvent(s, self, x, y, self._button, self._key,
guiEvent=guiEvent)
self.callbacks.process(s, event)
def leave_notify_event(self, guiEvent=None):
"""
Backend derived classes should call this function when leaving
canvas
*guiEvent*
the native UI event that generated the mpl event
"""
self.callbacks.process('figure_leave_event', LocationEvent.lastevent)
LocationEvent.lastevent = None
self._lastx, self._lasty = None, None
def enter_notify_event(self, guiEvent=None, xy=None):
"""
Backend derived classes should call this function when entering
canvas
*guiEvent*
the native UI event that generated the mpl event
*xy*
the coordinate location of the pointer when the canvas is
entered
"""
if xy is not None:
x, y = xy
self._lastx, self._lasty = x, y
event = Event('figure_enter_event', self, guiEvent)
self.callbacks.process('figure_enter_event', event)
def idle_event(self, guiEvent=None):
"""Called when GUI is idle."""
s = 'idle_event'
event = IdleEvent(s, self, guiEvent=guiEvent)
self.callbacks.process(s, event)
def grab_mouse(self, ax):
"""
Set the child axes which are currently grabbing the mouse events.
Usually called by the widgets themselves.
It is an error to call this if the mouse is already grabbed by
another axes.
"""
if self.mouse_grabber not in (None, ax):
raise RuntimeError('two different attempted to grab mouse input')
self.mouse_grabber = ax
def release_mouse(self, ax):
"""
Release the mouse grab held by the axes, ax.
Usually called by the widgets.
It is ok to call this even if you ax doesn't have the mouse
grab currently.
"""
if self.mouse_grabber is ax:
self.mouse_grabber = None
def draw(self, *args, **kwargs):
"""
Render the :class:`~matplotlib.figure.Figure`
"""
pass
def draw_idle(self, *args, **kwargs):
"""
:meth:`draw` only if idle; defaults to draw but backends can overrride
"""
self.draw(*args, **kwargs)
def draw_cursor(self, event):
"""
Draw a cursor in the event.axes if inaxes is not None. Use
native GUI drawing for efficiency if possible
"""
pass
def get_width_height(self):
"""
Return the figure width and height in points or pixels
(depending on the backend), truncated to integers
"""
return int(self.figure.bbox.width), int(self.figure.bbox.height)
filetypes = {
'eps': 'Encapsulated Postscript',
'pdf': 'Portable Document Format',
'pgf': 'LaTeX PGF Figure',
'png': 'Portable Network Graphics',
'ps': 'Postscript',
'raw': 'Raw RGBA bitmap',
'rgba': 'Raw RGBA bitmap',
'svg': 'Scalable Vector Graphics',
'svgz': 'Scalable Vector Graphics'}
# All of these print_* functions do a lazy import because
# a) otherwise we'd have cyclical imports, since all of these
# classes inherit from FigureCanvasBase
# b) so we don't import a bunch of stuff the user may never use
# TODO: these print_* throw ImportErrror when called from
# compare_images_decorator (decorators.py line 112)
# if the backend has not already been loaded earlier on. Simple trigger:
# >>> import matplotlib.tests.test_spines
# >>> list(matplotlib.tests.test_spines.test_spines_axes_positions())[0][0]()
def print_eps(self, *args, **kwargs):
from .backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_eps(*args, **kwargs)
def print_pdf(self, *args, **kwargs):
from .backends.backend_pdf import FigureCanvasPdf # lazy import
pdf = self.switch_backends(FigureCanvasPdf)
return pdf.print_pdf(*args, **kwargs)
def print_pgf(self, *args, **kwargs):
from .backends.backend_pgf import FigureCanvasPgf # lazy import
pgf = self.switch_backends(FigureCanvasPgf)
return pgf.print_pgf(*args, **kwargs)
def print_png(self, *args, **kwargs):
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_png(*args, **kwargs)
def print_ps(self, *args, **kwargs):
from .backends.backend_ps import FigureCanvasPS # lazy import
ps = self.switch_backends(FigureCanvasPS)
return ps.print_ps(*args, **kwargs)
def print_raw(self, *args, **kwargs):
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
return agg.print_raw(*args, **kwargs)
print_bmp = print_rgba = print_raw
def print_svg(self, *args, **kwargs):
from .backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svg(*args, **kwargs)
def print_svgz(self, *args, **kwargs):
from .backends.backend_svg import FigureCanvasSVG # lazy import
svg = self.switch_backends(FigureCanvasSVG)
return svg.print_svgz(*args, **kwargs)
if _has_pil:
filetypes['jpg'] = 'Joint Photographic Experts Group'
filetypes['jpeg'] = filetypes['jpg']
def print_jpg(self, filename_or_obj, *args, **kwargs):
"""
Supported kwargs:
*quality*: The image quality, on a scale from 1 (worst) to
95 (best). The default is 95, if not given in the
matplotlibrc file in the savefig.jpeg_quality parameter.
Values above 95 should be avoided; 100 completely
disables the JPEG quantization stage.
*optimize*: If present, indicates that the encoder should
make an extra pass over the image in order to select
optimal encoder settings.
*progressive*: If present, indicates that this image
should be stored as a progressive JPEG file.
"""
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
buf, size = agg.print_to_buffer()
if kwargs.pop("dryrun", False):
return
image = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1)
options = cbook.restrict_dict(kwargs, ['quality', 'optimize',
'progressive'])
if 'quality' not in options:
options['quality'] = rcParams['savefig.jpeg_quality']
return image.save(filename_or_obj, format='jpeg', **options)
print_jpeg = print_jpg
filetypes['tif'] = filetypes['tiff'] = 'Tagged Image File Format'
def print_tif(self, filename_or_obj, *args, **kwargs):
from .backends.backend_agg import FigureCanvasAgg # lazy import
agg = self.switch_backends(FigureCanvasAgg)
buf, size = agg.print_to_buffer()
if kwargs.pop("dryrun", False):
return
image = Image.frombuffer('RGBA', size, buf, 'raw', 'RGBA', 0, 1)
dpi = (self.figure.dpi, self.figure.dpi)
return image.save(filename_or_obj, format='tiff',
dpi=dpi)
print_tiff = print_tif
def get_supported_filetypes(self):
"""Return dict of savefig file formats supported by this backend"""
return self.filetypes
def get_supported_filetypes_grouped(self):
"""Return a dict of savefig file formats supported by this backend,
where the keys are a file type name, such as 'Joint Photographic
Experts Group', and the values are a list of filename extensions used
for that filetype, such as ['jpg', 'jpeg']."""
groupings = {}
for ext, name in self.filetypes.items():
groupings.setdefault(name, []).append(ext)
groupings[name].sort()
return groupings
def _get_print_method(self, format):
method_name = 'print_%s' % format
# check for registered backends
if format in _backend_d:
backend_class = _backend_d[format]
def _print_method(*args, **kwargs):
backend = self.switch_backends(backend_class)
print_method = getattr(backend, method_name)
return print_method(*args, **kwargs)
return _print_method
formats = self.get_supported_filetypes()
if (format not in formats or not hasattr(self, method_name)):
formats = sorted(formats)
raise ValueError(
'Format "%s" is not supported.\n'
'Supported formats: '
'%s.' % (format, ', '.join(formats)))
return getattr(self, method_name)
def print_figure(self, filename, dpi=None, facecolor='w', edgecolor='w',
orientation='portrait', format=None, **kwargs):
"""
Render the figure to hardcopy. Set the figure patch face and edge
colors. This is useful because some of the GUIs have a gray figure
face color background and you'll probably want to override this on
hardcopy.
Arguments are:
*filename*
can also be a file object on image backends
*orientation*
only currently applies to PostScript printing.
*dpi*
the dots per inch to save the figure in; if None, use savefig.dpi
*facecolor*
the facecolor of the figure
*edgecolor*
the edgecolor of the figure
*orientation*
landscape' | 'portrait' (not supported on all backends)
*format*
when set, forcibly set the file format to save to
*bbox_inches*
Bbox in inches. Only the given portion of the figure is
saved. If 'tight', try to figure out the tight bbox of
the figure. If None, use savefig.bbox
*pad_inches*
Amount of padding around the figure when bbox_inches is
'tight'. If None, use savefig.pad_inches
*bbox_extra_artists*
A list of extra artists that will be considered when the
tight bbox is calculated.
"""
if format is None:
# get format from filename, or from backend's default filetype
if cbook.is_string_like(filename):
format = os.path.splitext(filename)[1][1:]
if format is None or format == '':
format = self.get_default_filetype()
if cbook.is_string_like(filename):
filename = filename.rstrip('.') + '.' + format
format = format.lower()
print_method = self._get_print_method(format)
if dpi is None:
dpi = rcParams['savefig.dpi']
origDPI = self.figure.dpi
origfacecolor = self.figure.get_facecolor()
origedgecolor = self.figure.get_edgecolor()
self.figure.dpi = dpi
self.figure.set_facecolor(facecolor)
self.figure.set_edgecolor(edgecolor)
bbox_inches = kwargs.pop("bbox_inches", None)
if bbox_inches is None:
bbox_inches = rcParams['savefig.bbox']
if bbox_inches:
# call adjust_bbox to save only the given area
if bbox_inches == "tight":
# when bbox_inches == "tight", it saves the figure
# twice. The first save command is just to estimate
# the bounding box of the figure. A stringIO object is
# used as a temporary file object, but it causes a
# problem for some backends (ps backend with
# usetex=True) if they expect a filename, not a
# file-like object. As I think it is best to change
# the backend to support file-like object, i'm going
# to leave it as it is. However, a better solution
# than stringIO seems to be needed. -JJL
#result = getattr(self, method_name)
result = print_method(
io.BytesIO(),
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
dryrun=True,
**kwargs)
renderer = self.figure._cachedRenderer
bbox_inches = self.figure.get_tightbbox(renderer)
bbox_artists = kwargs.pop("bbox_extra_artists", None)
if bbox_artists is None:
bbox_artists = self.figure.get_default_bbox_extra_artists()
bbox_filtered = []
for a in bbox_artists:
bbox = a.get_window_extent(renderer)
if a.get_clip_on():
clip_box = a.get_clip_box()
if clip_box is not None:
bbox = Bbox.intersection(bbox, clip_box)
clip_path = a.get_clip_path()
if clip_path is not None and bbox is not None:
clip_path = clip_path.get_fully_transformed_path()
bbox = Bbox.intersection(bbox,
clip_path.get_extents())
if bbox is not None and (bbox.width != 0 or
bbox.height != 0):
bbox_filtered.append(bbox)
if bbox_filtered:
_bbox = Bbox.union(bbox_filtered)
trans = Affine2D().scale(1.0 / self.figure.dpi)
bbox_extra = TransformedBbox(_bbox, trans)
bbox_inches = Bbox.union([bbox_inches, bbox_extra])
pad = kwargs.pop("pad_inches", None)
if pad is None:
pad = rcParams['savefig.pad_inches']
bbox_inches = bbox_inches.padded(pad)
restore_bbox = tight_bbox.adjust_bbox(self.figure, format,
bbox_inches)
_bbox_inches_restore = (bbox_inches, restore_bbox)
else:
_bbox_inches_restore = None
self._is_saving = True
try:
#result = getattr(self, method_name)(
result = print_method(
filename,
dpi=dpi,
facecolor=facecolor,
edgecolor=edgecolor,
orientation=orientation,
bbox_inches_restore=_bbox_inches_restore,
**kwargs)
finally:
if bbox_inches and restore_bbox:
restore_bbox()
self.figure.dpi = origDPI
self.figure.set_facecolor(origfacecolor)
self.figure.set_edgecolor(origedgecolor)
self.figure.set_canvas(self)
self._is_saving = False
#self.figure.canvas.draw() ## seems superfluous
return result
def get_default_filetype(self):
"""
Get the default savefig file format as specified in rcParam
``savefig.format``. Returned string excludes period. Overridden
in backends that only support a single file type.
"""
return rcParams['savefig.format']
def get_window_title(self):
"""
Get the title text of the window containing the figure.
Return None if there is no window (eg, a PS backend).
"""
if hasattr(self, "manager"):
return self.manager.get_window_title()
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect if there is no window (eg, a PS backend).
"""
if hasattr(self, "manager"):
self.manager.set_window_title(title)
def get_default_filename(self):
"""
Return a string, which includes extension, suitable for use as
a default filename.
"""
default_filename = self.get_window_title() or 'image'
default_filename = default_filename.lower().replace(' ', '_')
return default_filename + '.' + self.get_default_filetype()
def switch_backends(self, FigureCanvasClass):
"""
Instantiate an instance of FigureCanvasClass
This is used for backend switching, eg, to instantiate a
FigureCanvasPS from a FigureCanvasGTK. Note, deep copying is
not done, so any changes to one of the instances (eg, setting
figure size or line props), will be reflected in the other
"""
newCanvas = FigureCanvasClass(self.figure)
newCanvas._is_saving = self._is_saving
return newCanvas
def mpl_connect(self, s, func):
"""
Connect event with string *s* to *func*. The signature of *func* is::
def func(event)
where event is a :class:`matplotlib.backend_bases.Event`. The
following events are recognized
- 'button_press_event'
- 'button_release_event'
- 'draw_event'
- 'key_press_event'
- 'key_release_event'
- 'motion_notify_event'
- 'pick_event'
- 'resize_event'
- 'scroll_event'
- 'figure_enter_event',
- 'figure_leave_event',
- 'axes_enter_event',
- 'axes_leave_event'
- 'close_event'
For the location events (button and key press/release), if the
mouse is over the axes, the variable ``event.inaxes`` will be
set to the :class:`~matplotlib.axes.Axes` the event occurs is
over, and additionally, the variables ``event.xdata`` and
``event.ydata`` will be defined. This is the mouse location
in data coords. See
:class:`~matplotlib.backend_bases.KeyEvent` and
:class:`~matplotlib.backend_bases.MouseEvent` for more info.
Return value is a connection id that can be used with
:meth:`~matplotlib.backend_bases.Event.mpl_disconnect`.
Example usage::
def on_press(event):
print('you pressed', event.button, event.xdata, event.ydata)
cid = canvas.mpl_connect('button_press_event', on_press)
"""
return self.callbacks.connect(s, func)
def mpl_disconnect(self, cid):
"""
Disconnect callback id cid
Example usage::
cid = canvas.mpl_connect('button_press_event', on_press)
#...later
canvas.mpl_disconnect(cid)
"""
return self.callbacks.disconnect(cid)
def new_timer(self, *args, **kwargs):
"""
Creates a new backend-specific subclass of
:class:`backend_bases.Timer`. This is useful for getting periodic
events through the backend's native event loop. Implemented only for
backends with GUIs.
optional arguments:
*interval*
Timer interval in milliseconds
*callbacks*
Sequence of (func, args, kwargs) where func(*args, **kwargs) will
be executed by the timer every *interval*.
"""
return TimerBase(*args, **kwargs)
def flush_events(self):
"""
Flush the GUI events for the figure. Implemented only for
backends with GUIs.
"""
raise NotImplementedError
def start_event_loop(self, timeout):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def stop_event_loop(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
This is implemented only for backends with GUIs.
"""
raise NotImplementedError
def start_event_loop_default(self, timeout=0):
"""
Start an event loop. This is used to start a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events. This should not be
confused with the main GUI event loop, which is always running
and has nothing to do with this.
This function provides default event loop functionality based
on time.sleep that is meant to be used until event loop
functions for each of the GUI backends can be written. As
such, it throws a deprecated warning.
Call signature::
start_event_loop_default(self,timeout=0)
This call blocks until a callback function triggers
stop_event_loop() or *timeout* is reached. If *timeout* is
<=0, never timeout.
"""
str = "Using default event loop until function specific"
str += " to this GUI is implemented"
warnings.warn(str, mplDeprecation)
if timeout <= 0:
timeout = np.inf
timestep = 0.01
counter = 0
self._looping = True
while self._looping and counter * timestep < timeout:
self.flush_events()
time.sleep(timestep)
counter += 1
def stop_event_loop_default(self):
"""
Stop an event loop. This is used to stop a blocking event
loop so that interactive functions, such as ginput and
waitforbuttonpress, can wait for events.
Call signature::
stop_event_loop_default(self)
"""
self._looping = False
def key_press_handler(event, canvas, toolbar=None):
"""
Implement the default mpl key bindings for the canvas and toolbar
described at :ref:`key-event-handling`
*event*
a :class:`KeyEvent` instance
*canvas*
a :class:`FigureCanvasBase` instance
*toolbar*
a :class:`NavigationToolbar2` instance
"""
# these bindings happen whether you are over an axes or not
if event.key is None:
return
# Load key-mappings from your matplotlibrc file.
fullscreen_keys = rcParams['keymap.fullscreen']
home_keys = rcParams['keymap.home']
back_keys = rcParams['keymap.back']
forward_keys = rcParams['keymap.forward']
pan_keys = rcParams['keymap.pan']
zoom_keys = rcParams['keymap.zoom']
save_keys = rcParams['keymap.save']
quit_keys = rcParams['keymap.quit']
grid_keys = rcParams['keymap.grid']
toggle_yscale_keys = rcParams['keymap.yscale']
toggle_xscale_keys = rcParams['keymap.xscale']
all = rcParams['keymap.all_axes']
# toggle fullscreen mode (default key 'f')
if event.key in fullscreen_keys:
canvas.manager.full_screen_toggle()
# quit the figure (defaut key 'ctrl+w')
if event.key in quit_keys:
Gcf.destroy_fig(canvas.figure)
if toolbar is not None:
# home or reset mnemonic (default key 'h', 'home' and 'r')
if event.key in home_keys:
toolbar.home()
# forward / backward keys to enable left handed quick navigation
# (default key for backward: 'left', 'backspace' and 'c')
elif event.key in back_keys:
toolbar.back()
# (default key for forward: 'right' and 'v')
elif event.key in forward_keys:
toolbar.forward()
# pan mnemonic (default key 'p')
elif event.key in pan_keys:
toolbar.pan()
# zoom mnemonic (default key 'o')
elif event.key in zoom_keys:
toolbar.zoom()
# saving current figure (default key 's')
elif event.key in save_keys:
toolbar.save_figure()
if event.inaxes is None:
return
# these bindings require the mouse to be over an axes to trigger
# switching on/off a grid in current axes (default key 'g')
if event.key in grid_keys:
event.inaxes.grid()
canvas.draw()
# toggle scaling of y-axes between 'log and 'linear' (default key 'l')
elif event.key in toggle_yscale_keys:
ax = event.inaxes
scale = ax.get_yscale()
if scale == 'log':
ax.set_yscale('linear')
ax.figure.canvas.draw()
elif scale == 'linear':
ax.set_yscale('log')
ax.figure.canvas.draw()
# toggle scaling of x-axes between 'log and 'linear' (default key 'k')
elif event.key in toggle_xscale_keys:
ax = event.inaxes
scalex = ax.get_xscale()
if scalex == 'log':
ax.set_xscale('linear')
ax.figure.canvas.draw()
elif scalex == 'linear':
ax.set_xscale('log')
ax.figure.canvas.draw()
elif (event.key.isdigit() and event.key != '0') or event.key in all:
# keys in list 'all' enables all axes (default key 'a'),
# otherwise if key is a number only enable this particular axes
# if it was the axes, where the event was raised
if not (event.key in all):
n = int(event.key) - 1
for i, a in enumerate(canvas.figure.get_axes()):
# consider axes, in which the event was raised
# FIXME: Why only this axes?
if event.x is not None and event.y is not None \
and a.in_axes(event):
if event.key in all:
a.set_navigate(True)
else:
a.set_navigate(i == n)
class NonGuiException(Exception):
pass
class FigureManagerBase:
"""
Helper class for pyplot mode, wraps everything up into a neat bundle
Public attibutes:
*canvas*
A :class:`FigureCanvasBase` instance
*num*
The figure number
"""
def __init__(self, canvas, num):
self.canvas = canvas
canvas.manager = self # store a pointer to parent
self.num = num
self.key_press_handler_id = self.canvas.mpl_connect('key_press_event',
self.key_press)
"""
The returned id from connecting the default key handler via
:meth:`FigureCanvasBase.mpl_connnect`.
To disable default key press handling::
manager, canvas = figure.canvas.manager, figure.canvas
canvas.mpl_disconnect(manager.key_press_handler_id)
"""
def show(self):
"""
For GUI backends, show the figure window and redraw.
For non-GUI backends, raise an exception to be caught
by :meth:`~matplotlib.figure.Figure.show`, for an
optional warning.
"""
raise NonGuiException()
def destroy(self):
pass
def full_screen_toggle(self):
pass
def resize(self, w, h):
""""For gui backends, resize the window (in pixels)."""
pass
def key_press(self, event):
"""
Implement the default mpl key bindings defined at
:ref:`key-event-handling`
"""
key_press_handler(event, self.canvas, self.canvas.toolbar)
def show_popup(self, msg):
"""
Display message in a popup -- GUI only
"""
pass
def get_window_title(self):
"""
Get the title text of the window containing the figure.
Return None for non-GUI backends (eg, a PS backend).
"""
return 'image'
def set_window_title(self, title):
"""
Set the title text of the window containing the figure. Note that
this has no effect for non-GUI backends (eg, a PS backend).
"""
pass
class Cursors:
# this class is only used as a simple namespace
HAND, POINTER, SELECT_REGION, MOVE = list(range(4))
cursors = Cursors()
class NavigationToolbar2(object):
"""
Base class for the navigation cursor, version 2
backends must implement a canvas that handles connections for
'button_press_event' and 'button_release_event'. See
:meth:`FigureCanvasBase.mpl_connect` for more information
They must also define
:meth:`save_figure`
save the current figure
:meth:`set_cursor`
if you want the pointer icon to change
:meth:`_init_toolbar`
create your toolbar widget
:meth:`draw_rubberband` (optional)
draw the zoom to rect "rubberband" rectangle
:meth:`press` (optional)
whenever a mouse button is pressed, you'll be notified with
the event
:meth:`release` (optional)
whenever a mouse button is released, you'll be notified with
the event
:meth:`dynamic_update` (optional)
dynamically update the window while navigating
:meth:`set_message` (optional)
display message
:meth:`set_history_buttons` (optional)
you can change the history back / forward buttons to
indicate disabled / enabled state.
That's it, we'll do the rest!
"""
# list of toolitems to add to the toolbar, format is:
# (
# text, # the text of the button (often not visible to users)
# tooltip_text, # the tooltip shown on hover (where possible)
# image_file, # name of the image for the button (without the extension)
# name_of_method, # name of the method in NavigationToolbar2 to call
# )
toolitems = (
('Home', 'Reset original view', 'home', 'home'),
('Back', 'Back to previous view', 'back', 'back'),
('Forward', 'Forward to next view', 'forward', 'forward'),
(None, None, None, None),
('Pan', 'Pan axes with left mouse, zoom with right', 'move', 'pan'),
('Zoom', 'Zoom to rectangle', 'zoom_to_rect', 'zoom'),
(None, None, None, None),
('Subplots', 'Configure subplots', 'subplots', 'configure_subplots'),
('Save', 'Save the figure', 'filesave', 'save_figure'),
)
def __init__(self, canvas):
self.canvas = canvas
canvas.toolbar = self
# a dict from axes index to a list of view limits
self._views = cbook.Stack()
self._positions = cbook.Stack() # stack of subplot positions
self._xypress = None # the location and axis info at the time
# of the press
self._idPress = None
self._idRelease = None
self._active = None
self._lastCursor = None
self._init_toolbar()
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
self._ids_zoom = []
self._zoom_mode = None
self._button_pressed = None # determined by the button pressed
# at start
self.mode = '' # a mode string for the status bar
self.set_history_buttons()
def set_message(self, s):
"""Display a message on toolbar or in status bar"""
pass
def back(self, *args):
"""move back up the view lim stack"""
self._views.back()
self._positions.back()
self.set_history_buttons()
self._update_view()
def dynamic_update(self):
pass
def draw_rubberband(self, event, x0, y0, x1, y1):
"""Draw a rectangle rubberband to indicate zoom limits"""
pass
def forward(self, *args):
"""Move forward in the view lim stack"""
self._views.forward()
self._positions.forward()
self.set_history_buttons()
self._update_view()
def home(self, *args):
"""Restore the original view"""
self._views.home()
self._positions.home()
self.set_history_buttons()
self._update_view()
def _init_toolbar(self):
"""
This is where you actually build the GUI widgets (called by
__init__). The icons ``home.xpm``, ``back.xpm``, ``forward.xpm``,
``hand.xpm``, ``zoom_to_rect.xpm`` and ``filesave.xpm`` are standard
across backends (there are ppm versions in CVS also).
You just need to set the callbacks
home : self.home
back : self.back
forward : self.forward
hand : self.pan
zoom_to_rect : self.zoom
filesave : self.save_figure
You only need to define the last one - the others are in the base
class implementation.
"""
raise NotImplementedError
def mouse_move(self, event):
if not event.inaxes or not self._active:
if self._lastCursor != cursors.POINTER:
self.set_cursor(cursors.POINTER)
self._lastCursor = cursors.POINTER
else:
if self._active == 'ZOOM':
if self._lastCursor != cursors.SELECT_REGION:
self.set_cursor(cursors.SELECT_REGION)
self._lastCursor = cursors.SELECT_REGION
elif (self._active == 'PAN' and
self._lastCursor != cursors.MOVE):
self.set_cursor(cursors.MOVE)
self._lastCursor = cursors.MOVE
if event.inaxes and event.inaxes.get_navigate():
try:
s = event.inaxes.format_coord(event.xdata, event.ydata)
except (ValueError, OverflowError):
pass
else:
if len(self.mode):
self.set_message('%s, %s' % (self.mode, s))
else:
self.set_message(s)
else:
self.set_message(self.mode)
def pan(self, *args):
"""Activate the pan/zoom tool. pan with left button, zoom with right"""
# set the pointer icon and button press funcs to the
# appropriate callbacks
if self._active == 'PAN':
self._active = None
else:
self._active = 'PAN'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect(
'button_press_event', self.press_pan)
self._idRelease = self.canvas.mpl_connect(
'button_release_event', self.release_pan)
self.mode = 'pan/zoom'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def press(self, event):
"""Called whenver a mouse button is pressed."""
pass
def press_pan(self, event):
"""the press mouse button in pan/zoom mode callback"""
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty():
self.push_current()
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_pan()):
a.start_pan(x, y, event.button)
self._xypress.append((a, i))
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect('motion_notify_event',
self.drag_pan)
self.press(event)
def press_zoom(self, event):
"""the press mouse button in zoom to rect mode callback"""
# If we're already in the middle of a zoom, pressing another
# button works to "cancel"
if self._ids_zoom != []:
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self.release(event)
self.draw()
self._xypress = None
self._button_pressed = None
self._ids_zoom = []
return
if event.button == 1:
self._button_pressed = 1
elif event.button == 3:
self._button_pressed = 3
else:
self._button_pressed = None
return
x, y = event.x, event.y
# push the current view to define home if stack is empty
if self._views.empty():
self.push_current()
self._xypress = []
for i, a in enumerate(self.canvas.figure.get_axes()):
if (x is not None and y is not None and a.in_axes(event) and
a.get_navigate() and a.can_zoom()):
self._xypress.append((x, y, a, i, a.viewLim.frozen(),
a.transData.frozen()))
id1 = self.canvas.mpl_connect('motion_notify_event', self.drag_zoom)
id2 = self.canvas.mpl_connect('key_press_event',
self._switch_on_zoom_mode)
id3 = self.canvas.mpl_connect('key_release_event',
self._switch_off_zoom_mode)
self._ids_zoom = id1, id2, id3
self._zoom_mode = event.key
self.press(event)
def _switch_on_zoom_mode(self, event):
self._zoom_mode = event.key
self.mouse_move(event)
def _switch_off_zoom_mode(self, event):
self._zoom_mode = None
self.mouse_move(event)
def push_current(self):
"""push the current view limits and position onto the stack"""
lims = []
pos = []
for a in self.canvas.figure.get_axes():
xmin, xmax = a.get_xlim()
ymin, ymax = a.get_ylim()
lims.append((xmin, xmax, ymin, ymax))
# Store both the original and modified positions
pos.append((
a.get_position(True).frozen(),
a.get_position().frozen()))
self._views.push(lims)
self._positions.push(pos)
self.set_history_buttons()
def release(self, event):
"""this will be called whenever mouse button is released"""
pass
def release_pan(self, event):
"""the release mouse button callback in pan/zoom mode"""
if self._button_pressed is None:
return
self.canvas.mpl_disconnect(self._idDrag)
self._idDrag = self.canvas.mpl_connect(
'motion_notify_event', self.mouse_move)
for a, ind in self._xypress:
a.end_pan()
if not self._xypress:
return
self._xypress = []
self._button_pressed = None
self.push_current()
self.release(event)
self.draw()
def drag_pan(self, event):
"""the drag callback in pan/zoom mode"""
for a, ind in self._xypress:
#safer to use the recorded button at the press than current button:
#multiple button can get pressed during motion...
a.drag_pan(self._button_pressed, event.key, event.x, event.y)
self.dynamic_update()
def drag_zoom(self, event):
"""the drag callback in zoom mode"""
if self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = self._xypress[0]
# adjust x, last, y, last
x1, y1, x2, y2 = a.bbox.extents
x, lastx = max(min(x, lastx), x1), min(max(x, lastx), x2)
y, lasty = max(min(y, lasty), y1), min(max(y, lasty), y2)
if self._zoom_mode == "x":
x1, y1, x2, y2 = a.bbox.extents
y, lasty = y1, y2
elif self._zoom_mode == "y":
x1, y1, x2, y2 = a.bbox.extents
x, lastx = x1, x2
self.draw_rubberband(event, x, y, lastx, lasty)
def release_zoom(self, event):
"""the release mouse button callback in zoom to rect mode"""
for zoom_id in self._ids_zoom:
self.canvas.mpl_disconnect(zoom_id)
self._ids_zoom = []
if not self._xypress:
return
last_a = []
for cur_xypress in self._xypress:
x, y = event.x, event.y
lastx, lasty, a, ind, lim, trans = cur_xypress
# ignore singular clicks - 5 pixels is a threshold
if abs(x - lastx) < 5 or abs(y - lasty) < 5:
self._xypress = None
self.release(event)
self.draw()
return
x0, y0, x1, y1 = lim.extents
# zoom to rect
inverse = a.transData.inverted()
lastx, lasty = inverse.transform_point((lastx, lasty))
x, y = inverse.transform_point((x, y))
Xmin, Xmax = a.get_xlim()
Ymin, Ymax = a.get_ylim()
# detect twinx,y axes and avoid double zooming
twinx, twiny = False, False
if last_a:
for la in last_a:
if a.get_shared_x_axes().joined(a, la):
twinx = True
if a.get_shared_y_axes().joined(a, la):
twiny = True
last_a.append(a)
if twinx:
x0, x1 = Xmin, Xmax
else:
if Xmin < Xmax:
if x < lastx:
x0, x1 = x, lastx
else:
x0, x1 = lastx, x
if x0 < Xmin:
x0 = Xmin
if x1 > Xmax:
x1 = Xmax
else:
if x > lastx:
x0, x1 = x, lastx
else:
x0, x1 = lastx, x
if x0 > Xmin:
x0 = Xmin
if x1 < Xmax:
x1 = Xmax
if twiny:
y0, y1 = Ymin, Ymax
else:
if Ymin < Ymax:
if y < lasty:
y0, y1 = y, lasty
else:
y0, y1 = lasty, y
if y0 < Ymin:
y0 = Ymin
if y1 > Ymax:
y1 = Ymax
else:
if y > lasty:
y0, y1 = y, lasty
else:
y0, y1 = lasty, y
if y0 > Ymin:
y0 = Ymin
if y1 < Ymax:
y1 = Ymax
if self._button_pressed == 1:
if self._zoom_mode == "x":
a.set_xlim((x0, x1))
elif self._zoom_mode == "y":
a.set_ylim((y0, y1))
else:
a.set_xlim((x0, x1))
a.set_ylim((y0, y1))
elif self._button_pressed == 3:
if a.get_xscale() == 'log':
alpha = np.log(Xmax / Xmin) / np.log(x1 / x0)
rx1 = pow(Xmin / x0, alpha) * Xmin
rx2 = pow(Xmax / x0, alpha) * Xmin
else:
alpha = (Xmax - Xmin) / (x1 - x0)
rx1 = alpha * (Xmin - x0) + Xmin
rx2 = alpha * (Xmax - x0) + Xmin
if a.get_yscale() == 'log':
alpha = np.log(Ymax / Ymin) / np.log(y1 / y0)
ry1 = pow(Ymin / y0, alpha) * Ymin
ry2 = pow(Ymax / y0, alpha) * Ymin
else:
alpha = (Ymax - Ymin) / (y1 - y0)
ry1 = alpha * (Ymin - y0) + Ymin
ry2 = alpha * (Ymax - y0) + Ymin
if self._zoom_mode == "x":
a.set_xlim((rx1, rx2))
elif self._zoom_mode == "y":
a.set_ylim((ry1, ry2))
else:
a.set_xlim((rx1, rx2))
a.set_ylim((ry1, ry2))
self.draw()
self._xypress = None
self._button_pressed = None
self._zoom_mode = None
self.push_current()
self.release(event)
def draw(self):
"""Redraw the canvases, update the locators"""
for a in self.canvas.figure.get_axes():
xaxis = getattr(a, 'xaxis', None)
yaxis = getattr(a, 'yaxis', None)
locators = []
if xaxis is not None:
locators.append(xaxis.get_major_locator())
locators.append(xaxis.get_minor_locator())
if yaxis is not None:
locators.append(yaxis.get_major_locator())
locators.append(yaxis.get_minor_locator())
for loc in locators:
loc.refresh()
self.canvas.draw_idle()
def _update_view(self):
"""Update the viewlim and position from the view and
position stack for each axes
"""
lims = self._views()
if lims is None:
return
pos = self._positions()
if pos is None:
return
for i, a in enumerate(self.canvas.figure.get_axes()):
xmin, xmax, ymin, ymax = lims[i]
a.set_xlim((xmin, xmax))
a.set_ylim((ymin, ymax))
# Restore both the original and modified positions
a.set_position(pos[i][0], 'original')
a.set_position(pos[i][1], 'active')
self.canvas.draw_idle()
def save_figure(self, *args):
"""Save the current figure"""
raise NotImplementedError
def set_cursor(self, cursor):
"""
Set the current cursor to one of the :class:`Cursors`
enums values
"""
pass
def update(self):
"""Reset the axes stack"""
self._views.clear()
self._positions.clear()
self.set_history_buttons()
def zoom(self, *args):
"""Activate zoom to rect mode"""
if self._active == 'ZOOM':
self._active = None
else:
self._active = 'ZOOM'
if self._idPress is not None:
self._idPress = self.canvas.mpl_disconnect(self._idPress)
self.mode = ''
if self._idRelease is not None:
self._idRelease = self.canvas.mpl_disconnect(self._idRelease)
self.mode = ''
if self._active:
self._idPress = self.canvas.mpl_connect('button_press_event',
self.press_zoom)
self._idRelease = self.canvas.mpl_connect('button_release_event',
self.release_zoom)
self.mode = 'zoom rect'
self.canvas.widgetlock(self)
else:
self.canvas.widgetlock.release(self)
for a in self.canvas.figure.get_axes():
a.set_navigate_mode(self._active)
self.set_message(self.mode)
def set_history_buttons(self):
"""Enable or disable back/forward button"""
pass
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Object Relational Mapping
#.apidoc module-mods: member-order: bysource
"""
Object relational mapping to database (postgresql) module
* Hierarchical structure
* Constraints consistency, validations
* Object meta Data depends on its status
* Optimised processing by complex query (multiple actions at once)
* Default fields value
* Permissions optimisation
* Persistant object: DB postgresql
* Datas conversions
* Multi-level caching system
* 2 different inheritancies
* Fields:
- classicals (varchar, integer, boolean, ...)
- relations (one2many, many2one, many2many)
- functions
"""
import babel.dates
import calendar
import collections
import copy
import datetime
import itertools
import logging
import operator
import pickle
import re
import simplejson
import time
import traceback
import types
import psycopg2
from lxml import etree
import fields
import openerp
import openerp.netsvc as netsvc
import openerp.tools as tools
from openerp.tools.config import config
from openerp.tools.misc import CountingStream
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from query import Query
_logger = logging.getLogger(__name__)
_schema = logging.getLogger(__name__ + '.schema')
# List of etree._Element subclasses that we choose to ignore when parsing XML.
from openerp.tools import SKIPPED_ELEMENT_TYPES
regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
def transfer_field_to_modifiers(field, modifiers):
default_values = {}
state_exceptions = {}
for attr in ('invisible', 'readonly', 'required'):
state_exceptions[attr] = []
default_values[attr] = bool(field.get(attr))
for state, modifs in (field.get("states",{})).items():
for modif in modifs:
if default_values[modif[0]] != modif[1]:
state_exceptions[modif[0]].append(state)
for attr, default_value in default_values.items():
if state_exceptions[attr]:
modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
else:
modifiers[attr] = default_value
# Don't deal with groups, it is done by check_group().
# Need the context to evaluate the invisible attribute on tree views.
# For non-tree views, the context shouldn't be given.
def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
if node.get('attrs'):
modifiers.update(eval(node.get('attrs')))
if node.get('states'):
if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
# TODO combine with AND or OR, use implicit AND for now.
modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
else:
modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
for a in ('invisible', 'readonly', 'required'):
if node.get(a):
v = bool(eval(node.get(a), {'context': context or {}}))
if in_tree_view and a == 'invisible':
# Invisible in a tree view has a specific meaning, make it a
# new key in the modifiers attribute.
modifiers['tree_invisible'] = v
elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
# Don't set the attribute to False if a dynamic value was
# provided (i.e. a domain from attrs or states).
modifiers[a] = v
def simplify_modifiers(modifiers):
for a in ('invisible', 'readonly', 'required'):
if a in modifiers and not modifiers[a]:
del modifiers[a]
def transfer_modifiers_to_node(modifiers, node):
if modifiers:
simplify_modifiers(modifiers)
node.set('modifiers', simplejson.dumps(modifiers))
def setup_modifiers(node, field=None, context=None, in_tree_view=False):
""" Processes node attributes and field descriptors to generate
the ``modifiers`` node attribute and set it on the provided node.
Alters its first argument in-place.
:param node: ``field`` node from an OpenERP view
:type node: lxml.etree._Element
:param dict field: field descriptor corresponding to the provided node
:param dict context: execution context used to evaluate node attributes
:param bool in_tree_view: triggers the ``tree_invisible`` code
path (separate from ``invisible``): in
tree view there are two levels of
invisibility, cell content (a column is
present but the cell itself is not
displayed) with ``invisible`` and column
invisibility (the whole column is
hidden) with ``tree_invisible``.
:returns: nothing
"""
modifiers = {}
if field is not None:
transfer_field_to_modifiers(field, modifiers)
transfer_node_to_modifiers(
node, modifiers, context=context, in_tree_view=in_tree_view)
transfer_modifiers_to_node(modifiers, node)
def test_modifiers(what, expected):
modifiers = {}
if isinstance(what, basestring):
node = etree.fromstring(what)
transfer_node_to_modifiers(node, modifiers)
simplify_modifiers(modifiers)
json = simplejson.dumps(modifiers)
assert json == expected, "%s != %s" % (json, expected)
elif isinstance(what, dict):
transfer_field_to_modifiers(what, modifiers)
simplify_modifiers(modifiers)
json = simplejson.dumps(modifiers)
assert json == expected, "%s != %s" % (json, expected)
# To use this test:
# import openerp
# openerp.osv.orm.modifiers_tests()
def modifiers_tests():
test_modifiers('<field name="a"/>', '{}')
test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
test_modifiers('<field name="a" required="1"/>', '{"required": true}')
test_modifiers('<field name="a" invisible="0"/>', '{}')
test_modifiers('<field name="a" readonly="0"/>', '{}')
test_modifiers('<field name="a" required="0"/>', '{}')
test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
# The dictionary is supposed to be the result of fields_get().
test_modifiers({}, '{}')
test_modifiers({"invisible": True}, '{"invisible": true}')
test_modifiers({"invisible": False}, '{}')
def check_object_name(name):
""" Check if the given name is a valid openerp object name.
The _name attribute in osv and osv_memory object is subject to
some restrictions. This function returns True or False whether
the given name is allowed or not.
TODO: this is an approximation. The goal in this approximation
is to disallow uppercase characters (in some places, we quote
table/column names and in other not, which leads to this kind
of errors:
psycopg2.ProgrammingError: relation "xxx" does not exist).
The same restriction should apply to both osv and osv_memory
objects for consistency.
"""
if regex_object_name.match(name) is None:
return False
return True
def raise_on_invalid_object_name(name):
if not check_object_name(name):
msg = "The _name attribute %s is not valid." % name
_logger.error(msg)
raise except_orm('ValueError', msg)
POSTGRES_CONFDELTYPES = {
'RESTRICT': 'r',
'NO ACTION': 'a',
'CASCADE': 'c',
'SET NULL': 'n',
'SET DEFAULT': 'd',
}
def intersect(la, lb):
return filter(lambda x: x in lb, la)
def fix_import_export_id_paths(fieldname):
"""
Fixes the id fields in import and exports, and splits field paths
on '/'.
:param str fieldname: name of the field to import/export
:return: split field name
:rtype: list of str
"""
fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
return fixed_external_id.split('/')
class except_orm(Exception):
def __init__(self, name, value):
self.name = name
self.value = value
self.args = (name, value)
class BrowseRecordError(Exception):
pass
class browse_null(object):
""" Readonly python database object browser
"""
def __init__(self):
self.id = False
def __getitem__(self, name):
return None
def __getattr__(self, name):
return None # XXX: return self ?
def __int__(self):
return False
def __str__(self):
return ''
def __nonzero__(self):
return False
def __unicode__(self):
return u''
#
# TODO: execute an object method on browse_record_list
#
class browse_record_list(list):
""" Collection of browse objects
Such an instance will be returned when doing a ``browse([ids..])``
and will be iterable, yielding browse() objects
"""
def __init__(self, lst, context=None):
if not context:
context = {}
super(browse_record_list, self).__init__(lst)
self.context = context
class browse_record(object):
""" An object that behaves like a row of an object's table.
It has attributes after the columns of the corresponding object.
Examples::
uobj = pool.get('res.users')
user_rec = uobj.browse(cr, uid, 104)
name = user_rec.name
"""
def __init__(self, cr, uid, id, table, cache, context=None,
list_class=browse_record_list, fields_process=None):
"""
:param table: the browsed object (inherited from orm)
:param dict cache: a dictionary of model->field->data to be shared
across browse objects, thus reducing the SQL
read()s. It can speed up things a lot, but also be
disastrous if not discarded after write()/unlink()
operations
:param dict context: dictionary with an optional context
"""
if fields_process is None:
fields_process = {}
if context is None:
context = {}
self._list_class = list_class
self._cr = cr
self._uid = uid
self._id = id
self._table = table # deprecated, use _model!
self._model = table
self._table_name = self._table._name
self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
self._context = context
self._fields_process = fields_process
cache.setdefault(table._name, {})
self._data = cache[table._name]
# if not (id and isinstance(id, (int, long,))):
# raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
# if not table.exists(cr, uid, id, context):
# raise BrowseRecordError(_('Object %s does not exists') % (self,))
if id not in self._data:
self._data[id] = {'id': id}
self._cache = cache
def __getitem__(self, name):
if name == 'id':
return self._id
if name not in self._data[self._id]:
# build the list of fields we will fetch
# fetch the definition of the field which was asked for
if name in self._table._columns:
col = self._table._columns[name]
elif name in self._table._inherit_fields:
col = self._table._inherit_fields[name][2]
elif hasattr(self._table, str(name)):
attr = getattr(self._table, name)
if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
def function_proxy(*args, **kwargs):
if 'context' not in kwargs and self._context:
kwargs.update(context=self._context)
return attr(self._cr, self._uid, [self._id], *args, **kwargs)
return function_proxy
else:
return attr
else:
error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
self.__logger.warning(error_msg)
if self.__logger.isEnabledFor(logging.DEBUG):
self.__logger.debug(''.join(traceback.format_stack()))
raise KeyError(error_msg)
# if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
if col._prefetch and not col.groups:
# gen the list of "local" (ie not inherited) fields which are classic or many2one
field_filter = lambda x: x[1]._classic_write and x[1]._prefetch and not x[1].groups
fields_to_fetch = filter(field_filter, self._table._columns.items())
# gen the list of inherited fields
inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
# complete the field list with the inherited fields which are classic or many2one
fields_to_fetch += filter(field_filter, inherits)
# otherwise we fetch only that field
else:
fields_to_fetch = [(name, col)]
ids = filter(lambda id: name not in self._data[id], self._data.keys())
# read the results
field_names = map(lambda x: x[0], fields_to_fetch)
field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
# TODO: improve this, very slow for reports
if self._fields_process:
lang = self._context.get('lang', 'en_US') or 'en_US'
lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
if not lang_obj_ids:
raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
for field_name, field_column in fields_to_fetch:
if field_column._type in self._fields_process:
for result_line in field_values:
result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
if result_line[field_name]:
result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
if not field_values:
# Where did those ids come from? Perhaps old entries in ir_model_dat?
_logger.warning("No field_values found for ids %s in %s", ids, self)
raise KeyError('Field %s not found in %s'%(name, self))
# create browse records for 'remote' objects
for result_line in field_values:
new_data = {}
for field_name, field_column in fields_to_fetch:
if field_column._type == 'many2one':
if result_line[field_name]:
obj = self._table.pool.get(field_column._obj)
if isinstance(result_line[field_name], (list, tuple)):
value = result_line[field_name][0]
else:
value = result_line[field_name]
if value:
# FIXME: this happen when a _inherits object
# overwrite a field of it parent. Need
# testing to be sure we got the right
# object and not the parent one.
if not isinstance(value, browse_record):
if obj is None:
# In some cases the target model is not available yet, so we must ignore it,
# which is safe in most cases, this value will just be loaded later when needed.
# This situation can be caused by custom fields that connect objects with m2o without
# respecting module dependencies, causing relationships to be connected to soon when
# the target is not loaded yet.
continue
new_data[field_name] = browse_record(self._cr,
self._uid, value, obj, self._cache,
context=self._context,
list_class=self._list_class,
fields_process=self._fields_process)
else:
new_data[field_name] = value
else:
new_data[field_name] = browse_null()
else:
new_data[field_name] = browse_null()
elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
elif field_column._type == 'reference':
if result_line[field_name]:
if isinstance(result_line[field_name], browse_record):
new_data[field_name] = result_line[field_name]
else:
ref_obj, ref_id = result_line[field_name].split(',')
ref_id = long(ref_id)
if ref_id:
obj = self._table.pool.get(ref_obj)
new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
else:
new_data[field_name] = browse_null()
else:
new_data[field_name] = browse_null()
else:
new_data[field_name] = result_line[field_name]
self._data[result_line['id']].update(new_data)
if not name in self._data[self._id]:
# How did this happen? Could be a missing model due to custom fields used too soon, see above.
self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
return self._data[self._id][name]
def __getattr__(self, name):
try:
return self[name]
except KeyError, e:
raise AttributeError(e)
def __contains__(self, name):
return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
def __iter__(self):
raise NotImplementedError("Iteration is not allowed on %s" % self)
def __hasattr__(self, name):
return name in self
def __int__(self):
return self._id
def __str__(self):
return "browse_record(%s, %d)" % (self._table_name, self._id)
def __eq__(self, other):
if not isinstance(other, browse_record):
return False
return (self._table_name, self._id) == (other._table_name, other._id)
def __ne__(self, other):
if not isinstance(other, browse_record):
return True
return (self._table_name, self._id) != (other._table_name, other._id)
# we need to define __unicode__ even though we've already defined __str__
# because we have overridden __getattr__
def __unicode__(self):
return unicode(str(self))
def __hash__(self):
return hash((self._table_name, self._id))
__repr__ = __str__
def refresh(self):
"""Force refreshing this browse_record's data and all the data of the
records that belong to the same cache, by emptying the cache completely,
preserving only the record identifiers (for prefetching optimizations).
"""
for model, model_cache in self._cache.iteritems():
# only preserve the ids of the records that were in the cache
cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
self._cache[model].clear()
self._cache[model].update(cached_ids)
def pg_varchar(size=0):
""" Returns the VARCHAR declaration for the provided size:
* If no size (or an empty or negative size is provided) return an
'infinite' VARCHAR
* Otherwise return a VARCHAR(n)
:type int size: varchar size, optional
:rtype: str
"""
if size:
if not isinstance(size, int):
raise TypeError("VARCHAR parameter should be an int, got %s"
% type(size))
if size > 0:
return 'VARCHAR(%d)' % size
return 'VARCHAR'
FIELDS_TO_PGTYPES = {
fields.boolean: 'bool',
fields.integer: 'int4',
fields.text: 'text',
fields.html: 'text',
fields.date: 'date',
fields.datetime: 'timestamp',
fields.binary: 'bytea',
fields.many2one: 'int4',
fields.serialized: 'text',
}
def get_pg_type(f, type_override=None):
"""
:param fields._column f: field to get a Postgres type for
:param type type_override: use the provided type for dispatching instead of the field's own type
:returns: (postgres_identification_type, postgres_type_specification)
:rtype: (str, str)
"""
field_type = type_override or type(f)
if field_type in FIELDS_TO_PGTYPES:
pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
elif issubclass(field_type, fields.float):
if f.digits:
pg_type = ('numeric', 'NUMERIC')
else:
pg_type = ('float8', 'DOUBLE PRECISION')
elif issubclass(field_type, (fields.char, fields.reference)):
pg_type = ('varchar', pg_varchar(f.size))
elif issubclass(field_type, fields.selection):
if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
or getattr(f, 'size', None) == -1:
pg_type = ('int4', 'INTEGER')
else:
pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
elif issubclass(field_type, fields.function):
if f._type == 'selection':
pg_type = ('varchar', pg_varchar())
else:
pg_type = get_pg_type(f, getattr(fields, f._type))
else:
_logger.warning('%s type not supported!', field_type)
pg_type = None
return pg_type
class MetaModel(type):
""" Metaclass for the Model.
This class is used as the metaclass for the Model class to discover
the models defined in a module (i.e. without instanciating them).
If the automatic discovery is not needed, it is possible to set the
model's _register attribute to False.
"""
module_to_models = {}
def __init__(self, name, bases, attrs):
if not self._register:
self._register = True
super(MetaModel, self).__init__(name, bases, attrs)
return
# The (OpenERP) module name can be in the `openerp.addons` namespace
# or not. For instance module `sale` can be imported as
# `openerp.addons.sale` (the good way) or `sale` (for backward
# compatibility).
module_parts = self.__module__.split('.')
if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
module_parts[1] == 'addons':
module_name = self.__module__.split('.')[2]
else:
module_name = self.__module__.split('.')[0]
if not hasattr(self, '_module'):
self._module = module_name
# Remember which models to instanciate for this module.
if not self._custom:
self.module_to_models.setdefault(self._module, []).append(self)
# Definition of log access columns, automatically added to models if
# self._log_access is True
LOG_ACCESS_COLUMNS = {
'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
'create_date': 'TIMESTAMP',
'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
'write_date': 'TIMESTAMP'
}
# special columns automatically created by the ORM
MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
class BaseModel(object):
""" Base class for OpenERP models.
OpenERP models are created by inheriting from this class' subclasses:
* Model: for regular database-persisted models
* TransientModel: for temporary data, stored in the database but automatically
vaccuumed every so often
* AbstractModel: for abstract super classes meant to be shared by multiple
_inheriting classes (usually Models or TransientModels)
The system will later instantiate the class once per database (on
which the class' module is installed).
To create a class that should not be instantiated, the _register class attribute
may be set to False.
"""
__metaclass__ = MetaModel
_auto = True # create database backend
_register = False # Set to false if the model shouldn't be automatically discovered.
_name = None
_columns = {}
_constraints = []
_custom = False
_defaults = {}
_rec_name = None
_parent_name = 'parent_id'
_parent_store = False
_parent_order = False
_date_name = 'date'
_order = 'id'
_sequence = None
_description = None
_needaction = False
# dict of {field:method}, with method returning the (name_get of records, {id: fold})
# to include in the _read_group, if grouped on this field
_group_by_full = {}
# Transience
_transient = False # True in a TransientModel
# structure:
# { 'parent_model': 'm2o_field', ... }
_inherits = {}
# Mapping from inherits'd field name to triple (m, r, f, n) where m is the
# model from which it is inherits'd, r is the (local) field towards m, f
# is the _column object itself, and n is the original (i.e. top-most)
# parent model.
# Example:
# { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
# field_column_obj, origina_parent_model), ... }
_inherit_fields = {}
# Mapping field name/column_info object
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
_all_columns = {}
_table = None
_invalids = set()
_log_create = False
_sql_constraints = []
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
CONCURRENCY_CHECK_FIELD = '__last_update'
def log(self, cr, uid, id, message, secondary=False, context=None):
return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
def view_init(self, cr, uid, fields_list, context=None):
"""Override this method to do specific things when a view on the object is opened."""
pass
def _field_create(self, cr, context=None):
""" Create entries in ir_model_fields for all the model's fields.
If necessary, also create an entry in ir_model, and if called from the
modules loading scheme (by receiving 'module' in the context), also
create entries in ir_model_data (for the model and the fields).
- create an entry in ir_model (if there is not already one),
- create an entry in ir_model_data (if there is not already one, and if
'module' is in the context),
- update ir_model_fields with the fields found in _columns
(TODO there is some redundancy as _columns is updated from
ir_model_fields in __init__).
"""
if context is None:
context = {}
cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
if not cr.rowcount:
cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
model_id = cr.fetchone()[0]
cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
else:
model_id = cr.fetchone()[0]
if 'module' in context:
name_id = 'model_'+self._name.replace('.', '_')
cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
if not cr.rowcount:
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
(name_id, context['module'], 'ir.model', model_id)
)
cr.commit()
cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
cols = {}
for rec in cr.dictfetchall():
cols[rec['name']] = rec
ir_model_fields_obj = self.pool.get('ir.model.fields')
# sparse field should be created at the end, as it depends on its serialized field already existing
model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
for (k, f) in model_fields:
vals = {
'model_id': model_id,
'model': self._name,
'name': k,
'field_description': f.string,
'ttype': f._type,
'relation': f._obj or '',
'view_load': (f.view_load and 1) or 0,
'select_level': tools.ustr(f.select or 0),
'readonly': (f.readonly and 1) or 0,
'required': (f.required and 1) or 0,
'selectable': (f.selectable and 1) or 0,
'translate': (f.translate and 1) or 0,
'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
'serialization_field_id': None,
}
if getattr(f, 'serialization_field', None):
# resolve link to serialization_field if specified by name
serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
if not serialization_field_id:
raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
vals['serialization_field_id'] = serialization_field_id[0]
# When its a custom field,it does not contain f.select
if context.get('field_state', 'base') == 'manual':
if context.get('field_name', '') == k:
vals['select_level'] = context.get('select', '0')
#setting value to let the problem NOT occur next time
elif k in cols:
vals['select_level'] = cols[k]['select_level']
if k not in cols:
cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
id = cr.fetchone()[0]
vals['id'] = id
cr.execute("""INSERT INTO ir_model_fields (
id, model_id, model, name, field_description, ttype,
relation,view_load,state,select_level,relation_field, translate, serialization_field_id
) VALUES (
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
)""", (
id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
vals['relation'], bool(vals['view_load']), 'base',
vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
))
if 'module' in context:
name1 = 'field_' + self._table + '_' + k
cr.execute("select name from ir_model_data where name=%s", (name1,))
if cr.fetchone():
name1 = name1 + "_" + str(id)
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
(name1, context['module'], 'ir.model.fields', id)
)
else:
for key, val in vals.items():
if cols[k][key] != vals[key]:
cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
cr.commit()
cr.execute("""UPDATE ir_model_fields SET
model_id=%s, field_description=%s, ttype=%s, relation=%s,
view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
WHERE
model=%s AND name=%s""", (
vals['model_id'], vals['field_description'], vals['ttype'],
vals['relation'], bool(vals['view_load']),
vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
))
break
cr.commit()
#
# Goal: try to apply inheritance at the instanciation level and
# put objects in the pool var
#
@classmethod
def create_instance(cls, pool, cr):
""" Instanciate a given model.
This class method instanciates the class of some model (i.e. a class
deriving from osv or osv_memory). The class might be the class passed
in argument or, if it inherits from another class, a class constructed
by combining the two classes.
The ``attributes`` argument specifies which parent class attributes
have to be combined.
TODO: the creation of the combined class is repeated at each call of
this method. This is probably unnecessary.
"""
attributes = ['_columns', '_defaults', '_inherits', '_constraints',
'_sql_constraints']
parent_names = getattr(cls, '_inherit', None)
if parent_names:
if isinstance(parent_names, (str, unicode)):
name = cls._name or parent_names
parent_names = [parent_names]
else:
name = cls._name
if not name:
raise TypeError('_name is mandatory in case of multiple inheritance')
for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
parent_model = pool.get(parent_name)
if not parent_model:
raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
if not getattr(cls, '_original_module', None) and name == parent_model._name:
cls._original_module = parent_model._original_module
parent_class = parent_model.__class__
nattr = {}
for s in attributes:
new = copy.copy(getattr(parent_model, s, {}))
if s == '_columns':
# Don't _inherit custom fields.
for c in new.keys():
if new[c].manual:
del new[c]
# Duplicate float fields because they have a .digits
# cache (which must be per-registry, not server-wide).
for c in new.keys():
if new[c]._type == 'float':
new[c] = copy.copy(new[c])
if hasattr(new, 'update'):
new.update(cls.__dict__.get(s, {}))
elif s=='_constraints':
for c in cls.__dict__.get(s, []):
exist = False
for c2 in range(len(new)):
#For _constraints, we should check field and methods as well
if new[c2][2]==c[2] and (new[c2][0] == c[0] \
or getattr(new[c2][0],'__name__', True) == \
getattr(c[0],'__name__', False)):
# If new class defines a constraint with
# same function name, we let it override
# the old one.
new[c2] = c
exist = True
break
if not exist:
new.append(c)
else:
new.extend(cls.__dict__.get(s, []))
nattr[s] = new
# Keep links to non-inherited constraints, e.g. useful when exporting translations
nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
cls = type(name, (cls, parent_class), dict(nattr, _register=False))
else:
cls._local_constraints = getattr(cls, '_constraints', [])
cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
if not getattr(cls, '_original_module', None):
cls._original_module = cls._module
obj = object.__new__(cls)
obj.__init__(pool, cr)
return obj
def __new__(cls):
"""Register this model.
This doesn't create an instance but simply register the model
as being part of the module where it is defined.
"""
# Set the module name (e.g. base, sale, accounting, ...) on the class.
module = cls.__module__.split('.')[0]
if not hasattr(cls, '_module'):
cls._module = module
# Record this class in the list of models to instantiate for this module,
# managed by the metaclass.
module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
if cls not in module_model_list:
if not cls._custom:
module_model_list.append(cls)
# Since we don't return an instance here, the __init__
# method won't be called.
return None
def __init__(self, pool, cr):
""" Initialize a model and make it part of the given registry.
- copy the stored fields' functions in the osv_pool,
- update the _columns with the fields found in ir_model_fields,
- ensure there is a many2one for each _inherits'd parent,
- update the children's _columns,
- give a chance to each field to initialize itself.
"""
pool.add(self._name, self)
self.pool = pool
if not self._name and not hasattr(self, '_inherit'):
name = type(self).__name__.split('.')[0]
msg = "The class %s has to have a _name attribute" % name
_logger.error(msg)
raise except_orm('ValueError', msg)
if not self._description:
self._description = self._name
if not self._table:
self._table = self._name.replace('.', '_')
if not hasattr(self, '_log_access'):
# If _log_access is not specified, it is the same value as _auto.
self._log_access = getattr(self, "_auto", True)
self._columns = self._columns.copy()
for store_field in self._columns:
f = self._columns[store_field]
if hasattr(f, 'digits_change'):
f.digits_change(cr)
def not_this_field(stored_func):
x, y, z, e, f, l = stored_func
return x != self._name or y != store_field
self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
if not isinstance(f, fields.function):
continue
if not f.store:
continue
sm = f.store
if sm is True:
sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
for object, aa in sm.items():
if len(aa) == 4:
(fnct, fields2, order, length) = aa
elif len(aa) == 3:
(fnct, fields2, order) = aa
length = None
else:
raise except_orm('Error',
('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
self.pool._store_function.setdefault(object, [])
t = (self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length)
if not t in self.pool._store_function[object]:
self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
for (key, _, msg) in self._sql_constraints:
self.pool._sql_error[self._table+'_'+key] = msg
# Load manual fields
# Check the query is already done for all modules of if we need to
# do it ourselves.
if self.pool.fields_by_model is not None:
manual_fields = self.pool.fields_by_model.get(self._name, [])
else:
cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
manual_fields = cr.dictfetchall()
for field in manual_fields:
if field['name'] in self._columns:
continue
attrs = {
'string': field['field_description'],
'required': bool(field['required']),
'readonly': bool(field['readonly']),
'domain': eval(field['domain']) if field['domain'] else None,
'size': field['size'] or None,
'ondelete': field['on_delete'],
'translate': (field['translate']),
'manual': True,
#'select': int(field['select_level'])
}
if field['serialization_field_id']:
cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
if field['ttype'] in ['many2one', 'one2many', 'many2many']:
attrs.update({'relation': field['relation']})
self._columns[field['name']] = fields.sparse(**attrs)
elif field['ttype'] == 'selection':
self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
elif field['ttype'] == 'reference':
self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
elif field['ttype'] == 'many2one':
self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
elif field['ttype'] == 'one2many':
self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
elif field['ttype'] == 'many2many':
_rel1 = field['relation'].replace('.', '_')
_rel2 = field['model'].replace('.', '_')
_rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
else:
self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
self._inherits_check()
self._inherits_reload()
if not self._sequence:
self._sequence = self._table + '_id_seq'
for k in self._defaults:
assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
for f in self._columns:
self._columns[f].restart()
# Transience
if self.is_transient():
self._transient_check_count = 0
self._transient_max_count = config.get('osv_memory_count_limit')
self._transient_max_hours = config.get('osv_memory_age_limit')
assert self._log_access, "TransientModels must have log_access turned on, "\
"in order to implement their access rights policy"
# Validate rec_name
if self._rec_name is not None:
assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
else:
self._rec_name = 'name'
def __export_row(self, cr, uid, row, fields, context=None):
if context is None:
context = {}
def check_type(field_type):
if field_type == 'float':
return 0.0
elif field_type == 'integer':
return 0
elif field_type == 'boolean':
return 'False'
return ''
def selection_field(in_field):
col_obj = self.pool.get(in_field.keys()[0])
if f[i] in col_obj._columns.keys():
return col_obj._columns[f[i]]
elif f[i] in col_obj._inherits.keys():
selection_field(col_obj._inherits)
else:
return False
def _get_xml_id(self, cr, uid, r):
model_data = self.pool.get('ir.model.data')
data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
if len(data_ids):
d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
if d['module']:
r = '%s.%s' % (d['module'], d['name'])
else:
r = d['name']
else:
postfix = 0
while True:
n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
if not model_data.search(cr, uid, [('name', '=', n)]):
break
postfix += 1
model_data.create(cr, SUPERUSER_ID, {
'name': n,
'model': self._name,
'res_id': r['id'],
'module': '__export__',
})
r = '__export__.'+n
return r
lines = []
data = map(lambda x: '', range(len(fields)))
done = []
for fpos in range(len(fields)):
f = fields[fpos]
if f:
r = row
i = 0
while i < len(f):
cols = False
if f[i] == '.id':
r = r['id']
elif f[i] == 'id':
r = _get_xml_id(self, cr, uid, r)
else:
r = r[f[i]]
# To display external name of selection field when its exported
if f[i] in self._columns.keys():
cols = self._columns[f[i]]
elif f[i] in self._inherit_fields.keys():
cols = selection_field(self._inherits)
if cols and cols._type == 'selection':
sel_list = cols.selection
if r and type(sel_list) == type([]):
r = [x[1] for x in sel_list if r==x[0]]
r = r and r[0] or False
if not r:
if f[i] in self._columns:
r = check_type(self._columns[f[i]]._type)
elif f[i] in self._inherit_fields:
r = check_type(self._inherit_fields[f[i]][2]._type)
data[fpos] = r or False
break
if isinstance(r, (browse_record_list, list)):
first = True
fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
or [], fields)
if fields2 in done:
if [x for x in fields2 if x]:
break
done.append(fields2)
if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
break
for row2 in r:
lines2 = row2._model.__export_row(cr, uid, row2, fields2,
context)
if first:
for fpos2 in range(len(fields)):
if lines2 and lines2[0][fpos2]:
data[fpos2] = lines2[0][fpos2]
if not data[fpos]:
dt = ''
for rr in r:
name_relation = self.pool.get(rr._table_name)._rec_name
if isinstance(rr[name_relation], browse_record):
rr = rr[name_relation]
rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
dt += tools.ustr(rr_name or '') + ','
data[fpos] = dt[:-1]
break
lines += lines2[1:]
first = False
else:
lines += lines2
break
i += 1
if i == len(f):
if isinstance(r, browse_record):
r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
r = r and r[0] and r[0][1] or ''
data[fpos] = tools.ustr(r or '')
return [data] + lines
def export_data(self, cr, uid, ids, fields_to_export, context=None):
"""
Export fields for selected objects
:param cr: database cursor
:param uid: current user id
:param ids: list of ids
:param fields_to_export: list of fields
:param context: context arguments, like lang, time zone
:rtype: dictionary with a *datas* matrix
This method is used when exporting data via client menu
"""
if context is None:
context = {}
cols = self._columns.copy()
for f in self._inherit_fields:
cols.update({f: self._inherit_fields[f][2]})
fields_to_export = map(fix_import_export_id_paths, fields_to_export)
datas = []
for row in self.browse(cr, uid, ids, context):
datas += self.__export_row(cr, uid, row, fields_to_export, context)
return {'datas': datas}
def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
"""
.. deprecated:: 7.0
Use :meth:`~load` instead
Import given data in given module
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
.id, (=database_id)
partner_id, (=name_search)
order_line/.id, (=database_id)
order_line/name,
order_line/product_id/id, (=xml id)
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
This method returns a 4-tuple with the following structure::
(return_code, errored_resource, error_message, unused)
* The first item is a return code, it is ``-1`` in case of
import error, or the last imported row number in case of success
* The second item contains the record data dict that failed to import
in case of error, otherwise it's 0
* The third item contains an error message string in case of error,
otherwise it's 0
* The last item is currently unused, with no specific semantics
:param fields: list of fields to import
:param datas: data to import
:param mode: 'init' or 'update' for record creation
:param current_module: module name
:param noupdate: flag for record creation
:param filename: optional file to store partial import state for recovery
:returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
:rtype: (int, dict or 0, str or 0, str or 0)
"""
context = dict(context) if context is not None else {}
context['_import_current_module'] = current_module
fields = map(fix_import_export_id_paths, fields)
ir_model_data_obj = self.pool.get('ir.model.data')
def log(m):
if m['type'] == 'error':
raise Exception(m['message'])
if config.get('import_partial') and filename:
with open(config.get('import_partial'), 'rb') as partial_import_file:
data = pickle.load(partial_import_file)
position = data.get(filename, 0)
position = 0
try:
for res_id, xml_id, res, info in self._convert_records(cr, uid,
self._extract_records(cr, uid, fields, datas,
context=context, log=log),
context=context, log=log):
ir_model_data_obj._update(cr, uid, self._name,
current_module, res, mode=mode, xml_id=xml_id,
noupdate=noupdate, res_id=res_id, context=context)
position = info.get('rows', {}).get('to', 0) + 1
if config.get('import_partial') and filename and (not (position%100)):
with open(config.get('import_partial'), 'rb') as partial_import:
data = pickle.load(partial_import)
data[filename] = position
with open(config.get('import_partial'), 'wb') as partial_import:
pickle.dump(data, partial_import)
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
cr.commit()
except Exception, e:
cr.rollback()
return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
return position, 0, 0, 0
def load(self, cr, uid, fields, data, context=None):
"""
Attempts to load the data matrix, and returns a list of ids (or
``False`` if there was an error and no id could be generated) and a
list of messages.
The ids are those of the records created and saved (in database), in
the same order they were extracted from the file. They can be passed
directly to :meth:`~read`
:param fields: list of fields to import, at the same index as the corresponding data
:type fields: list(str)
:param data: row-major matrix of data to import
:type data: list(list(str))
:param dict context:
:returns: {ids: list(int)|False, messages: [Message]}
"""
cr.execute('SAVEPOINT model_load')
messages = []
fields = map(fix_import_export_id_paths, fields)
ModelData = self.pool['ir.model.data'].clear_caches()
fg = self.fields_get(cr, uid, context=context)
mode = 'init'
current_module = ''
noupdate = False
ids = []
for id, xid, record, info in self._convert_records(cr, uid,
self._extract_records(cr, uid, fields, data,
context=context, log=messages.append),
context=context, log=messages.append):
try:
cr.execute('SAVEPOINT model_load_save')
except psycopg2.InternalError, e:
# broken transaction, exit and hope the source error was
# already logged
if not any(message['type'] == 'error' for message in messages):
messages.append(dict(info, type='error',message=
u"Unknown database error: '%s'" % e))
break
try:
ids.append(ModelData._update(cr, uid, self._name,
current_module, record, mode=mode, xml_id=xid,
noupdate=noupdate, res_id=id, context=context))
cr.execute('RELEASE SAVEPOINT model_load_save')
except psycopg2.Warning, e:
messages.append(dict(info, type='warning', message=str(e)))
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
except psycopg2.Error, e:
messages.append(dict(
info, type='error',
**PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
# Failed to write, log to messages, rollback savepoint (to
# avoid broken transaction) and keep going
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
if any(message['type'] == 'error' for message in messages):
cr.execute('ROLLBACK TO SAVEPOINT model_load')
ids = False
return {'ids': ids, 'messages': messages}
def _extract_records(self, cr, uid, fields_, data,
context=None, log=lambda a: None):
""" Generates record dicts from the data sequence.
The result is a generator of dicts mapping field names to raw
(unconverted, unvalidated) values.
For relational fields, if sub-fields were provided the value will be
a list of sub-records
The following sub-fields may be set on the record (by key):
* None is the name_get for the record (to use with name_create/name_search)
* "id" is the External ID for the record
* ".id" is the Database ID for the record
"""
columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
# Fake columns to avoid special cases in extractor
columns[None] = fields.char('rec_name')
columns['id'] = fields.char('External ID')
columns['.id'] = fields.integer('Database ID')
# m2o fields can't be on multiple lines so exclude them from the
# is_relational field rows filter, but special-case it later on to
# be handled with relational fields (as it can have subfields)
is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
get_o2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
if columns[field[0]]._type == 'one2many'])
get_nono2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
if columns[field[0]]._type != 'one2many'])
# Checks if the provided row has any non-empty non-relational field
def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
return any(g(row)) and not any(f(row))
index = 0
while True:
if index >= len(data): return
row = data[index]
# copy non-relational fields to record dict
record = dict((field[0], value)
for field, value in itertools.izip(fields_, row)
if not is_relational(field[0]))
# Get all following rows which have relational values attached to
# the current record (no non-relational values)
record_span = itertools.takewhile(
only_o2m_values, itertools.islice(data, index + 1, None))
# stitch record row back on for relational fields
record_span = list(itertools.chain([row], record_span))
for relfield in set(
field[0] for field in fields_
if is_relational(field[0])):
column = columns[relfield]
# FIXME: how to not use _obj without relying on fields_get?
Model = self.pool[column._obj]
# get only cells for this sub-field, should be strictly
# non-empty, field path [None] is for name_get column
indices, subfields = zip(*((index, field[1:] or [None])
for index, field in enumerate(fields_)
if field[0] == relfield))
# return all rows which have at least one value for the
# subfields of relfield
relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
record[relfield] = [subrecord
for subrecord, _subinfo in Model._extract_records(
cr, uid, subfields, relfield_data,
context=context, log=log)]
yield record, {'rows': {
'from': index,
'to': index + len(record_span) - 1
}}
index += len(record_span)
def _convert_records(self, cr, uid, records,
context=None, log=lambda a: None):
""" Converts records from the source iterable (recursive dicts of
strings) into forms which can be written to the database (via
self.create or (ir.model.data)._update)
:returns: a list of triplets of (id, xid, record)
:rtype: list((int|None, str|None, dict))
"""
if context is None: context = {}
Converter = self.pool['ir.fields.converter']
columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
Translation = self.pool['ir.translation']
field_names = dict(
(f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
context.get('lang'))
or column.string))
for f, column in columns.iteritems())
convert = Converter.for_model(cr, uid, self, context=context)
def _log(base, field, exception):
type = 'warning' if isinstance(exception, Warning) else 'error'
# logs the logical (not human-readable) field name for automated
# processing of response, but injects human readable in message
record = dict(base, type=type, field=field,
message=unicode(exception.args[0]) % base)
if len(exception.args) > 1 and exception.args[1]:
record.update(exception.args[1])
log(record)
stream = CountingStream(records)
for record, extras in stream:
dbid = False
xid = False
# name_get/name_create
if None in record: pass
# xid
if 'id' in record:
xid = record['id']
# dbid
if '.id' in record:
try:
dbid = int(record['.id'])
except ValueError:
# in case of overridden id column
dbid = record['.id']
if not self.search(cr, uid, [('id', '=', dbid)], context=context):
log(dict(extras,
type='error',
record=stream.index,
field='.id',
message=_(u"Unknown database identifier '%s'") % dbid))
dbid = False
converted = convert(record, lambda field, err:\
_log(dict(extras, record=stream.index, field=field_names[field]), field, err))
yield dbid, xid, converted, dict(extras, record=stream.index)
def get_invalid_fields(self, cr, uid):
return list(self._invalids)
def _validate(self, cr, uid, ids, context=None):
context = context or {}
lng = context.get('lang')
trans = self.pool.get('ir.translation')
error_msgs = []
for constraint in self._constraints:
fun, msg, fields = constraint
# We don't pass around the context here: validation code
# must always yield the same results.
if not fun(self, cr, uid, ids):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
if hasattr(msg, '__call__'):
tmp_msg = msg(self, cr, uid, ids, context=context)
if isinstance(tmp_msg, tuple):
tmp_msg, params = tmp_msg
translated_msg = tmp_msg % params
else:
translated_msg = tmp_msg
else:
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
error_msgs.append(
_("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
)
self._invalids.update(fields)
if error_msgs:
raise except_orm('ValidateError', '\n'.join(error_msgs))
else:
self._invalids.clear()
def default_get(self, cr, uid, fields_list, context=None):
"""
Returns default values for the fields in fields_list.
:param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
:type fields_list: list
:param context: optional context dictionary - it may contains keys for specifying certain options
like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
or override a default value for a field.
A special ``bin_size`` boolean flag may also be passed in the context to request the
value of all fields.binary columns to be returned as the size of the binary instead of its
contents. This can also be selectively overriden by passing a field-specific flag
in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
:return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
"""
# trigger view init hook
self.view_init(cr, uid, fields_list, context)
if not context:
context = {}
defaults = {}
# get the default values for the inherited fields
for t in self._inherits.keys():
defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
context))
# get the default values defined in the object
for f in fields_list:
if f in self._defaults:
if callable(self._defaults[f]):
defaults[f] = self._defaults[f](self, cr, uid, context)
else:
defaults[f] = self._defaults[f]
fld_def = ((f in self._columns) and self._columns[f]) \
or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
or False
if isinstance(fld_def, fields.property):
property_obj = self.pool.get('ir.property')
prop_value = property_obj.get(cr, uid, f, self._name, context=context)
if prop_value:
if isinstance(prop_value, (browse_record, browse_null)):
defaults[f] = prop_value.id
else:
defaults[f] = prop_value
else:
if f not in defaults:
defaults[f] = False
# get the default values set by the user and override the default
# values defined in the object
ir_values_obj = self.pool.get('ir.values')
res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
for id, field, field_value in res:
if field in fields_list:
fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
if fld_def._type == 'many2one':
obj = self.pool.get(fld_def._obj)
if not obj.search(cr, uid, [('id', '=', field_value or False)]):
continue
if fld_def._type == 'many2many':
obj = self.pool.get(fld_def._obj)
field_value2 = []
for i in range(len(field_value or [])):
if not obj.search(cr, uid, [('id', '=',
field_value[i])]):
continue
field_value2.append(field_value[i])
field_value = field_value2
if fld_def._type == 'one2many':
obj = self.pool.get(fld_def._obj)
field_value2 = []
for i in range(len(field_value or [])):
field_value2.append({})
for field2 in field_value[i]:
if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
obj2 = self.pool.get(obj._columns[field2]._obj)
if not obj2.search(cr, uid,
[('id', '=', field_value[i][field2])]):
continue
elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
if not obj2.search(cr, uid,
[('id', '=', field_value[i][field2])]):
continue
# TODO add test for many2many and one2many
field_value2[i][field2] = field_value[i][field2]
field_value = field_value2
defaults[field] = field_value
# get the default values from the context
for key in context or {}:
if key.startswith('default_') and (key[8:] in fields_list):
defaults[key[8:]] = context[key]
return defaults
def fields_get_keys(self, cr, user, context=None):
res = self._columns.keys()
# TODO I believe this loop can be replace by
# res.extend(self._inherit_fields.key())
for parent in self._inherits:
res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
return res
def _rec_name_fallback(self, cr, uid, context=None):
rec_name = self._rec_name
if rec_name not in self._columns:
rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
return rec_name
#
# Overload this method if you need a window title which depends on the context
#
def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
return False
def user_has_groups(self, cr, uid, groups, context=None):
"""Return true if the user is at least member of one of the groups
in groups_str. Typically used to resolve ``groups`` attribute
in view and model definitions.
:param str groups: comma-separated list of fully-qualified group
external IDs, e.g.: ``base.group_user,base.group_system``
:return: True if the current user is a member of one of the
given groups
"""
return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
for group_ext_id in groups.split(',')])
def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
"""Return the description of the fields in the node.
In a normal call to this method, node is a complete view architecture
but it is actually possible to give some sub-node (this is used so
that the method can call itself recursively).
Originally, the field descriptions are drawn from the node itself.
But there is now some code calling fields_get() in order to merge some
of those information in the architecture.
"""
if context is None:
context = {}
result = False
fields = {}
children = True
modifiers = {}
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
def check_group(node):
"""Apply group restrictions, may be set at view level or model level::
* at view level this means the element should be made invisible to
people who are not members
* at model level (exclusively for fields, obviously), this means
the field should be completely removed from the view, as it is
completely unavailable for non-members
:return: True if field should be included in the result of fields_view_get
"""
if node.tag == 'field' and node.get('name') in self._all_columns:
column = self._all_columns[node.get('name')].column
if column.groups and not self.user_has_groups(cr, user,
groups=column.groups,
context=context):
node.getparent().remove(node)
fields.pop(node.get('name'), None)
# no point processing view-level ``groups`` anymore, return
return False
if node.get('groups'):
can_see = self.user_has_groups(cr, user,
groups=node.get('groups'),
context=context)
if not can_see:
node.set('invisible', '1')
modifiers['invisible'] = True
if 'attrs' in node.attrib:
del(node.attrib['attrs']) #avoid making field visible later
del(node.attrib['groups'])
return True
if node.tag in ('field', 'node', 'arrow'):
if node.get('object'):
attrs = {}
views = {}
xml = "<form>"
for f in node:
if f.tag == 'field':
xml += etree.tostring(f, encoding="utf-8")
xml += "</form>"
new_xml = etree.fromstring(encode(xml))
ctx = context.copy()
ctx['base_model_name'] = self._name
xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
views['form'] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
fields = xfields
if node.get('name'):
attrs = {}
try:
if node.get('name') in self._columns:
column = self._columns[node.get('name')]
else:
column = self._inherit_fields[node.get('name')][2]
except Exception:
column = False
if column:
relation = self.pool.get(column._obj)
children = False
views = {}
for f in node:
if f.tag in ('form', 'tree', 'graph', 'kanban'):
node.remove(f)
ctx = context.copy()
ctx['base_model_name'] = self._name
xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
views[str(f.tag)] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
if node.get('widget') and node.get('widget') == 'selection':
# Prepare the cached selection list for the client. This needs to be
# done even when the field is invisible to the current user, because
# other events could need to change its value to any of the selectable ones
# (such as on_change events, refreshes, etc.)
# If domain and context are strings, we keep them for client-side, otherwise
# we evaluate them server-side to consider them when generating the list of
# possible values
# TODO: find a way to remove this hack, by allow dynamic domains
dom = []
if column._domain and not isinstance(column._domain, basestring):
dom = list(column._domain)
dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
search_context = dict(context)
if column._context and not isinstance(column._context, basestring):
search_context.update(column._context)
attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
if (node.get('required') and not int(node.get('required'))) or not column.required:
attrs['selection'].append((False, ''))
fields[node.get('name')] = attrs
field = model_fields.get(node.get('name'))
if field:
transfer_field_to_modifiers(field, modifiers)
elif node.tag in ('form', 'tree'):
result = self.view_header_get(cr, user, False, node.tag, context)
if result:
node.set('string', result)
in_tree_view = node.tag == 'tree'
elif node.tag == 'calendar':
for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
if node.get(additional_field):
fields[node.get(additional_field)] = {}
if not check_group(node):
# node must be removed, no need to proceed further with its children
return fields
# The view architeture overrides the python model.
# Get the attrs before they are (possibly) deleted by check_group below
transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
# TODO remove attrs couterpart in modifiers when invisible is true ?
# translate view
if 'lang' in context:
if node.text and node.text.strip():
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
if trans:
node.text = node.text.replace(node.text.strip(), trans)
if node.tail and node.tail.strip():
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
if trans:
node.tail = node.tail.replace(node.tail.strip(), trans)
if node.get('string') and not result:
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
if trans == node.get('string') and ('base_model_name' in context):
# If translation is same as source, perhaps we'd have more luck with the alternative model name
# (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
if trans:
node.set('string', trans)
for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
attr_value = node.get(attr_name)
if attr_value:
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
if trans:
node.set(attr_name, trans)
for f in node:
if children or (node.tag == 'field' and f.tag in ('filter','separator')):
fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
transfer_modifiers_to_node(modifiers, node)
return fields
def _disable_workflow_buttons(self, cr, user, node):
""" Set the buttons in node to readonly if the user can't activate them. """
if user == 1:
# admin user can always activate workflow buttons
return node
# TODO handle the case of more than one workflow for a model or multiple
# transitions with different groups and same signal
usersobj = self.pool.get('res.users')
buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
for button in buttons:
user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
cr.execute("""SELECT DISTINCT t.group_id
FROM wkf
INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
INNER JOIN wkf_transition t ON (t.act_to = a.id)
WHERE wkf.osv = %s
AND t.signal = %s
AND t.group_id is NOT NULL
""", (self._name, button.get('name')))
group_ids = [x[0] for x in cr.fetchall() if x[0]]
can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
button.set('readonly', str(int(not can_click)))
return node
def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
""" Return an architecture and a description of all the fields.
The field description combines the result of fields_get() and
__view_look_dom().
:param node: the architecture as as an etree
:return: a tuple (arch, fields) where arch is the given node as a
string and fields is the description of all the fields.
"""
fields = {}
if node.tag == 'diagram':
if node.getchildren()[0].tag == 'node':
node_model = self.pool.get(node.getchildren()[0].get('object'))
node_fields = node_model.fields_get(cr, user, None, context)
fields.update(node_fields)
if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
node.set("create", 'false')
if node.getchildren()[1].tag == 'arrow':
arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
fields.update(arrow_fields)
else:
fields = self.fields_get(cr, user, None, context)
fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
node = self._disable_workflow_buttons(cr, user, node)
if node.tag in ('kanban', 'tree', 'form', 'gantt'):
for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
node.set(action, 'false')
arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
for k in fields.keys():
if k not in fields_def:
del fields[k]
for field in fields_def:
if field == 'id':
# sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
elif field in fields:
fields[field].update(fields_def[field])
else:
cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
res = cr.fetchall()[:]
model = res[0][1]
res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
msg = "\n * ".join([r[0] for r in res])
msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
_logger.error(msg)
raise except_orm('View error', msg)
return arch, fields
def _get_default_form_view(self, cr, user, context=None):
""" Generates a default single-line form view using all fields
of the current model except the m2m and o2m ones.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a form view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('form', string=self._description)
# TODO it seems fields_get can be replaced by _all_columns (no need for translation)
for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
if descriptor['type'] in ('one2many', 'many2many'):
continue
etree.SubElement(view, 'field', name=field)
if descriptor['type'] == 'text':
etree.SubElement(view, 'newline')
return view
def _get_default_search_view(self, cr, user, context=None):
""" Generates a single-field search view, based on _rec_name.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a tree view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('search', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
return view
def _get_default_tree_view(self, cr, user, context=None):
""" Generates a single-field tree view, based on _rec_name.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a tree view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('tree', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
return view
def _get_default_calendar_view(self, cr, user, context=None):
""" Generates a default calendar view by trying to infer
calendar fields from a number of pre-set attribute names
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a calendar view
:rtype: etree._Element
"""
def set_first_of(seq, in_, to):
"""Sets the first value of ``seq`` also found in ``in_`` to
the ``to`` attribute of the view being closed over.
Returns whether it's found a suitable value (and set it on
the attribute) or not
"""
for item in seq:
if item in in_:
view.set(to, item)
return True
return False
view = etree.Element('calendar', string=self._description)
etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
if self._date_name not in self._columns:
date_found = False
for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
if dt in self._columns:
self._date_name = dt
date_found = True
break
if not date_found:
raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
view.set('date_start', self._date_name)
set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
self._columns, 'color')
if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
self._columns, 'date_stop'):
if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
self._columns, 'date_delay'):
raise except_orm(
_('Invalid Object Architecture!'),
_("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
return view
#
# if view_id, view_type is not required
#
def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
"""
Get the detailed composition of the requested view like fields, model, view architecture
:param cr: database cursor
:param user: current user id
:param view_id: id of the view or None
:param view_type: type of the view to return if view_id is None ('form', tree', ...)
:param context: context arguments, like lang, time zone
:param toolbar: true to include contextual actions
:param submenu: deprecated
:return: dictionary describing the composition of the requested view (including inherited views and extensions)
:raise AttributeError:
* if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
* if some tag other than 'position' is found in parent view
:raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
"""
if context is None:
context = {}
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
def raise_view_error(error_msg, child_view_id):
view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
error_msg = error_msg % {'parent_xml_id': view.xml_id}
raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
% (child_view.xml_id, self._name, error_msg))
def locate(source, spec):
""" Locate a node in a source (parent) architecture.
Given a complete source (parent) architecture (i.e. the field
`arch` in a view), and a 'spec' node (a node in an inheriting
view that specifies the location in the source view of what
should be changed), return (if it exists) the node in the
source view matching the specification.
:param source: a parent architecture to modify
:param spec: a modifying node in an inheriting view
:return: a node in the source matching the spec
"""
if spec.tag == 'xpath':
nodes = source.xpath(spec.get('expr'))
return nodes[0] if nodes else None
elif spec.tag == 'field':
# Only compare the field name: a field can be only once in a given view
# at a given level (and for multilevel expressions, we should use xpath
# inheritance spec anyway).
for node in source.getiterator('field'):
if node.get('name') == spec.get('name'):
return node
return None
for node in source.getiterator(spec.tag):
if isinstance(node, SKIPPED_ELEMENT_TYPES):
continue
if all(node.get(attr) == spec.get(attr) \
for attr in spec.attrib
if attr not in ('position','version')):
# Version spec should match parent's root element's version
if spec.get('version') and spec.get('version') != source.get('version'):
return None
return node
return None
def apply_inheritance_specs(source, specs_arch, inherit_id=None):
""" Apply an inheriting view.
Apply to a source architecture all the spec nodes (i.e. nodes
describing where and what changes to apply to some parent
architecture) given by an inheriting view.
:param source: a parent architecture to modify
:param specs_arch: a modifying architecture in an inheriting view
:param inherit_id: the database id of the inheriting view
:return: a modified source where the specs are applied
"""
specs_tree = etree.fromstring(encode(specs_arch))
# Queue of specification nodes (i.e. nodes describing where and
# changes to apply to some parent architecture).
specs = [specs_tree]
while len(specs):
spec = specs.pop(0)
if isinstance(spec, SKIPPED_ELEMENT_TYPES):
continue
if spec.tag == 'data':
specs += [ c for c in specs_tree ]
continue
node = locate(source, spec)
if node is not None:
pos = spec.get('position', 'inside')
if pos == 'replace':
if node.getparent() is None:
source = copy.deepcopy(spec[0])
else:
for child in spec:
node.addprevious(child)
node.getparent().remove(node)
elif pos == 'attributes':
for child in spec.getiterator('attribute'):
attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
if attribute[1]:
node.set(attribute[0], attribute[1])
else:
del(node.attrib[attribute[0]])
else:
sib = node.getnext()
for child in spec:
if pos == 'inside':
node.append(child)
elif pos == 'after':
if sib is None:
node.addnext(child)
node = child
else:
sib.addprevious(child)
elif pos == 'before':
node.addprevious(child)
else:
raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
else:
attrs = ''.join([
' %s="%s"' % (attr, spec.get(attr))
for attr in spec.attrib
if attr != 'position'
])
tag = "<%s%s>" % (spec.tag, attrs)
if spec.get('version') and spec.get('version') != source.get('version'):
raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
(tag, spec.get('version'), source.get('version')), inherit_id)
raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
return source
def apply_view_inheritance(cr, user, source, inherit_id):
""" Apply all the (directly and indirectly) inheriting views.
:param source: a parent architecture to modify (with parent
modifications already applied)
:param inherit_id: the database view_id of the parent view
:return: a modified source where all the modifying architecture
are applied
"""
sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name, context=context)
for (view_arch, view_id) in sql_inherit:
source = apply_inheritance_specs(source, view_arch, view_id)
source = apply_view_inheritance(cr, user, source, view_id)
return source
result = {'type': view_type, 'model': self._name}
sql_res = False
parent_view_model = None
view_ref = context.get(view_type + '_view_ref')
# Search for a root (i.e. without any parent) view.
while True:
if view_ref and not view_id:
if '.' in view_ref:
module, view_ref = view_ref.split('.', 1)
cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
view_ref_res = cr.fetchone()
if view_ref_res:
view_id = view_ref_res[0]
if view_id:
cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
FROM ir_ui_view
WHERE id=%s""", (view_id,))
else:
cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
FROM ir_ui_view
WHERE model=%s AND type=%s AND inherit_id IS NULL
ORDER BY priority""", (self._name, view_type))
sql_res = cr.dictfetchone()
if not sql_res:
break
view_id = sql_res['inherit_id'] or sql_res['id']
parent_view_model = sql_res['model']
if not sql_res['inherit_id']:
break
# if a view was found
if sql_res:
source = etree.fromstring(encode(sql_res['arch']))
result.update(
arch=apply_view_inheritance(cr, user, source, sql_res['id']),
type=sql_res['type'],
view_id=sql_res['id'],
name=sql_res['name'],
field_parent=sql_res['field_parent'] or False)
else:
# otherwise, build some kind of default view
try:
view = getattr(self, '_get_default_%s_view' % view_type)(
cr, user, context)
except AttributeError:
# what happens here, graph case?
raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
result.update(
arch=view,
name='default',
field_parent=False,
view_id=0)
if parent_view_model != self._name:
ctx = context.copy()
ctx['base_model_name'] = parent_view_model
else:
ctx = context
xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
result['arch'] = xarch
result['fields'] = xfields
if toolbar:
def clean(x):
x = x[2]
for key in ('report_sxw_content', 'report_rml_content',
'report_sxw', 'report_rml',
'report_sxw_content_data', 'report_rml_content_data'):
if key in x:
del x[key]
return x
ir_values_obj = self.pool.get('ir.values')
resprint = ir_values_obj.get(cr, user, 'action',
'client_print_multi', [(self._name, False)], False,
context)
resaction = ir_values_obj.get(cr, user, 'action',
'client_action_multi', [(self._name, False)], False,
context)
resrelate = ir_values_obj.get(cr, user, 'action',
'client_action_relate', [(self._name, False)], False,
context)
resaction = [clean(action) for action in resaction
if view_type == 'tree' or not action[2].get('multi')]
resprint = [clean(print_) for print_ in resprint
if view_type == 'tree' or not print_[2].get('multi')]
#When multi="True" set it will display only in More of the list view
resrelate = [clean(action) for action in resrelate
if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
for x in itertools.chain(resprint, resaction, resrelate):
x['string'] = x['name']
result['toolbar'] = {
'print': resprint,
'action': resaction,
'relate': resrelate
}
return result
_view_look_dom_arch = __view_look_dom_arch
def search_count(self, cr, user, args, context=None):
if not context:
context = {}
res = self.search(cr, user, args, context=context, count=True)
if isinstance(res, list):
return len(res)
return res
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
"""
Search for records based on a search domain.
:param cr: database cursor
:param user: current user id
:param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
:param offset: optional number of results to skip in the returned values (default: 0)
:param limit: optional max number of records to return (default: **None**)
:param order: optional columns to sort by (default: self._order=id )
:param context: optional context arguments, like lang, time zone
:type context: dictionary
:param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
:return: id or list of ids of records matching the criteria
:rtype: integer or list of integers
:raise AccessError: * if user tries to bypass access rules for read on the requested object.
**Expressing a search domain (args)**
Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
* **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
* **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
The semantics of most of these operators are obvious.
The ``child_of`` operator will look for records who are children or grand-children of a given record,
according to the semantics of this model (i.e following the relationship field named by
``self._parent_name``, by default ``parent_id``.
* **value** must be a valid value to compare with the values of **field_name**, depending on its type.
Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
Be very careful about this when you combine them the first time.
Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
[('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
(name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
"""
return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
def name_get(self, cr, user, ids, context=None):
"""Returns the preferred display value (text representation) for the records with the
given ``ids``. By default this will be the value of the ``name`` column, unless
the model implements a custom behavior.
Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
guaranteed to be.
:rtype: list(tuple)
:return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
"""
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
if self._rec_name in self._all_columns:
rec_name_column = self._all_columns[self._rec_name].column
return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
for r in self.read(cr, user, ids, [self._rec_name],
load='_classic_write', context=context)]
return [(id, "%s,%s" % (self._name, id)) for id in ids]
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
"""Search for records that have a display name matching the given ``name`` pattern if compared
with the given ``operator``, while also matching the optional search domain (``args``).
This is used for example to provide suggestions based on a partial value for a relational
field.
Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
guaranteed to be.
This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
and then :meth:`~.name_get` on the result of the search.
:param list args: optional search domain (see :meth:`~.search` for syntax),
specifying further restrictions
:param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
or ``'='``.
:param int limit: optional max number of records to return
:rtype: list
:return: list of pairs ``(id,text_repr)`` for all matching records.
"""
return self._name_search(cr, user, name, args, operator, context, limit)
def name_create(self, cr, uid, name, context=None):
"""Creates a new record by calling :meth:`~.create` with only one
value provided: the name of the new record (``_rec_name`` field).
The new record will also be initialized with any default values applicable
to this model, or provided through the context. The usual behavior of
:meth:`~.create` applies.
Similarly, this method may raise an exception if the model has multiple
required fields and some do not have default values.
:param name: name of the record to create
:rtype: tuple
:return: the :meth:`~.name_get` pair value for the newly-created record.
"""
rec_id = self.create(cr, uid, {self._rec_name: name}, context)
return self.name_get(cr, uid, [rec_id], context)[0]
# private implementation of name_search, allows passing a dedicated user for the name_get part to
# solve some access rights issues
def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
if args is None:
args = []
if context is None:
context = {}
args = args[:]
# optimize out the default criterion of ``ilike ''`` that matches everything
if not (name == '' and operator == 'ilike'):
args += [(self._rec_name, operator, name)]
access_rights_uid = name_get_uid or user
ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
res = self.name_get(cr, access_rights_uid, ids, context)
return res
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
#FIXME: collect all calls to _get_source into one SQL call.
for lang in langs:
res[lang] = {'code': lang}
for f in fields:
if f in self._columns:
res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
if res_trans:
res[lang][f] = res_trans
else:
res[lang][f] = self._columns[f].string
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), fields)
res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
for lang in res2:
if lang in res:
res[lang]['code'] = lang
for f in res2[lang]:
res[lang][f] = res2[lang][f]
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
#FIXME: try to only call the translation in one SQL
for lang in langs:
for field in vals:
if field in self._columns:
src = self._columns[field].string
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), vals)
if cols:
self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
return True
def _add_missing_default_values(self, cr, uid, values, context=None):
missing_defaults = []
avoid_tables = [] # avoid overriding inherited values when parent is set
for tables, parent_field in self._inherits.items():
if parent_field in values:
avoid_tables.append(tables)
for field in self._columns.keys():
if not field in values:
missing_defaults.append(field)
for field in self._inherit_fields.keys():
if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
missing_defaults.append(field)
if len(missing_defaults):
# override defaults with the provided values, never allow the other way around
defaults = self.default_get(cr, uid, missing_defaults, context)
for dv in defaults:
if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
defaults[dv] = [(6, 0, defaults[dv])]
if (dv in self._columns and self._columns[dv]._type == 'one2many' \
or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
defaults[dv] = [(0, 0, x) for x in defaults[dv]]
defaults.update(values)
values = defaults
return values
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi``.
"""
try:
getattr(self, '_ormcache')
self._ormcache = {}
self.pool._any_cache_cleared = True
except AttributeError:
pass
def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
read_group_result, read_group_order=None, context=None):
"""Helper method for filling in empty groups for all possible values of
the field being grouped by"""
# self._group_by_full should map groupable fields to a method that returns
# a list of all aggregated values that we want to display for this field,
# in the form of a m2o-like pair (key,label).
# This is useful to implement kanban views for instance, where all columns
# should be displayed even if they don't contain any record.
# Grab the list of all groups that should be displayed, including all present groups
present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
read_group_order=read_group_order,
access_rights_uid=openerp.SUPERUSER_ID,
context=context)
result_template = dict.fromkeys(aggregated_fields, False)
result_template[groupby + '_count'] = 0
if groupby_list and len(groupby_list) > 1:
result_template['__context'] = {'group_by': groupby_list[1:]}
# Merge the left_side (current results as dicts) with the right_side (all
# possible values as m2o pairs). Both lists are supposed to be using the
# same ordering, and can be merged in one pass.
result = []
known_values = {}
def append_left(left_side):
grouped_value = left_side[groupby] and left_side[groupby][0]
if not grouped_value in known_values:
result.append(left_side)
known_values[grouped_value] = left_side
else:
count_attr = groupby + '_count'
known_values[grouped_value].update({count_attr: left_side[count_attr]})
def append_right(right_side):
grouped_value = right_side[0]
if not grouped_value in known_values:
line = dict(result_template)
line[groupby] = right_side
line['__domain'] = [(groupby,'=',grouped_value)] + domain
result.append(line)
known_values[grouped_value] = line
while read_group_result or all_groups:
left_side = read_group_result[0] if read_group_result else None
right_side = all_groups[0] if all_groups else None
assert left_side is None or left_side[groupby] is False \
or isinstance(left_side[groupby], (tuple,list)), \
'M2O-like pair expected, got %r' % left_side[groupby]
assert right_side is None or isinstance(right_side, (tuple,list)), \
'M2O-like pair expected, got %r' % right_side
if left_side is None:
append_right(all_groups.pop(0))
elif right_side is None:
append_left(read_group_result.pop(0))
elif left_side[groupby] == right_side:
append_left(read_group_result.pop(0))
all_groups.pop(0) # discard right_side
elif not left_side[groupby] or not left_side[groupby][0]:
# left side == "Undefined" entry, not present on right_side
append_left(read_group_result.pop(0))
else:
append_right(all_groups.pop(0))
if folded:
for r in result:
r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
return result
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
:param cr: database cursor
:param uid: current user id
:param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
:param list fields: list of fields present in the list view specified on the object
:param list groupby: fields by which the records will be grouped
:param int offset: optional number of records to skip
:param int limit: optional max number of records to return
:param dict context: context arguments, like lang, time zone
:param list orderby: optional ``order by`` specification, for
overriding the natural sort ordering of the
groups, see also :py:meth:`~osv.osv.osv.search`
(supported only for many2one fields currently)
:return: list of dictionaries(one dictionary for each record) containing:
* the values of fields grouped by the fields in ``groupby`` argument
* __domain: list of tuples specifying the search criteria
* __context: dictionary with argument like ``groupby``
:rtype: [{'field_name_1': value, ...]
:raise AccessError: * if user has no read rights on the requested object
* if user tries to bypass access rules for read on the requested object
"""
context = context or {}
self.check_access_rights(cr, uid, 'read')
if not fields:
fields = self._columns.keys()
query = self._where_calc(cr, uid, domain, context=context)
self._apply_ir_rules(cr, uid, query, 'read', context=context)
# Take care of adding join(s) if groupby is an '_inherits'ed field
groupby_list = groupby
qualified_groupby_field = groupby
if groupby:
if isinstance(groupby, list):
groupby = groupby[0]
qualified_groupby_field = self._inherits_join_calc(groupby, query)
if groupby:
assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
# TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fget = self.fields_get(cr, uid, fields)
flist = ''
group_count = group_by = groupby
if groupby:
if fget.get(groupby):
groupby_type = fget[groupby]['type']
if groupby_type in ('date', 'datetime'):
qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
flist = "%s as %s " % (qualified_groupby_field, groupby)
elif groupby_type == 'boolean':
qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
flist = "%s as %s " % (qualified_groupby_field, groupby)
else:
flist = qualified_groupby_field
else:
# Don't allow arbitrary values, as this would be a SQL injection vector!
raise except_orm(_('Invalid group_by'),
_('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
aggregated_fields = [
f for f in fields
if f not in ('id', 'sequence')
if fget[f]['type'] in ('integer', 'float')
if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
for f in aggregated_fields:
group_operator = fget[f].get('group_operator', 'sum')
if flist:
flist += ', '
qualified_field = '"%s"."%s"' % (self._table, f)
flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
from_clause, where_clause, where_clause_params = query.get_sql()
where_clause = where_clause and ' WHERE ' + where_clause
limit_str = limit and ' limit %d' % limit or ''
offset_str = offset and ' offset %d' % offset or ''
if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
group_count = '_'
cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
alldata = {}
groupby = group_by
for r in cr.dictfetchall():
for fld, val in r.items():
if val is None: r[fld] = False
alldata[r['id']] = r
del r['id']
order = orderby or groupby
data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
# the IDs of records that have groupby field value = False or '' should be included too
data_ids += set(alldata.keys()).difference(data_ids)
if groupby:
data = self.read(cr, uid, data_ids, [groupby], context=context)
# restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
data_dict = dict((d['id'], d[groupby] ) for d in data)
result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
else:
result = [{'id': i} for i in data_ids]
for d in result:
if groupby:
d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
if not isinstance(groupby_list, (str, unicode)):
if groupby or not context.get('group_by_no_leaf', False):
d['__context'] = {'group_by': groupby_list[1:]}
if groupby and groupby in fget:
if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
days = calendar.monthrange(dt.year, dt.month)[1]
date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
d[groupby] = babel.dates.format_date(
date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
(groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
del alldata[d['id']][groupby]
d.update(alldata[d['id']])
del d['id']
if groupby and groupby in self._group_by_full:
result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
aggregated_fields, result, read_group_order=order,
context=context)
return result
def _inherits_join_add(self, current_model, parent_model_name, query):
"""
Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
:param current_model: current model object
:param parent_model_name: name of the parent model for which the clauses should be added
:param query: query object on which the JOIN should be added
"""
inherits_field = current_model._inherits[parent_model_name]
parent_model = self.pool.get(parent_model_name)
parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
return parent_alias
def _inherits_join_calc(self, field, query):
"""
Adds missing table select and join clause(s) to ``query`` for reaching
the field coming from an '_inherits' parent table (no duplicates).
:param field: name of inherited field to reach
:param query: query object on which the JOIN should be added
:return: qualified name of field, to be used in SELECT clause
"""
current_table = self
parent_alias = '"%s"' % current_table._table
while field in current_table._inherit_fields and not field in current_table._columns:
parent_model_name = current_table._inherit_fields[field][0]
parent_table = self.pool.get(parent_model_name)
parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
current_table = parent_table
return '%s."%s"' % (parent_alias, field)
def _parent_store_compute(self, cr):
if not self._parent_store:
return
_logger.info('Computing parent left and right for table %s...', self._table)
def browse_rec(root, pos=0):
# TODO: set order
where = self._parent_name+'='+str(root)
if not root:
where = self._parent_name+' IS NULL'
if self._parent_order:
where += ' order by '+self._parent_order
cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
pos2 = pos + 1
for id in cr.fetchall():
pos2 = browse_rec(id[0], pos2)
cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
return pos2 + 1
query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
if self._parent_order:
query += ' order by ' + self._parent_order
pos = 0
cr.execute(query)
for (root,) in cr.fetchall():
pos = browse_rec(root, pos)
return True
def _update_store(self, cr, f, k):
_logger.info("storing computed values of fields.function '%s'", k)
ss = self._columns[k]._symbol_set
update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
cr.execute('select id from '+self._table)
ids_lst = map(lambda x: x[0], cr.fetchall())
while ids_lst:
iids = ids_lst[:40]
ids_lst = ids_lst[40:]
res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
for key, val in res.items():
if f._multi:
val = val[k]
# if val is a many2one, just write the ID
if type(val) == tuple:
val = val[0]
if val is not False:
cr.execute(update_query, (ss[1](val), key))
def _check_selection_field_value(self, cr, uid, field, value, context=None):
"""Raise except_orm if value is not among the valid values for the selection field"""
if self._columns[field]._type == 'reference':
val_model, val_id_str = value.split(',', 1)
val_id = False
try:
val_id = long(val_id_str)
except ValueError:
pass
if not val_id:
raise except_orm(_('ValidateError'),
_('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
val = val_model
else:
val = value
if isinstance(self._columns[field].selection, (tuple, list)):
if val in dict(self._columns[field].selection):
return
elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
return
raise except_orm(_('ValidateError'),
_('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
" AND c.oid=a.attrelid"
" AND a.attisdropped=%s"
" AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
" AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
for column in cr.dictfetchall():
if log:
_logger.debug("column %s is in the table %s but not in the corresponding object %s",
column['attname'], self._table, self._name)
if column['attnotnull']:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, column['attname'])
def _save_constraint(self, cr, constraint_name, type):
"""
Record the creation of a constraint for this model, to make it possible
to delete it later when the module is uninstalled. Type can be either
'f' or 'u' depending on the constraint being a foreign key or not.
"""
if not self._module:
# no need to save constraints for custom models as they're not part
# of any module
return
assert type in ('f', 'u')
cr.execute("""
SELECT 1 FROM ir_model_constraint, ir_module_module
WHERE ir_model_constraint.module=ir_module_module.id
AND ir_model_constraint.name=%s
AND ir_module_module.name=%s
""", (constraint_name, self._module))
if not cr.rowcount:
cr.execute("""
INSERT INTO ir_model_constraint
(name, date_init, date_update, module, model, type)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
(SELECT id FROM ir_model WHERE model=%s), %s)""",
(constraint_name, self._module, self._name, type))
def _save_relation_table(self, cr, relation_table):
"""
Record the creation of a many2many for this model, to make it possible
to delete it later when the module is uninstalled.
"""
cr.execute("""
SELECT 1 FROM ir_model_relation, ir_module_module
WHERE ir_model_relation.module=ir_module_module.id
AND ir_model_relation.name=%s
AND ir_module_module.name=%s
""", (relation_table, self._module))
if not cr.rowcount:
cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
(SELECT id FROM ir_model WHERE model=%s))""",
(relation_table, self._module, self._name))
# checked version: for direct m2o starting from `self`
def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
assert self.is_transient() or not dest_model.is_transient(), \
'Many2One relationships from non-transient Model to TransientModel are forbidden'
if self.is_transient() and not dest_model.is_transient():
# TransientModel relationships to regular Models are annoying
# usually because they could block deletion due to the FKs.
# So unless stated otherwise we default them to ondelete=cascade.
ondelete = ondelete or 'cascade'
fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
self._foreign_keys.add(fk_def)
_schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
# unchecked version: for custom cases, such as m2m relationships
def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
self._foreign_keys.add(fk_def)
_schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
def _drop_constraint(self, cr, source_table, constraint_name):
cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
# Find FK constraint(s) currently established for the m2o field,
# and see whether they are stale or not
cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
cl2.relname as foreign_table
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
pg_attribute as att1, pg_attribute as att2
WHERE con.conrelid = cl1.oid
AND cl1.relname = %s
AND con.confrelid = cl2.oid
AND array_lower(con.conkey, 1) = 1
AND con.conkey[1] = att1.attnum
AND att1.attrelid = cl1.oid
AND att1.attname = %s
AND array_lower(con.confkey, 1) = 1
AND con.confkey[1] = att2.attnum
AND att2.attrelid = cl2.oid
AND att2.attname = %s
AND con.contype = 'f'""", (source_table, source_field, 'id'))
constraints = cr.dictfetchall()
if constraints:
if len(constraints) == 1:
# Is it the right constraint?
cons, = constraints
if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
or cons['foreign_table'] != dest_model._table:
# Wrong FK: drop it and recreate
_schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
source_table, cons['constraint_name'])
self._drop_constraint(cr, source_table, cons['constraint_name'])
else:
# it's all good, nothing to do!
return
else:
# Multiple FKs found for the same field, drop them all, and re-create
for cons in constraints:
_schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
source_table, cons['constraint_name'])
self._drop_constraint(cr, source_table, cons['constraint_name'])
# (re-)create the FK
self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
def _auto_init(self, cr, context=None):
"""
Call _field_create and, unless _auto is False:
- create the corresponding table in database for the model,
- possibly add the parent columns in database,
- possibly add the columns 'create_uid', 'create_date', 'write_uid',
'write_date' in database if _log_access is True (the default),
- report on database columns no more existing in _columns,
- remove no more existing not null constraints,
- alter existing database columns to match _columns,
- create database tables to match _columns,
- add database indices to match _columns,
- save in self._foreign_keys a list a foreign keys to create (see
_auto_end).
"""
self._foreign_keys = set()
raise_on_invalid_object_name(self._name)
if context is None:
context = {}
store_compute = False
todo_end = []
update_custom_fields = context.get('update_custom_fields', False)
self._field_create(cr, context=context)
create = not self._table_exist(cr)
if getattr(self, '_auto', True):
if create:
self._create_table(cr)
cr.commit()
if self._parent_store:
if not self._parent_columns_exist(cr):
self._create_parent_columns(cr)
store_compute = True
# Create the create_uid, create_date, write_uid, write_date, columns if desired.
if self._log_access:
self._add_log_columns(cr)
self._check_removed_columns(cr, log=False)
# iterate on the "object columns"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
if k in MAGIC_COLUMNS:
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
continue
if isinstance(f, fields.one2many):
self._o2m_raise_on_missing_reference(cr, f)
elif isinstance(f, fields.many2many):
self._m2m_raise_or_create_relation(cr, f)
else:
res = column_data.get(k)
# The field is not found as-is in database, try if it
# exists with an old name.
if not res and hasattr(f, 'oldname'):
res = column_data.get(f.oldname)
if res:
cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
res['attname'] = k
column_data[k] = res
_schema.debug("Table '%s': renamed column '%s' to '%s'",
self._table, f.oldname, k)
# The field already exists in database. Possibly
# change its type, rename it, drop it or change its
# constraints.
if res:
f_pg_type = res['typname']
f_pg_size = res['size']
f_pg_notnull = res['attnotnull']
if isinstance(f, fields.function) and not f.store and\
not getattr(f, 'nodrop', False):
_logger.info('column %s (%s) in table %s removed: converted to a function !\n',
k, f.string, self._table)
cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
cr.commit()
_schema.debug("Table '%s': dropped column '%s' with cascade",
self._table, k)
f_obj_type = None
else:
f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
if f_obj_type:
ok = False
casts = [
('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
('varchar', 'text', 'TEXT', ''),
('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
('timestamp', 'date', 'date', '::date'),
('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
]
if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
cr.commit()
_schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
for c in casts:
if (f_pg_type==c[0]) and (f._type==c[1]):
if f_pg_type != f_obj_type:
ok = True
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
cr.commit()
_schema.debug("Table '%s': column '%s' changed type from %s to %s",
self._table, k, c[0], c[1])
break
if f_pg_type != f_obj_type:
if not ok:
i = 0
while True:
newname = k + '_moved' + str(i)
cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
"WHERE c.relname=%s " \
"AND a.attname=%s " \
"AND c.oid=a.attrelid ", (self._table, newname))
if not cr.fetchone()[0]:
break
i += 1
if f_pg_notnull:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
_schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
self._table, k, f_pg_type, f._type, newname)
# if the field is required and hasn't got a NOT NULL constraint
if f.required and f_pg_notnull == 0:
# set the field to the default value if any
if k in self._defaults:
if callable(self._defaults[k]):
default = self._defaults[k](self, cr, SUPERUSER_ID, context)
else:
default = self._defaults[k]
if default is not None:
ss = self._columns[k]._symbol_set
query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
cr.execute(query, (ss[1](default),))
# add the NOT NULL constraint
cr.commit()
try:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
cr.commit()
| codeparrot/github-code-clean |
"""
Acceptance tests for the teams feature.
"""
import json
import random
import time
from dateutil.parser import parse
import ddt
from nose.plugins.attrib import attr
from selenium.common.exceptions import TimeoutException
from uuid import uuid4
from common.test.acceptance.tests.helpers import get_modal_alert, EventsTestMixin, UniqueCourseTest
from common.test.acceptance.fixtures import LMS_BASE_URL
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.fixtures.discussion import (
Thread,
MultipleThreadFixture,
ForumsConfigMixin,
)
from common.test.acceptance.pages.lms.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.course_info import CourseInfoPage
from common.test.acceptance.pages.lms.learner_profile import LearnerProfilePage
from common.test.acceptance.pages.lms.tab_nav import TabNavPage
from common.test.acceptance.pages.lms.teams import (
TeamsPage,
MyTeamsPage,
BrowseTopicsPage,
BrowseTeamsPage,
TeamManagementPage,
EditMembershipPage,
TeamPage
)
from common.test.acceptance.pages.common.utils import confirm_prompt
TOPICS_PER_PAGE = 12
class TeamsTabBase(EventsTestMixin, ForumsConfigMixin, UniqueCourseTest):
"""Base class for Teams Tab tests"""
def setUp(self):
super(TeamsTabBase, self).setUp()
self.tab_nav = TabNavPage(self.browser)
self.course_info_page = CourseInfoPage(self.browser, self.course_id)
self.teams_page = TeamsPage(self.browser, self.course_id)
# TODO: Refactor so resetting events database is not necessary
self.reset_event_tracking()
self.enable_forums()
def create_topics(self, num_topics):
"""Create `num_topics` test topics."""
return [{u"description": i, u"name": i, u"id": i} for i in map(str, xrange(num_topics))]
def create_teams(self, topic, num_teams, time_between_creation=0):
"""Create `num_teams` teams belonging to `topic`."""
teams = []
for i in xrange(num_teams):
team = {
'course_id': self.course_id,
'topic_id': topic['id'],
'name': 'Team {}'.format(i),
'description': 'Description {}'.format(i),
'language': 'aa',
'country': 'AF'
}
teams.append(self.post_team_data(team))
# Sadly, this sleep is necessary in order to ensure that
# sorting by last_activity_at works correctly when running
# in Jenkins.
# THIS IS AN ANTI-PATTERN - DO NOT COPY.
time.sleep(time_between_creation)
return teams
def post_team_data(self, team_data):
"""Given a JSON representation of a team, post it to the server."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/teams/',
data=json.dumps(team_data),
headers=self.course_fixture.headers
)
self.assertEqual(response.status_code, 200)
return json.loads(response.text)
def create_memberships(self, num_memberships, team_id):
"""Create `num_memberships` users and assign them to `team_id`. The
last user created becomes the current user."""
memberships = []
for __ in xrange(num_memberships):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
memberships.append(user_info)
self.create_membership(user_info['username'], team_id)
#pylint: disable=attribute-defined-outside-init
self.user_info = memberships[-1]
return memberships
def create_membership(self, username, team_id):
"""Assign `username` to `team_id`."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/team_membership/',
data=json.dumps({'username': username, 'team_id': team_id}),
headers=self.course_fixture.headers
)
return json.loads(response.text)
def set_team_configuration(self, configuration, enroll_in_course=True, global_staff=False):
"""
Sets team configuration on the course and calls auto-auth on the user.
"""
#pylint: disable=attribute-defined-outside-init
self.course_fixture = CourseFixture(**self.course_info)
if configuration:
self.course_fixture.add_advanced_settings(
{u"teams_configuration": {u"value": configuration}}
)
self.course_fixture.install()
enroll_course_id = self.course_id if enroll_in_course else None
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, course_id=enroll_course_id, staff=global_staff).visit().user_info
self.course_info_page.visit()
def verify_teams_present(self, present):
"""
Verifies whether or not the teams tab is present. If it should be present, also
checks the text on the page (to ensure view is working).
"""
if present:
self.assertIn("Teams", self.tab_nav.tab_names)
self.teams_page.visit()
self.assertEqual(self.teams_page.active_tab(), 'browse')
else:
self.assertNotIn("Teams", self.tab_nav.tab_names)
def verify_teams(self, page, expected_teams):
"""Verify that the list of team cards on the current page match the expected teams in order."""
def assert_team_equal(expected_team, team_card_name, team_card_description):
"""
Helper to assert that a single team card has the expected name and
description.
"""
self.assertEqual(expected_team['name'], team_card_name)
self.assertEqual(expected_team['description'], team_card_description)
team_card_names = page.team_names
team_card_descriptions = page.team_descriptions
map(assert_team_equal, expected_teams, team_card_names, team_card_descriptions)
def verify_my_team_count(self, expected_number_of_teams):
""" Verify the number of teams shown on "My Team". """
# We are doing these operations on this top-level page object to avoid reloading the page.
self.teams_page.verify_my_team_count(expected_number_of_teams)
def only_team_events(self, event):
"""Filter out all non-team events."""
return event['event_type'].startswith('edx.team.')
@ddt.ddt
@attr(shard=5)
class TeamsTabTest(TeamsTabBase):
"""
Tests verifying when the Teams tab is present.
"""
def test_teams_not_enabled(self):
"""
Scenario: teams tab should not be present if no team configuration is set
Given I am enrolled in a course without team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(None)
self.verify_teams_present(False)
def test_teams_not_enabled_no_topics(self):
"""
Scenario: teams tab should not be present if team configuration does not specify topics
Given I am enrolled in a course with no topics in the team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": []})
self.verify_teams_present(False)
def test_teams_not_enabled_not_enrolled(self):
"""
Scenario: teams tab should not be present if student is not enrolled in the course
Given there is a course with team configuration and topics
And I am not enrolled in that course, and am not global staff
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False
)
self.verify_teams_present(False)
def test_teams_enabled(self):
"""
Scenario: teams tab should be present if user is enrolled in the course and it has team configuration
Given I am enrolled in a course with team configuration and topics
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(1)})
self.verify_teams_present(True)
def test_teams_enabled_global_staff(self):
"""
Scenario: teams tab should be present if user is not enrolled in the course, but is global staff
Given there is a course with team configuration
And I am not enrolled in that course, but am global staff
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False,
global_staff=True
)
self.verify_teams_present(True)
@ddt.data(
'topics/{topic_id}',
'topics/{topic_id}/search',
'teams/{topic_id}/{team_id}/edit-team',
'teams/{topic_id}/{team_id}'
)
def test_unauthorized_error_message(self, route):
"""Ensure that an error message is shown to the user if they attempt
to take an action which makes an AJAX request while not signed
in.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration(
{u'max_team_size': 10, u'topics': topics},
global_staff=True
)
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
self.browser.delete_cookie('sessionid')
url = self.browser.current_url.split('#')[0]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
)
)
)
self.teams_page.wait_for_ajax()
self.assertEqual(
self.teams_page.warning_message,
u"Your request could not be completed. Reload the page and try again."
)
@ddt.data(
('browse', '.topics-list'),
# TODO: find a reliable way to match the "My Teams" tab
# ('my-teams', 'div.teams-list'),
('teams/{topic_id}/{team_id}', 'div.discussion-module'),
('topics/{topic_id}/create-team', 'div.create-team-instructions'),
('topics/{topic_id}', '.teams-list'),
('not-a-real-route', 'div.warning')
)
@ddt.unpack
def test_url_routing(self, route, selector):
"""Ensure that navigating to a URL route correctly updates the page
content.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration({
u'max_team_size': 10,
u'topics': topics
})
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
# Get the base URL (the URL without any trailing fragment)
url = self.browser.current_url
fragment_index = url.find('#')
if fragment_index >= 0:
url = url[0:fragment_index]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
))
)
self.teams_page.wait_for_page()
self.teams_page.wait_for_ajax()
self.assertTrue(self.teams_page.q(css=selector).present)
self.assertTrue(self.teams_page.q(css=selector).visible)
@attr(shard=5)
class MyTeamsTest(TeamsTabBase):
"""
Tests for the "My Teams" tab of the Teams page.
"""
def setUp(self):
super(MyTeamsTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.my_teams_page = MyTeamsPage(self.browser, self.course_id)
self.page_viewed_event = {
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'my-teams',
'topic_id': None,
'team_id': None
}
}
def test_not_member_of_any_teams(self):
"""
Scenario: Visiting the My Teams page when user is not a member of any team should not display any teams.
Given I am enrolled in a course with a team configuration and a topic but am not a member of a team
When I visit the My Teams page
And I should see no teams
And I should see a message that I belong to no teams.
"""
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.assertEqual(len(self.my_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertEqual(
self.my_teams_page.q(css='.page-content-main').text,
[u'You are not currently a member of any team.']
)
def test_member_of_a_team(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am enrolled in a course with a team configuration and a topic and am a member of a team
When I visit the My Teams page
Then I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should not see a pagination footer
"""
teams = self.create_teams(self.topic, 1)
self.create_membership(self.user_info['username'], teams[0]['id'])
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.verify_teams(self.my_teams_page, teams)
def test_multiple_team_members(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am a member of a team with multiple members
When I visit the My Teams page
Then I should see the correct number of team members on my membership
"""
teams = self.create_teams(self.topic, 1)
self.create_memberships(4, teams[0]['id'])
self.my_teams_page.visit()
self.assertEqual(self.my_teams_page.team_memberships[0], '4 / 10 Members')
@attr(shard=5)
@ddt.ddt
class BrowseTopicsTest(TeamsTabBase):
"""
Tests for the Browse tab of the Teams page.
"""
def setUp(self):
super(BrowseTopicsTest, self).setUp()
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
@ddt.data(('name', False), ('team_count', True))
@ddt.unpack
def test_sort_topics(self, sort_order, reverse):
"""
Scenario: the user should be able to sort the list of topics by name or team count
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
When I choose a sort order
Then I should see the paginated list of topics in that order
"""
topics = self.create_topics(TOPICS_PER_PAGE + 1)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
for i, topic in enumerate(random.sample(topics, len(topics))):
self.create_teams(topic, i)
topic['team_count'] = i
self.topics_page.visit()
self.topics_page.sort_topics_by(sort_order)
topic_names = self.topics_page.topic_names
self.assertEqual(len(topic_names), TOPICS_PER_PAGE)
self.assertEqual(
topic_names,
[t['name'] for t in sorted(topics, key=lambda t: t[sort_order], reverse=reverse)][:TOPICS_PER_PAGE]
)
def test_sort_topics_update(self):
"""
Scenario: the list of topics should remain sorted after updates
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics and choose a sort order
Then I should see the paginated list of topics in that order
When I create a team in one of those topics
And I return to the topics list
Then I should see the topics in the correct sorted order
"""
topics = self.create_topics(3)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
self.topics_page.visit()
self.topics_page.sort_topics_by('team_count')
topic_name = self.topics_page.topic_names[-1]
topic = [t for t in topics if t['name'] == topic_name][0]
self.topics_page.browse_teams_for_topic(topic_name)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
browse_teams_page.wait_for_page()
browse_teams_page.click_create_team_link()
create_team_page = TeamManagementPage(self.browser, self.course_id, topic)
create_team_page.create_team()
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
team_page.click_all_topics()
self.topics_page.wait_for_page()
self.topics_page.wait_for_ajax()
self.assertEqual(topic_name, self.topics_page.topic_names[0])
def test_list_topics(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(2)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 2)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-2 out of 2 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_topic_pagination(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab, paginated 12 per page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see only the first 12 topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(20)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 20 total'))
self.assertTrue(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertTrue(self.topics_page.is_next_page_button_enabled())
def test_go_to_numbered_page(self):
"""
Scenario: topics should be able to be navigated by page number
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter a valid page number in the page number input
Then I should see that page of topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(25)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_go_to_invalid_page(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter an invalid page number in the page number input
Then I should stay on the current page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(self.topics_page.get_current_page_number(), 1)
def test_page_navigation_buttons(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
When I press the next page button
Then I should move to the next page
When I press the previous page button
Then I should move to the previous page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.press_next_page_button()
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 13-13 out of 13 total'))
self.topics_page.press_previous_page_button()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 13 total'))
def test_topic_pagination_one_page(self):
"""
Scenario: Browsing topics when there are fewer topics than the page size i.e. 12
all topics should show on one page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I should see corrected number of topic cards
And I should see the correct page header
And I should not see a pagination footer
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(10)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 10)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
def test_topic_description_truncation(self):
"""
Scenario: excessively long topic descriptions should be truncated so
as to fit within a topic card.
Given I am enrolled in a course with a team configuration and a topic
with a long description
When I visit the Teams page
And I browse topics
Then I should see a truncated topic description
"""
initial_description = "A" + " really" * 50 + " long description"
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [{"name": "", "id": "", "description": initial_description}]}
)
self.topics_page.visit()
truncated_description = self.topics_page.topic_descriptions[0]
self.assertLess(len(truncated_description), len(initial_description))
self.assertTrue(truncated_description.endswith('...'))
self.assertIn(truncated_description.split('...')[0], initial_description)
def test_go_to_teams_list(self):
"""
Scenario: Clicking on a Topic Card should take you to the
teams list for that Topic.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
And I browse topics
And I click on the arrow link to view teams for the first topic
Then I should be on the browse teams page
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
self.topics_page.visit()
self.topics_page.browse_teams_for_topic('Example Topic')
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
browse_teams_page.wait_for_page()
self.assertEqual(browse_teams_page.header_name, 'Example Topic')
self.assertEqual(browse_teams_page.header_description, 'Description')
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse topics page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the browse topics page
Then my browser should post a page viewed event
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'browse',
'topic_id': None,
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.topics_page.visit()
@attr(shard=5)
@ddt.ddt
class BrowseTeamsWithinTopicTest(TeamsTabBase):
"""
Tests for browsing Teams within a Topic on the Teams page.
"""
TEAMS_PAGE_SIZE = 10
def setUp(self):
super(BrowseTeamsWithinTopicTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.max_team_size = 10
self.set_team_configuration({
'course_id': self.course_id,
'max_team_size': self.max_team_size,
'topics': [self.topic]
})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
def teams_with_default_sort_order(self, teams):
"""Return a list of teams sorted according to the default ordering
(last_activity_at, with a secondary sort by open slots).
"""
return sorted(
sorted(teams, key=lambda t: len(t['membership']), reverse=True),
key=lambda t: parse(t['last_activity_at']).replace(microsecond=0),
reverse=True
)
def verify_page_header(self):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(self.browse_teams_page.header_name, self.topic['name'])
self.assertEqual(self.browse_teams_page.header_description, self.topic['description'])
def verify_search_header(self, search_results_page, search_query):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(search_results_page.header_name, 'Team Search')
self.assertEqual(
search_results_page.header_description,
'Showing results for "{search_query}"'.format(search_query=search_query)
)
def verify_on_page(self, teams_page, page_num, total_teams, pagination_header_text, footer_visible):
"""
Verify that we are on the correct team list page.
Arguments:
teams_page (BaseTeamsPage): The teams page object that should be the current page.
page_num (int): The one-indexed page number that we expect to be on
total_teams (list): An unsorted list of all the teams for the
current topic
pagination_header_text (str): Text we expect to see in the
pagination header.
footer_visible (bool): Whether we expect to see the pagination
footer controls.
"""
sorted_teams = self.teams_with_default_sort_order(total_teams)
self.assertTrue(teams_page.get_pagination_header_text().startswith(pagination_header_text))
self.verify_teams(
teams_page,
sorted_teams[(page_num - 1) * self.TEAMS_PAGE_SIZE:page_num * self.TEAMS_PAGE_SIZE]
)
self.assertEqual(
teams_page.pagination_controls_visible(),
footer_visible,
msg='Expected paging footer to be ' + 'visible' if footer_visible else 'invisible'
)
@ddt.data(
('open_slots', 'last_activity_at', True),
('last_activity_at', 'open_slots', True)
)
@ddt.unpack
def test_sort_teams(self, sort_order, secondary_sort_order, reverse):
"""
Scenario: the user should be able to sort the list of teams by open slots or last activity
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic
When I choose a sort order
Then I should see the paginated list of teams in that order
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
for i, team in enumerate(random.sample(teams, len(teams))):
for _ in range(i):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
self.create_membership(user_info['username'], team['id'])
team['open_slots'] = self.max_team_size - i
# Re-authenticate as staff after creating users
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=True
).visit()
self.browse_teams_page.visit()
self.browse_teams_page.sort_teams_by(sort_order)
team_names = self.browse_teams_page.team_names
self.assertEqual(len(team_names), self.TEAMS_PAGE_SIZE)
sorted_teams = [
team['name']
for team in sorted(
sorted(teams, key=lambda t: t[secondary_sort_order], reverse=reverse),
key=lambda t: t[sort_order],
reverse=reverse
)
][:self.TEAMS_PAGE_SIZE]
self.assertEqual(team_names, sorted_teams)
def test_default_sort_order(self):
"""
Scenario: the list of teams should be sorted by last activity by default
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic, sorted by last activity
"""
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
self.browse_teams_page.visit()
self.assertEqual(self.browse_teams_page.sort_order, 'last activity')
def test_no_teams(self):
"""
Scenario: Visiting a topic with no teams should not display any teams.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing no teams
And I should see no teams
And I should see a button to add a team
And I should not see a pagination footer
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.assertEqual(len(self.browse_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_one_page(self):
"""
Scenario: Visiting a topic with fewer teams than the page size should
all those teams on one page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should see a button to add a team
And I should not see a pagination footer
"""
teams = self.teams_with_default_sort_order(
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE, time_between_creation=1)
)
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.verify_teams(self.browse_teams_page, teams)
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_navigation_buttons(self):
"""
Scenario: The user should be able to page through a topic's team list
using navigation buttons when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I click on the next page button
Then I should see that I am on the second page of results
And when I click on the previous page button
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
self.browse_teams_page.press_next_page_button()
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-11 out of 11 total', True)
self.browse_teams_page.press_previous_page_button()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
def test_teams_page_input(self):
"""
Scenario: The user should be able to page through a topic's team list
using the page input when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I input the second page
Then I should see that I am on the second page of results
When I input the first page
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 10, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
self.browse_teams_page.go_to_page(2)
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-20 out of 20 total', True)
self.browse_teams_page.go_to_page(1)
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
def test_browse_team_topics(self):
"""
Scenario: User should be able to navigate to "browse all teams" and "search team description" links.
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic
Then I should see the correct page header
And I should see the link to "browse teams in other topics"
When I should navigate to that link
Then I should see the topic browse page
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.browse_teams_page.click_browse_all_teams_link()
self.topics_page.wait_for_page()
def test_search(self):
"""
Scenario: User should be able to search for a team
Given I am enrolled in a course with teams enabled
When I visit the Teams page for that topic
And I search for 'banana'
Then I should see the search result page
And the search header should be shown
And 0 results should be shown
And my browser should fire a page viewed event for the search page
And a searched event should have been fired
"""
# Note: all searches will return 0 results with the mock search server
# used by Bok Choy.
search_text = 'banana'
self.create_teams(self.topic, 5)
self.browse_teams_page.visit()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'search-teams',
'topic_id': self.topic['id'],
'team_id': None
}
}, {
'event_type': 'edx.team.searched',
'event': {
'search_text': search_text,
'topic_id': self.topic['id'],
'number_of_results': 0
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events, in_order=False):
search_results_page = self.browse_teams_page.search(search_text)
self.verify_search_header(search_results_page, search_text)
self.assertTrue(search_results_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
Then my browser should post a page viewed event for the teams page
"""
self.create_teams(self.topic, 5)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-topic',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.browse_teams_page.visit()
def test_team_name_xss(self):
"""
Scenario: Team names should be HTML-escaped on the teams page
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic, with a team name containing JS code
Then I should not see any alerts
"""
self.post_team_data({
'course_id': self.course_id,
'topic_id': self.topic['id'],
'name': '<script>alert("XSS")</script>',
'description': 'Description',
'language': 'aa',
'country': 'AF'
})
with self.assertRaises(TimeoutException):
self.browser.get(self.browse_teams_page.url)
alert = get_modal_alert(self.browser)
alert.accept()
@attr(shard=5)
class TeamFormActions(TeamsTabBase):
"""
Base class for create, edit, and delete team.
"""
TEAM_DESCRIPTION = 'The Avengers are a fictional team of superheroes.'
topic = {'name': 'Example Topic', 'id': 'example_topic', 'description': 'Description'}
TEAMS_NAME = 'Avengers'
def setUp(self):
super(TeamFormActions, self).setUp()
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
def verify_page_header(self, title, description, breadcrumbs):
"""
Verify that the page header correctly reflects the
create team header, description and breadcrumb.
"""
self.assertEqual(self.team_management_page.header_page_name, title)
self.assertEqual(self.team_management_page.header_page_description, description)
self.assertEqual(self.team_management_page.header_page_breadcrumbs, breadcrumbs)
def verify_and_navigate_to_create_team_page(self):
"""Navigates to the create team page and verifies."""
self.browse_teams_page.click_create_team_link()
self.verify_page_header(
title='Create a New Team',
description='Create a new team if you can\'t find an existing team to join, '
'or if you would like to learn with friends you know.',
breadcrumbs='All Topics {topic_name}'.format(topic_name=self.topic['name'])
)
def verify_and_navigate_to_edit_team_page(self):
"""Navigates to the edit team page and verifies."""
# pylint: disable=no-member
self.assertEqual(self.team_page.team_name, self.team['name'])
self.assertTrue(self.team_page.edit_team_button_present)
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
# Edit page header.
self.verify_page_header(
title='Edit Team',
description='If you make significant changes, make sure you notify '
'members of the team before making these changes.',
breadcrumbs='All Topics {topic_name} {team_name}'.format(
topic_name=self.topic['name'],
team_name=self.team['name']
)
)
def verify_team_info(self, name, description, location, language):
"""Verify the team information on team page."""
# pylint: disable=no-member
self.assertEqual(self.team_page.team_name, name)
self.assertEqual(self.team_page.team_description, description)
self.assertEqual(self.team_page.team_location, location)
self.assertEqual(self.team_page.team_language, language)
def fill_create_or_edit_form(self):
"""Fill the create/edit team form fields with appropriate values."""
self.team_management_page.value_for_text_field(
field_id='name',
value=self.TEAMS_NAME,
press_enter=False
)
self.team_management_page.set_value_for_textarea_field(
field_id='description',
value=self.TEAM_DESCRIPTION
)
self.team_management_page.value_for_dropdown_field(field_id='language', value='English')
self.team_management_page.value_for_dropdown_field(field_id='country', value='Pakistan')
def verify_all_fields_exist(self):
"""
Verify the fields for create/edit page.
"""
self.assertEqual(
self.team_management_page.message_for_field('name'),
'A name that identifies your team (maximum 255 characters).'
)
self.assertEqual(
self.team_management_page.message_for_textarea_field('description'),
'A short description of the team to help other learners understand '
'the goals or direction of the team (maximum 300 characters).'
)
self.assertEqual(
self.team_management_page.message_for_field('country'),
'The country that team members primarily identify with.'
)
self.assertEqual(
self.team_management_page.message_for_field('language'),
'The language that team members primarily use to communicate with each other.'
)
@ddt.ddt
class CreateTeamTest(TeamFormActions):
"""
Tests for creating a new Team within a Topic on the Teams page.
"""
def setUp(self):
super(CreateTeamTest, self).setUp()
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.browse_teams_page.visit()
def test_user_can_see_create_team_page(self):
"""
Scenario: The user should be able to see the create team page via teams list page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the Create Team page link on bottom
And When I click create team link
Then I should see the create team page.
And I should see the create team header
And I should also see the help messages for fields.
"""
self.verify_and_navigate_to_create_team_page()
self.verify_all_fields_exist()
def test_user_can_see_error_message_for_missing_data(self):
"""
Scenario: The user should be able to see error message in case of missing required field.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
And When I click create team button without filling required fields
Then I should see the error message and highlighted fields.
"""
self.verify_and_navigate_to_create_team_page()
# `submit_form` clicks on a button, but that button doesn't always
# have the click event handler registered on it in time. That's why
# this test is flaky. Unfortunately, I don't know of a straightforward
# way to write something that waits for that event handler to be bound
# to the button element. So I used time.sleep as well, even though
# the bok choy docs explicitly ask us not to:
# http://bok-choy.readthedocs.io/en/latest/guidelines.html
# Sorry! For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_management_page.submit_form()
self.team_management_page.wait_for(
lambda: self.team_management_page.validation_message_text,
"Validation message text never loaded."
)
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
self.assertTrue(self.team_management_page.error_for_field(field_id='description'))
def test_user_can_see_error_message_for_incorrect_data(self):
"""
Scenario: The user should be able to see error message in case of increasing length for required fields.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I add text > than 255 characters for name field
And I click Create button
Then I should see the error message for exceeding length.
"""
self.verify_and_navigate_to_create_team_page()
# Fill the name field with >255 characters to see validation message.
self.team_management_page.value_for_text_field(
field_id='name',
value='EdX is a massive open online course (MOOC) provider and online learning platform. '
'It hosts online university-level courses in a wide range of disciplines to a worldwide '
'audience, some at no charge. It also conducts research into learning based on how '
'people use its platform. EdX was created for students and institutions that seek to'
'transform themselves through cutting-edge technologies, innovative pedagogy, and '
'rigorous courses. More than 70 schools, nonprofits, corporations, and international'
'organizations offer or plan to offer courses on the edX website. As of 22 October 2014,'
'edX has more than 4 million users taking more than 500 courses online.',
press_enter=False
)
self.team_management_page.submit_form()
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
def test_user_can_create_new_team_successfully(self):
"""
Scenario: The user should be able to create new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I fill all the fields present with appropriate data
And I click Create button
Then I expect analytics events to be emitted
And I should see the page for my team
And I should see the message that says "You are member of this team"
And the new team should be added to the list of teams within the topic
And the number of teams should be updated on the topic card
And if I switch to "My Team", the newly created team is displayed
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.browse_teams_page.visit()
self.verify_and_navigate_to_create_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.created'
},
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'added_on_create',
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_management_page.submit_form()
# Verify that the page is shown for the new team
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
self.assertEqual(team_page.team_name, self.TEAMS_NAME)
self.assertEqual(team_page.team_description, self.TEAM_DESCRIPTION)
self.assertEqual(team_page.team_user_membership_text, 'You are a member of this team.')
# Verify the new team was added to the topic list
self.teams_page.click_specific_topic("Example Topic")
self.teams_page.verify_topic_team_count(1)
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(1)
# Verify that if one switches to "My Team" without reloading the page, the newly created team is shown.
self.verify_my_team_count(1)
def test_user_can_cancel_the_team_creation(self):
"""
Scenario: The user should be able to cancel the creation of new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I click Cancel button
Then I should see teams list page without any new team.
And if I switch to "My Team", it shows no teams
"""
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.verify_and_navigate_to_create_team_page()
# We add a sleep here to allow time for the click event handler to bind
# to the cancel button. Using time.sleep in bok-choy tests is,
# generally, an anti-pattern. So don't copy this :).
# For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_management_page.cancel_team()
self.browse_teams_page.wait_for_page()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(0)
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the create team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the create team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'new-team',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_create_team_page()
@ddt.ddt
class DeleteTeamTest(TeamFormActions):
"""
Tests for deleting teams.
"""
def setUp(self):
super(DeleteTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
#need to have a membership to confirm it gets deleted as well
self.create_membership(self.user_info['username'], self.team['id'])
self.team_page.visit()
def test_cancel_delete(self):
"""
Scenario: The user should be able to cancel the Delete Team dialog
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I cancel the prompt
And I refresh the page
Then I should still see the team
"""
self.delete_team(cancel=True)
self.team_management_page.wait_for_page()
self.browser.refresh()
self.team_management_page.wait_for_page()
self.assertEqual(
' '.join(('All Topics', self.topic['name'], self.team['name'])),
self.team_management_page.header_page_breadcrumbs
)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_delete_team(self, role):
"""
Scenario: The user should be able to see and navigate to the delete team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I confirm the prompt
Then I should see the browse teams page
And the team should not be present
"""
# If role is None, remain logged in as global staff
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.delete_team(require_notification=False)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
browse_teams_page.wait_for_page()
self.assertNotIn(self.team['name'], browse_teams_page.team_names)
def delete_team(self, **kwargs):
"""
Delete a team. Passes `kwargs` to `confirm_prompt`.
Expects edx.team.deleted event to be emitted, with correct course_id.
Also expects edx.team.learner_removed event to be emitted for the
membership that is removed as a part of the delete operation.
"""
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.team_management_page.delete_team_button.click()
if 'cancel' in kwargs and kwargs['cancel'] is True:
confirm_prompt(self.team_management_page, **kwargs)
else:
expected_events = [
{
'event_type': 'edx.team.deleted',
'event': {
'team_id': self.team['id']
}
},
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'team_deleted',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
confirm_prompt(self.team_management_page, **kwargs)
def test_delete_team_updates_topics(self):
"""
Scenario: Deleting a team should update the team count on the topics page
Given I am staff user for a course with a team
And I delete a team
When I navigate to the browse topics page
Then the team count for the deletd team's topic should be updated
"""
self.delete_team(require_notification=False)
BrowseTeamsPage(self.browser, self.course_id, self.topic).click_all_topics()
topics_page = BrowseTopicsPage(self.browser, self.course_id)
topics_page.wait_for_page()
self.teams_page.verify_topic_team_count(0)
@ddt.ddt
class EditTeamTest(TeamFormActions):
"""
Tests for editing the team.
"""
def setUp(self):
super(EditTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
self.team_page.visit()
def test_staff_can_navigate_to_edit_team_page(self):
"""
Scenario: The user should be able to see and navigate to the edit team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And I should see the edit team header
And I should also see the help messages for fields
"""
self.verify_and_navigate_to_edit_team_page()
self.verify_all_fields_exist()
def test_staff_can_edit_team_successfully(self):
"""
Scenario: The staff should be able to edit team successfully.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And an analytics event should be fired
When I edit all the fields with appropriate data
And I click Update button
Then I should see the page for my team with updated data
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'country',
'old': 'AF',
'new': 'PK',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'name',
'old': self.team['name'],
'new': self.TEAMS_NAME,
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'language',
'old': 'aa',
'new': 'en',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'description',
'old': self.team['description'],
'new': self.TEAM_DESCRIPTION,
'truncated': [],
}
},
]
with self.assert_events_match_during(
event_filter=self.only_team_events,
expected_events=expected_events,
):
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_staff_can_cancel_the_team_edit(self):
"""
Scenario: The user should be able to cancel the editing of team.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
Then I should see the Edit Team header
When I click Cancel button
Then I should see team page page without changes.
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.cancel_team()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
def test_student_cannot_see_edit_button(self):
"""
Scenario: The student should not see the edit team button.
Given I am student for a course with a team
When I visit the Team profile page
Then I should not see the Edit Team button
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page.visit()
self.assertFalse(self.team_page.edit_team_button_present)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged_user_can_edit_team(self, role):
"""
Scenario: The user with specified role should see the edit team button.
Given I am user with privileged role for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
"""
kwargs = {
'course_id': self.course_id,
'staff': False
}
if role is not None:
kwargs['roles'] = role
AutoAuthPage(self.browser, **kwargs).visit()
self.team_page.visit()
self.teams_page.wait_for_page()
self.assertTrue(self.team_page.edit_team_button_present)
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_page_viewed_event(self):
"""
Scenario: Visiting the edit team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the edit team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'edit-team',
'topic_id': self.topic['id'],
'team_id': self.team['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_edit_team_page()
@ddt.ddt
class EditMembershipTest(TeamFormActions):
"""
Tests for administrating from the team membership page
"""
def setUp(self):
super(EditMembershipTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
self.team = self.create_teams(self.topic, num_teams=1)[0]
#make sure a user exists on this team so we can edit the membership
self.create_membership(self.user_info['username'], self.team['id'])
self.edit_membership_page = EditMembershipPage(self.browser, self.course_id, self.team)
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
def edit_membership_helper(self, role, cancel=False):
"""
Helper for common functionality in edit membership tests.
Checks for all relevant assertions about membership being removed,
including verify edx.team.learner_removed events are emitted.
"""
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.assertTrue(
self.team_management_page.membership_button_present
)
self.team_management_page.click_membership_button()
self.edit_membership_page.wait_for_page()
self.edit_membership_page.click_first_remove()
if cancel:
self.edit_membership_page.cancel_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 1)
else:
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'removed_by_admin',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
self.edit_membership_page.confirm_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 0)
self.edit_membership_page.wait_for_page()
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and confirm the dialog
Then my membership should be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=False)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_cancel_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and cancel the dialog
Then my membership should not be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=True)
@attr(shard=5)
@ddt.ddt
class TeamPageTest(TeamsTabBase):
"""Tests for viewing a specific team"""
SEND_INVITE_TEXT = 'Send this link to friends so that they can join too.'
def setUp(self):
super(TeamPageTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
def _set_team_configuration_and_membership(
self,
max_team_size=10,
membership_team_index=0,
visit_team_index=0,
create_membership=True,
another_user=False):
"""
Set team configuration.
Arguments:
max_team_size (int): number of users a team can have
membership_team_index (int): index of team user will join
visit_team_index (int): index of team user will visit
create_membership (bool): whether to create membership or not
another_user (bool): another user to visit a team
"""
#pylint: disable=attribute-defined-outside-init
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': max_team_size, 'topics': [self.topic]}
)
self.teams = self.create_teams(self.topic, 2)
if create_membership:
self.create_membership(self.user_info['username'], self.teams[membership_team_index]['id'])
if another_user:
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page = TeamPage(self.browser, self.course_id, self.teams[visit_team_index])
def setup_thread(self):
"""
Create and return a thread for this test's discussion topic.
"""
thread = Thread(
id="test_thread_{}".format(uuid4().hex),
commentable_id=self.teams[0]['discussion_topic_id'],
body="Dummy text body."
)
thread_fixture = MultipleThreadFixture([thread])
thread_fixture.push()
return thread
def setup_discussion_user(self, role=None, staff=False):
"""Set this test's user to have the given role in its
discussions. Role is one of 'Community TA', 'Moderator',
'Administrator', or 'Student'.
"""
kwargs = {
'course_id': self.course_id,
'staff': staff
}
if role is not None:
kwargs['roles'] = role
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, **kwargs).visit().user_info
def verify_teams_discussion_permissions(self, should_have_permission):
"""Verify that the teams discussion component is in the correct state
for the test user. If `should_have_permission` is True, assert that
the user can see controls for posting replies, voting, editing, and
deleting. Otherwise, assert that those controls are hidden.
"""
thread = self.setup_thread()
self.team_page.visit()
self.assertEqual(self.team_page.discussion_id, self.teams[0]['discussion_topic_id'])
discussion = self.team_page.discussion_page
discussion.wait_for_page()
self.assertTrue(discussion.is_discussion_expanded())
self.assertEqual(discussion.get_num_displayed_threads(), 1)
self.assertTrue(discussion.has_thread(thread['id']))
assertion = self.assertTrue if should_have_permission else self.assertFalse
assertion(discussion.q(css='.post-header-actions').present)
assertion(discussion.q(css='.add-response').present)
def test_discussion_on_my_team_page(self):
"""
Scenario: Team Page renders a discussion for a team to which I belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the existing thread
And I should see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership()
self.verify_teams_discussion_permissions(True)
@ddt.data(True, False)
def test_discussion_on_other_team_page(self, is_staff):
"""
Scenario: Team Page renders a team discussion for a team to which I do
not belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the team's thread
And I should not see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(staff=is_staff)
self.verify_teams_discussion_permissions(False)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged(self, role):
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(role=role)
self.verify_teams_discussion_permissions(True)
def assert_team_details(self, num_members, is_member=True, max_size=10):
"""
Verifies that user can see all the information, present on detail page according to their membership status.
Arguments:
num_members (int): number of users in a team
is_member (bool) default True: True if request user is member else False
max_size (int): number of users a team can have
"""
self.assertEqual(
self.team_page.team_capacity_text,
self.team_page.format_capacity_text(num_members, max_size)
)
self.assertEqual(self.team_page.team_location, 'Afghanistan')
self.assertEqual(self.team_page.team_language, 'Afar')
self.assertEqual(self.team_page.team_members, num_members)
if num_members > 0:
self.assertTrue(self.team_page.team_members_present)
else:
self.assertFalse(self.team_page.team_members_present)
if is_member:
self.assertEqual(self.team_page.team_user_membership_text, 'You are a member of this team.')
self.assertTrue(self.team_page.team_leave_link_present)
self.assertTrue(self.team_page.new_post_button_present)
else:
self.assertEqual(self.team_page.team_user_membership_text, '')
self.assertFalse(self.team_page.team_leave_link_present)
self.assertFalse(self.team_page.new_post_button_present)
def test_team_member_can_see_full_team_details(self):
"""
Scenario: Team member can see full info for team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see the full team detail
And I should see the team members
And I should see my team membership text
And I should see the language & country
And I should see the Leave Team and Invite Team
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assert_team_details(
num_members=1,
)
def test_other_users_can_see_limited_team_details(self):
"""
Scenario: Users who are not member of this team can only see limited info for this team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When I visit the Team page for that team
Then I should not see full team detail
And I should see the team members
And I should not see my team membership text
And I should not see the Leave Team and Invite Team links
"""
self._set_team_configuration_and_membership(create_membership=False)
self.team_page.visit()
self.assert_team_details(is_member=False, num_members=0)
def test_user_can_navigate_to_members_profile_page(self):
"""
Scenario: User can navigate to profile page via team member profile image.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see profile images for the team members
When I click on the first profile image
Then I should be taken to the user's profile page
And I should see the username on profile page
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
learner_name = self.team_page.first_member_username
self.team_page.click_first_profile_image()
learner_profile_page = LearnerProfilePage(self.browser, learner_name)
learner_profile_page.wait_for_page()
learner_profile_page.wait_for_field('username')
self.assertTrue(learner_profile_page.field_is_visible('username'))
def test_join_team(self):
"""
Scenario: User can join a Team if not a member already..
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I visit the Team page for that team
Then I should see Join Team button
And I should not see New Post button
When I click on Join Team button
Then there should be no Join Team button and no message
And an analytics event should be emitted
And I should see the updated information under Team Details
And I should see New Post button
And if I switch to "My Team", the team I have joined is displayed
"""
self._set_team_configuration_and_membership(create_membership=False)
teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
teams_page.visit()
teams_page.view_first_team()
self.assertTrue(self.team_page.join_team_button_present)
expected_events = [
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'joined_from_team_view'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_page.click_join_team_button()
self.assertFalse(self.team_page.join_team_button_present)
self.assertFalse(self.team_page.join_team_message_present)
self.assert_team_details(num_members=1, is_member=True)
# Verify that if one switches to "My Team" without reloading the page, the newly joined team is shown.
self.teams_page.click_all_topics()
self.verify_my_team_count(1)
def test_already_member_message(self):
"""
Scenario: User should see `You are already in a team` if user is a
member of other team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am already a member of a team
And I visit a team other than mine
Then I should see `You are already in a team` message
"""
self._set_team_configuration_and_membership(membership_team_index=0, visit_team_index=1)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'You already belong to another team.')
self.assert_team_details(num_members=0, is_member=False)
def test_team_full_message(self):
"""
Scenario: User should see `Team is full` message when team is full.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And team has no space left
And I am not a member of any team
And I visit the team
Then I should see `Team is full` message
"""
self._set_team_configuration_and_membership(
create_membership=True,
max_team_size=1,
membership_team_index=0,
visit_team_index=0,
another_user=True
)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'This team is full.')
self.assert_team_details(num_members=1, is_member=False, max_size=1)
def test_leave_team(self):
"""
Scenario: User can leave a team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am a member of team
And I visit the team
And I should not see Join Team button
And I should see New Post button
Then I should see Leave Team link
When I click on Leave Team link
Then user should be removed from team
And an analytics event should be emitted
And I should see Join Team button
And I should not see New Post button
And if I switch to "My Team", the team I have left is not displayed
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assertFalse(self.team_page.join_team_button_present)
self.assert_team_details(num_members=1)
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'remove_method': 'self_removal'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
# I think we're seeing the same problem that we're seeing in
# CreateTeamTest.test_user_can_see_error_message_for_missing_data.
# We click on the "leave team" link after it's loaded, but before
# its JavaScript event handler is added. Adding this sleep gives
# enough time for that event handler to bind to the link. Sorry!
# For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_page.click_leave_team_link()
self.assert_team_details(num_members=0, is_member=False)
self.assertTrue(self.team_page.join_team_button_present)
# Verify that if one switches to "My Team" without reloading the page, the old team no longer shows.
self.teams_page.click_all_topics()
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the team profile page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the team profile page
Then my browser should post a page viewed event
"""
self._set_team_configuration_and_membership()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-team',
'topic_id': self.topic['id'],
'team_id': self.teams[0]['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.team_page.visit()
| codeparrot/github-code-clean |
# -*- Mode: Python; py-indent-offset: 4 -*-
# coding=utf-8
# vim: tabstop=4 shiftwidth=4 expandtab
import sys
import unittest
import tempfile
import shutil
import os
import locale
import subprocess
from gi.repository import GObject
import gobject
from gi.repository import GIMarshallingTests
from compathelper import _bytes
if sys.version_info < (3, 0):
CONSTANT_UTF8 = "const \xe2\x99\xa5 utf8"
PY2_UNICODE_UTF8 = unicode(CONSTANT_UTF8, 'UTF-8')
CHAR_255='\xff'
else:
CONSTANT_UTF8 = "const ♥ utf8"
CHAR_255=bytes([255])
CONSTANT_NUMBER = 42
class Number(object):
def __init__(self, value):
self.value = value
def __int__(self):
return int(self.value)
def __float__(self):
return float(self.value)
class Sequence(object):
def __init__(self, sequence):
self.sequence = sequence
def __len__(self):
return len(self.sequence)
def __getitem__(self, key):
return self.sequence[key]
class TestConstant(unittest.TestCase):
# Blocked by https://bugzilla.gnome.org/show_bug.cgi?id=595773
# def test_constant_utf8(self):
# self.assertEquals(CONSTANT_UTF8, GIMarshallingTests.CONSTANT_UTF8)
def test_constant_number(self):
self.assertEquals(CONSTANT_NUMBER, GIMarshallingTests.CONSTANT_NUMBER)
class TestBoolean(unittest.TestCase):
def test_boolean_return(self):
self.assertEquals(True, GIMarshallingTests.boolean_return_true())
self.assertEquals(False, GIMarshallingTests.boolean_return_false())
def test_boolean_in(self):
GIMarshallingTests.boolean_in_true(True)
GIMarshallingTests.boolean_in_false(False)
GIMarshallingTests.boolean_in_true(1)
GIMarshallingTests.boolean_in_false(0)
def test_boolean_out(self):
self.assertEquals(True, GIMarshallingTests.boolean_out_true())
self.assertEquals(False, GIMarshallingTests.boolean_out_false())
def test_boolean_inout(self):
self.assertEquals(False, GIMarshallingTests.boolean_inout_true_false(True))
self.assertEquals(True, GIMarshallingTests.boolean_inout_false_true(False))
class TestInt8(unittest.TestCase):
MAX = GObject.G_MAXINT8
MIN = GObject.G_MININT8
def test_int8_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.int8_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.int8_return_min())
def test_int8_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.int8_in_max(max)
GIMarshallingTests.int8_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.int8_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.int8_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.int8_in_max, "self.MAX")
def test_int8_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.int8_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.int8_out_min())
def test_int8_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.int8_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.int8_inout_min_max(Number(self.MIN)))
class TestUInt8(unittest.TestCase):
MAX = GObject.G_MAXUINT8
def test_uint8_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint8_return())
def test_uint8_in(self):
number = Number(self.MAX)
GIMarshallingTests.uint8_in(number)
GIMarshallingTests.uint8_in(CHAR_255)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.uint8_in, number)
self.assertRaises(ValueError, GIMarshallingTests.uint8_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.uint8_in, "self.MAX")
def test_uint8_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint8_out())
def test_uint8_inout(self):
self.assertEquals(0, GIMarshallingTests.uint8_inout(Number(self.MAX)))
class TestInt16(unittest.TestCase):
MAX = GObject.G_MAXINT16
MIN = GObject.G_MININT16
def test_int16_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.int16_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.int16_return_min())
def test_int16_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.int16_in_max(max)
GIMarshallingTests.int16_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.int16_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.int16_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.int16_in_max, "self.MAX")
def test_int16_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.int16_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.int16_out_min())
def test_int16_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.int16_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.int16_inout_min_max(Number(self.MIN)))
class TestUInt16(unittest.TestCase):
MAX = GObject.G_MAXUINT16
def test_uint16_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint16_return())
def test_uint16_in(self):
number = Number(self.MAX)
GIMarshallingTests.uint16_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.uint16_in, number)
self.assertRaises(ValueError, GIMarshallingTests.uint16_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.uint16_in, "self.MAX")
def test_uint16_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint16_out())
def test_uint16_inout(self):
self.assertEquals(0, GIMarshallingTests.uint16_inout(Number(self.MAX)))
class TestInt32(unittest.TestCase):
MAX = GObject.G_MAXINT32
MIN = GObject.G_MININT32
def test_int32_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.int32_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.int32_return_min())
def test_int32_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.int32_in_max(max)
GIMarshallingTests.int32_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.int32_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.int32_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.int32_in_max, "self.MAX")
def test_int32_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.int32_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.int32_out_min())
def test_int32_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.int32_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.int32_inout_min_max(Number(self.MIN)))
class TestUInt32(unittest.TestCase):
MAX = GObject.G_MAXUINT32
def test_uint32_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint32_return())
def test_uint32_in(self):
number = Number(self.MAX)
GIMarshallingTests.uint32_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.uint32_in, number)
self.assertRaises(ValueError, GIMarshallingTests.uint32_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.uint32_in, "self.MAX")
def test_uint32_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint32_out())
def test_uint32_inout(self):
self.assertEquals(0, GIMarshallingTests.uint32_inout(Number(self.MAX)))
class TestInt64(unittest.TestCase):
MAX = 2 ** 63 - 1
MIN = - (2 ** 63)
def test_int64_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.int64_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.int64_return_min())
def test_int64_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.int64_in_max(max)
GIMarshallingTests.int64_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.int64_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.int64_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.int64_in_max, "self.MAX")
def test_int64_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.int64_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.int64_out_min())
def test_int64_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.int64_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.int64_inout_min_max(Number(self.MIN)))
class TestUInt64(unittest.TestCase):
MAX = 2 ** 64 - 1
def test_uint64_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint64_return())
def test_uint64_in(self):
number = Number(self.MAX)
GIMarshallingTests.uint64_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.uint64_in, number)
self.assertRaises(ValueError, GIMarshallingTests.uint64_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.uint64_in, "self.MAX")
def test_uint64_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint64_out())
def test_uint64_inout(self):
self.assertEquals(0, GIMarshallingTests.uint64_inout(Number(self.MAX)))
class TestShort(unittest.TestCase):
MAX = GObject.constants.G_MAXSHORT
MIN = GObject.constants.G_MINSHORT
def test_short_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.short_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.short_return_min())
def test_short_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.short_in_max(max)
GIMarshallingTests.short_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.short_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.short_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.short_in_max, "self.MAX")
def test_short_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.short_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.short_out_min())
def test_short_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.short_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.short_inout_min_max(Number(self.MIN)))
class TestUShort(unittest.TestCase):
MAX = GObject.constants.G_MAXUSHORT
def test_ushort_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.ushort_return())
def test_ushort_in(self):
number = Number(self.MAX)
GIMarshallingTests.ushort_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.ushort_in, number)
self.assertRaises(ValueError, GIMarshallingTests.ushort_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.ushort_in, "self.MAX")
def test_ushort_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.ushort_out())
def test_ushort_inout(self):
self.assertEquals(0, GIMarshallingTests.ushort_inout(Number(self.MAX)))
class TestInt(unittest.TestCase):
MAX = GObject.constants.G_MAXINT
MIN = GObject.constants.G_MININT
def test_int_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.int_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.int_return_min())
def test_int_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.int_in_max(max)
GIMarshallingTests.int_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.int_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.int_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.int_in_max, "self.MAX")
def test_int_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.int_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.int_out_min())
def test_int_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.int_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.int_inout_min_max(Number(self.MIN)))
self.assertRaises(TypeError, GIMarshallingTests.int_inout_min_max, Number(self.MIN), CONSTANT_NUMBER)
class TestUInt(unittest.TestCase):
MAX = GObject.constants.G_MAXUINT
def test_uint_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint_return())
def test_uint_in(self):
number = Number(self.MAX)
GIMarshallingTests.uint_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.uint_in, number)
self.assertRaises(ValueError, GIMarshallingTests.uint_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.uint_in, "self.MAX")
def test_uint_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.uint_out())
def test_uint_inout(self):
self.assertEquals(0, GIMarshallingTests.uint_inout(Number(self.MAX)))
class TestLong(unittest.TestCase):
MAX = GObject.constants.G_MAXLONG
MIN = GObject.constants.G_MINLONG
def test_long_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.long_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.long_return_min())
def test_long_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.long_in_max(max)
GIMarshallingTests.long_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.long_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.long_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.long_in_max, "self.MAX")
def test_long_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.long_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.long_out_min())
def test_long_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.long_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.long_inout_min_max(Number(self.MIN)))
class TestULong(unittest.TestCase):
MAX = GObject.constants.G_MAXULONG
def test_ulong_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.ulong_return())
def test_ulong_in(self):
number = Number(self.MAX)
GIMarshallingTests.ulong_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.ulong_in, number)
self.assertRaises(ValueError, GIMarshallingTests.ulong_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.ulong_in, "self.MAX")
def test_ulong_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.ulong_out())
def test_ulong_inout(self):
self.assertEquals(0, GIMarshallingTests.ulong_inout(Number(self.MAX)))
class TestSSize(unittest.TestCase):
MAX = GObject.constants.G_MAXLONG
MIN = GObject.constants.G_MINLONG
def test_ssize_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.ssize_return_max())
self.assertEquals(self.MIN, GIMarshallingTests.ssize_return_min())
def test_ssize_in(self):
max = Number(self.MAX)
min = Number(self.MIN)
GIMarshallingTests.ssize_in_max(max)
GIMarshallingTests.ssize_in_min(min)
max.value += 1
min.value -= 1
self.assertRaises(ValueError, GIMarshallingTests.ssize_in_max, max)
self.assertRaises(ValueError, GIMarshallingTests.ssize_in_min, min)
self.assertRaises(TypeError, GIMarshallingTests.ssize_in_max, "self.MAX")
def test_ssize_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.ssize_out_max())
self.assertEquals(self.MIN, GIMarshallingTests.ssize_out_min())
def test_ssize_inout(self):
self.assertEquals(self.MIN, GIMarshallingTests.ssize_inout_max_min(Number(self.MAX)))
self.assertEquals(self.MAX, GIMarshallingTests.ssize_inout_min_max(Number(self.MIN)))
class TestSize(unittest.TestCase):
MAX = GObject.constants.G_MAXULONG
def test_size_return(self):
self.assertEquals(self.MAX, GIMarshallingTests.size_return())
def test_size_in(self):
number = Number(self.MAX)
GIMarshallingTests.size_in(number)
number.value += 1
self.assertRaises(ValueError, GIMarshallingTests.size_in, number)
self.assertRaises(ValueError, GIMarshallingTests.size_in, Number(-1))
self.assertRaises(TypeError, GIMarshallingTests.size_in, "self.MAX")
def test_size_out(self):
self.assertEquals(self.MAX, GIMarshallingTests.size_out())
def test_size_inout(self):
self.assertEquals(0, GIMarshallingTests.size_inout(Number(self.MAX)))
class TestFloat(unittest.TestCase):
MAX = GObject.constants.G_MAXFLOAT
MIN = GObject.constants.G_MINFLOAT
def test_float_return(self):
self.assertAlmostEquals(self.MAX, GIMarshallingTests.float_return())
def test_float_in(self):
GIMarshallingTests.float_in(Number(self.MAX))
self.assertRaises(TypeError, GIMarshallingTests.float_in, "self.MAX")
def test_float_out(self):
self.assertAlmostEquals(self.MAX, GIMarshallingTests.float_out())
def test_float_inout(self):
self.assertAlmostEquals(self.MIN, GIMarshallingTests.float_inout(Number(self.MAX)))
class TestDouble(unittest.TestCase):
MAX = GObject.constants.G_MAXDOUBLE
MIN = GObject.constants.G_MINDOUBLE
def test_double_return(self):
self.assertAlmostEquals(self.MAX, GIMarshallingTests.double_return())
def test_double_in(self):
GIMarshallingTests.double_in(Number(self.MAX))
self.assertRaises(TypeError, GIMarshallingTests.double_in, "self.MAX")
def test_double_out(self):
self.assertAlmostEquals(self.MAX, GIMarshallingTests.double_out())
def test_double_inout(self):
self.assertAlmostEquals(self.MIN, GIMarshallingTests.double_inout(Number(self.MAX)))
class TestGType(unittest.TestCase):
def test_gtype_return(self):
self.assertEquals(GObject.TYPE_NONE, GIMarshallingTests.gtype_return())
def test_gtype_in(self):
GIMarshallingTests.gtype_in(GObject.TYPE_NONE)
self.assertRaises(TypeError, GIMarshallingTests.gtype_in, "GObject.TYPE_NONE")
def test_gtype_out(self):
self.assertEquals(GObject.TYPE_NONE, GIMarshallingTests.gtype_out())
def test_gtype_inout(self):
self.assertEquals(GObject.TYPE_INT, GIMarshallingTests.gtype_inout(GObject.TYPE_NONE))
class TestUtf8(unittest.TestCase):
def test_utf8_none_return(self):
self.assertEquals(CONSTANT_UTF8, GIMarshallingTests.utf8_none_return())
def test_utf8_full_return(self):
self.assertEquals(CONSTANT_UTF8, GIMarshallingTests.utf8_full_return())
def test_utf8_none_in(self):
GIMarshallingTests.utf8_none_in(CONSTANT_UTF8)
if sys.version_info < (3, 0):
GIMarshallingTests.utf8_none_in(PY2_UNICODE_UTF8)
self.assertRaises(TypeError, GIMarshallingTests.utf8_none_in, CONSTANT_NUMBER)
self.assertRaises(TypeError, GIMarshallingTests.utf8_none_in, None)
def test_utf8_none_out(self):
self.assertEquals(CONSTANT_UTF8, GIMarshallingTests.utf8_none_out())
def test_utf8_full_out(self):
self.assertEquals(CONSTANT_UTF8, GIMarshallingTests.utf8_full_out())
def test_utf8_dangling_out(self):
GIMarshallingTests.utf8_dangling_out()
def test_utf8_none_inout(self):
self.assertEquals("", GIMarshallingTests.utf8_none_inout(CONSTANT_UTF8))
def test_utf8_full_inout(self):
self.assertEquals("", GIMarshallingTests.utf8_full_inout(CONSTANT_UTF8))
class TestArray(unittest.TestCase):
def test_array_fixed_int_return(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.array_fixed_int_return())
def test_array_fixed_short_return(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.array_fixed_short_return())
def test_array_fixed_int_in(self):
GIMarshallingTests.array_fixed_int_in(Sequence([-1, 0, 1, 2]))
self.assertRaises(TypeError, GIMarshallingTests.array_fixed_int_in, Sequence([-1, '0', 1, 2]))
self.assertRaises(TypeError, GIMarshallingTests.array_fixed_int_in, 42)
self.assertRaises(TypeError, GIMarshallingTests.array_fixed_int_in, None)
def test_array_fixed_short_in(self):
GIMarshallingTests.array_fixed_short_in(Sequence([-1, 0, 1, 2]))
def test_array_fixed_out(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.array_fixed_out())
def test_array_fixed_inout(self):
self.assertEquals([2, 1, 0, -1], GIMarshallingTests.array_fixed_inout([-1, 0, 1, 2]))
def test_array_return(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.array_return())
def test_array_in(self):
GIMarshallingTests.array_in(Sequence([-1, 0, 1, 2]))
def test_array_uint8_in(self):
GIMarshallingTests.array_uint8_in(Sequence([97, 98, 99, 100]))
GIMarshallingTests.array_uint8_in(_bytes("abcd"))
def test_array_out(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.array_out())
def test_array_inout(self):
self.assertEquals([-2, -1, 0, 1, 2], GIMarshallingTests.array_inout(Sequence([-1, 0, 1, 2])))
def test_method_array_in(self):
object_ = GIMarshallingTests.Object()
object_.method_array_in(Sequence([-1, 0, 1, 2]))
def test_method_array_out(self):
object_ = GIMarshallingTests.Object()
self.assertEquals([-1, 0, 1, 2], object_.method_array_out())
def test_method_array_inout(self):
object_ = GIMarshallingTests.Object()
self.assertEquals([-2, -1, 0, 1, 2], object_.method_array_inout(Sequence([-1, 0, 1, 2])))
def test_method_array_return(self):
object_ = GIMarshallingTests.Object()
self.assertEquals([-1, 0, 1, 2], object_.method_array_return())
def test_array_fixed_out_struct(self):
struct1, struct2 = GIMarshallingTests.array_fixed_out_struct()
self.assertEquals(7, struct1.long_)
self.assertEquals(6, struct1.int8)
self.assertEquals(6, struct2.long_)
self.assertEquals(7, struct2.int8)
def test_array_zero_terminated_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.array_zero_terminated_return())
def test_array_zero_terminated_in(self):
GIMarshallingTests.array_zero_terminated_in(Sequence(['0', '1', '2']))
def test_array_zero_terminated_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.array_zero_terminated_out())
def test_array_zero_terminated_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.array_zero_terminated_out())
def test_array_zero_terminated_inout(self):
self.assertEquals(['-1', '0', '1', '2'], GIMarshallingTests.array_zero_terminated_inout(['0', '1', '2']))
def test_gstrv_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gstrv_return())
def test_gstrv_in(self):
GIMarshallingTests.gstrv_in(Sequence(['0', '1', '2']))
def test_gstrv_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gstrv_out())
def test_gstrv_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gstrv_out())
def test_gstrv_inout(self):
self.assertEquals(['-1', '0', '1', '2'], GIMarshallingTests.gstrv_inout(['0', '1', '2']))
class TestGArray(unittest.TestCase):
def test_garray_int_none_return(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.garray_int_none_return())
def test_garray_utf8_none_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.garray_utf8_none_return())
def test_garray_utf8_container_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.garray_utf8_container_return())
def test_garray_utf8_full_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.garray_utf8_full_return())
def test_garray_int_none_in(self):
GIMarshallingTests.garray_int_none_in(Sequence([-1, 0, 1, 2]))
self.assertRaises(TypeError, GIMarshallingTests.garray_int_none_in, Sequence([-1, '0', 1, 2]))
self.assertRaises(TypeError, GIMarshallingTests.garray_int_none_in, 42)
self.assertRaises(TypeError, GIMarshallingTests.garray_int_none_in, None)
def test_garray_utf8_none_in(self):
GIMarshallingTests.garray_utf8_none_in(Sequence(['0', '1', '2']))
def test_garray_utf8_none_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.garray_utf8_none_out())
def test_garray_utf8_container_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.garray_utf8_container_out())
def test_garray_utf8_full_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.garray_utf8_full_out())
def test_garray_utf8_none_inout(self):
self.assertEquals(['-2', '-1', '0', '1'], GIMarshallingTests.garray_utf8_none_inout(Sequence(('0', '1', '2'))))
def test_garray_utf8_container_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.garray_utf8_container_inout(['0', '1', '2']))
def test_garray_utf8_full_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.garray_utf8_full_inout(['0', '1', '2']))
class TestGPtrArray(unittest.TestCase):
def test_gptrarray_int_none_return(self):
self.assertEquals([0, 1, 2, 3], GIMarshallingTests.gptrarray_int_none_return())
def test_gptrarray_utf8_none_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gptrarray_utf8_none_return())
def test_gptrarray_utf8_container_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gptrarray_utf8_container_return())
def test_gptrarray_utf8_full_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gptrarray_utf8_full_return())
def test_gptrarray_int_none_in(self):
GIMarshallingTests.gptrarray_int_none_in(Sequence([0, 1, 2, 3]))
self.assertRaises(TypeError, GIMarshallingTests.gptrarray_int_none_in, Sequence([-1, '0', 1, 2]))
self.assertRaises(TypeError, GIMarshallingTests.gptrarray_int_none_in, 42)
self.assertRaises(TypeError, GIMarshallingTests.gptrarray_int_none_in, None)
def test_gptrarray_utf8_none_in(self):
GIMarshallingTests.gptrarray_utf8_none_in(Sequence(['0', '1', '2']))
def test_gptrarray_utf8_none_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gptrarray_utf8_none_out())
def test_gptrarray_utf8_container_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gptrarray_utf8_container_out())
def test_gptrarray_utf8_full_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gptrarray_utf8_full_out())
def test_gptrarray_utf8_none_inout(self):
self.assertEquals(['-2', '-1', '0', '1'], GIMarshallingTests.gptrarray_utf8_none_inout(Sequence(('0', '1', '2'))))
def test_gptrarray_utf8_container_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.gptrarray_utf8_container_inout(['0', '1', '2']))
def test_gptrarray_utf8_full_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.gptrarray_utf8_full_inout(['0', '1', '2']))
class TestGList(unittest.TestCase):
def test_glist_int_none_return(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.glist_int_none_return())
def test_glist_utf8_none_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.glist_utf8_none_return())
def test_glist_utf8_container_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.glist_utf8_container_return())
def test_glist_utf8_full_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.glist_utf8_full_return())
def test_glist_int_none_in(self):
GIMarshallingTests.glist_int_none_in(Sequence((-1, 0, 1, 2)))
self.assertRaises(TypeError, GIMarshallingTests.glist_int_none_in, Sequence((-1, '0', 1, 2)))
self.assertRaises(TypeError, GIMarshallingTests.glist_int_none_in, 42)
self.assertRaises(TypeError, GIMarshallingTests.glist_int_none_in, None)
def test_glist_utf8_none_in(self):
GIMarshallingTests.glist_utf8_none_in(Sequence(('0', '1', '2')))
def test_glist_utf8_none_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.glist_utf8_none_out())
def test_glist_utf8_container_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.glist_utf8_container_out())
def test_glist_utf8_full_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.glist_utf8_full_out())
def test_glist_utf8_none_inout(self):
self.assertEquals(['-2', '-1', '0', '1'], GIMarshallingTests.glist_utf8_none_inout(Sequence(('0', '1', '2'))))
def test_glist_utf8_container_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.glist_utf8_container_inout(('0', '1', '2')))
def test_glist_utf8_full_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.glist_utf8_full_inout(('0', '1', '2')))
class TestGSList(unittest.TestCase):
def test_gslist_int_none_return(self):
self.assertEquals([-1, 0, 1, 2], GIMarshallingTests.gslist_int_none_return())
def test_gslist_utf8_none_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gslist_utf8_none_return())
def test_gslist_utf8_container_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gslist_utf8_container_return())
def test_gslist_utf8_full_return(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gslist_utf8_full_return())
def test_gslist_int_none_in(self):
GIMarshallingTests.gslist_int_none_in(Sequence((-1, 0, 1, 2)))
self.assertRaises(TypeError, GIMarshallingTests.gslist_int_none_in, Sequence((-1, '0', 1, 2)))
self.assertRaises(TypeError, GIMarshallingTests.gslist_int_none_in, 42)
self.assertRaises(TypeError, GIMarshallingTests.gslist_int_none_in, None)
def test_gslist_utf8_none_in(self):
GIMarshallingTests.gslist_utf8_none_in(Sequence(('0', '1', '2')))
def test_gslist_utf8_none_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gslist_utf8_none_out())
def test_gslist_utf8_container_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gslist_utf8_container_out())
def test_gslist_utf8_full_out(self):
self.assertEquals(['0', '1', '2'], GIMarshallingTests.gslist_utf8_full_out())
def test_gslist_utf8_none_inout(self):
self.assertEquals(['-2', '-1', '0', '1'], GIMarshallingTests.gslist_utf8_none_inout(Sequence(('0', '1', '2'))))
def test_gslist_utf8_container_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.gslist_utf8_container_inout(('0', '1', '2')))
def test_gslist_utf8_full_inout(self):
self.assertEquals(['-2', '-1','0', '1'], GIMarshallingTests.gslist_utf8_full_inout(('0', '1', '2')))
class TestGHashTable(unittest.TestCase):
def test_ghashtable_int_none_return(self):
self.assertEquals({-1: 1, 0: 0, 1: -1, 2: -2}, GIMarshallingTests.ghashtable_int_none_return())
def test_ghashtable_int_none_return(self):
self.assertEquals({'-1': '1', '0': '0', '1': '-1', '2': '-2'}, GIMarshallingTests.ghashtable_utf8_none_return())
def test_ghashtable_int_container_return(self):
self.assertEquals({'-1': '1', '0': '0', '1': '-1', '2': '-2'}, GIMarshallingTests.ghashtable_utf8_container_return())
def test_ghashtable_int_full_return(self):
self.assertEquals({'-1': '1', '0': '0', '1': '-1', '2': '-2'}, GIMarshallingTests.ghashtable_utf8_full_return())
def test_ghashtable_int_none_in(self):
GIMarshallingTests.ghashtable_int_none_in({-1: 1, 0: 0, 1: -1, 2: -2})
self.assertRaises(TypeError, GIMarshallingTests.ghashtable_int_none_in, {-1: 1, '0': 0, 1: -1, 2: -2})
self.assertRaises(TypeError, GIMarshallingTests.ghashtable_int_none_in, {-1: 1, 0: '0', 1: -1, 2: -2})
self.assertRaises(TypeError, GIMarshallingTests.ghashtable_int_none_in, '{-1: 1, 0: 0, 1: -1, 2: -2}')
self.assertRaises(TypeError, GIMarshallingTests.ghashtable_int_none_in, None)
def test_ghashtable_utf8_none_in(self):
GIMarshallingTests.ghashtable_utf8_none_in({'-1': '1', '0': '0', '1': '-1', '2': '-2'})
def test_ghashtable_utf8_none_out(self):
self.assertEquals({'-1': '1', '0': '0', '1': '-1', '2': '-2'}, GIMarshallingTests.ghashtable_utf8_none_out())
def test_ghashtable_utf8_container_out(self):
self.assertEquals({'-1': '1', '0': '0', '1': '-1', '2': '-2'}, GIMarshallingTests.ghashtable_utf8_container_out())
def test_ghashtable_utf8_full_out(self):
self.assertEquals({'-1': '1', '0': '0', '1': '-1', '2': '-2'}, GIMarshallingTests.ghashtable_utf8_full_out())
def test_ghashtable_utf8_none_inout(self):
self.assertEquals({'-1': '1', '0': '0', '1': '1'},
GIMarshallingTests.ghashtable_utf8_none_inout({'-1': '1', '0': '0', '1': '-1', '2': '-2'}))
def test_ghashtable_utf8_container_inout(self):
self.assertEquals({'-1': '1', '0': '0', '1': '1'},
GIMarshallingTests.ghashtable_utf8_container_inout({'-1': '1', '0': '0', '1': '-1', '2': '-2'}))
def test_ghashtable_utf8_full_inout(self):
self.assertEquals({'-1': '1', '0': '0', '1': '1'},
GIMarshallingTests.ghashtable_utf8_full_inout({'-1': '1', '0': '0', '1': '-1', '2': '-2'}))
class TestGValue(unittest.TestCase):
def test_gvalue_return(self):
self.assertEquals(42, GIMarshallingTests.gvalue_return())
def test_gvalue_in(self):
GIMarshallingTests.gvalue_in(42)
value = GObject.Value()
value.init(GObject.TYPE_INT)
value.set_int(42)
GIMarshallingTests.gvalue_in(value)
def test_gvalue_out(self):
self.assertEquals(42, GIMarshallingTests.gvalue_out())
def test_gvalue_inout(self):
self.assertEquals('42', GIMarshallingTests.gvalue_inout(42))
value = GObject.Value()
value.init(GObject.TYPE_INT)
value.set_int(42)
self.assertEquals('42', GIMarshallingTests.gvalue_inout(value))
class TestGClosure(unittest.TestCase):
def test_gclosure_in(self):
GIMarshallingTests.gclosure_in(lambda: 42)
# test passing a closure between two C calls
closure = GIMarshallingTests.gclosure_return()
GIMarshallingTests.gclosure_in(closure)
self.assertRaises(TypeError, GIMarshallingTests.gclosure_in, 42)
self.assertRaises(TypeError, GIMarshallingTests.gclosure_in, None)
class TestPointer(unittest.TestCase):
def test_pointer_in_return(self):
self.assertEquals(GIMarshallingTests.pointer_in_return(42), 42)
class TestEnum(unittest.TestCase):
@classmethod
def setUpClass(cls):
'''Run tests under a test locale.
Upper case conversion of member names should not be locale specific;
e. g. in Turkish, "i".upper() == "i", which gives results like "iNVALiD"
Run test under a locale which defines toupper('a') == 'a'
'''
cls.locale_dir = tempfile.mkdtemp()
subprocess.check_call(['localedef', '-i',
os.path.join(os.path.dirname(os.path.realpath(__file__)), 'te_ST@nouppera'),
'-c', '-f', 'UTF-8', os.path.join(cls.locale_dir, 'te_ST.UTF-8@nouppera')])
os.environ['LOCPATH'] = cls.locale_dir
locale.setlocale(locale.LC_ALL, 'te_ST.UTF-8@nouppera')
@classmethod
def tearDownClass(cls):
locale.setlocale(locale.LC_ALL, 'C')
shutil.rmtree(cls.locale_dir)
try:
del os.environ['LOCPATH']
except KeyError:
pass
def test_enum(self):
self.assertTrue(issubclass(GIMarshallingTests.Enum, int))
self.assertTrue(isinstance(GIMarshallingTests.Enum.VALUE1, GIMarshallingTests.Enum))
self.assertTrue(isinstance(GIMarshallingTests.Enum.VALUE2, GIMarshallingTests.Enum))
self.assertTrue(isinstance(GIMarshallingTests.Enum.VALUE3, GIMarshallingTests.Enum))
self.assertEquals(42, GIMarshallingTests.Enum.VALUE3)
def test_value_nick_and_name(self):
self.assertEqual(GIMarshallingTests.Enum.VALUE1.value_nick, 'value1')
self.assertEqual(GIMarshallingTests.Enum.VALUE2.value_nick, 'value2')
self.assertEqual(GIMarshallingTests.Enum.VALUE3.value_nick, 'value3')
self.assertEqual(GIMarshallingTests.Enum.VALUE1.value_name, 'GI_MARSHALLING_TESTS_ENUM_VALUE1')
self.assertEqual(GIMarshallingTests.Enum.VALUE2.value_name, 'GI_MARSHALLING_TESTS_ENUM_VALUE2')
self.assertEqual(GIMarshallingTests.Enum.VALUE3.value_name, 'GI_MARSHALLING_TESTS_ENUM_VALUE3')
def test_enum_in(self):
GIMarshallingTests.enum_in(GIMarshallingTests.Enum.VALUE3)
GIMarshallingTests.enum_in(42)
self.assertRaises(TypeError, GIMarshallingTests.enum_in, 43)
self.assertRaises(TypeError, GIMarshallingTests.enum_in, 'GIMarshallingTests.Enum.VALUE3')
def test_enum_out(self):
enum = GIMarshallingTests.enum_out()
self.assertTrue(isinstance(enum, GIMarshallingTests.Enum))
self.assertEquals(enum, GIMarshallingTests.Enum.VALUE3)
def test_enum_inout(self):
enum = GIMarshallingTests.enum_inout(GIMarshallingTests.Enum.VALUE3)
self.assertTrue(isinstance(enum, GIMarshallingTests.Enum))
self.assertEquals(enum, GIMarshallingTests.Enum.VALUE1)
def test_enum_second(self):
# check for the bug where different non-gtype enums share the same class
self.assertNotEqual(GIMarshallingTests.Enum, GIMarshallingTests.SecondEnum)
# check that values are not being shared between different enums
self.assertTrue(hasattr(GIMarshallingTests.SecondEnum, "SECONDVALUE1"))
self.assertRaises(AttributeError, getattr, GIMarshallingTests.Enum, "SECONDVALUE1")
self.assertTrue(hasattr(GIMarshallingTests.Enum, "VALUE1"))
self.assertRaises(AttributeError, getattr, GIMarshallingTests.SecondEnum, "VALUE1")
class TestGEnum(unittest.TestCase):
def test_genum(self):
self.assertTrue(issubclass(GIMarshallingTests.GEnum, GObject.GEnum))
self.assertTrue(isinstance(GIMarshallingTests.GEnum.VALUE1, GIMarshallingTests.GEnum))
self.assertTrue(isinstance(GIMarshallingTests.GEnum.VALUE2, GIMarshallingTests.GEnum))
self.assertTrue(isinstance(GIMarshallingTests.GEnum.VALUE3, GIMarshallingTests.GEnum))
self.assertEquals(42, GIMarshallingTests.GEnum.VALUE3)
def test_value_nick_and_name(self):
self.assertEqual(GIMarshallingTests.GEnum.VALUE1.value_nick, 'value1')
self.assertEqual(GIMarshallingTests.GEnum.VALUE2.value_nick, 'value2')
self.assertEqual(GIMarshallingTests.GEnum.VALUE3.value_nick, 'value3')
self.assertEqual(GIMarshallingTests.GEnum.VALUE1.value_name, 'GI_MARSHALLING_TESTS_GENUM_VALUE1')
self.assertEqual(GIMarshallingTests.GEnum.VALUE2.value_name, 'GI_MARSHALLING_TESTS_GENUM_VALUE2')
self.assertEqual(GIMarshallingTests.GEnum.VALUE3.value_name, 'GI_MARSHALLING_TESTS_GENUM_VALUE3')
def test_genum_in(self):
GIMarshallingTests.genum_in(GIMarshallingTests.GEnum.VALUE3)
GIMarshallingTests.genum_in(42)
self.assertRaises(TypeError, GIMarshallingTests.genum_in, 43)
self.assertRaises(TypeError, GIMarshallingTests.genum_in, 'GIMarshallingTests.GEnum.VALUE3')
def test_genum_out(self):
genum = GIMarshallingTests.genum_out()
self.assertTrue(isinstance(genum, GIMarshallingTests.GEnum))
self.assertEquals(genum, GIMarshallingTests.GEnum.VALUE3)
def test_genum_inout(self):
genum = GIMarshallingTests.genum_inout(GIMarshallingTests.GEnum.VALUE3)
self.assertTrue(isinstance(genum, GIMarshallingTests.GEnum))
self.assertEquals(genum, GIMarshallingTests.GEnum.VALUE1)
class TestGFlags(unittest.TestCase):
def test_flags(self):
self.assertTrue(issubclass(GIMarshallingTests.Flags, GObject.GFlags))
self.assertTrue(isinstance(GIMarshallingTests.Flags.VALUE1, GIMarshallingTests.Flags))
self.assertTrue(isinstance(GIMarshallingTests.Flags.VALUE2, GIMarshallingTests.Flags))
self.assertTrue(isinstance(GIMarshallingTests.Flags.VALUE3, GIMarshallingTests.Flags))
# __or__() operation should still return an instance, not an int.
self.assertTrue(isinstance(GIMarshallingTests.Flags.VALUE1 | GIMarshallingTests.Flags.VALUE2,
GIMarshallingTests.Flags))
self.assertEquals(1 << 1, GIMarshallingTests.Flags.VALUE2)
def test_value_nick_and_name(self):
self.assertEqual(GIMarshallingTests.Flags.VALUE1.first_value_nick, 'value1')
self.assertEqual(GIMarshallingTests.Flags.VALUE2.first_value_nick, 'value2')
self.assertEqual(GIMarshallingTests.Flags.VALUE3.first_value_nick, 'value3')
self.assertEqual(GIMarshallingTests.Flags.VALUE1.first_value_name, 'GI_MARSHALLING_TESTS_FLAGS_VALUE1')
self.assertEqual(GIMarshallingTests.Flags.VALUE2.first_value_name, 'GI_MARSHALLING_TESTS_FLAGS_VALUE2')
self.assertEqual(GIMarshallingTests.Flags.VALUE3.first_value_name, 'GI_MARSHALLING_TESTS_FLAGS_VALUE3')
def test_flags_in(self):
GIMarshallingTests.flags_in(GIMarshallingTests.Flags.VALUE2)
# result of __or__() operation should still be valid instance, not an int.
GIMarshallingTests.flags_in(GIMarshallingTests.Flags.VALUE2 | GIMarshallingTests.Flags.VALUE2)
GIMarshallingTests.flags_in_zero(Number(0))
self.assertRaises(TypeError, GIMarshallingTests.flags_in, 1 << 1)
self.assertRaises(TypeError, GIMarshallingTests.flags_in, 'GIMarshallingTests.Flags.VALUE2')
def test_flags_out(self):
flags = GIMarshallingTests.flags_out()
self.assertTrue(isinstance(flags, GIMarshallingTests.Flags))
self.assertEquals(flags, GIMarshallingTests.Flags.VALUE2)
def test_flags_inout(self):
flags = GIMarshallingTests.flags_inout(GIMarshallingTests.Flags.VALUE2)
self.assertTrue(isinstance(flags, GIMarshallingTests.Flags))
self.assertEquals(flags, GIMarshallingTests.Flags.VALUE1)
class TestNoTypeFlags(unittest.TestCase):
def test_flags(self):
self.assertTrue(issubclass(GIMarshallingTests.NoTypeFlags, GObject.GFlags))
self.assertTrue(isinstance(GIMarshallingTests.NoTypeFlags.VALUE1, GIMarshallingTests.NoTypeFlags))
self.assertTrue(isinstance(GIMarshallingTests.NoTypeFlags.VALUE2, GIMarshallingTests.NoTypeFlags))
self.assertTrue(isinstance(GIMarshallingTests.NoTypeFlags.VALUE3, GIMarshallingTests.NoTypeFlags))
# __or__() operation should still return an instance, not an int.
self.assertTrue(isinstance(GIMarshallingTests.NoTypeFlags.VALUE1 | GIMarshallingTests.NoTypeFlags.VALUE2,
GIMarshallingTests.NoTypeFlags))
self.assertEquals(1 << 1, GIMarshallingTests.NoTypeFlags.VALUE2)
def test_value_nick_and_name(self):
self.assertEqual(GIMarshallingTests.NoTypeFlags.VALUE1.first_value_nick, 'value1')
self.assertEqual(GIMarshallingTests.NoTypeFlags.VALUE2.first_value_nick, 'value2')
self.assertEqual(GIMarshallingTests.NoTypeFlags.VALUE3.first_value_nick, 'value3')
self.assertEqual(GIMarshallingTests.NoTypeFlags.VALUE1.first_value_name, 'GI_MARSHALLING_TESTS_NO_TYPE_FLAGS_VALUE1')
self.assertEqual(GIMarshallingTests.NoTypeFlags.VALUE2.first_value_name, 'GI_MARSHALLING_TESTS_NO_TYPE_FLAGS_VALUE2')
self.assertEqual(GIMarshallingTests.NoTypeFlags.VALUE3.first_value_name, 'GI_MARSHALLING_TESTS_NO_TYPE_FLAGS_VALUE3')
def test_flags_in(self):
GIMarshallingTests.no_type_flags_in(GIMarshallingTests.NoTypeFlags.VALUE2)
GIMarshallingTests.no_type_flags_in(GIMarshallingTests.NoTypeFlags.VALUE2 | GIMarshallingTests.NoTypeFlags.VALUE2)
GIMarshallingTests.no_type_flags_in_zero(Number(0))
self.assertRaises(TypeError, GIMarshallingTests.no_type_flags_in, 1 << 1)
self.assertRaises(TypeError, GIMarshallingTests.no_type_flags_in, 'GIMarshallingTests.NoTypeFlags.VALUE2')
def test_flags_out(self):
flags = GIMarshallingTests.no_type_flags_out()
self.assertTrue(isinstance(flags, GIMarshallingTests.NoTypeFlags))
self.assertEquals(flags, GIMarshallingTests.NoTypeFlags.VALUE2)
def test_flags_inout(self):
flags = GIMarshallingTests.no_type_flags_inout(GIMarshallingTests.NoTypeFlags.VALUE2)
self.assertTrue(isinstance(flags, GIMarshallingTests.NoTypeFlags))
self.assertEquals(flags, GIMarshallingTests.NoTypeFlags.VALUE1)
class TestStructure(unittest.TestCase):
def test_simple_struct(self):
self.assertTrue(issubclass(GIMarshallingTests.SimpleStruct, GObject.GPointer))
struct = GIMarshallingTests.SimpleStruct()
self.assertTrue(isinstance(struct, GIMarshallingTests.SimpleStruct))
self.assertEquals(0, struct.long_)
self.assertEquals(0, struct.int8)
struct.long_ = 6
struct.int8 = 7
self.assertEquals(6, struct.long_)
self.assertEquals(7, struct.int8)
del struct
def test_nested_struct(self):
struct = GIMarshallingTests.NestedStruct()
self.assertTrue(isinstance(struct.simple_struct, GIMarshallingTests.SimpleStruct))
struct.simple_struct.long_ = 42
self.assertEquals(42, struct.simple_struct.long_)
del struct
def test_not_simple_struct(self):
struct = GIMarshallingTests.NotSimpleStruct()
self.assertEquals(None, struct.pointer)
def test_simple_struct_return(self):
struct = GIMarshallingTests.simple_struct_returnv()
self.assertTrue(isinstance(struct, GIMarshallingTests.SimpleStruct))
self.assertEquals(6, struct.long_)
self.assertEquals(7, struct.int8)
del struct
def test_simple_struct_in(self):
struct = GIMarshallingTests.SimpleStruct()
struct.long_ = 6
struct.int8 = 7
GIMarshallingTests.SimpleStruct.inv(struct)
del struct
struct = GIMarshallingTests.NestedStruct()
self.assertRaises(TypeError, GIMarshallingTests.SimpleStruct.inv, struct)
del struct
self.assertRaises(TypeError, GIMarshallingTests.SimpleStruct.inv, None)
def test_simple_struct_method(self):
struct = GIMarshallingTests.SimpleStruct()
struct.long_ = 6
struct.int8 = 7
struct.method()
del struct
self.assertRaises(TypeError, GIMarshallingTests.SimpleStruct.method)
def test_pointer_struct(self):
self.assertTrue(issubclass(GIMarshallingTests.PointerStruct, GObject.GPointer))
struct = GIMarshallingTests.PointerStruct()
self.assertTrue(isinstance(struct, GIMarshallingTests.PointerStruct))
del struct
def test_pointer_struct_return(self):
struct = GIMarshallingTests.pointer_struct_returnv()
self.assertTrue(isinstance(struct, GIMarshallingTests.PointerStruct))
self.assertEquals(42, struct.long_)
del struct
def test_pointer_struct_in(self):
struct = GIMarshallingTests.PointerStruct()
struct.long_ = 42
struct.inv()
del struct
def test_boxed_struct(self):
self.assertTrue(issubclass(GIMarshallingTests.BoxedStruct, GObject.GBoxed))
struct = GIMarshallingTests.BoxedStruct()
self.assertTrue(isinstance(struct, GIMarshallingTests.BoxedStruct))
self.assertEquals(0, struct.long_)
self.assertEquals([], struct.g_strv)
del struct
def test_boxed_struct_new(self):
struct = GIMarshallingTests.BoxedStruct.new()
self.assertTrue(isinstance(struct, GIMarshallingTests.BoxedStruct))
del struct
def test_boxed_struct_copy(self):
struct = GIMarshallingTests.BoxedStruct()
new_struct = struct.copy()
self.assertTrue(isinstance(new_struct, GIMarshallingTests.BoxedStruct))
del new_struct
del struct
def test_boxed_struct_return(self):
struct = GIMarshallingTests.boxed_struct_returnv()
self.assertTrue(isinstance(struct, GIMarshallingTests.BoxedStruct))
self.assertEquals(42, struct.long_)
self.assertEquals(['0', '1', '2'], struct.g_strv)
del struct
def test_boxed_struct_in(self):
struct = GIMarshallingTests.BoxedStruct()
struct.long_ = 42
struct.inv()
del struct
def test_boxed_struct_out(self):
struct = GIMarshallingTests.boxed_struct_out()
self.assertTrue(isinstance(struct, GIMarshallingTests.BoxedStruct))
self.assertEquals(42, struct.long_)
del struct
def test_boxed_struct_inout(self):
in_struct = GIMarshallingTests.BoxedStruct()
in_struct.long_ = 42
out_struct = GIMarshallingTests.boxed_struct_inout(in_struct)
self.assertTrue(isinstance(out_struct, GIMarshallingTests.BoxedStruct))
self.assertEquals(0, out_struct.long_)
del in_struct
del out_struct
def test_union(self):
union = GIMarshallingTests.Union()
self.assertTrue(isinstance(union, GIMarshallingTests.Union))
new_union = union.copy()
self.assertTrue(isinstance(new_union, GIMarshallingTests.Union))
del union
del new_union
def test_union_return(self):
union = GIMarshallingTests.union_returnv()
self.assertTrue(isinstance(union, GIMarshallingTests.Union))
self.assertEquals(42, union.long_)
del union
def test_union_in(self):
union = GIMarshallingTests.Union()
union.long_ = 42
union.inv()
del union
def test_union_method(self):
union = GIMarshallingTests.Union()
union.long_ = 42
union.method()
del union
self.assertRaises(TypeError, GIMarshallingTests.Union.method)
class TestGObject(unittest.TestCase):
def test_object(self):
self.assertTrue(issubclass(GIMarshallingTests.Object, GObject.GObject))
object_ = GIMarshallingTests.Object()
self.assertTrue(isinstance(object_, GIMarshallingTests.Object))
self.assertEquals(object_.__grefcount__, 1)
def test_object_new(self):
object_ = GIMarshallingTests.Object.new(42)
self.assertTrue(isinstance(object_, GIMarshallingTests.Object))
self.assertEquals(object_.__grefcount__, 1)
def test_object_int(self):
object_ = GIMarshallingTests.Object(int = 42)
self.assertEquals(object_.int_, 42)
# FIXME: Don't work yet.
# object_.int_ = 0
# self.assertEquals(object_.int_, 0)
def test_object_static_method(self):
GIMarshallingTests.Object.static_method()
def test_object_method(self):
GIMarshallingTests.Object(int = 42).method()
self.assertRaises(TypeError, GIMarshallingTests.Object.method, GObject.GObject())
self.assertRaises(TypeError, GIMarshallingTests.Object.method)
def test_sub_object(self):
self.assertTrue(issubclass(GIMarshallingTests.SubObject, GIMarshallingTests.Object))
object_ = GIMarshallingTests.SubObject()
self.assertTrue(isinstance(object_, GIMarshallingTests.SubObject))
def test_sub_object_new(self):
self.assertRaises(TypeError, GIMarshallingTests.SubObject.new, 42)
def test_sub_object_static_method(self):
object_ = GIMarshallingTests.SubObject()
object_.static_method()
def test_sub_object_method(self):
object_ = GIMarshallingTests.SubObject(int = 42)
object_.method()
def test_sub_object_sub_method(self):
object_ = GIMarshallingTests.SubObject()
object_.sub_method()
def test_sub_object_overwritten_method(self):
object_ = GIMarshallingTests.SubObject()
object_.overwritten_method()
self.assertRaises(TypeError, GIMarshallingTests.SubObject.overwritten_method, GIMarshallingTests.Object())
def test_sub_object_int(self):
object_ = GIMarshallingTests.SubObject()
self.assertEquals(object_.int_, 0)
# FIXME: Don't work yet.
# object_.int_ = 42
# self.assertEquals(object_.int_, 42)
def test_object_none_return(self):
object_ = GIMarshallingTests.Object.none_return()
self.assertTrue(isinstance(object_, GIMarshallingTests.Object))
self.assertEquals(object_.__grefcount__, 2)
def test_object_full_return(self):
object_ = GIMarshallingTests.Object.full_return()
self.assertTrue(isinstance(object_, GIMarshallingTests.Object))
self.assertEquals(object_.__grefcount__, 1)
def test_object_none_in(self):
object_ = GIMarshallingTests.Object(int = 42)
GIMarshallingTests.Object.none_in(object_)
self.assertEquals(object_.__grefcount__, 1)
object_ = GIMarshallingTests.SubObject(int = 42)
GIMarshallingTests.Object.none_in(object_)
object_ = GObject.GObject()
self.assertRaises(TypeError, GIMarshallingTests.Object.none_in, object_)
self.assertRaises(TypeError, GIMarshallingTests.Object.none_in, None)
def test_object_none_out(self):
object_ = GIMarshallingTests.Object.none_out()
self.assertTrue(isinstance(object_, GIMarshallingTests.Object))
self.assertEquals(object_.__grefcount__, 2)
new_object = GIMarshallingTests.Object.none_out()
self.assertTrue(new_object is object_)
def test_object_full_out(self):
object_ = GIMarshallingTests.Object.full_out()
self.assertTrue(isinstance(object_, GIMarshallingTests.Object))
self.assertEquals(object_.__grefcount__, 1)
def test_object_none_inout(self):
object_ = GIMarshallingTests.Object(int = 42)
new_object = GIMarshallingTests.Object.none_inout(object_)
self.assertTrue(isinstance(new_object, GIMarshallingTests.Object))
self.assertFalse(object_ is new_object)
self.assertEquals(object_.__grefcount__, 1)
self.assertEquals(new_object.__grefcount__, 2)
new_new_object = GIMarshallingTests.Object.none_inout(object_)
self.assertTrue(new_new_object is new_object)
GIMarshallingTests.Object.none_inout(GIMarshallingTests.SubObject(int = 42))
def test_object_full_inout(self):
object_ = GIMarshallingTests.Object(int = 42)
new_object = GIMarshallingTests.Object.full_inout(object_)
self.assertTrue(isinstance(new_object, GIMarshallingTests.Object))
self.assertFalse(object_ is new_object)
self.assertEquals(object_.__grefcount__, 2)
self.assertEquals(new_object.__grefcount__, 1)
# FIXME: Doesn't actually return the same object.
# def test_object_inout_same(self):
# object_ = GIMarshallingTests.Object()
# new_object = GIMarshallingTests.object_full_inout(object_)
# self.assertTrue(object_ is new_object)
# self.assertEquals(object_.__grefcount__, 1)
class TestPythonGObject(unittest.TestCase):
class Object(GIMarshallingTests.Object):
def __init__(self, int):
GIMarshallingTests.Object.__init__(self)
self.val = None
def method(self):
# Don't call super, which asserts that self.int == 42.
pass
def do_method_int8_in(self, int8):
self.val = int8
def do_method_int8_out(self):
return 42
def do_method_with_default_implementation(self, int8):
GIMarshallingTests.Object.do_method_with_default_implementation(self, int8)
self.props.int += int8
class SubObject(GIMarshallingTests.SubObject):
def __init__(self, int):
GIMarshallingTests.SubObject.__init__(self)
self.val = None
def do_method_with_default_implementation(self, int8):
self.val = int8
def test_object(self):
self.assertTrue(issubclass(self.Object, GIMarshallingTests.Object))
object_ = self.Object(int = 42)
self.assertTrue(isinstance(object_, self.Object))
def test_object_method(self):
self.Object(int = 0).method()
def test_object_vfuncs(self):
object_ = self.Object(int = 42)
object_.method_int8_in(84)
self.assertEqual(object_.val, 84)
self.assertEqual(object_.method_int8_out(), 42)
object_.method_with_default_implementation(42)
self.assertEqual(object_.props.int, 84)
class ObjectWithoutVFunc(GIMarshallingTests.Object):
def __init__(self, int):
GIMarshallingTests.Object.__init__(self)
object_ = ObjectWithoutVFunc(int = 42)
object_.method_with_default_implementation(84)
self.assertEqual(object_.props.int, 84)
def test_subobject_parent_vfunc(self):
object_ = self.SubObject(int = 81)
object_.method_with_default_implementation(87)
self.assertEquals(object_.val, 87)
def test_dynamic_module(self):
from gi.module import DynamicGObjectModule
self.assertTrue(isinstance(GObject, DynamicGObjectModule))
# compare the same enum from both the pygobject attrs and gi GObject attrs
self.assertEquals(GObject.SIGNAL_ACTION, GObject.SignalFlags.ACTION)
# compare a static gobject attr with a dynamic GObject attr
self.assertEquals(GObject.GObject, gobject.GObject)
def test_subobject_non_vfunc_do_method(self):
class PythonObjectWithNonVFuncDoMethod:
def do_not_a_vfunc(self):
return 5
class ObjectOverrideNonVFuncDoMethod(GIMarshallingTests.Object, PythonObjectWithNonVFuncDoMethod):
def do_not_a_vfunc(self):
value = super(ObjectOverrideNonVFuncDoMethod, self).do_not_a_vfunc()
return 13 + value
object_ = ObjectOverrideNonVFuncDoMethod()
self.assertEquals(18, object_.do_not_a_vfunc())
def test_native_function_not_set_in_subclass_dict(self):
# Previously, GI was setting virtual functions on the class as well
# as any *native* class that subclasses it. Here we check that it is only
# set on the class that the method is originally from.
self.assertTrue('do_method_with_default_implementation' in GIMarshallingTests.Object.__dict__)
self.assertTrue('do_method_with_default_implementation' not in GIMarshallingTests.SubObject.__dict__)
# Here we check that accessing a vfunc from the subclass returns the same wrapper object,
# meaning that multiple wrapper objects have not been created for the same vfunc.
func1 = GIMarshallingTests.Object.do_method_with_default_implementation
func2 = GIMarshallingTests.SubObject.do_method_with_default_implementation
if sys.version_info < (3,0):
func1 = func1.im_func
func2 = func2.im_func
self.assertTrue(func1 is func2)
def test_subobject_with_interface_and_non_vfunc_do_method(self):
# There was a bug for searching for vfuncs in interfaces. It was
# triggered by having a do_* method that wasn't overriding
# a native vfunc, as well as inheriting from an interface.
class GObjectSubclassWithInterface(GObject.GObject, GIMarshallingTests.Interface):
def do_method_not_a_vfunc(self):
pass
class TestMultiOutputArgs(unittest.TestCase):
def test_int_out_out(self):
self.assertEquals((6, 7), GIMarshallingTests.int_out_out())
def test_int_return_out(self):
self.assertEquals((6, 7), GIMarshallingTests.int_return_out())
class TestGErrorException(unittest.TestCase):
def test_gerror_exception(self):
self.assertRaises(GObject.GError, GIMarshallingTests.gerror)
try:
GIMarshallingTests.gerror()
except Exception:
etype, e = sys.exc_info()[:2]
self.assertEquals(e.domain, GIMarshallingTests.CONSTANT_GERROR_DOMAIN)
self.assertEquals(e.code, GIMarshallingTests.CONSTANT_GERROR_CODE)
self.assertEquals(e.message, GIMarshallingTests.CONSTANT_GERROR_MESSAGE)
# Interface
class TestInterfaces(unittest.TestCase):
def test_wrapper(self):
self.assertTrue(issubclass(GIMarshallingTests.Interface, GObject.GInterface))
self.assertEquals(GIMarshallingTests.Interface.__gtype__.name, 'GIMarshallingTestsInterface')
self.assertRaises(NotImplementedError, GIMarshallingTests.Interface)
def test_implementation(self):
class TestInterfaceImpl(GObject.GObject, GIMarshallingTests.Interface):
def __init__(self):
GObject.GObject.__init__(self)
self.val = None
def do_test_int8_in(self, int8):
self.val = int8
self.assertTrue(issubclass(TestInterfaceImpl, GIMarshallingTests.Interface))
instance = TestInterfaceImpl()
self.assertTrue(isinstance(instance, GIMarshallingTests.Interface))
GIMarshallingTests.test_interface_test_int8_in(instance, 42)
self.assertEquals(instance.val, 42)
class TestInterfaceImplA(TestInterfaceImpl):
pass
class TestInterfaceImplB(TestInterfaceImplA):
pass
instance = TestInterfaceImplA()
GIMarshallingTests.test_interface_test_int8_in(instance, 42)
self.assertEquals(instance.val, 42)
def test_mro(self):
# there was a problem with Python bailing out because of
# http://en.wikipedia.org/wiki/Diamond_problem with interfaces,
# which shouldn't really be a problem.
class TestInterfaceImpl(GObject.GObject, GIMarshallingTests.Interface):
pass
class TestInterfaceImpl2(GIMarshallingTests.Interface,
TestInterfaceImpl):
pass
class TestInterfaceImpl3(TestInterfaceImpl,
GIMarshallingTests.Interface2):
pass
class TestInterfaceClash(unittest.TestCase):
def test_clash(self):
def create_clash():
class TestClash(GObject.GObject, GIMarshallingTests.Interface, GIMarshallingTests.Interface2):
def do_test_int8_in(self, int8):
pass
TestClash()
self.assertRaises(TypeError, create_clash)
class TestOverrides(unittest.TestCase):
def test_constant(self):
self.assertEquals(GIMarshallingTests.OVERRIDES_CONSTANT, 7)
def test_struct(self):
# Test that the constructor has been overridden.
struct = GIMarshallingTests.OverridesStruct(42)
self.assertTrue(isinstance(struct, GIMarshallingTests.OverridesStruct))
# Test that the method has been overridden.
self.assertEquals(6, struct.method())
del struct
# Test that the overrides wrapper has been registered.
struct = GIMarshallingTests.overrides_struct_returnv()
self.assertTrue(isinstance(struct, GIMarshallingTests.OverridesStruct))
del struct
def test_object(self):
# Test that the constructor has been overridden.
object_ = GIMarshallingTests.OverridesObject(42)
self.assertTrue(isinstance(object_, GIMarshallingTests.OverridesObject))
# Test that the alternate constructor has been overridden.
object_ = GIMarshallingTests.OverridesObject.new(42)
self.assertTrue(isinstance(object_, GIMarshallingTests.OverridesObject))
# Test that the method has been overridden.
self.assertEquals(6, object_.method())
# Test that the overrides wrapper has been registered.
object_ = GIMarshallingTests.OverridesObject.returnv()
self.assertTrue(isinstance(object_, GIMarshallingTests.OverridesObject))
def test_module_name(self):
self.assertEquals(GIMarshallingTests.OverridesStruct.__module__, 'gi.overrides.GIMarshallingTests')
self.assertEquals(GObject.InitiallyUnowned.__module__, 'gi.repository.GObject')
class TestDir(unittest.TestCase):
def test_members_list(self):
list = dir(GIMarshallingTests)
self.assertTrue('OverridesStruct' in list)
self.assertTrue('BoxedStruct' in list)
self.assertTrue('OVERRIDES_CONSTANT' in list)
self.assertTrue('GEnum' in list)
self.assertTrue('int32_return_max' in list)
def test_modules_list(self):
import gi.repository
list = dir(gi.repository)
self.assertTrue('GIMarshallingTests' in list)
# FIXME: test to see if a module which was not imported is in the list
# we should be listing every typelib we find, not just the ones
# which are imported
#
# to test this I recommend we compile a fake module which
# our tests would never import and check to see if it is
# in the list:
#
# self.assertTrue('DoNotImportDummyTests' in list)
class TestGErrorArrayInCrash(unittest.TestCase):
# Previously there was a bug in invoke, in which C arrays were unwrapped
# from inside GArrays to be passed to the C function. But when a GError was
# set, invoke would attempt to free the C array as if it were a GArray.
# This crash is only for C arrays. It does not happen for C functions which
# take in GArrays. See https://bugzilla.gnome.org/show_bug.cgi?id=642708
def test_gerror_array_in_crash(self):
self.assertRaises(GObject.GError, GIMarshallingTests.gerror_array_in, [1, 2, 3])
| codeparrot/github-code-clean |
# Copyright 2022 The T5 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preprocessors for T5 Tasks."""
# TODO(adarob): Move some of the more general preprocessors to seqio.
import collections
import functools
import math
import re
from typing import Callable, Mapping, Optional, Sequence, Union
import uuid
from absl import logging
import babel
import gin
import seqio
import tensorflow.compat.v2 as tf
# We disable no-value-for-parameter since the seqio.map_over_dataset leads to
# a false positive when seeds are provided.
# pylint:disable=no-value-for-parameter
AUTOTUNE = tf.data.experimental.AUTOTUNE
FeatureType = Mapping[str, tf.Tensor]
rekey = seqio.preprocessors.rekey
tokenize = seqio.preprocessors.tokenize
@seqio.map_over_dataset
def translate(x, source_language, target_language):
"""Convert a translation dataset to a text2text pair.
For example, say the dataset returns examples of this format:
{'de': 'Das ist gut.', 'en': 'That is good.'}
If source_language = 'de', target_language = 'en', then the outputs will have
the format:
{'inputs': 'translate German to English: Das ist gut.',
'targets': 'That is good.'}
Args:
x: an example to process.
source_language: source language code (e.g. 'en') to translate from.
target_language: target language code (e.g. 'de') to translate to.
Returns:
A preprocessed example with the format listed above.
"""
# Language codes like zh-cn are not supported; use only the first 2 chars
for language in (source_language, target_language):
if language != language[:2]:
logging.warning(
'Extended language code %s not supported. Falling back on %s.',
language, language[:2]
)
lang_id_to_string = {
source_language: babel.Locale(source_language[:2]).english_name,
target_language: babel.Locale(target_language[:2]).english_name,
}
src_str = 'translate {}'.format(lang_id_to_string[source_language])
tgt_str = ' to {}: '.format(lang_id_to_string[target_language])
return {
'inputs': tf.strings.join([src_str, tgt_str, x[source_language]]),
'targets': x[target_language],
}
@seqio.map_over_dataset
def summarize(x, article_key, summary_key):
"""Convert a summarization dataset to a text2text pair.
For example, say the dataset returns examples of this format:
{'article': <article>, 'highlights': <summary>}
If article_key = 'article', summary_key = 'highlights', then the outputs will
have the format:
{'inputs': 'summarize': <article>, 'targets': <summary>}
Args:
x: an example to process.
article_key: the feature key for the article to summarize.
summary_key: the feature key for the target summary.
Returns:
A preprocessed example with the format listed above.
"""
strs_to_join = ['summarize:', x[article_key]]
return {
'inputs': tf.strings.join(strs_to_join, separator=' '),
'targets': x[summary_key],
}
# Unicode ranges for characters in non-spaced languages.
# https://en.wikipedia.org/wiki/Category:Writing_systems_without_word_boundaries
# https://en.wikipedia.org/wiki/Han_unification#Unicode_ranges
# https://linguistics.stackexchange.com/questions/6131
NON_SPACED_LANGUAGE_RANGES = (
'\u1000-\u104f', # Burmese
'\u4e00-\u9fff', # CJK Unified Ideographs
'\u3400-\u4dbf', # CJK Unified Ideographs Extension A
'\uf900-\ufaff', # CJK Compatibility Ideographs
'\u2e80-\u2eff', # CJK Radicals Supplement
'\u31c0-\u31ef', # CJK Strokes
'\u3000-\u303f', # CJK Symbols and Punctuation
'\u3040-\u309f', # Japanese Hiragana
'\u30a0-\u30ff', # Japanese Katakana
'\ua980-\ua9df', # Javanese
'\u1780-\u17ff', # Khmer
'\u19e0-\u19ff', # Khmer Symbols
'\u0e80-\u0eff', # Lao
'\u1980-\u19df', # Tai Lue
'\u1a20-\u1aaf', # Tai Tham
'\u0e00-\u0e7f', # Thai
'\u0f00-\u0fff', # Tibetan
)
@seqio.map_over_dataset
def pad_nonspaced_languages(x, text_key='text'):
"""Pad non-spaced languages with spaces around each character.
Args:
x: an example to process.
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
Returns:
A preprocessed example.
"""
res = dict(x)
text = res[text_key]
# Add spaces around any character from a non-spaced language.
pattern = ''.join(NON_SPACED_LANGUAGE_RANGES)
text = tf.strings.regex_replace(text, u'([{}])'.format(pattern), r' \1 ')
# Collapse consecutive whitespace into one space.
text = tf.strings.regex_replace(text, r'\s+', ' ')
res[text_key] = text
return res
def _pad_punctuation(text):
"""Adds spaces around punctuation."""
# Add space around punctuation.
text = tf.strings.regex_replace(text, r'(\W)', r' \1 ')
# Collapse consecutive whitespace into one space.
text = tf.strings.regex_replace(text, r'\s+', ' ')
return text
def _string_join(lst):
# Join on space, but collapse consecutive spaces.
out = tf.strings.join(lst, separator=' ')
return tf.strings.regex_replace(out, r'\s+', ' ')
def trivia_qa(dataset):
"""Convert a TriviaQA example to multiple flattened examples.
TriviaQA produces examples with this form:
{'entity_pages': {dict of wiki entities},
'search_results': <dict of web search results>,
'answer': {dict of all answers}, 'question': <question>,
'question_id': <question_id>, 'question_source': <question_source>}
This function will return flattend examples of the format:
{'inputs': 'question: <question> context: <article>'
'targets': 'answer: <sampled answer>'}
Args:
dataset: a tf.data.Dataset to process.
Returns:
A preprocessed tf.data.Dataset with the format listed above.
"""
def triviaqa_question_answer_context(x):
"""Extracts matched contexts and answers.
Returns all matched (question-context, answer) pairs.
Args:
x: A tfds sample.
Returns:
Flattened samples: (question-context, answer).
"""
contexts = []
if 'entity_pages' in x:
contexts.append(x['entity_pages']['wiki_context'])
if 'search_results' in x:
contexts.append(x['search_results']['search_context'])
contexts = tf.concat(contexts, 0)
q = _pad_punctuation(x['question'])
answers = x['answer']['normalized_aliases']
combination_size = tf.size(answers)*tf.size(contexts)
find_answers = tf.TensorArray(
tf.bool, size=combination_size, dynamic_size=True)
selected_answers = tf.TensorArray(
tf.string, size=combination_size, dynamic_size=True)
join_q_c = tf.TensorArray(
tf.string, size=combination_size, dynamic_size=True)
def cond_fn(i, find_answers, selected_answers, join_q_c):
del find_answers, selected_answers, join_q_c # Unused
return tf.less(i, combination_size)
def body_fn(i, find_answers, selected_answers, join_q_c):
"""Find answers from contexts and join."""
context_idx = tf.math.floordiv(i, tf.size(answers))
answer_idx = tf.math.mod(i, tf.size(answers))
a = _pad_punctuation(answers[answer_idx])
a_ = tf.strings.join(['.*', a, '.*'])
c = _pad_punctuation(contexts[context_idx])
find_a = tf.strings.regex_full_match(
tf.strings.lower(c),
tf.strings.lower(a_))
find_answers = find_answers.write(i, find_a)
selected_answers = selected_answers.write(i, a)
join_q_c_str = _string_join(['question:', q, 'context:', c])
join_q_c = join_q_c.write(i, join_q_c_str)
return (i + 1, find_answers, selected_answers, join_q_c)
_, find_answers, selected_answers, join_q_c = tf.while_loop(
cond_fn,
body_fn,
loop_vars=[
tf.constant(0), find_answers, selected_answers,
join_q_c
])
find_answers = find_answers.stack()
selected_answers = selected_answers.stack()
join_q_c = join_q_c.stack()
selected_answers = tf.boolean_mask(selected_answers, find_answers)
selected_join_q_c = tf.boolean_mask(join_q_c, find_answers)
return selected_join_q_c, selected_answers
def my_fn(x):
"""Create TriviaQA example."""
join_q_c, a = triviaqa_question_answer_context(x)
return {
'inputs': join_q_c,
'targets': a
}
dataset = dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
return dataset.unbatch()
@seqio.map_over_dataset
def squad(x, include_context=True):
"""Convert SQuAD examples to a text2text pair.
SQuAD produces examples with this form:
{'id': <id>, context': <article>, 'question': <question>,
'answers': { 'text': [<n answers>] }}
This function will return examples of the format:
{'inputs': 'question: <question> context: <article>',
'targets': '<answer_0>',
'id': <id>, 'question': <question>, 'context': <context>,
'answers': [<n answers>]},
Args:
x: an example to process.
include_context: a boolean
Returns:
A preprocessed example with the format listed above.
"""
a = _pad_punctuation(x['answers']['text'])
q = _pad_punctuation(x['question'])
c = _pad_punctuation(x['context'])
if include_context:
inputs = _string_join(['question:', q, 'context:', c])
else:
inputs = _string_join(['squad trivia question:', q])
return {
'inputs': inputs,
'targets': a[0],
'id': x['id'],
'context': c,
'question': q,
'answers': a
}
def _span_answer(context, answer_text):
"""Finds start/end indices of answer_text in context after space tokenization.
If answer_tokens is not a sublist of context_tokens, returns empty string.
Args:
context: 0-d string tensor
answer_text: 0-d string
Returns:
A string tensor.
"""
def space_tok(s):
"""Replace non-word chars with space then split on space."""
s = tf.strings.regex_replace(s, r'\W', ' ')
return tf.strings.split(input=[s], sep=' ').values
def find_subseq(n, h):
"""Finds index of needle subsequence inside haystack.
Args:
n: 1-d tensor
h: 1-d tensor same type as n
Returns:
Index of start of n if found found; otherwise -1.
"""
l_n = tf.size(n)
l_h = tf.size(h)
found = -1
for i in tf.range(0, l_h - l_n):
if tf.reduce_all(tf.equal(h[i:i+l_n], n)):
found = i
break
return found
answer_tokens = space_tok(answer_text)
context_tokens = space_tok(context)
start = find_subseq(answer_tokens, context_tokens)
end = start + tf.size(answer_tokens) - 1
# Just take the first candidate that matches exactly.
if tf.equal(start, -1):
return ''
return tf.strings.format('start: {} end: {}', [start, end])
def squad_span_space_tokenized(dataset):
"""Convert SQuAD examples to a text2text pair with span output.
SQuAD produces examples with this form:
{'context': <article>, 'question': <question>,
'answers': { 'text': [<all answers>] }}
This function returns examples with the format
{'inputs': 'context: <article> question: <question>',
'targets': 'start: <start_index> end: <end_index>'}
where <start_index> and <end_index> specify the space-tokenized span
start/end indices. Both <start_index> and <end_index> are included in
the answer. In the case where the tokenized answer is
not found in the tokenized context, the example is skipped.
Args:
dataset: a tf.data.Dataset to process.
Returns:
A preprocessed tf.data.Dataset with the format listed above.
"""
def my_fn(x):
"""Create squad example as in squad_span_char, but tokenized on spaces."""
res = dict(x)
res['targets'] = _span_answer(x['context'], x['targets'])
return res
dataset = squad(dataset)
dataset = dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
return dataset.filter(lambda x: tf.strings.length(x['targets']) > 0)
def random_split_text(dataset,
text_key='text',
min_words_per_segment=16,
max_words_per_segment=512,
max_words_total=8192):
"""Randomly split single-string examples into multiple examples each.
Segment lengths are chosen according to a log-uniform distribution.
Each incoming string is chopped into multiple equal-length examples
with the last one possibly being shorter.
If the input string is longer than max_words_total, then we use one random
chunk and discard the rest. This may help with model stability.
The intended use case is to break up long text examples for use in
unsupervised transfer-learning.
We don't really want to use this preprocessor for any dataset which has a
well-defined evaluation procedure. If apply this preprocessor e.g. in an MT
component, then the evaluation job will randomly split text when evaluating
and the BLEU will get funky.
Args:
dataset: a tf.data.Dataset with dictionaries containing the key text_key
text_key: a string
min_words_per_segment: an integer
max_words_per_segment: an integer
max_words_total: an integer
Returns:
a dataset
"""
def random_chunk(x, chunk_size, seed):
"""Pick a random chunk of a 1d Tensor.
The tensor is divided into chunks of length chunk_size, with the last
chunk being potentially smaller. A random chunk is returned.
Args:
x: a 1d tf.Tensor.
chunk_size: an integer.
seed: int32 [2]-Tensor, the random seed.
Returns:
a 1d tf.Tensor with length <= chunk_size.
"""
size = tf.size(x)
num_chunks = tf.maximum(1, (size - 1) // chunk_size + 1)
chunk_num = tf.random.stateless_uniform(
[],
seed=seed,
minval=0,
maxval=num_chunks,
dtype=tf.int32)
return x[chunk_size * chunk_num:chunk_size * (chunk_num + 1)]
@seqio.map_over_dataset(num_seeds=2)
def my_fn(x, seeds):
"""Split one string into multiple strings.
Args:
x: a feature dictionary
seeds: an int32 Tensor, shaped (2, 2), the random seeds.
Returns:
a feature dictionary
"""
text = x[text_key]
words = tf.strings.split([text]).values
if max_words_total:
words = random_chunk(words, max_words_total, seed=seeds[0])
n_words = tf.size(words)
# first pick a length (number of words per segment)
length = tf.cast(
tf.exp(
tf.random.stateless_uniform(
[],
minval=math.log(min_words_per_segment),
maxval=math.log(max_words_per_segment),
seed=seeds[1],
)
),
tf.int32)
# Pad to a multiple of length, then use tf.reshape to split up the words
# into num_segments segments each of the given length.
num_segments = tf.cast(
tf.math.ceil(
tf.cast(n_words, tf.float32) / tf.cast(length, tf.float32)
),
tf.int32)
padding = num_segments * length - n_words
words = tf.pad(words, [[0, padding]])
words = tf.reshape(words, [-1, length])
# Finally, join with spaces and strip. The padding turns into a bunch of
# spaces that get stripped out.
words = tf.strings.reduce_join(words, axis=1, separator=' ')
return {text_key: tf.strings.strip(words)}
return my_fn(dataset).unbatch()
def split_text_to_words(dataset, text_key='text', min_num_words=2):
"""Split text to words and filter out examples with too few words."""
def split(x):
res = dict(x)
res['words'] = tf.strings.split([x[text_key]]).values
return res
dataset = dataset.map(split, num_parallel_calls=AUTOTUNE)
return dataset.filter(lambda x: tf.size(x['words']) >= min_num_words)
def fill_in_the_blank(dataset,
text_key='text',
label='fill: '):
"""Create a dataset consisting of fill-in-the-blank text examples.
The input examples should have a key text_key associated with a tf.string
value.
The output examples have keys 'inputs' and 'targets'.
The input string is split on whitespace to form a sequence of words.
This sequence is chopped randomly into segments of one or more words.
Alternate segments are included in the inputs and targets, with a special
word 'X' marking a missing segment.
The given label is prepended to the inputs. Each input string produces two
examples - one the inverse of the other. Inputs with less than two words
are dropped.
EXAMPLE:
input:
{
'text': 'The fat cat sat on the mat.'
}
outputs:
{
'inputs': 'fill: The fat X the X'
'targets': 'X cat sat on X mat.'
}
{
'inputs': 'fill: X cat sat on X mat.'
'targets': 'The fat X the X'
}
Args:
dataset: a tf.data.Dataset
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
label: a string, the label to prepend to the inputs.
Returns:
a tf.data.Dataset
"""
@seqio.map_over_dataset(num_seeds=3)
def my_fn(x, seeds):
"""Generates two preprocessed examples that are roughly inverses.
Args:
x: an example dict with text pre-split in `words` feature.
seeds: an int32 Tensor, shaped (3, 2), the random seeds.
Returns:
an example dict with two inputs and two targets, one for each resulting
preprocessed example.
"""
words = x['words']
n_words = tf.size(words)
# First select the break probability. We pick this on a log-uniform
# distribution between 1/(n_words + 1) and 1/2. This means that some
# sequences will be chopped roughly and others finely.
min_log_p_break = -tf.math.log(tf.cast(n_words, tf.float32) + 2.0)
max_log_p_break = -tf.math.log(2.0)
p_break = tf.exp(
tf.random.stateless_uniform(
[],
minval=min_log_p_break,
maxval=max_log_p_break,
seed=seeds[0])
)
# craffel@ says that there may be bugs in random.uniform making it not
# really uniform. This doesn't seem horribly important here, but may
# need another look.
breaks = tf.less(
tf.random.stateless_uniform([n_words - 1], seed=seeds[1]),
p_break)
def one_random_break():
pos = tf.random.stateless_uniform(
[],
minval=0,
maxval=n_words - 1,
dtype=tf.int32,
seed=seeds[2])
return tf.one_hot(pos, n_words - 1,
dtype=tf.bool, on_value=True, off_value=False)
breaks = tf.cond(
tf.math.reduce_any(breaks), lambda: breaks, one_random_break)
breaks = tf.concat([[True], breaks], axis=0)
word_to_seq_id = tf.math.mod(tf.math.cumsum(tf.cast(breaks, tf.int32)), 2)
# separators:
# if in your segment: ' '
# if break to other segment: ' X'
# else: ''
results = []
for seq_id in [0, 1]:
in_my_seq = tf.equal(word_to_seq_id, seq_id)
separator_strings = tf.where(
in_my_seq,
' ',
tf.where(breaks, ' X', '')
)
word_strings = tf.where(in_my_seq, words, '')
all_strings = tf.stack([separator_strings, word_strings], axis=1)
results.append(tf.strings.substr(
tf.strings.reduce_join(all_strings), 1, tf.int32.max))
inputs = tf.stack([tf.strings.join([label, results[0]]),
tf.strings.join([label, results[1]])])
targets = tf.stack([results[1], results[0]])
return {'inputs': inputs, 'targets': targets}
dataset = split_text_to_words(dataset, text_key, min_num_words=2)
return my_fn(dataset).unbatch()
def fill_in_the_blank_sized(
dataset,
size_bins=(1, 2, 4, 8, 16, 32, 64, 128, 256, 512),
text_key='text',
label='fill: '):
"""Fill in the blank preprocessor that labels blank with a binned size.
The actual blank size is sampled uniformly from the inclusive range of the min
and max bin. The blank is then filled in with the closest bin size to the
actual blank size.
Args:
dataset: a tf.data.Dataset, the dataset to preprocess.
size_bins: a list, a list of blank sizes to select from when labelling the
blank.
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
label: a string, the label to prepend to the inputs.
Returns:
a tf.data.Dataset
"""
bins = sorted(size_bins)
@seqio.map_over_dataset(num_seeds=2)
def my_fn(x, seeds):
"""Apply transformation."""
words = x['words']
n_words = tf.size(words)
blank_size = tf.random.stateless_uniform(
[],
minval=bins[0],
maxval=tf.math.minimum(n_words, bins[-1]),
dtype=tf.dtypes.int32,
seed=seeds[0])
bin_delta = tf.math.abs(bins - blank_size)
bin_ = tf.gather(bins, tf.argmin(bin_delta))
blank_start = tf.random.stateless_uniform(
[],
minval=0,
maxval=tf.math.maximum(0, n_words-blank_size) + 1,
dtype=tf.dtypes.int32,
seed=seeds[1])
pre_blank = tf.strings.reduce_join(words[0:blank_start], separator=' ')
post_blank = tf.strings.reduce_join(
words[blank_start+blank_size:], separator=' ')
blank = tf.strings.format('_{}_', bin_)
# We strip to handle cases where blank is at beginning or end.
input_ = tf.strings.strip(
tf.strings.join([pre_blank, blank, post_blank], ' '))
input_ = tf.strings.join([label, input_])
target = tf.strings.reduce_join(
words[blank_start:blank_start+blank_size], separator=' ')
return {
'inputs': tf.strings.strip(input_),
'targets': tf.strings.strip(target)}
dataset = split_text_to_words(dataset, text_key, min_num_words=2)
# Filter out examples with fewer words than the minimum.
dataset = dataset.filter(lambda x: tf.size(x['words']) >= bins[0])
return my_fn(dataset)
def neighboring_pairs(dataset, text_key='text', reuse_sentences=True):
"""Create a dataset consisting of neighboring sentence pairs.
The input examples should have a key text_key associated with a tf.string
value.
The output examples have keys 'first' and 'second'.
We only take sentence pairs from within the same line since lines seem to
represent paragraph-like structures in our text datasets. Empty lines and
1-sentence lines will thus be ignored.
The argument reuse_sentences determines whether a sentence can be used as both
the first and last element in the pair. For example, the input with sentences
A,B,C,D will return (A,B),(B,C),(C,D) if reuse_sentences is True and
(A,B),(C,D) if reuse_sentences is False.
Args:
dataset: a tf.data.Dataset
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
reuse_sentences: a boolean
Returns:
a tf.data.Dataset
"""
def split_by_lines(dataset):
"""Splits text in dataset by line, removing empty lines."""
def my_fn(text):
lines = tf.strings.split([text], sep='\n').values
return tf.strings.strip(lines)
dataset = dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
dataset = dataset.unbatch()
return dataset.filter(lambda x: tf.strings.length(x) > 0)
def split_into_pairs(line):
"""Split a given text example into pairs of neighboring sentences."""
# TODO(mmatena): Use better sentence segmentation.
sep = str(uuid.uuid4())
sentences = tf.strings.regex_replace(line, r'((?:\.|\!|\?)+)', r'\1' + sep)
sentences = tf.strings.strip(tf.strings.split([sentences], sep).values)
if reuse_sentences:
firsts = sentences[:-1]
seconds = sentences[1:]
else:
firsts = sentences[:-1:2]
seconds = sentences[1::2]
return {
'first': firsts,
'second': seconds,
}
def example_len(x):
return tf.math.minimum(
tf.strings.length(x['first']), tf.strings.length(x['second']))
# Split by lines.
dataset = dataset.map(lambda x: x[text_key], num_parallel_calls=AUTOTUNE)
dataset = split_by_lines(dataset)
# Get pairs of neighboring sentences.
dataset = dataset.map(split_into_pairs, num_parallel_calls=AUTOTUNE)
dataset = dataset.unbatch()
# Remove examples with empty strings.
dataset = dataset.filter(lambda x: example_len(x) > 0)
return dataset
@seqio.map_over_dataset
def glue(x, benchmark_name, label_names, feature_names=None, id_key='idx'):
"""Convert a dataset from glue to text2text examples.
This function uses the feature names from the dataset to unpack examples into
a format amenable for a text2text problem. For example, consider the Quora
Question Pairs (QQP) benchmark, which would suggest
benchmark_name="qqp"
label_names=['not_duplicate', 'duplicate']
For QQP, a typical example might look like
{
"question1": "Why do I easily get bored of my friends?",
"question2": "Why do I get bored of friends so quickly?",
"label": 1,
"idx": 10,
}
This example would be transformed to
{
"inputs": (
"qqp question1: Why do I easily get bored of my friends? question2: "
"Why do I get bored of my friends so quickly?"
),
"targets": "duplicate",
"idx": 10,
}
Args:
x: an example to process.
benchmark_name: the name of the GLUE benchmark for this dataset.
label_names: a list of label names corresponding to class index.
feature_names: an optional ordered list of feature names. If provided,
features will be ordered in this way in the output. If not provided, all
features (except 'idx' and 'label') will be used, sorted by name.
id_key: str, key for id in the dataset. If not provided, 'idx' will be used.
if None, no id will be added to the dataset.
Returns:
A preprocessed example.
"""
# If an ordering is not provided, sort feature keys to ensure a consistent
# order.
feature_keys = (
feature_names or sorted(set(x.keys()).difference(['label', 'idx'])))
# Pack keys (formatted as " key: ") and corresponding text feature
strs_to_join = []
for key in feature_keys:
strs_to_join.append('{}:'.format(key))
strs_to_join.append(x[key])
# Add benchmark name at the start
strs_to_join.insert(0, benchmark_name)
label_name = tf.cond(
# When no label is provided (label == -1), use "<unk>"
tf.equal(x['label'], -1),
lambda: tf.constant('<unk>'),
# Otherwise grab the label text from label_names
lambda: tf.gather(label_names, x['label']),
)
joined = tf.strings.join(strs_to_join, separator=' ')
ex = {}
if benchmark_name == 'multirc':
# Remove HTML markup.
joined = tf.strings.regex_replace(joined, '<br>', ' ')
joined = tf.strings.regex_replace(joined, '<(/)?b>', '')
# Store the data index in the returned example (used by eval)
ex['idx/paragraph'] = x['idx']['paragraph']
ex['idx/question'] = x['idx']['question']
ex['idx/answer'] = x['idx']['answer']
else:
# Store the data index in the returned example (used by eval)
if id_key:
ex['idx'] = x[id_key]
ex['inputs'] = joined
ex['targets'] = label_name
return ex
@seqio.map_over_dataset
def stsb(x):
"""Convert STSB examples to text2text format.
STSB maps two sentences to a floating point number between 1 and 5
representing their semantic similarity. Since we are treating all tasks as
text-to-text tasks we need to convert this floating point number to a string.
The vast majority of the similarity score labels in STSB are in the set
[0, 0.2, 0.4, ..., 4.8, 5.0]. So, we first round the number to the closest
entry in this set, and then we convert the result to a string (literally e.g.
"3.4"). This converts STSB roughly into a 26-class classification dataset.
This function uses the feature names from the dataset to unpack examples into
a format amenable for a text2text problem.
For example, a typical example from STSB might look like
{
"sentence1": "Three more US soldiers killed in Afghanistan",
"sentence2": "NATO Soldier Killed in Afghanistan",
"label": 1.8,
}
This example would be transformed to
{
"inputs": (
"stsb sentence1: Three more US soldiers killed in Afghanistan "
"sentence2: NATO Soldier Killed in Afghanistan"
),
"targets": "1.8",
}
Args:
x: an example to process.
Returns:
A preprocessed example.
"""
strs_to_join = [
'stsb sentence1:', x['sentence1'], 'sentence2:', x['sentence2']
]
label_string = tf.as_string(tf.round(x['label'] * 5) / 5, precision=1)
joined = tf.strings.join(strs_to_join, separator=' ')
return {'inputs': joined, 'targets': label_string, 'idx': x['idx']}
@seqio.map_over_dataset
def wsc(x):
"""Convert WSC examples to text2text format.
WSC includes a sentence along with 2 'spans': the first denoting a noun and
the other a pronoun. The 'label' specifies whether or not the pronoun is
referencing the noun. This preprocessor puts ' * ' around the noun and ' # '
around the pronoun.
For example, a typical example from WSC might look like
{
'text': 'This is a test sentence .',
'span1_text': 'test',
'span1_index': 3,
'span2_text': 'This',
'span2_index': 0,
'label': 0
}
This example would be transformed to
{
'inputs': 'wsc text: # This # is a * test * sentence .',
'targets': 'False'
}
Args:
x: an example to process.
Returns:
A preprocessed example.
"""
def _mark_span(text, span_str, span_idx, mark):
pattern_tmpl = r'^((?:\S+\s){N})(W)'
pattern = tf.strings.regex_replace(pattern_tmpl, 'N',
tf.as_string(span_idx))
pattern = tf.strings.regex_replace(pattern, 'W', span_str)
return tf.strings.regex_replace(text, pattern, r'\1{0} \2 {0}'.format(mark))
text = x['text']
text = _mark_span(text, x['span1_text'], x['span1_index'], '*')
# Compensate for 2 added "words" added in previous step.
span2_index = x['span2_index'] + 2 * tf.cast(
x['span1_index'] < x['span2_index'], tf.int32)
text = _mark_span(text, x['span2_text'], span2_index, '#')
# Add benchmark name at the start
strs_to_join = ['wsc', 'text:', text]
label_name = tf.cond(
# When no label is provided (label == -1), use "<unk>"
tf.equal(x['label'], -1),
lambda: tf.constant('<unk>'),
# Otherwise use False/True.
lambda: tf.gather(['False', 'True'], x['label']))
joined = tf.strings.join(strs_to_join, separator=' ')
return {'inputs': joined, 'targets': label_name, 'idx': x['idx']}
@gin.configurable
def record(dataset):
"""Convert ReCoRD examples to text2text examples.
ReCoRD contains a passage, query containing a '@placeholder' string, and a set
of entities that are the possible values of the placeholder. Each train and
validation example will have a list of answers, any of which would be
considered correct.
For example, a typical example from ReCoRD might look like
{
'passsage': 'This is the passage.',
'query': 'A @placeholder is a bird.',
'entities': ['penguin', 'potato', 'pigeon'],
'answers': ['penguin', 'pigeon'],
}
which this preprocessor would turn into the following two examples:
{
'inputs': 'record query: A @placeholder is a bird. entities: penguin, '
'potato, pigeon passage: This is the passage.',
'targets': 'penguin',
}
and
{
'inputs': 'record query: A @placeholder is a bird. entities: penguin, '
'potato, pigeon passage: This is the passage.',
'targets': 'potato',
}
Args:
dataset: a tf.data.Dataset to process.
Returns:
a tf.data.Dataset
"""
def process_answers(x):
"""Helper fn to get one example per answer."""
ex = x.copy()
num_answers = tf.size(ex['answers'])
def duplicate_along_first_dim(t):
n_duplicates = tf.math.maximum(num_answers, 1)
return tf.broadcast_to(
t, shape=tf.concat([[n_duplicates], tf.shape(t)], axis=0))
for k, v in x.items():
if k != 'idx':
ex[k] = duplicate_along_first_dim(v)
ex['targets'] = tf.cond(
tf.greater(num_answers, 0), lambda: x['answers'],
lambda: tf.constant(['<unk>']))
ex['idx'] = {
'passage': duplicate_along_first_dim(x['idx']['passage']),
'query': duplicate_along_first_dim(x['idx']['query']),
}
return ex
def my_fn(x):
"""Converts the processed example to text2text strings."""
passage = x['passage']
passage = tf.strings.regex_replace(passage,
r'(\.|\?|\!|\"|\')\n@highlight\n',
r'\1 ')
passage = tf.strings.regex_replace(passage, r'\n@highlight\n', '. ')
strs_to_join = [
'record query:', x['query'], 'entities:',
tf.strings.reduce_join(x['entities'], separator=', '), 'passage:',
passage
]
joined = tf.strings.join(strs_to_join, separator=' ')
ex = {}
# Store the data index in the returned example (used by eval)
ex['idx/passage'] = x['idx']['passage']
ex['idx/query'] = x['idx']['query']
ex['inputs'] = joined
# Note that "answers" has been converted to a single string by the
# process_answers function.
ex['targets'] = x['targets']
# Pass-through full list of answers for eval
ex['answers'] = x['answers']
return ex
dataset = dataset.map(process_answers, num_parallel_calls=AUTOTUNE)
dataset = dataset.unbatch()
return dataset.map(my_fn, num_parallel_calls=AUTOTUNE)
def multi_translate(dataset, source_language, target_language):
"""Convert a multi-translate dataset to a text2text pair.
For example, say the dataset returns examples which have a 'translations'
feature key so that examples have the following format:
{
...
'translations': {
'language': ['de', 'fr', 'en'],
'translation': ['Das ist gut.', 'Ca c'est bon', 'That is good.']
},
...
}
If source_language = 'de', target_language = 'en', then this function will
return examples of the format:
{'inputs': 'translate German to English: Das is gut.',
'targets': 'That is good.'}
Any other languages present in the dataset will be filtered out.
Args:
dataset: a tf.data.Dataset to process.
source_language: source language code (e.g. 'en') to translate from.
target_language: target language code (e.g. 'de') to translate to.
Returns:
A preprocessed tf.data.Dataset with the format listed above.
"""
def filter_fn(x):
langs = x['translations']['language']
# Test whether both source/target_language appear in the language list
source_in_langs = tf.reduce_any(tf.equal(source_language, langs))
target_in_langs = tf.reduce_any(tf.equal(target_language, langs))
return tf.logical_and(source_in_langs, target_in_langs)
def map_fn(x):
langs = x['translations']['language']
# Retrieve the index in langs where source/target_language appears
src_idx = tf.squeeze(tf.where(tf.equal(langs, source_language)))
tgt_idx = tf.squeeze(tf.where(tf.equal(langs, target_language)))
return {
source_language: x['translations']['translation'][src_idx],
target_language: x['translations']['translation'][tgt_idx],
}
dataset = dataset.filter(filter_fn)
dataset = dataset.map(map_fn, num_parallel_calls=AUTOTUNE)
return translate(dataset, source_language, target_language)
@seqio.map_over_dataset
def definite_pronoun_resolution_simple(x, label='wsc:'):
"""Converts DPR examples to a simple text to text format.
A typical example from the definite pronoun resolution dataset might look like
{
'sentence': 'Bob asked Tom if he can lend some money.',
'pronoun': 'he',
'candidates': ['Bob', 'Tom'],
'label': 1,
}
This will be transformed to
{
'inputs': 'wsc: Bob asked Tom if *he* can lend some money.'
'targets': 'Tom',
}
Args:
x: an example to process.
label: a string, the label to prepend to the inputs.
Returns:
A preprocessed example.
"""
# If there are multiple instances of the pronoun in the sentence, the first
# one is the one that needs to be resolved.
inputs = [
label,
tf.strings.regex_replace(
x['sentence'],
tf.strings.join([r' (', x['pronoun'], r')( |\.|,)']),
r' *\1*\2',
replace_global=False,
),
]
return {
'inputs': tf.strings.join(inputs, separator=' '),
'targets': x['candidates'][x['label']],
}
def next_sentence_prediction(dataset,
text_key='text',
reuse_sentences=True,
label_sentences=False,
p_neighbors=0.5,
label='nsp: ',
buffer_size=50000):
"""Create a dataset containing a next sentence prediction objective.
The input examples should have a key text_key associated with a tf.string
value.
The output examples have keys 'inputs' and 'targets'.
EXAMPLE OUTPUTS:
{
input: "nsp: sentence1: The man went to the store. sentence2: Penguins are "
"flightless birds.",
target: "not_next"
}
The "sentence1:" and "sentence2:" labels will be omitted if label_sentences is
False.
Args:
dataset: a tf.data.Dataset
text_key: a string, the key for the text feature to preprocess in the
dataset examples.
reuse_sentences: a boolean, see docs for `neighboring_pairs` for more info.
label_sentences: a boolean
p_neighbors: a float between 0 and 1, the probability that a sentence pair
will be neighbors.
label: a string, the label to prepend to the inputs.
buffer_size: an int, the size of the shuffle buffer used to get
non-neighboring sentences.
Returns:
a tf.data.Dataset
"""
sentence1_label, sentence2_label = '', ''
if label_sentences:
sentence1_label, sentence2_label = 'sentence1: ', 'sentence2: '
empty = tf.constant('', dtype=tf.string, shape=[1])
dataset = neighboring_pairs(
dataset, text_key=text_key, reuse_sentences=reuse_sentences)
dataset = dataset.shuffle(buffer_size).batch(2, drop_remainder=True)
def some_are_empty(*tensors):
"""See if at least one tensor has shape [0]."""
empty = [tf.equal(tf.size(t), 0) for t in tensors]
return tf.reduce_any(empty)
@seqio.map_over_dataset(num_seeds=1)
def my_fn(x, seed):
"""Function to be applied to each example in dataset."""
use_neighbors = (
tf.random.stateless_uniform(shape=[], seed=seed) < p_neighbors
)
firsts, seconds = tf.cond(
use_neighbors,
lambda: (x['first'], x['second']),
lambda: (x['first'], tf.stack([x['second'][1], x['second'][0]])),
)
relation_label = tf.cond(
use_neighbors,
lambda: 'next',
lambda: 'not_next',
)
inputs = []
for i in range(2):
first_inputs = firsts[i]
second_inputs = seconds[i]
def create_examples(first_i=first_inputs, second_i=second_inputs):
return tf.strings.join([
label,
sentence1_label,
first_i,
' ',
sentence2_label,
second_i,
])
inpt = tf.cond(
some_are_empty(first_inputs, second_inputs),
lambda: empty,
create_examples,
)
inputs.append(tf.strings.strip(inpt))
inputs = tf.reshape(inputs, [-1])
targets = tf.reshape(2 * [relation_label], [-1])
return {'inputs': inputs, 'targets': targets}
dataset = my_fn(dataset).unbatch()
def example_len(x):
return tf.math.minimum(
tf.strings.length(x['inputs']), tf.strings.length(x['targets']))
# Remove examples with empty strings.
return dataset.filter(lambda x: example_len(x) > 0)
@seqio.map_over_dataset
def lm(x):
"""Basic language modeling objective for text - empty inputs.
Given inputs with the format:
{"text": "Here is some text."}
This preprocess produces examples with the format
{"inputs": "", "targets": "Here is some text."}
Args:
x: an example to process.
Returns:
A preprocessed example.
"""
return {'inputs': '', 'targets': x['text']}
def _wsc_inputs(x):
"""Given an example from SuperGLUE WSC, compute the 'inputs' value.
The output will look like a fill in the blank with the pronoun blanked out.
For example, the text
'Mitchell asked Tom if he could lend some money.'
would be transformed to
'Mitchell asked Tom if X could lend some money.'
Args:
x: A dict that is an example from the WSC task of SuperGLUE.
Returns:
A scalar string tensor.
"""
words = tf.strings.split([x['text']], sep=' ').values
# We would need some special logic to handle the case where the pronoun is the
# first or last word in the text. None of the examples in WSC seem to have
# this, so we are ignoring these cases.
with tf.control_dependencies([
tf.assert_greater(x['span2_index'], 0),
tf.assert_less(x['span2_index'], tf.size(words)),
]):
pronoun_index = tf.identity(x['span2_index'])
def create_input():
with tf.control_dependencies(
[tf.assert_equal(words[pronoun_index], x['span2_text'])]):
return tf.strings.join(
[
tf.strings.reduce_join(words[:pronoun_index], separator=' '),
'X',
tf.strings.reduce_join(
words[pronoun_index + 1:], separator=' '),
],
separator=' ',
)
# Handle some special cases.
if tf.equal(
x['text'],
'The boy continued to whip the pony , and eventually the pony threw him over. John laughed out quite loud. \"Good for him,\" he said. '
):
return (
'The boy continued to whip the pony , and eventually the pony threw '
'him over. John laughed out quite loud. "Good for X ," he said.'
)
# Using the span2_index, we get 'use' instead of 'it'.
if tf.equal(
x['text'],
'When they had eventually calmed down a bit , and had gotten home, Mr. Farley put the magic pebble in an iron safe . Some day they might want to use it , but really for now, what more could they wish for?'
):
return (
'When they had eventually calmed down a bit , and had gotten home, '
'Mr. Farley put the magic pebble in an iron safe . Some day they might '
'want to use X , but really for now, what more could they wish for?'
)
return create_input()
def wsc_simple(dataset,
label='wsc:',
correct_referent_only=False):
"""Converts SuperGLUE WSC examples to a simple text to text format.
A typical example from SuperGLUE WSC might look like
{
'text': 'Mitchell asked Tom if he could lend some money.',
'span1_text': 'Tom',
'span2_text': 'he',
'span2_index': 4,
}
This will be transformed to
{
'inputs': 'wsc: Bob asked Tom if *he* can lend some money.'
'targets': 'Tom',
}
The targets will always be the text of the referent regardless of whether it
is the correct referrent of the pronoun. Thus for training purposes, please
set `correct_referent_only` to be True.
Args:
dataset: a tf.data.Dataset
label: a string, the label to prepend to the inputs.
correct_referent_only: a bool, whether to filter out examples for which the
targets is not the correct referent of the pronoun.
Returns:
a tf.data.Dataset
"""
def map_fn(x):
"""Function to be called for every example in dataset."""
inputs = [
label,
tf.strings.regex_replace(
_wsc_inputs(x), r' X ', ' *' + x['span2_text'] + '* '),
]
referent = x['span1_text']
return {
'inputs': tf.strings.join(inputs, separator=' '),
# The reshape is necessary as otherwise the tensor has unknown rank.
'targets': tf.reshape(referent, shape=[]),
'label': x.get('label', 0),
'idx': x['idx'],
}
if correct_referent_only:
dataset = dataset.filter(lambda x: tf.cast(x.get('label', False), tf.bool))
return dataset.map(map_fn, num_parallel_calls=AUTOTUNE)
@seqio.map_over_dataset
def wnli_simple(x, label='wsc:'):
"""Converts GLUE WNLI examples to a simple text to text format.
A typical example from WNLI might look like:
{
'sentence1': 'The fish ate the worm. It was tasty.',
'sentence2': 'The worm was tasty.',
'label': 1,
}
This will be transformed to:
{
'inputs': 'wsc: The fish ate the worm. *It* was tasty.',
'targets': 'The worm',
'premise': 'The fish ate the worm. It was tasty.,
'hypothesis': 'The worm was tasty.',
'label': 1,
}
This preprocessor has been manually verified to produce reasonable WSC
examples for the dev and test sets. Tasks using this preprocessor should only
be used eval and not train.
Args:
x: an example to process.
label: a string, the label to prepend to the inputs.
Returns:
A preprocessed example.
"""
pronouns = ['he', 'she', 'they', 'it', 'her', 'his', 'their', 'them', 'him']
PronounMatch = collections.namedtuple( # pylint: disable=invalid-name
'PronounMatch', ['score', 'index_in_premise', 'candidate'])
def split_clean(s):
"""Returns array of words with punctuation and capitalization removed."""
words = [
re.sub(r'(\.|,|\?|\!)$', '', w) for w in s.strip().lower().split(' ')
]
return [w for w in words if w]
def get_all_pronoun_indices(s):
return [i for i, w in enumerate(s) if w in pronouns]
def get_post_match_size(hypothesis, words):
"""Returns len of largest prefix of words that is substr of hypothesis."""
hypothesis = ' '.join(hypothesis)
for i in range(len(words)):
if ' '.join(words[:i + 1]) not in hypothesis:
return i
return len(words)
def get_pre_match_size(hypothesis, words):
"""Returns len of largest suffix of words that is substr of hypothesis."""
return get_post_match_size(hypothesis[::-1], words[::-1])
def get_pronoun_match(premise, hypothesis, index):
"""Return the PronounMatch for the pronoun at `index` in premise."""
pre, post = premise[:index], premise[index + 1:]
pre_match_size = get_pre_match_size(hypothesis, pre)
post_match_size = get_post_match_size(hypothesis, post)
score = pre_match_size + post_match_size
candidate = ''
if score:
pre_match = pre[-pre_match_size or len(pre):]
post_match = post[:post_match_size]
m = re.search(' '.join(pre_match + [r'(.+)'] + post_match),
' '.join(hypothesis))
if not m:
# Handle cases where the candidate is at the start of the hypthesis.
m = re.search(' '.join([r'^(.+)'] + post_match), ' '.join(hypothesis))
if not m:
# Handle cases where the candidate is at the end of the hypthesis.
m = re.search(' '.join(pre_match + [r'(.+)$']), ' '.join(hypothesis))
if m:
candidate = m.group(1)
return PronounMatch(
score=score, index_in_premise=index, candidate=candidate)
def get_best_pronoun_match(premise, hypothesis):
"""Returns the match for the pronoun in the premise to disambiguate."""
pronoun_indices = get_all_pronoun_indices(premise)
scoredpronouns = [
get_pronoun_match(premise, hypothesis, index)
for index in pronoun_indices
]
return max(scoredpronouns, key=lambda x: x.score)
def highlight(sentence, index):
words = sentence.split(' ')
word = words[index]
if word[-1] in ['.', ',', '!', '?']:
highlighted = '*{}* {}'.format(word[:-1], word[-1])
else:
highlighted = '*{}*'.format(word)
return ' '.join(words[:index] + [highlighted] + words[index + 1:])
def make_nonpossessive(word):
# WSC simple targets will never be possessive, even when the pronoun is
# possesive.
if word.endswith("'"):
return word[:-1]
elif word.endswith("'s"):
return word[:-2]
else:
return word
def clean_up(candidate):
words = candidate.split(' ')
# Sometimes the candidate extraction messes up, and the candidate will start
# with the start of the hypothesis and extend to the correct candidate. We
# can try to clean up the candidate in some cases by removing everything up
# to the last article in the sentence.
article_index = max(
[words.index(art) for art in {'a', 'an', 'the'} if art in words] or [0])
return ' '.join(words[article_index:])
def process_candidate(candidate, hypothesis):
"""Handles special cases and adds proper punctuation/capitalization."""
candidate = clean_up(candidate)
pattern = '({})'.format(' '.join([
r'{}(?:\.|,|\?|\!)?'.format(re.escape(c)) for c in candidate.split(' ')
]))
m = re.search(pattern, hypothesis, re.IGNORECASE)
if not m:
raise ValueError(
'Unable to find candidate "{}" in hypothesis "{}".'.format(
candidate, hypothesis))
candidate = m.group(1)
if candidate and candidate[-1] in ['.', ',', '!', '?']:
candidate = candidate[:-1]
return make_nonpossessive(candidate)
def compute_inputs_and_targets(premise, hypothesis):
"""Compute inputs and targets for WNLI simple."""
premise = tf.compat.as_text(premise.numpy())
hypothesis = tf.compat.as_text(hypothesis.numpy())
match = get_best_pronoun_match(
split_clean(premise), split_clean(hypothesis))
targets = process_candidate(match.candidate, hypothesis)
inputs = '{} {}'.format(label, highlight(premise, match.index_in_premise))
return inputs, targets
inputs, targets = tf.py_function(
compute_inputs_and_targets,
inp=[x['sentence1'], x['sentence2']],
Tout=[tf.string, tf.string])
return {
# The reshape is necessary as otherwise the tensor has unknown rank.
'inputs': tf.reshape(inputs, shape=[]),
'targets': tf.reshape(targets, shape=[]),
'premise': x['sentence1'],
'hypothesis': x['sentence2'],
'label': x.get('label', 0),
'idx': x['idx'],
}
def rank_classification(
ds: tf.data.Dataset,
inputs_fn: Callable[[FeatureType], tf.Tensor],
targets_fn: Callable[[FeatureType], tf.Tensor],
is_correct_fn: Callable[[FeatureType], tf.Tensor],
weight_fn: Optional[Callable[[FeatureType], tf.Tensor]] = None,
mode: str = 'eval',
passthrough_feature_keys: Optional[Sequence[str]] = None,
) -> tf.data.Dataset:
"""Prepare dataset for rank classification scoring.
Intended to be used with `rank_classification` postprocessor and metric.
`inputs_fn` and `targets_fn` must return the 'inputs' and 'targets' features,
respectively, for each possible class label given the raw example features.
'is_correct_fn' must return the 'is_correct' feature, a boolean for whether
each label is correct.
In 'train' mode, only the inputs / targets marked correct will be produced.
In 'eval' mode, all inputs / targets will be produced.
In 'fewshot_eval', all inputs / targets will be produced as a single batch.
Each output example will also be given a unique 'idx' feature. The first dim
is a sequential index for the input example and the second is the index of the
generated output for it. E.g., the second output example from the fourth input
example would be `[3, 1]`.
To be clear, consider the following arguments:
inputs_fn=lambda ex: ex['prefix'],
targets_fn=lambda ex: ex['suffix'],
is_correct_fn=lambda ex: tf.one_hot(ex['label'], num_classes)
weight_fn=lambda ex: ex['weight']
Given the following example:
{
'prefix': ['The farmland needed ', 'The farmland wanted '],
'suffix': ['water', 'cows'],
'label': 0,
'weight': 1.0,
}
the preprocessor would return:
[{
'idx': [0, 0],
'inputs': 'The farmland needed ',
'targets': 'water',
'is_correct': True,
'weight': 1.0
},
{
'idx': [0, 1],
'inputs': 'The farmland wanted ',
'targets': 'cows',
'is_correct': False,
'weight': 1.0
}]
With mode set to 'train', it would return only the first example,
since it uses the correct label. With mode set to 'fewshot_eval', it would
return both examples in a single batch.
Args:
ds: a tf.data.Dataset to preprocess.
inputs_fn: a callable that returns the 'inputs' features for each label
given the input example.
targets_fn: a callable that returns the 'targets' features for each label
given the input example.
is_correct_fn: a callable that returns the 'label' feature. May be an int32
scalar or 1-D Tensor.
weight_fn: a callable that returns the 'weight' feature (float32 scalar).
mode: A string, one of 'train' or'eval 'train' produces only the correct
example(s) based on the label value(s). 'eval' produces an example for
every possible class value, sequentially. 'fewshot_eval' produces an
example for every possible class value, batched together for each input
example.
passthrough_feature_keys: a sequence of feature names that should be passed
through to the output of this preprocessor. eg: ["starburst", "tokens"]
Returns:
A tf.data.Dataset containing 'idx', inputs', 'targets', and 'is_correct'.
"""
if mode not in ('train', 'eval', 'fewshot_eval'):
raise ValueError(
"Mode must be one of 'train', 'eval', or 'fewshot_eval'. "
f"Got '{mode}'.")
def make_examples(idx, ex):
inputs = inputs_fn(ex)
targets = targets_fn(ex)
is_correct = tf.cast(is_correct_fn(ex), tf.bool)
tf.debugging.assert_equal(
tf.size(is_correct), [tf.size(inputs), tf.size(targets)],
'`inputs_fn`, `targets_fn`, and `is_correct_fn` must return the same '
'size tensors.')
num_out = tf.size(is_correct)
in_idx = tf.fill([num_out], tf.cast(idx, tf.int32))
out_idx = tf.range(num_out)
output = {
'idx': tf.stack([in_idx, out_idx], 1),
'inputs': inputs,
'targets': targets,
'is_correct': is_correct,
}
if passthrough_feature_keys is not None:
for feature_name in passthrough_feature_keys:
output[feature_name] = [ex[feature_name]] * len(targets)
if weight_fn is not None:
output['weight'] = tf.fill(tf.shape(is_correct), weight_fn(ex))
output['weight'] = tf.cast(output['weight'], tf.float32)
return output
ds = ds.enumerate()
ds = ds.map(make_examples, num_parallel_calls=AUTOTUNE)
if mode != 'fewshot_eval':
ds = ds.unbatch()
if mode == 'train':
ds = ds.filter(lambda ex: ex['is_correct'])
return ds
def rank_classification_formatter(
ds: tf.data.Dataset,
inputs_formats: Union[str, Sequence[str]],
targets_formats: Union[str, Sequence[str]],
mode: str = 'eval',
label_key: str = 'label',
weight_key: Optional[str] = None) -> tf.data.Dataset:
"""Create 'inputs' and 'targets' strings for ranking classification.
Intended to be used with `rank_classification` postprocessor and metric.
Inputs will be formatted by filling in the feature values in the
`inputs_formats` and `targets_formats` strings.
Nested features can be accessed by concatenating the features using forward
slash. For eg: if sub-sub-key is nested under sub-key, which is nested under
key, then sub-sub-key can be accessed using key/sub-key/sub-sub-key.
In 'eval' mode, a separate example will be produced for each targets / inputs
format string. These can then be scored to find the one with the highest
likelihood. The `rank_classification` postprocessor and metric allow you to
evaluate with this technique.
In 'train' mode, only the targets / inputs format string indexed by the
label(s) will be produced. In 'eval' mode, all inputs / targets will be
produced.
Each input example will also be given a unique, sequential index called 'idx'.
For example, with arguments:
```
inputs_format='{premise} What is the {question}? X',
targets_formats=[
'I think {choice1}.',
'I think {choice2}.'
],
mode='eval'
```
given the input:
{
'premise': 'The farmland needed irrigation.',
'question': 'effect',
'choice1' : 'a canal was constructed',
'choice2': 'the crops grew tall',
'label': 0,
}
the preprocessor would return:
[{
'idx': 0,
'inputs': 'The farmland needed irrigation. What is the effect? X',
'targets': 'I think a canal was constructed.',
'is_correct': True
},
{
'idx': 0,
'inputs': 'The farmland needed irrigation. What is the effect? X',
'targets': 'I think the crops grew tall.',
'is_correct': False
}]
With `mode='train'`, it would return only the first example,
since it uses the correct label.
With `mode='fewshot_eval'`, it would return both examples in a single batch.
Args:
ds: a tf.data.Dataset to preprocess.
inputs_formats: A string or a list of strings to format with feature values
to produce 'inputs'. Feature keys should be surrounded by curly braces to
be replaced.
targets_formats: A string or a list of strings to format with feature values
to produce 'targets', one for each possible class value. Feature keys
should be surrounded by curly braces to be replaced.
mode: A string, one of 'train', 'eval', or 'fewshot_train') 'train' produces
only the correct example(s) based on the label value(s). 'eval' produces
an example for every possible class value, sequentially.
'fewshot_eval': produces an example for every possible class value,
batched together for each input example.
label_key: A string, the feature key for the integer label value(s).
weight_key: A string, the feature key for the float example weight.
Returns:
A tf.data.Dataset containing 'idx', inputs', 'targets', and 'is_correct'.
"""
if (isinstance(inputs_formats, (list, tuple)) and
isinstance(targets_formats, (list, tuple))):
if len(inputs_formats) != len(targets_formats):
raise ValueError(
f'The inputs_formats ({len(inputs_formats)}) and '
f'targets_formats ({len(targets_formats)}) are both instances '
'of list or tuple, but do not have matching lengths.')
elif isinstance(inputs_formats, (list, tuple)):
num_classes = len(inputs_formats)
targets_formats = [targets_formats] * num_classes
elif isinstance(targets_formats, (list, tuple)):
num_classes = len(targets_formats)
inputs_formats = [inputs_formats] * num_classes
else:
raise ValueError(
'One of the inputs_formats and targets_formats has to '
f'be a list or tuple, inputs_formats: {inputs_formats}, '
f'target_formats: {targets_formats}.')
def _format_str(features, fmt):
keys = set(re.findall(r'{(\S+)}', fmt))
s = fmt
for k in keys:
value = features
for subkey in k.split('/'):
value = value[subkey]
if not isinstance(value, tf.Tensor):
raise ValueError(
f'Final value of key \'{k}\' must be a tf.string. '
f'Got: {type(value).__name__}')
tf.debugging.assert_type(
value, tf.string,
f'Final value of key \'{k}\' must be a tf.string. '
f'Got: {value.dtype.name}')
s = tf.strings.regex_replace(s, '{%s}' % k, value)
return s
def _apply_formats(features, fmts):
return [_format_str(features, fmt) for fmt in fmts]
def _is_correct_fn(ex):
labels = ex[label_key]
is_correct = tf.one_hot(labels, num_classes, on_value=True, off_value=False)
if labels.shape.rank:
is_correct = tf.math.reduce_any(is_correct, axis=0)
return is_correct
def _weight_fn(ex):
return ex[weight_key]
return rank_classification(
ds,
inputs_fn=functools.partial(_apply_formats, fmts=inputs_formats),
targets_fn=functools.partial(_apply_formats, fmts=targets_formats),
is_correct_fn=_is_correct_fn,
weight_fn=None if weight_key is None else _weight_fn,
mode=mode)
@seqio.map_over_dataset
def parse_tsv(line, field_names=None, field_delim='\t'):
"""Splits TSV lines into dict examples mapping field name to string value.
Args:
line: an example containing a comma/tab-delimited string.
field_names: a list of strings, the ordered names of the TSV fields.
Defaults to "inputs" and "targets".
field_delim: a string, the delimiter to split on e.g. ',' for csv.
Returns:
A feature dict mapping field name to string value.
"""
field_names = field_names or ['inputs', 'targets']
return dict(
zip(field_names,
tf.io.decode_csv(
line,
record_defaults=[''] * len(field_names),
field_delim=field_delim,
use_quote_delim=False)))
@seqio.map_over_dataset
def preprocess_tsv(line,
field_delim='\t',
num_fields=2,
inputs_format='{0}',
targets_format='{1}',
field_names=None):
r"""Parse tab-delimited strings into inputs and targets.
This function takes a tf.data.Dataset of strings, each of which contains
tab-delimited fields. The function returns a tf.data.Dataset of feature
dictionaries of the form {"inputs": string, "targets": string}.
inputs_format contains a template string and field numbers or names used to
produce the "inputs" string.
targets_format contains a template string and field numbers or names used to
produce the "targets" string.
Example (field numbers):
The input dataset contains the lines:
"6,7,42"
"2,9,18"
preprocess_tsv(dataset,
field_delim=',',
inputs_format='numerator: {2} denominator: {1}',
targets_format='quotient: {0}'
would produce a dataset containing the dictionaries:
{"inputs": "numerator: 42 denomnator: 7", "targets": "quotient: 6"}
{"inputs": "numerator: 18 denomnator: 9", "targets": "quotient: 2"}
Example (field names):
The input dataset contains the lines:
"6,7,42"
"2,9,18"
preprocess_tsv(dataset,
field_delim=',',
field_names=['quot', 'denom', 'numer'],
inputs_format='numerator: {numer} denominator: {denom}',
targets_format='quotient: {quot}'
would produce a dataset containing the dictionaries:
{"inputs": "numerator: 42 denominator: 7", "targets": "quotient: 6"}
{"inputs": "numerator: 18 denominator: 9", "targets": "quotient: 2"}
Args:
line: an example containing comma/tab-delimited string.
field_delim: a string, the delimiter to split on e.g. ',' for csv.
num_fields: an integer
inputs_format: a string, the desired output format with placeholders for
field values.
targets_format: a string, the desired output format with placeholders for
field values.
field_names: a list of strings, the ordered names of the TSV fields.
defaults to None (i.e. use field number in *_format)
Returns:
A feature dict with 'inputs' and 'targets' features.
"""
def _format_part_with_field_numbers(part, field_values):
found = re.findall(r'{(\d)}', part)
if found:
return field_values[int(found[0])]
else:
return part
def _format_part_with_field_names(part, field_names, field_values):
field_names_re = '|'.join(['{{({})}}'.format(x) for x in field_names])
found = re.findall(field_names_re, part)
if found:
pos = field_names.index(''.join(found[0]))
return field_values[int(pos)]
else:
return part
def _format(format_string, field_names, field_values):
if field_names is None:
parts = [
_format_part_with_field_numbers(p, field_values)
for p in re.split(r'({\d})', format_string)
]
else:
field_names_re = '(' + '|'.join(['{{{}}}'.format(x) for x in field_names
]) + ')'
parts = [
_format_part_with_field_names(p, field_names, field_values)
for p in re.split(field_names_re, format_string)
]
return tf.strings.join(parts)
field_values = tf.io.decode_csv(
line,
record_defaults=[''] *
(num_fields if field_names is None else len(field_names)),
field_delim=field_delim,
use_quote_delim=False)
return {
'inputs': _format(inputs_format, field_names, field_values),
'targets': _format(targets_format, field_names, field_values)
}
# ======================Token Preprocessors=====================================
# TODO(adarob): Add a test.
def span_corruption(dataset,
sequence_length,
output_features,
mean_noise_span_length=3.0,
noise_density=0.15,
input_feature_key='inputs',
merge_examples_to_reduce_padding=True,
reserved_for_packing=None):
"""Final pretraining objective used in Raffel et al., 2019.
Args:
dataset: A tf.data.Dataset with dictionaries containing the key
`input_feature_key`.
sequence_length: dict mapping of feature key to int length for that feature.
output_features: mapping of keys to features.
mean_noise_span_length: the mean number of tokens per masked span per
example.
noise_density: what fraction of the tokens to mask.
input_feature_key: which feature to use from the dataset as the input text
tokens.
merge_examples_to_reduce_padding: if True, combines multiple input examples
to reduce padding.
reserved_for_packing: if specified, reduces the desired inputs length by the
specified amount to enable multiple examples to be packed together
downstream.
Returns:
a dataset
"""
inputs_length = sequence_length[input_feature_key]
if reserved_for_packing:
inputs_length -= reserved_for_packing
input_length, targets_length = random_spans_helper(
extra_tokens_per_span_inputs=1,
extra_tokens_per_span_targets=1,
inputs_length=inputs_length,
mean_noise_span_length=mean_noise_span_length,
noise_density=noise_density)
if sequence_length['targets'] < targets_length:
raise ValueError(
f'Expected targets length for span corruption ({targets_length}) is '
f'greater than configured targets length '
f"({sequence_length['targets']})")
ds = dataset
ds = select_random_chunk(
ds,
output_features=output_features,
feature_key='targets',
max_length=65536)
if merge_examples_to_reduce_padding:
ds = reduce_concat_tokens(ds, feature_key='targets', batch_size=128)
ds = split_tokens(
ds,
feature_key='targets',
min_tokens_per_segment=None,
max_tokens_per_segment=input_length)
ds = denoise(
ds,
output_features,
inputs_fn=noise_span_to_unique_sentinel,
targets_fn=nonnoise_span_to_unique_sentinel,
noise_density=noise_density,
noise_mask_fn=functools.partial(
random_spans_noise_mask,
mean_noise_span_length=mean_noise_span_length),
input_feature_key=input_feature_key)
return ds
# TODO(adarob): Add a test.
def iid_denoising(dataset, sequence_length, output_features):
"""Baseline pretraining objective used in Raffel et al., 2019."""
ds = dataset
ds = select_random_chunk(ds, output_features=output_features,
feature_key='targets', max_length=65536)
ds = reduce_concat_tokens(ds, feature_key='targets', batch_size=128)
ds = split_tokens_to_inputs_length(ds, output_features=output_features,
sequence_length=sequence_length)
ds = denoise(
ds,
output_features,
inputs_fn=noise_span_to_unique_sentinel,
targets_fn=nonnoise_span_to_unique_sentinel,
noise_density=0.15,
noise_mask_fn=iid_noise_mask
)
return ds
def prefix_lm(dataset, sequence_length, output_features):
"""Prefix language modeling objective used in Raffel et al. 2019."""
ds = dataset
ds = select_random_chunk(ds, output_features=output_features,
feature_key='targets', max_length=65536)
ds = split_tokens_to_inputs_length(ds, output_features=output_features,
sequence_length=sequence_length)
ds = denoise(
ds,
output_features,
inputs_fn=drop_nonnoise_tokens,
targets_fn=drop_noise_tokens,
noise_density=0.5,
noise_mask_fn=random_prefix_noise_mask,
)
return ds
def full_lm(dataset, sequence_length, output_features):
"""Full language modeling objective with EOS only at document boundaries."""
ds = dataset
ds = select_random_chunk(ds, output_features=output_features,
feature_key='targets', max_length=65536)
ds = seqio.preprocessors.append_eos(ds, output_features)
ds = reduce_concat_tokens(ds, feature_key='targets', batch_size=128)
# Don't use `split_tokens_to_targets_length` since we've alrady added EOS.
ds = split_tokens(ds, max_tokens_per_segment=sequence_length['targets'])
return ds
@gin.configurable
def select_random_chunk(dataset: tf.data.Dataset,
output_features: Mapping[str, seqio.Feature],
max_length: Optional[int] = None,
feature_key: str = 'targets',
additional_feature_keys: Optional[Sequence[str]] = None,
passthrough_feature_keys: Optional[
Sequence[str]] = None,
sequence_length: Optional[Mapping[str, int]] = None,
uniform_random_start: bool = False,
min_length: Optional[int] = None,
**unused_kwargs) -> tf.data.Dataset:
"""Token-preprocessor to extract one span of at most `max_length` tokens.
If the token sequence is longer than `max_length`, then we return a random
subsequence. Otherwise, we return the full sequence.
This is generally followed by split_tokens.
Args:
dataset: A tf.data.Dataset with dictionaries containing the key feature_key.
output_features: Mapping of keys to features.
max_length: Typically specified in gin configs, takes priority over
sequence_length.
feature_key: Which feature to use from the dataset.
additional_feature_keys: Additional features to use. The same chunk will be
selected from these features as from the one specified in feature_key,
so they should all have the same length.
passthrough_feature_keys: Additional keys to pass through unchanged.
sequence_length: Used if max_length is not specified. Typically passed in
by the data pipeline. feature_key will be used to select the length.
uniform_random_start: If True, will select a starting point in
[-max_length + 1, n_tokens). If False, will select one of a set of chunks
offset by max_length. Both of these starting points try to ensure each
token has an equal probability of being included.
min_length: If specified, lengths of chunks will be selected uniformly at
random from [min_length, max_length]. Note that chunks can end up shorter
than min_length if at the beginning or end of the sequence.
Returns:
a dataset
"""
if passthrough_feature_keys:
chunk_keys = set([feature_key] + (additional_feature_keys or []))
overlap_keys = chunk_keys & set(passthrough_feature_keys)
if overlap_keys:
raise ValueError(
f'chunk keys {overlap_keys} also included in passthrough keys')
if max_length is None and sequence_length is not None:
max_length = sequence_length[feature_key]
if output_features[feature_key].add_eos:
# Leave room to insert an EOS token.
max_length -= 1
if max_length is None:
raise ValueError('Must specify max_length or sequence_length.')
@seqio.map_over_dataset(num_seeds=2)
def _my_fn(x, seeds):
"""Select a random chunk of tokens.
Args:
x: a 1d Tensor
seeds: an int32 Tensor, shaped (2, 2), the random seeds.
Returns:
a 1d Tensor
"""
tokens = x[feature_key]
n_tokens = tf.shape(tokens)[0]
if min_length is not None:
length = tf.random.stateless_uniform(
[],
minval=min_length,
maxval=max_length,
dtype=tf.int32,
seed=seeds[0])
else:
length = max_length
if uniform_random_start:
start = tf.random.stateless_uniform(
[],
minval=-length + 1, # pylint:disable=invalid-unary-operand-type
maxval=n_tokens,
dtype=tf.int32,
seed=seeds[1])
end = tf.minimum(start + length, n_tokens)
start = tf.maximum(start, 0)
else:
num_segments = tf.cast(
tf.math.ceil(
tf.cast(n_tokens, tf.float32) / tf.cast(length, tf.float32)
),
tf.int32)
start = length * tf.random.stateless_uniform(
[],
maxval=num_segments,
dtype=tf.int32,
seed=seeds[1])
end = tf.minimum(start + length, n_tokens)
chunk = {feature_key: tokens[start:end]}
if additional_feature_keys is not None:
for k in additional_feature_keys:
with tf.control_dependencies([
tf.assert_equal(
tf.shape(tokens)[0],
tf.shape(x[k])[0],
message=(f'Additional feature {k} is not the same size as '
f'{feature_key} along axis 0 in select_random_chunk().'
)
)
]):
chunk[k] = x[k][start:end]
if passthrough_feature_keys is not None:
for k in passthrough_feature_keys:
chunk[k] = x[k]
return chunk
# Filter empty examples.
dataset = dataset.filter(lambda x: tf.not_equal(tf.size(x[feature_key]), 0))
return _my_fn(dataset)
@gin.configurable
def reduce_concat_tokens(dataset,
feature_key='targets',
batch_size=128,
**unused_kwargs):
"""Token-preprocessor to concatenate multiple unrelated documents.
If we want to generate examples of exactly the right length,
(to avoid wasting space on padding), then we use this function, folowed by
split_tokens.
Args:
dataset: a tf.data.Dataset with dictionaries containing the key feature_key.
feature_key: an string
batch_size: an integer - how many documents to concatenate into one
Returns:
a dataset
"""
dataset = dataset.map(
lambda x: {feature_key: x[feature_key]}, num_parallel_calls=AUTOTUNE)
dataset = dataset.padded_batch(batch_size, padded_shapes={feature_key: [-1]})
def _my_fn(x):
tokens = tf.reshape(x[feature_key], [-1])
# strip padding
tokens = tf.boolean_mask(tokens, tf.cast(tokens, tf.bool))
return {feature_key: tokens}
return dataset.map(_my_fn, num_parallel_calls=AUTOTUNE)
@seqio.map_over_dataset
def trim_tokens_at_front(x,
sequence_length,
keys_to_trim=None,
**unused_kwargs):
"""Token-preprocessor to trim sequence at the beginning.
Args:
x: an example with dictionaries containing keys_to_trim.
sequence_length: a dict of ints.
keys_to_trim: a list of feature keys.
Returns:
A preprocessed example.
"""
for key in (keys_to_trim or sequence_length.keys()):
if key in x:
# trim tokens, leaving room for EOS which gets added later
x[key] = x[key][-(sequence_length[key] - 1):]
return x
def trivia_qa_truncate_inputs(dataset, output_features, sequence_length):
"""Token preprocessor for the trivia QA dataset to truncate inputs.
This function takes a dataset containing "targets" and "inputs". It searches
for the "targets" in the "inputs" and truncates the "inputs" to
`sequence_length` while ensuring that the "targets" are present in the
"inputs". The function will randomly select a subset of "inputs".
If "targets" are not found in the "inputs", then the example is
is dropped from the dataset.
E.g.
Input dataset
{
"inputs": [0, 3, 5, 7, 9, 11, 13, 15, 17, 18]
"targets": [5, 7, 9]
}
Output dataset (assuming sequence_length['inputs'] = 4)
{
"inputs": [3, 5, 7, 9]
"targets": [5, 7, 9]
}
or
{
"inputs": [5, 7, 9, 11]
"targets": [5, 7, 9]
}
Args:
dataset: a tf.data.Dataset with dictionaries containing the "inputs" and
"targets".
output_features: unused by this function.
sequence_length: a dict, with keys as "inputs" and "targets" indicating the
maximum number of tokens in each of the sequences.
Returns:
a dataset
"""
del output_features
@seqio.map_over_dataset(num_seeds=1)
def my_fn(features, seed):
"""Function to map original dataset to the new dataset."""
inputs = features['inputs']
targets = features['targets']
ans_len = tf.shape(targets)[0]
max_input_tokens = sequence_length['inputs']
def truncate_inputs():
"""Helper function to truncate the inputs."""
def answer_in_context(context, answer):
"""Helper function that checks if the answer is present in the context.
Args:
context: Tensor, tokenized representation of the context
answer: Tensor, tokenized representation of the answer
Returns:
result: boolean, indicates if the answer was present in the context.
pos_mask: boolean mask, a mask for every possible start position of
the answer in the context. Indicates whether the answer starts at
the particular position.
"""
conv_inp = tf.reshape(tf.cast(context, tf.float32), [1, -1, 1])
ans_len = tf.shape(answer)[0]
filters = tf.eye(ans_len, dtype=tf.float32)
# Assume context len is N and answer len is M.
# Use a convolution to create a matrix of (N-M) x M elements where
# each row of the matrix is a sequence of len M. This matrix contains
# all possible contiguous sequences of length M from the context.
# Every row of this matrix is compared with the answer to check if the
# answer exists in the context.
strided = tf.nn.conv1d(conv_inp,
tf.reshape(filters, [ans_len, 1, ans_len]), 1,
'VALID')
strided = tf.cast(strided[0], answer.dtype)
pos_mask = tf.reduce_all(
tf.equal(strided, tf.reshape(answer, [1, -1])), 1)
result = tf.reduce_any(pos_mask)
return result, pos_mask
def slice_inputs(inputs, answer_len, pos_mask, seed=None):
"""Helper function to slice inputs while keeping the answer."""
ans_start_pos = tf.cast(tf.where(pos_mask)[0][0], tf.int32)
inputs_len = tf.shape(inputs)[0]
start_range_min = tf.maximum(
0, ans_start_pos - (max_input_tokens - answer_len))
start_range_max = tf.minimum(ans_start_pos,
inputs_len - max_input_tokens) + 1
start_pos = tf.random.stateless_uniform(
[],
minval=start_range_min,
maxval=start_range_max,
dtype=tf.int32,
seed=seed)
return inputs[start_pos:start_pos + max_input_tokens]
result, pos_mask = answer_in_context(inputs, targets)
if result:
return slice_inputs(inputs, ans_len, pos_mask, seed=seed)
else:
return tf.constant([], dtype=inputs.dtype)
if tf.greater(tf.shape(inputs)[0], max_input_tokens):
inputs = truncate_inputs()
return {'inputs': inputs, 'targets': features['targets']}
dataset = my_fn(dataset)
return dataset.filter(lambda x: tf.size(x['inputs']) > 0)
@gin.configurable()
def unsupervised(dataset,
preprocessors=None,
output_features=None,
sequence_length=None):
"""Configure this to point at unsupervised preprocessors.
This function creates an extra level of indirection in case we want
different unsupervised pretraining functions in the future which do not
fit into the denoise() framework.
This function should be used as a post-cache preprocessing function.
Args:
dataset: A tf.data.Dataset to process.
preprocessors: a list of token-preprocessor functions. These functions
should take unused kwargs if output_features or sequence_length is not
used.
output_features: dict(str, Feature), output features of the Task to be
passed to the model.
sequence_length: dict mapping feature key to int length for that feature.
Returns:
A preprocessed tf.data.Dataset.
"""
if preprocessors is None:
logging.warning(
'unsupervised preprocessor got preprocessors=None; no preprocessing '
'will be applied.'
)
return dataset
kwargs = {}
if output_features:
kwargs['output_features'] = output_features
if sequence_length:
kwargs['sequence_length'] = sequence_length
for p in preprocessors:
dataset = p(dataset, **kwargs)
return dataset
# ======================== split_tokens and helpers ============================
@gin.configurable
def split_tokens(dataset: tf.data.Dataset,
min_tokens_per_segment: Optional[int] = None,
max_tokens_per_segment: int = gin.REQUIRED,
feature_key: str = 'targets',
additional_feature_keys: Optional[Sequence[str]] = None,
passthrough_feature_keys: Optional[Sequence[str]] = None,
num_parallel_calls: int = AUTOTUNE,
**unused_kwargs) -> tf.data.Dataset:
"""Split examples into multiple examples each.
The intended use case is to break up long examples for use in unsupervised
transfer-learning.
This function is generally preceded by select_random_chunk.
If min_tokens_per_segment is provided, the segment length is chosen randomly
per document from a log-uniform distribution. If min_tokens_per_segment is
None, then the segment length is max_tokens_per_segment (except for a possibly
shorter last segment in each document).
Args:
dataset: a tf.data.Dataset with dictionaries containing the key feature_key.
min_tokens_per_segment: an optional integer
max_tokens_per_segment: an integer, the maximum number of tokens in each
segment. Only the final segment may be shorter.
feature_key: a string, the feature to split
additional_feature_keys: Additional features to split. The same chunk size
will be used, so they should be the same size as feature_key.
passthrough_feature_keys: Features to pass through without any splitting.
num_parallel_calls: num_parallel_calls value to pass to map_over_dataset
Returns:
a dataset
"""
if passthrough_feature_keys:
split_keys = set([feature_key] + (additional_feature_keys or []))
overlap_keys = split_keys & set(passthrough_feature_keys)
if overlap_keys:
raise ValueError(
f'split keys {overlap_keys} also included in passthrough keys')
@seqio.map_over_dataset(num_seeds=1, num_parallel_calls=num_parallel_calls)
def _split_tokens(x, seed):
"""Split one token sequence into multiple sequences."""
tokens = x[feature_key]
n_tokens = tf.shape(tokens)[0]
if min_tokens_per_segment is None:
length = max_tokens_per_segment
else:
# pick a length - log-uniformly distributed
length = tf.cast(
tf.exp(
tf.random.stateless_uniform(
[],
minval=math.log(min_tokens_per_segment),
maxval=math.log(max_tokens_per_segment),
seed=seed
)
),
tf.int32)
# Pad to a multiple of length, then use tf.reshape to split up the tokens
# into num_segments segments each of the given length.
num_segments = tf.cast(
tf.math.ceil(
tf.cast(n_tokens, tf.float32) / tf.cast(length, tf.float32))
,
tf.int32)
padding = num_segments * length - tf.shape(tokens)[0]
feature_keys_to_split = [feature_key]
orig_lengths = {}
outputs = {}
if additional_feature_keys is not None:
feature_keys_to_split.extend(additional_feature_keys)
for k in feature_keys_to_split:
with tf.control_dependencies([
tf.assert_equal(
tf.shape(tokens)[0],
tf.shape(x[k])[0],
message=(f'Additional feature {k} is not the same size as '
f'{feature_key} along axis 0 in split_tokens().')
)
]):
shape = tf.shape(x[k])[1:]
shape_list = x[k].shape[1:]
padded = tf.pad(
x[k],
tf.concat([[[0, padding]],
tf.zeros([len(shape_list), 2], dtype=tf.int32)],
axis=0))
orig_lengths[k] = tf.concat(
[tf.repeat(length, num_segments - 1), [length - padding]], axis=0)
outputs[k] = tf.reshape(
padded, tf.concat([[-1, length], shape], axis=0))
if passthrough_feature_keys:
for k in passthrough_feature_keys:
outputs[k] = tf.tile(
tf.expand_dims(x[k], axis=0),
tf.concat([[num_segments], tf.tile([1], [tf.rank(x[k])])], axis=0))
return outputs, orig_lengths
def _strip_padding(inputs, orig_lengths):
output = {}
for k, v in inputs.items():
if passthrough_feature_keys and k in passthrough_feature_keys:
output[k] = v
else:
output[k] = v[:orig_lengths[k]]
return output
# Filter empty examples.
dataset = dataset.filter(lambda x: tf.not_equal(tf.size(x[feature_key]), 0))
dataset = _split_tokens(dataset)
dataset = dataset.unbatch()
dataset = dataset.map(_strip_padding, num_parallel_calls=AUTOTUNE)
return dataset
@gin.configurable
def split_tokens_to_inputs_length(dataset, sequence_length,
output_features, **kwargs):
max_tokens = sequence_length['inputs']
if output_features['inputs'].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return split_tokens(dataset, max_tokens_per_segment=max_tokens, **kwargs)
@gin.configurable
def split_tokens_to_targets_length(dataset, sequence_length,
output_features, **kwargs):
max_tokens = sequence_length['targets']
if output_features['targets'].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return split_tokens(dataset, max_tokens_per_segment=max_tokens, **kwargs)
@gin.configurable
def split_tokens_to_random_length(dataset, sequence_length,
output_features, **kwargs):
max_tokens = sequence_length['inputs']
if output_features['inputs'].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return split_tokens(dataset,
min_tokens_per_segment=8,
max_tokens_per_segment=max_tokens,
**kwargs)
@gin.configurable
def concatenate_and_split_to_fixed_length(dataset,
sequence_length,
output_features,
feature_key='targets',
**unused_kwargs):
"""Concatenate tokens across examples, then split to fixed-size chunks.
Chunk length is determined by sequence_length[feature_key].
Args:
dataset: a tf.data.Dataset
sequence_length: a dict of ints.
output_features: a dict mapping feature name to t5.data.Feature.
feature_key: a string
Returns:
a tf.data.Dataset
"""
dataset = dataset.map(lambda x: {feature_key: x[feature_key]})
max_tokens = sequence_length[feature_key]
if output_features[feature_key].add_eos:
# Leave room to insert an EOS token.
max_tokens -= 1
return dataset.unbatch().batch(max_tokens)
@gin.configurable
def filter_by_string_length(dataset,
feature_key='targets',
min_length=1,
max_length=1000000,
**unused_kwargs):
"""Filter examples by string length.
Args:
dataset: a tf.data.Dataset (not tokenized)
feature_key: a string
min_length: an integer
max_length: an integer
Returns:
a tf.data.Dataset
"""
def my_fn(x):
l = tf.strings.length(x[feature_key])
return tf.logical_and(tf.greater_equal(l, min_length),
tf.less_equal(l, max_length))
return dataset.filter(my_fn)
@gin.configurable
def random_spans_helper(inputs_length=gin.REQUIRED,
noise_density=gin.REQUIRED,
mean_noise_span_length=gin.REQUIRED,
extra_tokens_per_span_inputs=gin.REQUIRED,
extra_tokens_per_span_targets=gin.REQUIRED,
verbose=False):
"""Training parameters to avoid padding with random_spans_noise_mask.
When training a model with random_spans_noise_mask, we would like to set the
other training hyperparmeters in a way that avoids padding. This function
helps us compute these hyperparameters.
We assume that each noise span in the input is replaced by
extra_tokens_per_span_inputs sentinel tokens, and each non-noise span in the
targets is replaced by extra_tokens_per_span_targets sentinel tokens.
This function tells us the required number of tokens in the raw example (for
split_tokens()) as well as the length of the encoded targets.
Note that this function assumes the inputs and targets will have EOS appended
and includes that in the reported length.
Args:
inputs_length: an integer - desired length of the tokenized inputs sequence
noise_density: a float
mean_noise_span_length: a float
extra_tokens_per_span_inputs: an integer
extra_tokens_per_span_targets: an integer
verbose: a bool indicating whether to log sequence lengths
Returns:
tokens_length: length of original text in tokens
targets_length: an integer - length in tokens of encoded targets sequence
"""
def _tokens_length_to_inputs_length_targets_length(tokens_length):
num_noise_tokens = int(round(tokens_length * noise_density))
num_nonnoise_tokens = tokens_length - num_noise_tokens
num_noise_spans = int(round(num_noise_tokens / mean_noise_span_length))
# inputs contain all nonnoise tokens, sentinels for all noise spans
# and one EOS token.
return (
num_nonnoise_tokens +
num_noise_spans * extra_tokens_per_span_inputs + 1,
num_noise_tokens +
num_noise_spans * extra_tokens_per_span_targets + 1)
tokens_length = inputs_length - 1
while (_tokens_length_to_inputs_length_targets_length(tokens_length + 1)[0]
<= inputs_length):
tokens_length += 1
inputs_length, targets_length = (
_tokens_length_to_inputs_length_targets_length(tokens_length))
# minor hack to get the targets length to be equal to inputs length
# which is more likely to have been set to a nice round number.
if noise_density == 0.5 and targets_length > inputs_length:
tokens_length -= 1
targets_length -= 1
if verbose:
logging.info(
'tokens_length=%s inputs_length=%s targets_length=%s '
'noise_density=%s mean_noise_span_length=%s ',
tokens_length, inputs_length, targets_length,
noise_density, mean_noise_span_length)
return tokens_length, targets_length
@gin.configurable
def random_spans_tokens_length():
"""Helper for gin-configuring split_tokens with random_spans_noise_mask."""
return random_spans_helper()[0]
@gin.configurable
def random_spans_targets_length():
"""Helper for gin-configuring the targets sequence length."""
return random_spans_helper()[1]
# ========================== denoise and helpers ===============================
@gin.configurable()
def denoise(dataset,
output_features,
noise_density=gin.REQUIRED,
noise_mask_fn=gin.REQUIRED,
inputs_fn=gin.REQUIRED,
targets_fn=None,
passthrough_feature_keys: Optional[Sequence[str]] = None,
input_feature_key='inputs',
**unused_kwargs):
"""Gin-configurable token preprocessor for self-supervised denoising tasks.
This function takes a dataset containing "targets" sequences,
and turns each sequence into a dictionary containing:
{
"inputs": noisy version of the original sequence
"targets": the full original sequence or missing parts of original sequence
}
In particular, for each sequence, we choose a boolean noise_mask identifying
which tokens in the sequence to corrupt, as defined by the given
noise_mask_fn.
Given the sequence and the noise mask, we generate the inputs and targets
using the given inputs_fn and targets_fn respectively.
The self-supervised tasks vary along these axes:
- noise_density: What fraction of the tokens to select as noise
- noise_mask_fn: What pattern should the noise mask follow
(iid, regular segments, etc.)
- inputs_fn: How to apply the noise
(drop noise tokens, replace with sentinels, etc.)
- targets_fn: How to represent the output
(full sequence, only non-noise tokens, etc.)
Note: Some functionality has been deleted, which we may or may not want to
restore at a later date. The code for this functionality can be found in
the deleted code for this CL. In particular:
- mixture of masking and random replacement
- task labels prepended to the inputs
Args:
dataset: A tf.data.Dataset to process.
output_features: a dict mapping feature name to t5.data.Feature.
noise_density: a float
noise_mask_fn: a function from (length, noise_density) -> boolean mask
inputs_fn: a function from (tokens, noise_mask, vocabulary) -> tokens
targets_fn: a function from (tokens, noise_mask, vocabulary) -> tokens
passthrough_feature_keys: names of additional features to include in output
input_feature_key: name of feature to use as inputs
Returns:
A preprocessed tf.data.Dataset.
"""
if passthrough_feature_keys and (input_feature_key in passthrough_feature_keys
or 'targets' in passthrough_feature_keys):
raise ValueError(
f"passthrough keys cannot contain '{input_feature_key}' or 'targets'")
@seqio.map_over_dataset(num_seeds=6)
def my_fn(features, seeds):
"""Map function."""
tokens = features['targets']
vocabulary = output_features['targets'].vocabulary
if (input_feature_key in output_features and
vocabulary != output_features[input_feature_key].vocabulary):
raise ValueError(
'denoise creates inputs based on tokenized targets but was applied '
'to a task that uses different vocabularies for inputs and targets.')
noise_mask = noise_mask_fn(tf.size(tokens), noise_density, seeds=seeds[:2])
inputs = inputs_fn(tokens, noise_mask, vocabulary, seeds=seeds[2:4])
if targets_fn:
targets = targets_fn(tokens, noise_mask, vocabulary, seeds=seeds[4:6])
else:
targets = tokens
return {
input_feature_key: inputs,
'targets': targets,
**{
k: features[k]
for k in features
if passthrough_feature_keys and k in passthrough_feature_keys
}
}
return my_fn(dataset)
@gin.configurable()
def iid_noise_mask(length, noise_density, seeds):
"""Independent and identically distributed token noise.
Args:
length: an int32 scalar.
noise_density: a float - approximate density of output mask.
seeds: an int32 Tensor, shaped (1, 2), the random seed.
Returns:
a boolean tensor with shape [length].
"""
return tf.random.stateless_uniform([length], seed=seeds[0]) < noise_density
@gin.configurable()
def regular_noise_mask(length,
noise_density,
seeds,
min_span_length=1,
max_span_length=5):
"""Noise mask consisting of equally spaced spans of equal length.
The span length and the offset are chosen randomly per-example.
The beginning and end of the sequence may be part of shorter spans of noise.
For example, if noise_density=0.25 and a span length of 2 is chosen,
then the output might be:
[T F F F F F F T T F F F F F F T T F F F F F F T T F F]
Args:
length: an int32 scalar.
noise_density: a float - approximate density of output mask.
seeds: an int32 Tensor, shaped (2, 2), the random seeds.
min_span_length: an integer.
max_span_length: an integer.
Returns:
a boolean tensor with shape [length].
"""
span_length = tf.random.stateless_uniform(
[],
minval=min_span_length,
maxval=max_span_length + 1,
dtype=tf.int32,
seed=seeds[0])
period = tf.cast(
tf.round(tf.cast(span_length, tf.float32) / noise_density), tf.int32)
offset = tf.random.stateless_uniform(
[],
maxval=period,
dtype=tf.int32,
seed=seeds[1])
return (tf.range(length, dtype=tf.int32) + offset) % period < span_length
@gin.configurable()
def random_spans_noise_mask(length,
noise_density,
seeds,
mean_noise_span_length=3.0):
"""Noise mask consisting of random spans of noise tokens.
The number of noise tokens and the number of noise spans and non-noise spans
are determined deterministically as follows:
num_noise_tokens = round(length * noise_density)
num_nonnoise_spans = num_noise_spans = round(
num_noise_tokens / mean_noise_span_length)
Spans alternate between non-noise and noise, beginning with non-noise.
Subject to the above restrictions, all masks are equally likely.
Args:
length: an int32 scalar (length of the incoming token sequence)
noise_density: a float - approximate density of output mask
seeds: an int32 Tensor, shaped (2, 2)
mean_noise_span_length: a number
Returns:
a boolean tensor with shape [length]
"""
orig_length = length
# increase length to avoid degeneracy
length = tf.maximum(length, 2)
def to_int(x):
return tf.cast(x, tf.int32)
def to_float(x):
return tf.cast(x, tf.float32)
num_noise_tokens = to_int(tf.round(to_float(length) * noise_density))
# avoid degeneracy by ensuring positive numbers of noise and nonnoise tokens.
num_noise_tokens = tf.minimum(tf.maximum(num_noise_tokens, 1), length - 1)
num_noise_spans = to_int(
tf.round(to_float(num_noise_tokens) / mean_noise_span_length))
# avoid degeneracy by ensuring positive number of noise spans
num_noise_spans = tf.maximum(num_noise_spans, 1)
num_nonnoise_tokens = length - num_noise_tokens
# pick the lengths of the noise spans and the non-noise spans
def _random_segmentation(num_items, num_segments, seed):
"""Partition a sequence of items randomly into non-empty segments.
Args:
num_items: an integer scalar > 0
num_segments: an integer scalar in [1, num_items]
seed: an integer seed
Returns:
a Tensor with shape [num_segments] containing positive integers that add
up to num_items
"""
first_in_segment = tf.pad(
seqio.stateless_shuffle(
to_int(tf.range(num_items - 1) < num_segments - 1),
seed),
[[1, 0]])
segment_id = tf.cumsum(first_in_segment)
segment_length = tf.math.segment_sum(tf.ones_like(segment_id), segment_id)
return segment_length
noise_span_lengths = _random_segmentation(
num_noise_tokens, num_noise_spans, seeds[0])
nonnoise_span_lengths = _random_segmentation(
num_nonnoise_tokens, num_noise_spans, seeds[1])
interleaved_span_lengths = tf.reshape(
tf.stack([nonnoise_span_lengths, noise_span_lengths], axis=1),
[num_noise_spans * 2])
span_starts = tf.cumsum(interleaved_span_lengths)[:-1]
span_start_indicator = tf.math.unsorted_segment_sum(
tf.ones_like(span_starts), span_starts, length)
span_num = tf.cumsum(span_start_indicator)
is_noise = tf.equal(span_num % 2, 1)
return is_noise[:orig_length]
@gin.configurable()
def random_prefix_noise_mask(length, noise_density, seeds):
"""First part of the sequence is noise (for prefix_lm).
The length of the prefix is chosen uniformly between [1, length)
noise_density must be 0.5.
TODO(noam): figure out some distribution to use if noise_density != 0.5.
Args:
length: an int32 scalar.
noise_density: a float - must equal 0.5.
seeds: an int32 Tensor, shaped (1, 2), the random seed.
Returns:
a boolean tensor with shape [length].
"""
if noise_density != 0.5:
raise NotImplementedError(
'noise density must equal 0.5 for random_prefix_noise_mask')
max_input_tokens = length - 1
min_input_tokens = tf.minimum(max_input_tokens, 1)
num_input_tokens = tf.random.stateless_uniform(
[],
minval=min_input_tokens,
maxval=max_input_tokens + 1,
dtype=tf.int32,
seed=seeds[0])
return tf.range(length, dtype=tf.int32) < num_input_tokens
@gin.configurable()
def sentinel_id(vocabulary, return_value=None):
"""Token ID to use as a sentinel.
By default, we use the last token in the vocabulary.
Args:
vocabulary: a t5.data.vocabularies.Vocabulary
return_value: an optional integer
Returns:
an integer
"""
if return_value is not None:
return return_value
return vocabulary.vocab_size - 1
@gin.configurable()
def noise_token_to_sentinel(tokens, noise_mask, vocabulary, seeds):
"""Replace each noise token with the given sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del seeds
return tf.where(noise_mask,
tf.cast(sentinel_id(vocabulary), tokens.dtype),
tokens)
@gin.configurable()
def noise_span_to_sentinel(tokens, noise_mask, vocabulary, seeds):
"""Replace each run of consecutive noise tokens with a single sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del seeds
tokens = tf.where(noise_mask,
tf.cast(sentinel_id(vocabulary), tokens.dtype),
tokens)
prev_token_is_noise = tf.pad(noise_mask[:-1], [[1, 0]])
subsequent_noise_tokens = tf.logical_and(noise_mask, prev_token_is_noise)
return tf.boolean_mask(tokens, tf.logical_not(subsequent_noise_tokens))
@gin.configurable()
def nonnoise_span_to_sentinel(tokens, noise_mask, vocabulary, seeds):
return noise_span_to_sentinel(
tokens, tf.logical_not(noise_mask), vocabulary, seeds)
@gin.configurable()
def noise_span_to_unique_sentinel(tokens, noise_mask, vocabulary, seeds):
"""Replace each run of consecutive noise tokens with a different sentinel.
The idea here is to be able to align the dropped spans in the inputs
with the markers in the targets.
We want to generate training examples like
"We hold X to be Y that" -> "X these truths Y self evident Z"
Sentinels assigned in decreasing order within the sequence starting at
vocabulary.size - 1. That is, we appropriate the last tokens in the
vocabulary for additional use as sentinels.
TODO(noam): we may want to try enlarging the vocabulary and leaving room
for the sentinels instead. However, this requires enlarging the embedding
tables in the model, so that is a bigger change.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del seeds
prev_token_is_noise = tf.pad(noise_mask[:-1], [[1, 0]])
first_noise_tokens = tf.logical_and(
noise_mask, tf.logical_not(prev_token_is_noise))
subsequent_noise_tokens = tf.logical_and(noise_mask, prev_token_is_noise)
sentinel = sentinel_id(vocabulary) + 1 - tf.cumsum(
tf.cast(first_noise_tokens, tokens.dtype))
tokens = tf.where(first_noise_tokens, sentinel, tokens)
return tf.boolean_mask(tokens, tf.logical_not(subsequent_noise_tokens))
@gin.configurable()
def nonnoise_span_to_unique_sentinel(tokens, noise_mask, vocabulary, seeds):
return noise_span_to_unique_sentinel(
tokens, tf.logical_not(noise_mask), vocabulary, seeds)
@gin.configurable()
def drop_noise_tokens(tokens, noise_mask, vocabulary, seeds):
"""Drop noise tokens without inserting a sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
del seeds
return tf.boolean_mask(tokens, tf.logical_not(noise_mask))
@gin.configurable()
def drop_nonnoise_tokens(tokens, noise_mask, vocabulary, seeds):
"""Drop non-noise tokens without inserting a sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an unused int32 Tensor
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
del seeds
return tf.boolean_mask(tokens, noise_mask)
@gin.configurable()
def permute_noise_tokens(tokens, noise_mask, vocabulary, seeds):
"""Permute the noise tokens, keeping the non-noise tokens where they are.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an int32 Tensor, sized (1, 2)
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
masked_only = tf.boolean_mask(tokens, noise_mask)
permuted = seqio.stateless_shuffle(masked_only, seeds[0])
# pad to avoid errors when it has size 0
permuted = tf.pad(permuted, [[0, 1]])
indices = tf.cumsum(tf.cast(noise_mask, tf.int32), exclusive=True)
return tf.where(noise_mask,
tf.gather(permuted, indices),
tokens)
@gin.configurable()
def noise_token_to_gathered_token(tokens, noise_mask, vocabulary, seeds):
"""Replace each noise token with a random token from the sequence.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: an unused vocabulary.Vocabulary
seeds: an int32 Tensor, sized (1, 2)
Returns:
a Tensor with the same shape and dtype as tokens
"""
del vocabulary
indices = tf.random.stateless_uniform(
shape=tf.shape(tokens),
maxval=tf.size(tokens),
dtype=tf.int32,
seed=seeds[0])
return tf.where(noise_mask,
tf.gather(tokens, indices),
tokens)
@gin.configurable()
def noise_token_to_random_token(
tokens,
noise_mask,
vocabulary,
seeds,
num_reserved_tokens=3):
"""Replace each noise token with a random token from the vocabulary.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an int32 Tensor, shaped (1, 2)
num_reserved_tokens: an integer
Returns:
a Tensor with the same shape and dtype as tokens
"""
return tf.where(noise_mask,
tf.random.stateless_uniform(
tf.shape(tokens),
minval=num_reserved_tokens,
maxval=vocabulary.vocab_size,
dtype=tokens.dtype,
seed=seeds[0]),
tokens)
@gin.configurable()
def noise_token_to_random_token_or_sentinel(
tokens,
noise_mask,
vocabulary,
seeds,
random_prob=0.1):
"""Replace each noise token with a random token or a sentinel.
For each masked token, with probability random_prob, we replace it by a
random token from the vocabulary. Otherwise, we replace it with a sentinel.
Args:
tokens: a 1d integer Tensor
noise_mask: a boolean Tensor with the same shape as tokens
vocabulary: a vocabulary.Vocabulary
seeds: an int32 Tensor, shaped (2, 2).
random_prob: a float
Returns:
a Tensor with the same shape and dtype as tokens
"""
use_random = (
tf.random.stateless_uniform(tf.shape(tokens), seed=seeds[0]) <
random_prob)
return tf.where(
use_random,
noise_token_to_random_token(
tokens, noise_mask, vocabulary, seeds=seeds[1:]),
noise_token_to_sentinel(
tokens, noise_mask, vocabulary, seeds=()))
# =============== EXPERIMENTAL preprocessors (not used for the T5 paper) =======
def trim_and_pad_dataset(dataset, sequence_length):
"""A wrapper to use `seqio.utils.trim_and_pad_dataset` as a preprocessor."""
return seqio.utils.trim_and_pad_dataset(
dataset, feature_lengths=sequence_length)
def targets_for_prefix_lm_objective(dataset, sequence_length, output_features):
"""Prepares targets to be used for prefix LM objective."""
dataset = select_random_chunk(
dataset, output_features, max_length=65536, feature_key='targets')
dataset = seqio.preprocessors.append_eos(dataset, output_features)
dataset = reduce_concat_tokens(dataset, batch_size=128)
dataset = split_tokens(
dataset, max_tokens_per_segment=sequence_length['targets'])
dataset = trim_and_pad_dataset(dataset, sequence_length)
return dataset
def pack_prefix_lm_encoder_decoder(ds, sequence_length, pad_id=0):
"""Pack two examples into one with the prefix LM objective."""
packed_length = next(iter(sequence_length.values()))
assert packed_length % 2 == 0
assert all(l == packed_length for l in sequence_length.values())
@seqio.utils.map_over_dataset(num_seeds=1)
def pack_examples(example_pair, seed):
split_point = tf.random.stateless_uniform((),
minval=1,
maxval=packed_length,
seed=seed,
dtype=tf.int32)
inputs = tf.concat([
example_pair['targets'][0][:split_point],
example_pair['targets'][1][:packed_length - split_point]
],
axis=0)
inputs = tf.reshape(inputs, (packed_length,))
targets = tf.concat([
example_pair['targets'][0][split_point:],
example_pair['targets'][1][packed_length - split_point:]
],
axis=0)
targets = tf.reshape(targets, (packed_length,))
encoder_segment_ids = tf.cast(
tf.range(packed_length) >= split_point, tf.int32) + 1
decoder_segment_ids = tf.cast(
tf.range(packed_length) >= (packed_length - split_point), tf.int32) + 1
decoder_input_tokens = seqio.utils.make_autoregressive_inputs(
targets, sequence_id=decoder_segment_ids)
encoder_positions = tf.concat(
[tf.range(split_point),
tf.range(packed_length - split_point)], axis=0)
encoder_positions = tf.reshape(encoder_positions, (packed_length,))
decoder_positions = tf.concat(
[tf.range(packed_length - split_point),
tf.range(split_point)], axis=0)
decoder_positions = tf.reshape(decoder_positions, (packed_length,))
decoder_loss_weights = tf.cast(
tf.not_equal(targets, pad_id), dtype=tf.int32)
return {
'encoder_input_tokens': inputs,
'decoder_target_tokens': targets,
'decoder_input_tokens': decoder_input_tokens,
'encoder_segment_ids': encoder_segment_ids,
'encoder_positions': encoder_positions,
'decoder_segment_ids': decoder_segment_ids,
'decoder_positions': decoder_positions,
'decoder_loss_weights': decoder_loss_weights,
}
# Note that the batch requires the lengths to be the same.
return pack_examples(ds.batch(2))
def pack_prefix_lm_decoder_only(ds,
sequence_length,
loss_on_targets_only=True,
pad_id=0):
"""Randomly split the tokens for the prefix LM objective."""
packed_length = next(iter(sequence_length.values()))
assert packed_length % 2 == 0
assert all(l == packed_length for l in sequence_length.values())
@seqio.utils.map_over_dataset(num_seeds=1)
def pack_examples(example, seed):
split_point = tf.random.stateless_uniform((),
minval=1,
maxval=packed_length,
seed=seed,
dtype=tf.int32)
decoder_target_tokens = example['targets']
decoder_input_tokens = seqio.utils.make_autoregressive_inputs(
decoder_target_tokens)
if loss_on_targets_only:
decoder_loss_weights = tf.cast(
tf.range(packed_length) >= split_point, tf.int32)
else:
decoder_loss_weights = tf.ones((packed_length,), dtype=tf.int32)
padding_mask = tf.cast(
tf.not_equal(decoder_target_tokens, pad_id), dtype=tf.int32)
decoder_loss_weights *= padding_mask
decoder_causal_attention = tf.cast(
tf.range(packed_length) <= split_point, tf.int32)
return {
'decoder_target_tokens': decoder_target_tokens,
'decoder_input_tokens': decoder_input_tokens,
'decoder_loss_weights': decoder_loss_weights,
'decoder_causal_attention': decoder_causal_attention,
}
return pack_examples(ds)
| codeparrot/github-code-clean |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A class to store named variables and a scope operator to manage sharing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections as collections_lib
import copy
import enum # pylint: disable=g-bad-import-order
import functools
import sys
import threading
import traceback
import six
from six import iteritems
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python import tf2
from tensorflow.python.eager import context
from tensorflow.python.eager import monitoring
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import deprecation
from tensorflow.python.util import function_utils
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"AUTO_REUSE", "VariableScope", "get_variable_scope", "get_variable",
"get_local_variable", "variable_scope", "variable_op_scope",
"no_regularizer", "VariableSynchronization", "VariableAggregation"
]
_api_usage_gauge = monitoring.BoolGauge(
"/tensorflow/api/resource_variables",
"Whether variable_scope.enable_resource_variables() is called.")
class _PartitionInfo(object):
"""Holds partition info used by initializer functions."""
def __init__(self, full_shape, var_offset):
"""Constructor.
Args:
full_shape: Tuple or list of `int` indicating the full combined shape of
the partitioned variables.
var_offset: Tuple or list of `int` specifying offset of this partition
with respect to the full variable for each dimension.
Raises:
TypeError: If `full_shape` or `var_offset` is not a sequence.
ValueError: If `full_shape` or `var_offset` differ in length. If
`var_offset` exceeds `full_shape` in any dimension.
"""
if not isinstance(full_shape, collections_lib.Sequence) or isinstance(
full_shape, six.string_types):
raise TypeError(
"`full_shape` must be a sequence (like tuple or list) instead of " +
type(full_shape).__name__)
if not isinstance(var_offset, collections_lib.Sequence) or isinstance(
var_offset, six.string_types):
raise TypeError(
"`var_offset` must be a sequence (like tuple or list) instead of " +
type(var_offset).__name__)
if len(var_offset) != len(full_shape):
raise ValueError(
"Expected equal length, but `var_offset` is of length {} while "
"full_shape is of length {}.".format(
len(var_offset), len(full_shape)))
for i in xrange(len(full_shape)):
offset = var_offset[i]
shape = full_shape[i]
if offset < 0 or offset >= shape:
raise ValueError(
"Expected 0 <= offset < shape but found offset={}, shape={} for "
"var_offset={}, full_shape={}".format(offset, shape, var_offset,
full_shape))
self._full_shape = full_shape
self._var_offset = var_offset
@property
def full_shape(self):
return self._full_shape
@property
def var_offset(self):
return self._var_offset
def single_offset(self, shape):
"""Returns the offset when the variable is partitioned in at most one dim.
Args:
shape: Tuple or list of `int` indicating the shape of one specific
variable partition.
Returns:
`int` representing the offset in the dimension along which the variable is
partitioned. Returns 0 if the variable is not being partitioned.
Raises:
ValueError: Depending on self.single_slice_dim().
"""
single_slice_dim = self.single_slice_dim(shape)
# If this variable is not being partitioned at all, single_slice_dim() could
# return None.
if single_slice_dim is None:
return 0
return self.var_offset[single_slice_dim]
def single_slice_dim(self, shape):
"""Returns the slice dim when the variable is partitioned only in one dim.
Args:
shape: Tuple or list of `int` indicating the shape of one specific
variable partition.
Returns:
`int` representing the dimension that the variable is partitioned in, or
`None` if the variable doesn't seem to be partitioned at all.
Raises:
TypeError: If `shape` is not a sequence.
ValueError: If `shape` is not the same length as `self.full_shape`. If
the variable is partitioned in more than one dimension.
"""
if not isinstance(shape, collections_lib.Sequence) or isinstance(
shape, six.string_types):
raise TypeError(
"`shape` must be a sequence (like tuple or list) instead of " +
type(shape).__name__)
if len(shape) != len(self.full_shape):
raise ValueError(
"Expected equal length, but received shape={} of length {} while "
"self.full_shape={} is of length {}.".format(shape, len(shape),
self.full_shape,
len(self.full_shape)))
for i in xrange(len(shape)):
if self.var_offset[i] + shape[i] > self.full_shape[i]:
raise ValueError(
"With self.var_offset={}, a partition of shape={} would exceed "
"self.full_shape={} in dimension {}.".format(
self.var_offset, shape, self.full_shape, i))
slice_dim = None
for i in xrange(len(shape)):
if shape[i] == self.full_shape[i]:
continue
if slice_dim is not None:
raise ValueError(
"Cannot use single_slice_dim() with shape={} and "
"self.full_shape={} since slice dim could be either dimension {} "
"or {}.".format(shape, self.full_shape, i, slice_dim))
slice_dim = i
return slice_dim
class _ReuseMode(enum.Enum):
"""Mode for variable access within a variable scope."""
# Indicates that variables are to be fetched if they already exist or
# otherwise created.
AUTO_REUSE = 1
# TODO(alive): For TensorFlow 2.0, Deprecate True/False/None API in favor of
# enum values.
# REUSE_FALSE = 2
# REUSE_TRUE = 3
# TODO(apassos) remove these forwarding symbols.
VariableSynchronization = variables.VariableSynchronization # pylint: disable=invalid-name
VariableAggregation = variables.VariableAggregation # pylint: disable=invalid-name
AUTO_REUSE = _ReuseMode.AUTO_REUSE
tf_export(v1=["AUTO_REUSE"]).export_constant(__name__, "AUTO_REUSE")
AUTO_REUSE.__doc__ = """
When passed in as the value for the `reuse` flag, AUTO_REUSE indicates that
get_variable() should create the requested variable if it doesn't exist or, if
it does exist, simply return it.
"""
_DEFAULT_USE_RESOURCE = tf2.enabled()
@tf_export(v1=["enable_resource_variables"])
def enable_resource_variables():
"""Creates resource variables by default.
Resource variables are improved versions of TensorFlow variables with a
well-defined memory model. Accessing a resource variable reads its value, and
all ops which access a specific read value of the variable are guaranteed to
see the same value for that tensor. Writes which happen after a read (by
having a control or data dependency on the read) are guaranteed not to affect
the value of the read tensor, and similarly writes which happen before a read
are guaranteed to affect the value. No guarantees are made about unordered
read/write pairs.
Calling tf.enable_resource_variables() lets you opt-in to this TensorFlow 2.0
feature.
"""
global _DEFAULT_USE_RESOURCE
_DEFAULT_USE_RESOURCE = True
_api_usage_gauge.get_cell().set(True)
@tf_export(v1=["resource_variables_enabled"])
def resource_variables_enabled():
"""Returns `True` if resource variables are enabled.
Resource variables are improved versions of TensorFlow variables with a
well-defined memory model. Accessing a resource variable reads its value, and
all ops which access a specific read value of the variable are guaranteed to
see the same value for that tensor. Writes which happen after a read (by
having a control or data dependency on the read) are guaranteed not to affect
the value of the read tensor, and similarly writes which happen before a read
are guaranteed to affect the value. No guarantees are made about unordered
read/write pairs.
Calling tf.enable_resource_variables() lets you opt-in to this TensorFlow 2.0
feature.
"""
global _DEFAULT_USE_RESOURCE
return _DEFAULT_USE_RESOURCE
@deprecation.deprecated(
None, "non-resource variables are not supported in the long term")
@tf_export(v1=["disable_resource_variables"])
def disable_resource_variables():
"""Opts out of resource variables.
If your code needs tf.disable_resource_variables() to be called to work
properly please file a bug.
"""
global _DEFAULT_USE_RESOURCE
_DEFAULT_USE_RESOURCE = False
_api_usage_gauge.get_cell().set(False)
class _VariableStore(object):
"""Variable store that carries a number of named Variables.
New variable names and new variables can be created; all stored
variables are initialized with the initializer passed to __init__.
Attributes:
vars: a dictionary with string names (same as passed in GetVar) as keys and
the corresponding TensorFlow Variables as values.
"""
def __init__(self):
"""Create a variable store."""
self._vars = {} # A dictionary of the stored TensorFlow variables.
self._partitioned_vars = {} # A dict of the stored PartitionedVariables.
self._store_eager_variables = False
def get_variable(self,
name,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets an existing variable with these parameters or create a new one.
If a variable with the given name is already stored, we return the stored
variable. Otherwise, we create a new one.
Set `reuse` to `True` when you only want to reuse existing Variables.
Set `reuse` to `False` when you only want to create new Variables.
Set `reuse` to None (the default) or tf.compat.v1.AUTO_REUSE when you want
variables to be created if they don't exist or returned if they do.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`glorot_uniform_initializer`. If initializer is a Tensor, we use
it as a value and derive the shape from the initializer.
If a partitioner is provided, a `PartitionedVariable` is returned.
Accessing this object as a `Tensor` returns the shards concatenated along
the partition axis.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
dtype: Type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: Initializer for the variable.
regularizer: A (Tensor -> Tensor or None) function; the result of applying
it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
reuse: a Boolean, None, or tf.AUTO_REUSE. Controls reuse or creation of
variables. When eager execution is enabled this argument is always
forced to be False.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). `trainable`
defaults to `True` unless `synchronization` is set to `ON_READ`.
collections: List of graph collections keys to add the `Variable` to.
Defaults to `[GraphKeys.GLOBAL_VARIABLES]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the `Variable` reside, to
deduplicate copying through `Switch` and other conditional statements.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and dtype of the `Variable` to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
validate_shape: If False, allows the variable to be initialized with a
value of unknown shape. If True, the default, the shape of initial_value
must be known.
use_resource: If False, creates a regular Variable. If True, creates
instead an experimental ResourceVariable which has well-defined
semantics. Defaults to False (will later change to True). When eager
execution is enabled this argument is always forced to be true.
custom_getter: Callable that takes as a first argument the true getter,
and allows overwriting the internal get_variable method. The signature
of `custom_getter` should match that of this method,
but the most future-proof version will allow for changes: `def
custom_getter(getter, *args, **kwargs)`. Direct access to
all `get_variable` parameters is also allowed: `def
custom_getter(getter, name, *args, **kwargs)`. A simple identity
custom getter that simply creates variables with modified names is:
```python
def custom_getter(getter, name, *args, **kwargs): return getter(name +
'_suffix', *args, **kwargs) ```
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses when to
synchronize. If `synchronization` is set to `ON_READ`, `trainable` must
not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
The created or existing `Variable` (or `PartitionedVariable`, if a
partitioner was used).
Raises:
ValueError: when creating a new variable and shape is not declared,
when reusing a variable and specifying a conflicting shape,
or when violating reuse during variable creation.
RuntimeError: when eager execution is enabled and not called from an
EagerVariableStore.
"""
if custom_getter is not None and not callable(custom_getter):
raise ValueError("Passed a custom_getter which is not callable: %s" %
custom_getter)
with ops.init_scope():
if context.executing_eagerly():
# Variable creation and initialization takes place in `init_scope`s;
# as such, if an `init_scope` lifts us into the eager context, then we
# need to use `ResourceVariable`s.
use_resource = True
# Note that it's fine to reuse eager variables whose initialization was
# lifted from a function-building graph into the eager context (that's why
# the following clause is not wrapped in an `init_scope`); lifted variables
# are tracked by the graph's `VariableStore`.
if context.executing_eagerly():
if not self._store_eager_variables and reuse:
raise RuntimeError(
"When eager execution is enabled variable reuse is only supported"
" when an EagerVariableStore is active. See the documentation on"
" EagerVariableStore for example usage.")
if self._store_eager_variables:
reuse = AUTO_REUSE
# If a *_ref type is passed in an error would be triggered further down the
# stack. We prevent this using base_dtype to get a non-ref version of the
# type, before doing anything else. When _ref types are removed in favor of
# resources, this line can be removed.
try:
dtype = dtype.base_dtype
except AttributeError:
# .base_dtype not existing means that we will try and use the raw dtype
# which was passed in - this might be a NumPy type which is valid.
pass
# This is the main logic of get_variable. However, custom_getter
# may override this logic. So we save it as a callable and pass
# it to custom_getter.
# Note: the parameters of _true_getter, and their documentation, match
# *exactly* item-for-item with the docstring of this method.
def _true_getter( # pylint: disable=missing-docstring
name,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
is_scalar = (
shape is not None and isinstance(shape, collections_lib.Sequence) and
not shape)
# Partitioned variable case
if partitioner is not None and not is_scalar:
if not callable(partitioner):
raise ValueError("Partitioner must be callable, but received: %s" %
partitioner)
with ops.name_scope(None):
return self._get_partitioned_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# Special case for partitioned variable to allow reuse without having to
# specify partitioner.
if (reuse is True and partitioner is None
and name in self._partitioned_vars):
return self._get_partitioned_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=None,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# Single variable case
if "%s/part_0" % name in self._vars:
raise ValueError(
"No partitioner was provided, but a partitioned version of the "
"variable was found: %s/part_0. Perhaps a variable of the same "
"name was already created with partitioning?" % name)
return self._get_single_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
synchronization, aggregation, trainable = (
variables.validate_synchronization_aggregation_trainable(
synchronization, aggregation, trainable, name))
if custom_getter is not None:
# Handle backwards compatibility with getter arguments that were added
# to the API after users started writing custom getters.
custom_getter_kwargs = {
"getter": _true_getter,
"name": name,
"shape": shape,
"dtype": dtype,
"initializer": initializer,
"regularizer": regularizer,
"reuse": reuse,
"trainable": trainable,
"collections": collections,
"caching_device": caching_device,
"partitioner": partitioner,
"validate_shape": validate_shape,
"use_resource": use_resource,
"synchronization": synchronization,
"aggregation": aggregation,
}
# `fn_args` and `has_kwargs` can handle functions, `functools.partial`,
# `lambda`.
if ("constraint" in function_utils.fn_args(custom_getter) or
function_utils.has_kwargs(custom_getter)):
custom_getter_kwargs["constraint"] = constraint
return custom_getter(**custom_getter_kwargs)
else:
return _true_getter(
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
def _get_partitioned_variable(self,
name,
partitioner,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets or creates a sharded variable list with these parameters.
The `partitioner` must be a callable that accepts a fully defined
`TensorShape` and returns a sequence of integers (the `partitions`).
These integers describe how to partition the given sharded `Variable`
along the given dimension. That is, `partitions[1] = 3` means split
the `Variable` into 3 shards along dimension 1. Currently, sharding along
only one axis is supported.
If the list of variables with the given name (prefix) is already stored,
we return the stored variables. Otherwise, we create a new one.
Set `reuse` to `True` when you only want to reuse existing Variables.
Set `reuse` to `False` when you only want to create new Variables.
Set `reuse` to None (the default) or tf.compat.v1.AUTO_REUSE when you want
variables to be created if they don't exist or returned if they do.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`glorot_uniform_initializer`. If initializer is a Tensor, we use
it as a value and derive the shape from the initializer.
If the initializer is a callable, then it will be called for each
shard. Otherwise the initializer should match the shape of the entire
sharded Variable, and it will be sliced accordingly for each shard.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: the name of the new or existing sharded variable.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and `dtype` of the Variable to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
shape: shape of the new or existing sharded variable.
dtype: type of the new or existing sharded variable (defaults to
`DT_FLOAT`).
initializer: initializer for the sharded variable.
regularizer: a (Tensor -> Tensor or None) function; the result of applying
it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
reuse: a Boolean, None, or tf.AUTO_REUSE. Controls reuse or creation of
variables.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
collections: List of graph collections keys to add the Variable to.
Defaults to `[GraphKeys.GLOBAL_VARIABLES]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the Variable reside, to
deduplicate copying through `Switch` and other conditional statements.
validate_shape: If False, allows the variable to be initialized with a
value of unknown shape. If True, the default, the shape of initial_value
must be known.
use_resource: If False, creates a regular Variable. If True, creates an
experimental ResourceVariable which has well-defined semantics. Defaults
to False (will later change to True).
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses when to
synchronize. If `synchronization` is set to `ON_READ`, `trainable` must
not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
A `PartitionedVariable` object.
Raises:
ValueError: when creating a new variable and shape is not declared,
when reusing a variable and specifying a conflicting shape,
when violating reuse during variable creation, or if an existing
sharded variable exists for the given name but with different sharding.
"""
initializing_from_value = initializer is not None and isinstance(
initializer, ops.Tensor)
if name in self._vars:
raise ValueError(
"A partitioner was provided, but an unpartitioned version of the "
"variable was found: %s. Perhaps a variable of the same name was "
"already created without partitioning?" % name)
shape = tensor_shape.as_shape(shape)
if initializing_from_value:
shape = shape.merge_with(initializer.get_shape())
partitions = None
if not reuse or partitioner:
partitions = _call_partitioner(partitioner, shape, dtype)
if name in self._partitioned_vars:
if reuse is False:
raise ValueError(
"Partitioned variable with name %s already exists. Did you mean to "
"set reuse=True or reuse=tf.AUTO_REUSE in VarScope?" % name)
existing_var = self._partitioned_vars[name]
if not shape.is_compatible_with(existing_var.get_shape()):
raise ValueError(
"Trying to reuse partitioned variable %s, but specified shape %s "
"and found shape %s." % (name, shape, existing_var.get_shape()))
if not dtype.is_compatible_with(existing_var.dtype):
raise ValueError(
"Trying to reuse partitioned variable %s, but specified dtype %s "
"and found dtype %s." % (name, dtype.name, existing_var.dtype.name))
# pylint: disable=protected-access
if (partitions is not None and
existing_var._get_partitions() != partitions):
raise ValueError(
"Trying to reuse partitioned variable %s, but specified partitions "
"%s and found partitions %s." %
(name, partitions, existing_var._get_partitions()))
# pylint: enable=protected-access
return existing_var
if reuse is True:
raise ValueError("PartitionedVariable %s does not exist, or was not "
"created with tf.get_variable(). Did you mean to set "
"reuse=False or reuse=tf.AUTO_REUSE in VarScope?" % name)
slice_dim, num_slices = _get_slice_dim_and_num_slices(partitions)
if "%s/part_0" % name in self._vars:
if "%s/part_%d" % (name, num_slices - 1) not in self._vars:
raise ValueError(
"Partitioner returned a different partitioning than what was "
"already found. Partitioner returned %d shards, and shard "
"%s/part_0 was found, but %s/part_%d was not." %
(num_slices, name, name, num_slices - 1))
if "%s/part_%d" % (name, num_slices) in self._vars:
raise ValueError(
"Partitioner returned a different partitioning than what was "
"already found. Partitioner returned %d shards, and shard "
"%s/part_0 was found, but so was the extra shard %s/part_%d." %
(num_slices, name, name, num_slices))
vs = []
for i, (var_offset, var_shape) in enumerate(
_iter_slices(shape.as_list(), num_slices, slice_dim)):
partition_info = _PartitionInfo(
full_shape=shape.as_list(), var_offset=var_offset)
var_full_name = "%s/part_%d" % (name, i)
with ops.name_scope(var_full_name + "/PartitionedInitializer"):
# Create the tensor to initialize the variable with default value.
if initializer is None:
init, initializing_from_value = self._get_default_initializer(
name=name, shape=shape, dtype=dtype)
if initializing_from_value:
init_shape = None
else:
init_shape = var_shape
elif callable(initializer):
init = initializer
init_shape = var_shape
elif isinstance(initializer, ops.Tensor):
init = array_ops.slice(initializer, var_offset, var_shape)
# Use the dtype of the given tensor.
dtype = init.dtype.base_dtype
init_shape = None
else:
init = ops.convert_to_tensor(initializer, dtype=dtype)
init = array_ops.slice(init, var_offset, var_shape)
init_shape = None
with ops.name_scope(None):
var = self._get_single_variable(
name=var_full_name,
shape=init_shape,
dtype=dtype,
initializer=init,
partition_info=partition_info,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# pylint: disable=protected-access
var._set_save_slice_info(
variables.Variable.SaveSliceInfo(name, shape.as_list(), var_offset,
var_shape))
vs.append(var)
# pylint: enable=protected-access
partitioned_var = variables.PartitionedVariable(
name=name,
shape=shape,
dtype=dtype,
variable_list=vs,
partitions=partitions)
if not context.executing_eagerly() or self._store_eager_variables:
self._partitioned_vars[name] = partitioned_var
return partitioned_var
def _get_single_variable(self,
name,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
partition_info=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Get or create a single Variable (e.g.
a shard or entire variable).
See the documentation of get_variable above (ignore partitioning components)
for details.
Args:
name: see get_variable.
shape: see get_variable.
dtype: see get_variable.
initializer: see get_variable.
regularizer: see get_variable.
partition_info: _PartitionInfo object.
reuse: see get_variable.
trainable: see get_variable.
collections: see get_variable.
caching_device: see get_variable.
validate_shape: see get_variable.
use_resource: see get_variable.
constraint: see get_variable.
synchronization: see get_variable.
aggregation: see get_variable.
Returns:
A Variable. See documentation of get_variable above.
Raises:
ValueError: See documentation of get_variable above.
"""
# Set to true if initializer is a constant.
initializing_from_value = False
if initializer is not None and not callable(initializer):
initializing_from_value = True
if shape is not None and initializing_from_value:
raise ValueError("If initializer is a constant, do not specify shape.")
dtype = dtypes.as_dtype(dtype)
shape = tensor_shape.as_shape(shape)
if name in self._vars:
# Here we handle the case when returning an existing variable.
if reuse is False:
var = self._vars[name]
err_msg = ("Variable %s already exists, disallowed."
" Did you mean to set reuse=True or "
"reuse=tf.AUTO_REUSE in VarScope?" % name)
# ResourceVariables don't have an op associated with so no traceback
if isinstance(var, resource_variable_ops.ResourceVariable):
raise ValueError(err_msg)
tb = var.op.traceback[::-1]
# Throw away internal tf entries and only take a few lines. In some
# cases the traceback can be longer (e.g. if someone uses factory
# functions to create variables) so we take more than needed in the
# default case.
tb = [x for x in tb if "tensorflow/python" not in x[0]][:5]
raise ValueError("%s Originally defined at:\n\n%s" %
(err_msg, "".join(traceback.format_list(tb))))
found_var = self._vars[name]
if not shape.is_compatible_with(found_var.get_shape()):
raise ValueError("Trying to share variable %s, but specified shape %s"
" and found shape %s." %
(name, shape, found_var.get_shape()))
if not dtype.is_compatible_with(found_var.dtype):
dtype_str = dtype.name
found_type_str = found_var.dtype.name
raise ValueError("Trying to share variable %s, but specified dtype %s"
" and found dtype %s." %
(name, dtype_str, found_type_str))
return found_var
# The code below handles only the case of creating a new variable.
if reuse is True:
raise ValueError("Variable %s does not exist, or was not created with "
"tf.get_variable(). Did you mean to set "
"reuse=tf.AUTO_REUSE in VarScope?" % name)
# Create the tensor to initialize the variable with default value.
if initializer is None:
initializer, initializing_from_value = self._get_default_initializer(
name=name, shape=shape, dtype=dtype)
# Enter an init scope when creating the initializer.
with ops.init_scope():
if initializing_from_value:
init_val = initializer
variable_dtype = None
else:
# Instantiate initializer if provided initializer is a type object.
if tf_inspect.isclass(initializer):
initializer = initializer()
if shape is not None and shape.is_fully_defined():
init_val = lambda: initializer( # pylint: disable=g-long-lambda
shape.as_list(),
dtype=dtype,
partition_info=partition_info)
variable_dtype = dtype.base_dtype
elif len(tf_inspect.getargspec(initializer).args) == len(
tf_inspect.getargspec(initializer).defaults or []):
init_val = initializer
variable_dtype = None
else:
raise ValueError("The initializer passed is not valid. It should "
"be a callable with no arguments and the "
"shape should not be provided or an instance of "
"`tf.keras.initializers.*' and `shape` should be "
"fully defined.")
# Create the variable.
if use_resource is None:
# Set the default value if unspecified.
use_resource = _DEFAULT_USE_RESOURCE
v = variables.VariableV1(
initial_value=init_val,
name=name,
trainable=trainable,
collections=collections,
caching_device=caching_device,
dtype=variable_dtype,
validate_shape=validate_shape,
constraint=constraint,
use_resource=use_resource,
synchronization=synchronization,
aggregation=aggregation)
if context.executing_eagerly() and self._store_eager_variables:
if collections:
ops.add_to_collections(collections, v)
else:
ops.add_to_collection(ops.GraphKeys.GLOBAL_VARIABLES, v)
if trainable:
ops.add_to_collection(ops.GraphKeys.TRAINABLE_VARIABLES, v)
if not context.executing_eagerly() or self._store_eager_variables:
# In eager mode we do not want to keep default references to Variable
# objects as this will prevent their memory from being released.
self._vars[name] = v
logging.vlog(1, "Created variable %s with shape %s and init %s", v.name,
format(shape), initializer)
# Run the regularizer if requested and save the resulting loss.
if regularizer:
with ops.colocate_with(v):
with ops.name_scope(name + "/Regularizer/"):
with ops.init_scope():
loss = regularizer(v)
if loss is not None:
if context.executing_eagerly():
v_name = "v_%s" % type(v)
loss_name = "loss_%s" % type(loss)
else:
v_name = v.name
loss_name = loss.name
logging.vlog(
1, "Applied regularizer to %s and added the result %s "
"to REGULARIZATION_LOSSES.", v_name, loss_name)
ops.add_to_collection(ops.GraphKeys.REGULARIZATION_LOSSES, loss)
return v
# Initialize variable when no initializer provided
def _get_default_initializer(self, name, shape=None, dtype=dtypes.float32):
"""Provide a default initializer and a corresponding value.
Args:
name: see get_variable.
shape: see get_variable.
dtype: see get_variable.
Returns:
initializer and initializing_from_value. See get_variable above.
Raises:
ValueError: When giving unsupported dtype.
"""
del shape
# If dtype is DT_FLOAT, provide a uniform unit scaling initializer
if dtype.is_floating:
initializer = init_ops.glorot_uniform_initializer()
initializing_from_value = False
# If dtype is DT_INT/DT_UINT, provide a default value `zero`
# If dtype is DT_BOOL, provide a default value `FALSE`
elif (dtype.is_integer or dtype.is_unsigned or dtype.is_bool or
dtype == dtypes.string):
initializer = init_ops.zeros_initializer()
initializing_from_value = False
# NOTES:Do we need to support for handling DT_STRING and DT_COMPLEX here?
else:
raise ValueError("An initializer for variable %s of %s is required" %
(name, dtype.base_dtype))
return initializer, initializing_from_value
# To stop regularization, use this regularizer
@tf_export(v1=["no_regularizer"])
def no_regularizer(_):
"""Use this function to prevent regularization of variables."""
return None
# TODO(alive): support caching devices and partitioned variables in Eager mode.
@tf_export(v1=["VariableScope"])
class VariableScope(object):
"""Variable scope object to carry defaults to provide to `get_variable`.
Many of the arguments we need for `get_variable` in a variable store are most
easily handled with a context. This object is used for the defaults.
Attributes:
name: name of the current scope, used as prefix in get_variable.
initializer: default initializer passed to get_variable.
regularizer: default regularizer passed to get_variable.
reuse: Boolean, None, or tf.compat.v1.AUTO_REUSE, setting the reuse in
get_variable. When eager execution is enabled this argument is always
forced to be False.
caching_device: string, callable, or None: the caching device passed to
get_variable.
partitioner: callable or `None`: the partitioner passed to `get_variable`.
custom_getter: default custom getter passed to get_variable.
name_scope: The name passed to `tf.name_scope`.
dtype: default type passed to get_variable (defaults to DT_FLOAT).
use_resource: if False, create a normal Variable; if True create an
experimental ResourceVariable with well-defined semantics. Defaults to
False (will later change to True). When eager execution is enabled this
argument is always forced to be True.
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
"""
def __init__(self,
reuse,
name="",
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
name_scope="",
dtype=dtypes.float32,
use_resource=None,
constraint=None):
"""Creates a new VariableScope with the given properties."""
self._name = name
self._initializer = initializer
self._regularizer = regularizer
self._reuse = reuse
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._name_scope = name_scope
self._dtype = dtype
self._use_resource = use_resource
self._constraint = constraint
if context.executing_eagerly():
if self._caching_device is not None:
raise NotImplementedError("Caching devices is not yet supported "
"when eager execution is enabled.")
self._reuse = AUTO_REUSE
self._use_resource = True
@property
def name(self):
return self._name
@property
def original_name_scope(self):
return self._name_scope
@property
def reuse(self):
return self._reuse
@property
def initializer(self):
return self._initializer
@property
def dtype(self):
return self._dtype
@property
def use_resource(self):
return self._use_resource
@property
def regularizer(self):
return self._regularizer
@property
def caching_device(self):
return self._caching_device
@property
def partitioner(self):
return self._partitioner
@property
def custom_getter(self):
return self._custom_getter
@property
def constraint(self):
return self._constraint
def reuse_variables(self):
"""Reuse variables in this scope."""
self._reuse = True
def set_initializer(self, initializer):
"""Set initializer for this scope."""
self._initializer = initializer
def set_dtype(self, dtype):
"""Set data type for this scope."""
self._dtype = dtype
def set_use_resource(self, use_resource):
"""Sets whether to use ResourceVariables for this scope."""
if context.executing_eagerly() and not use_resource:
raise ValueError("When eager execution is enabled, "
"use_resource cannot be set to false.")
self._use_resource = use_resource
def set_regularizer(self, regularizer):
"""Set regularizer for this scope."""
self._regularizer = regularizer
def set_caching_device(self, caching_device):
"""Set caching_device for this scope."""
if context.executing_eagerly():
raise NotImplementedError("Caching devices are not yet supported "
"when eager execution is enabled.")
self._caching_device = caching_device
def set_partitioner(self, partitioner):
"""Set partitioner for this scope."""
self._partitioner = partitioner
def set_custom_getter(self, custom_getter):
"""Set custom getter for this scope."""
self._custom_getter = custom_getter
def get_collection(self, name):
"""Get this scope's variables."""
scope = self._name + "/" if self._name else ""
return ops.get_collection(name, scope)
def trainable_variables(self):
"""Get this scope's trainable variables."""
return self.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
def global_variables(self):
"""Get this scope's global variables."""
return self.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
def local_variables(self):
"""Get this scope's local variables."""
return self.get_collection(ops.GraphKeys.LOCAL_VARIABLES)
def get_variable(self,
var_store,
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets an existing variable with this name or create a new one."""
if regularizer is None:
regularizer = self._regularizer
if caching_device is None:
caching_device = self._caching_device
if partitioner is None:
partitioner = self._partitioner
if custom_getter is None:
custom_getter = self._custom_getter
if context.executing_eagerly():
reuse = False
use_resource = True
else:
if reuse is None:
reuse = self._reuse
if use_resource is None:
use_resource = self._use_resource
full_name = self.name + "/" + name if self.name else name
# Variable names only depend on variable_scope (full_name here),
# not name_scope, so we reset it below for the time of variable creation.
with ops.name_scope(None):
# Check that `initializer` dtype and `dtype` are consistent before
# replacing them with defaults.
if (dtype is not None and initializer is not None and
not callable(initializer)):
init_dtype = ops.convert_to_tensor(initializer).dtype.base_dtype
if init_dtype != dtype:
raise ValueError("Initializer type '%s' and explicit dtype '%s' "
"don't match." % (init_dtype, dtype))
if initializer is None:
initializer = self._initializer
if constraint is None:
constraint = self._constraint
if dtype is None:
dtype = self._dtype
return var_store.get_variable(
full_name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
custom_getter=custom_getter,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
def _get_partitioned_variable(self,
var_store,
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets an existing variable with this name or create a new one."""
if initializer is None:
initializer = self._initializer
if regularizer is None:
regularizer = self._regularizer
if constraint is None:
constraint = self._constraint
if caching_device is None:
caching_device = self._caching_device
if partitioner is None:
partitioner = self._partitioner
if dtype is None:
dtype = self._dtype
if use_resource is None:
use_resource = self._use_resource
if self._custom_getter is not None:
raise ValueError(
"Private access to _get_partitioned_variable is not allowed when "
"a custom getter is set. Current custom getter: %s. "
"It is likely that you're using create_partitioned_variables. "
"If so, consider instead using get_variable with a non-empty "
"partitioner parameter instead." % self._custom_getter)
if partitioner is None:
raise ValueError("No partitioner was specified")
# This allows the variable scope name to be used as the variable name if
# this function is invoked with an empty name arg, for backward
# compatibility with create_partitioned_variables().
full_name_list = []
if self.name:
full_name_list.append(self.name)
if name:
full_name_list.append(name)
full_name = "/".join(full_name_list)
# Variable names only depend on variable_scope (full_name here),
# not name_scope, so we reset it below for the time of variable creation.
with ops.name_scope(None):
# pylint: disable=protected-access
return var_store._get_partitioned_variable(
full_name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=self.reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# pylint: enable=protected-access
_VARSTORE_KEY = ("__variable_store",)
_VARSCOPESTORE_KEY = ("__varscope",)
class _VariableScopeStore(threading.local):
"""A thread local store for the current variable scope and scope counts."""
def __init__(self):
super(_VariableScopeStore, self).__init__()
self.current_scope = VariableScope(False)
self.variable_scopes_count = {}
def open_variable_scope(self, scope_name):
if scope_name in self.variable_scopes_count:
self.variable_scopes_count[scope_name] += 1
else:
self.variable_scopes_count[scope_name] = 1
def close_variable_subscopes(self, scope_name):
for k in list(self.variable_scopes_count.keys()):
if scope_name is None or k.startswith(scope_name + "/"):
self.variable_scopes_count[k] = 0
def variable_scope_count(self, scope_name):
return self.variable_scopes_count.get(scope_name, 0)
def get_variable_scope_store():
"""Returns the variable scope store for current thread."""
scope_store = ops.get_collection(_VARSCOPESTORE_KEY)
if not scope_store:
scope_store = _VariableScopeStore()
ops.add_to_collection(_VARSCOPESTORE_KEY, scope_store)
else:
scope_store = scope_store[0]
return scope_store
@tf_export(v1=["get_variable_scope"])
def get_variable_scope():
"""Returns the current variable scope."""
return get_variable_scope_store().current_scope
def _get_default_variable_store():
store = ops.get_collection(_VARSTORE_KEY)
if store:
return store[0]
store = _VariableStore()
ops.add_to_collection(_VARSTORE_KEY, store)
return store
@tf_contextlib.contextmanager
def with_variable_store(store):
store_collection = ops.get_collection_ref(_VARSTORE_KEY)
old = list(store_collection)
store_collection[:] = [store]
try:
yield
finally:
store_collection[:] = old
class EagerVariableStore(object):
"""Wrapper allowing functional layers to be used with eager execution.
When eager execution is enabled Variables get deleted when they go out of
scope, and are not stored in global collections by default. A lot of code
(mostly the functional layers in tf.layers) assumes that variables are kept in
a global list.
EagerVariableStore can be used in conjunction with this code to make it
eager-friendly. For example, to create a dense layer, use:
```
container = tfe.EagerVariableStore()
for input in dataset_iterator:
with container.as_default():
x = tf.compat.v1.layers.dense(input, name="l1")
print(container.variables) # Should print the variables used in the layer.
```
"""
def __init__(self, store=None):
if store is not None:
if not store._store_eager_variables: # pylint: disable=protected-access
raise ValueError("Cannot construct EagerVariableStore from a "
"VariableStore object that does not hold eager "
"variables.")
self._store = store
else:
self._store = _VariableStore()
self._store._store_eager_variables = True # pylint: disable=protected-access
def as_default(self):
return with_variable_store(self._store)
def variables(self):
return sorted(self._store._vars.values(), key=lambda x: x.name) # pylint: disable=protected-access
def trainable_variables(self):
# pylint: disable=protected-access
return sorted([x for x in self._store._vars.values() if x.trainable],
key=lambda x: x.name)
# pylint: enable=protected-access
def non_trainable_variables(self):
# pylint: disable=protected-access
return sorted([x for x in self._store._vars.values() if not x.trainable],
key=lambda x: x.name)
# pylint: enable=protected-access
def copy(self):
"""Copy this variable store and all of its contents.
Variables contained in this store will be copied over to the new variable
store, meaning that they can be modified without affecting the variables in
this store.
Returns:
A new EagerVariableStore instance containing copied variables.
"""
# pylint: disable=protected-access
new_store = EagerVariableStore()
for key, var in iteritems(self._store._vars):
# Strip device out of variable name.
try:
index = var.name.index(":")
except ValueError:
stripped_var_name = var.name
else:
stripped_var_name = var.name[:index]
# Create new variable with same value, name, and "trainable" flag.
new_var = resource_variable_ops.ResourceVariable(
var.read_value(), name=stripped_var_name, trainable=var.trainable)
new_store._store._vars[key] = new_var
return new_store
# pylint: enable=protected-access
# The argument list for get_variable must match arguments to get_local_variable.
# So, if you are updating the arguments, also update arguments to
# get_local_variable below.
@tf_export(v1=["get_variable"])
def get_variable(name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
return get_variable_scope().get_variable(
_get_default_variable_store(),
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
custom_getter=custom_getter,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
get_variable_or_local_docstring = ("""%s
%sThis function prefixes the name with the current variable scope
and performs reuse checks. See the
[Variable Scope How To](https://tensorflow.org/guide/variables)
for an extensive description of how reusing works. Here is a basic example:
```python
def foo():
with tf.variable_scope("foo", reuse=tf.AUTO_REUSE):
v = tf.get_variable("v", [1])
return v
v1 = foo() # Creates v.
v2 = foo() # Gets the same, existing v.
assert v1 == v2
```
If initializer is `None` (the default), the default initializer passed in
the variable scope will be used. If that one is `None` too, a
`glorot_uniform_initializer` will be used. The initializer can also be
a Tensor, in which case the variable is initialized to this value and shape.
Similarly, if the regularizer is `None` (the default), the default regularizer
passed in the variable scope will be used (if that is `None` too,
then by default no regularization is performed).
If a partitioner is provided, a `PartitionedVariable` is returned.
Accessing this object as a `Tensor` returns the shards concatenated along
the partition axis.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
dtype: Type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: Initializer for the variable if one is created. Can either be
an initializer object or a Tensor. If it's a Tensor, its shape must be known
unless validate_shape is False.
regularizer: A (Tensor -> Tensor or None) function; the result of
applying it on a newly created variable will be added to the collection
`tf.GraphKeys.REGULARIZATION_LOSSES` and can be used for regularization.
%scollections: List of graph collections keys to add the Variable to.
Defaults to `[%s]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the Variable reside, to
deduplicate copying through `Switch` and other conditional statements.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and `dtype` of the Variable to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
validate_shape: If False, allows the variable to be initialized with a
value of unknown shape. If True, the default, the shape of initial_value
must be known. For this to be used the initializer must be a Tensor and
not an initializer object.
use_resource: If False, creates a regular Variable. If true, creates an
experimental ResourceVariable instead with well-defined semantics.
Defaults to False (will later change to True). When eager execution is
enabled this argument is always forced to be True.
custom_getter: Callable that takes as a first argument the true getter, and
allows overwriting the internal get_variable method.
The signature of `custom_getter` should match that of this method,
but the most future-proof version will allow for changes:
`def custom_getter(getter, *args, **kwargs)`. Direct access to
all `get_variable` parameters is also allowed:
`def custom_getter(getter, name, *args, **kwargs)`. A simple identity
custom getter that simply creates variables with modified names is:
```python
def custom_getter(getter, name, *args, **kwargs):
return getter(name + '_suffix', *args, **kwargs)
```
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value
(which must have the same shape). Constraints are not safe to
use when doing asynchronous distributed training.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
The created or existing `Variable` (or `PartitionedVariable`, if a
partitioner was used).
Raises:
ValueError: when creating a new variable and shape is not declared,
when violating reuse during variable creation, or when `initializer` dtype
and `dtype` don't match. Reuse is set inside `variable_scope`.
""")
get_variable.__doc__ = get_variable_or_local_docstring % (
"Gets an existing variable with these parameters or create a new one.", "",
"trainable: If `True` also add the variable to the graph collection\n"
" `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).\n ",
"GraphKeys.GLOBAL_VARIABLES")
# The argument list for get_local_variable must match arguments to get_variable.
# So, if you are updating the arguments, also update arguments to get_variable.
@tf_export(v1=["get_local_variable"])
def get_local_variable( # pylint: disable=missing-docstring
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=False, # pylint: disable=unused-argument
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
if collections:
collections += [ops.GraphKeys.LOCAL_VARIABLES]
else:
collections = [ops.GraphKeys.LOCAL_VARIABLES]
return get_variable(
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=False,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
synchronization=synchronization,
aggregation=aggregation,
custom_getter=custom_getter,
constraint=constraint)
get_local_variable.__doc__ = get_variable_or_local_docstring % (
"Gets an existing *local* variable or creates a new one.",
"Behavior is the same as in `get_variable`, except that variables are\n"
"added to the `LOCAL_VARIABLES` collection and `trainable` is set to\n"
"`False`.\n", "", "GraphKeys.LOCAL_VARIABLES")
def _get_partitioned_variable(name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=True,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets or creates a sharded variable list with these parameters.
The `partitioner` must be a callable that accepts a fully defined
`TensorShape` and returns a sequence of integers (the `partitions`).
These integers describe how to partition the given sharded `Variable`
along the given dimension. That is, `partitions[1] = 3` means split
the `Variable` into 3 shards along dimension 1. Currently, sharding along
only one axis is supported.
If the list of variables with the given name (prefix) is already stored,
we return the stored variables. Otherwise, we create a new one.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`glorot_uniform_initializer`. If initializer is a Tensor, we use
it as a value and derive the shape from the initializer.
If the initializer is a callable, then it will be called for each
shard. Otherwise the initializer should match the shape of the entire
sharded Variable, and it will be sliced accordingly for each shard.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
dtype: Type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: Initializer for the variable if one is created.
regularizer: A (Tensor -> Tensor or None) function; the result of applying
it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
collections: List of graph collections keys to add the Variable to. Defaults
to `[GraphKeys.GLOBAL_VARIABLES]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's device.
If not `None`, caches on another device. Typical use is to cache on the
device where the Ops using the Variable reside, to deduplicate copying
through `Switch` and other conditional statements.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and `dtype` of the Variable to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
validate_shape: If False, allows the variable to be initialized with a value
of unknown shape. If True, the default, the shape of initial_value must be
known.
use_resource: If False, creates a regular Variable. If True, creates an
experimental ResourceVariable instead which has well-defined semantics.
Defaults to False (will later change to True).
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
synchronization: Indicates when a distributed a variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses when to synchronize.
If `synchronization` is set to `ON_READ`, `trainable` must not be set to
`True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
A tuple `(shards, partitions)` where `shards` is the list of `Variable`
shards and `partitions` is the output of the partitioner on the input
shape.
Raises:
ValueError: when creating a new variable and shape is not declared,
or when violating reuse during variable creation. Reuse is set inside
`variable_scope`.
"""
# pylint: disable=protected-access
scope = get_variable_scope()
if scope.custom_getter is not None:
raise ValueError(
"Private access to _get_partitioned_variable is not allowed when "
"a custom getter is set. Current custom getter: %s. "
"It is likely that you're using create_partitioned_variables. "
"If so, consider instead using get_variable with a non-empty "
"partitioner parameter instead." % scope.custom_getter)
return scope._get_partitioned_variable(
_get_default_variable_store(),
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# pylint: enable=protected-access
# Named like a function for compatibility with the previous
# @tf_contextlib.contextmanager definition.
class _pure_variable_scope(object): # pylint: disable=invalid-name
"""A context for the variable_scope, see `variable_scope` for docs."""
def __init__(self,
name_or_scope,
reuse=None,
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
old_name_scope=None,
dtype=dtypes.float32,
use_resource=None,
constraint=None):
"""Creates a context for the variable_scope, see `variable_scope` for docs.
Note: this does not create a name scope.
Args:
name_or_scope: `string` or `VariableScope`: the scope to open.
reuse: `True` or None, or tf.compat.v1.AUTO_REUSE; if `None`, we inherit
the parent scope's reuse flag.
initializer: default initializer for variables within this scope.
regularizer: default regularizer for variables within this scope.
caching_device: default caching device for variables within this scope.
partitioner: default partitioner for variables within this scope.
custom_getter: default custom getter for variables within this scope.
old_name_scope: the original name scope when re-entering a variable scope.
dtype: type of the variables within this scope (defaults to `DT_FLOAT`).
use_resource: If False, variables in this scope will be regular Variables.
If True, experimental ResourceVariables will be creates instead, with
well-defined semantics. Defaults to False (will later change to True).
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
"""
self._name_or_scope = name_or_scope
self._reuse = reuse
self._initializer = initializer
self._regularizer = regularizer
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._old_name_scope = old_name_scope
self._dtype = dtype
self._use_resource = use_resource
self._constraint = constraint
self._var_store = _get_default_variable_store()
self._var_scope_store = get_variable_scope_store()
self._last_variable_scope_object = None
if isinstance(self._name_or_scope, VariableScope):
self._new_name = self._name_or_scope.name
name_scope = self._name_or_scope._name_scope # pylint: disable=protected-access
# Handler for the case when we jump to a shared scope. We create a new
# VariableScope (self._var_scope_object) that contains a copy of the
# provided shared scope, possibly with changed reuse and initializer, if
# the user requested this.
variable_scope_object = VariableScope(
self._name_or_scope.reuse if not self._reuse else self._reuse,
name=self._new_name,
initializer=self._name_or_scope.initializer,
regularizer=self._name_or_scope.regularizer,
caching_device=self._name_or_scope.caching_device,
partitioner=self._name_or_scope.partitioner,
dtype=self._name_or_scope.dtype,
custom_getter=self._name_or_scope.custom_getter,
name_scope=name_scope,
use_resource=self._name_or_scope.use_resource,
constraint=self._constraint)
if self._initializer is not None:
variable_scope_object.set_initializer(self._initializer)
if self._regularizer is not None:
variable_scope_object.set_regularizer(self._regularizer)
if self._caching_device is not None:
variable_scope_object.set_caching_device(self._caching_device)
if self._partitioner is not None:
variable_scope_object.set_partitioner(self._partitioner)
if self._custom_getter is not None:
variable_scope_object.set_custom_getter(
_maybe_wrap_custom_getter(self._custom_getter,
self._name_or_scope.custom_getter))
if self._dtype is not None:
variable_scope_object.set_dtype(self._dtype)
if self._use_resource is not None:
variable_scope_object.set_use_resource(self._use_resource)
self._cached_variable_scope_object = variable_scope_object
def __enter__(self):
"""Begins the scope block.
Returns:
A VariableScope.
Raises:
ValueError: when trying to reuse within a create scope, or create within
a reuse scope, or if reuse is not `None` or `True`.
TypeError: when the types of some arguments are not appropriate.
"""
self._old = self._var_scope_store.current_scope
if isinstance(self._name_or_scope, VariableScope):
self._var_scope_store.open_variable_scope(self._new_name)
self._old_subscopes = copy.copy(
self._var_scope_store.variable_scopes_count)
variable_scope_object = self._cached_variable_scope_object
else:
# Handler for the case when we just prolong current variable scope.
# VariableScope with name extended by the provided one, and inherited
# reuse and initializer (except if the user provided values to set).
self._new_name = (
self._old.name + "/" +
self._name_or_scope if self._old.name else self._name_or_scope)
self._reuse = (self._reuse or
self._old.reuse) # Re-using is inherited by sub-scopes.
if self._old_name_scope is None:
name_scope = self._name_or_scope
else:
name_scope = self._old_name_scope
variable_scope_object = VariableScope(
self._reuse,
name=self._new_name,
initializer=self._old.initializer,
regularizer=self._old.regularizer,
caching_device=self._old.caching_device,
partitioner=self._old.partitioner,
dtype=self._old.dtype,
use_resource=self._old.use_resource,
custom_getter=self._old.custom_getter,
name_scope=name_scope,
constraint=self._constraint)
if self._initializer is not None:
variable_scope_object.set_initializer(self._initializer)
if self._regularizer is not None:
variable_scope_object.set_regularizer(self._regularizer)
if self._caching_device is not None:
variable_scope_object.set_caching_device(self._caching_device)
if self._partitioner is not None:
variable_scope_object.set_partitioner(self._partitioner)
if self._custom_getter is not None:
variable_scope_object.set_custom_getter(
_maybe_wrap_custom_getter(self._custom_getter,
self._old.custom_getter))
if self._dtype is not None:
variable_scope_object.set_dtype(self._dtype)
if self._use_resource is not None:
variable_scope_object.set_use_resource(self._use_resource)
self._var_scope_store.open_variable_scope(self._new_name)
self._var_scope_store.current_scope = variable_scope_object
self._last_variable_scope_object = variable_scope_object
return variable_scope_object
def __exit__(self, type_arg, value_arg, traceback_arg):
if (self._var_scope_store.current_scope is
not self._last_variable_scope_object):
raise RuntimeError("Improper nesting of variable_scope.")
# If jumping out from a non-prolonged scope, restore counts.
if isinstance(self._name_or_scope, VariableScope):
self._var_scope_store.variable_scopes_count = self._old_subscopes
else:
self._var_scope_store.close_variable_subscopes(self._new_name)
self._var_scope_store.current_scope = self._old
def _maybe_wrap_custom_getter(custom_getter, old_getter):
"""Wrap a call to a custom_getter to use the old_getter internally."""
if old_getter is None:
return custom_getter
# The new custom_getter should call the old one
def wrapped_custom_getter(getter, *args, **kwargs):
# Call:
# custom_getter(
# lambda: old_getter(true_getter, ...), *args, **kwargs)
# which means custom_getter will call old_getter, which
# will call the true_getter, perform any intermediate
# processing, and return the results to the current
# getter, which will also perform additional processing.
return custom_getter(functools.partial(old_getter, getter), *args, **kwargs)
return wrapped_custom_getter
def _get_unique_variable_scope(prefix):
"""Get a name with the given prefix unique in the current variable scope."""
var_scope_store = get_variable_scope_store()
current_scope = get_variable_scope()
name = current_scope.name + "/" + prefix if current_scope.name else prefix
if var_scope_store.variable_scope_count(name) == 0:
return prefix
idx = 1
while var_scope_store.variable_scope_count(name + ("_%d" % idx)) > 0:
idx += 1
return prefix + ("_%d" % idx)
# Named like a function for backwards compatibility with the
# @tf_contextlib.contextmanager version, which was switched to a class to avoid
# some object creation overhead.
@tf_export(v1=["variable_scope"]) # pylint: disable=invalid-name
class variable_scope(object):
"""A context manager for defining ops that creates variables (layers).
This context manager validates that the (optional) `values` are from the same
graph, ensures that graph is the default graph, and pushes a name scope and a
variable scope.
If `name_or_scope` is not None, it is used as is. If `name_or_scope` is None,
then `default_name` is used. In that case, if the same name has been
previously used in the same scope, it will be made unique by appending `_N`
to it.
Variable scope allows you to create new variables and to share already created
ones while providing checks to not create or share by accident. For details,
see the [Variable Scope How To](https://tensorflow.org/guide/variables), here
we present only a few basic examples.
Simple example of how to create a new variable:
```python
with tf.compat.v1.variable_scope("foo"):
with tf.compat.v1.variable_scope("bar"):
v = tf.compat.v1.get_variable("v", [1])
assert v.name == "foo/bar/v:0"
```
Simple example of how to reenter a premade variable scope safely:
```python
with tf.compat.v1.variable_scope("foo") as vs:
pass
# Re-enter the variable scope.
with tf.compat.v1.variable_scope(vs,
auxiliary_name_scope=False) as vs1:
# Restore the original name_scope.
with tf.name_scope(vs1.original_name_scope):
v = tf.compat.v1.get_variable("v", [1])
assert v.name == "foo/v:0"
c = tf.constant([1], name="c")
assert c.name == "foo/c:0"
```
Basic example of sharing a variable AUTO_REUSE:
```python
def foo():
with tf.compat.v1.variable_scope("foo", reuse=tf.compat.v1.AUTO_REUSE):
v = tf.compat.v1.get_variable("v", [1])
return v
v1 = foo() # Creates v.
v2 = foo() # Gets the same, existing v.
assert v1 == v2
```
Basic example of sharing a variable with reuse=True:
```python
with tf.compat.v1.variable_scope("foo"):
v = tf.compat.v1.get_variable("v", [1])
with tf.compat.v1.variable_scope("foo", reuse=True):
v1 = tf.compat.v1.get_variable("v", [1])
assert v1 == v
```
Sharing a variable by capturing a scope and setting reuse:
```python
with tf.compat.v1.variable_scope("foo") as scope:
v = tf.compat.v1.get_variable("v", [1])
scope.reuse_variables()
v1 = tf.compat.v1.get_variable("v", [1])
assert v1 == v
```
To prevent accidental sharing of variables, we raise an exception when getting
an existing variable in a non-reusing scope.
```python
with tf.compat.v1.variable_scope("foo"):
v = tf.compat.v1.get_variable("v", [1])
v1 = tf.compat.v1.get_variable("v", [1])
# Raises ValueError("... v already exists ...").
```
Similarly, we raise an exception when trying to get a variable that does not
exist in reuse mode.
```python
with tf.compat.v1.variable_scope("foo", reuse=True):
v = tf.compat.v1.get_variable("v", [1])
# Raises ValueError("... v does not exists ...").
```
Note that the `reuse` flag is inherited: if we open a reusing scope, then all
its sub-scopes become reusing as well.
A note about name scoping: Setting `reuse` does not impact the naming of other
ops such as mult. See related discussion on
[github#6189](https://github.com/tensorflow/tensorflow/issues/6189)
Note that up to and including version 1.0, it was allowed (though explicitly
discouraged) to pass False to the reuse argument, yielding undocumented
behaviour slightly different from None. Starting at 1.1.0 passing None and
False as reuse has exactly the same effect.
A note about using variable scopes in multi-threaded environment: Variable
scopes are thread local, so one thread will not see another thread's current
scope. Also, when using `default_name`, unique scopes names are also generated
only on a per thread basis. If the same name was used within a different
thread, that doesn't prevent a new thread from creating the same scope.
However, the underlying variable store is shared across threads (within the
same graph). As such, if another thread tries to create a new variable with
the same name as a variable created by a previous thread, it will fail unless
reuse is True.
Further, each thread starts with an empty variable scope. So if you wish to
preserve name prefixes from a scope from the main thread, you should capture
the main thread's scope and re-enter it in each thread. For e.g.
```
main_thread_scope = variable_scope.get_variable_scope()
# Thread's target function:
def thread_target_fn(captured_scope):
with variable_scope.variable_scope(captured_scope):
# .... regular code for this thread
thread = threading.Thread(target=thread_target_fn, args=(main_thread_scope,))
```
"""
def __init__(self,
name_or_scope,
default_name=None,
values=None,
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
reuse=None,
dtype=None,
use_resource=None,
constraint=None,
auxiliary_name_scope=True):
"""Initialize the context manager.
Args:
name_or_scope: `string` or `VariableScope`: the scope to open.
default_name: The default name to use if the `name_or_scope` argument is
`None`, this name will be uniquified. If name_or_scope is provided it
won't be used and therefore it is not required and can be None.
values: The list of `Tensor` arguments that are passed to the op function.
initializer: default initializer for variables within this scope.
regularizer: default regularizer for variables within this scope.
caching_device: default caching device for variables within this scope.
partitioner: default partitioner for variables within this scope.
custom_getter: default custom getter for variables within this scope.
reuse: `True`, None, or tf.compat.v1.AUTO_REUSE; if `True`, we go into
reuse mode for this scope as well as all sub-scopes; if
tf.compat.v1.AUTO_REUSE, we create variables if they do not exist, and
return them otherwise; if None, we inherit the parent scope's reuse
flag. When eager execution is enabled, new variables are always created
unless an EagerVariableStore or template is currently active.
dtype: type of variables created in this scope (defaults to the type in
the passed scope, or inherited from parent scope).
use_resource: If False, all variables will be regular Variables. If True,
experimental ResourceVariables with well-defined semantics will be used
instead. Defaults to False (will later change to True). When eager
execution is enabled this argument is always forced to be True.
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
auxiliary_name_scope: If `True`, we create an auxiliary name scope with
the scope. If `False`, we don't create it. Note that the argument is not
inherited, and it only takes effect for once when creating. You should
only use it for re-entering a premade variable scope.
Returns:
A scope that can be captured and reused.
Raises:
ValueError: when trying to reuse within a create scope, or create within
a reuse scope.
TypeError: when the types of some arguments are not appropriate.
"""
self._name_or_scope = name_or_scope
self._default_name = default_name
self._values = values
self._initializer = initializer
self._regularizer = regularizer
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._reuse = reuse
self._dtype = dtype
self._use_resource = use_resource
self._constraint = constraint
if self._default_name is None and self._name_or_scope is None:
raise TypeError("If default_name is None then name_or_scope is required")
if self._reuse is False:
# We don't allow non-inheriting scopes, False = None here.
self._reuse = None
if not (self._reuse is True
or self._reuse is None
or self._reuse is AUTO_REUSE):
raise ValueError("The reuse parameter must be True or False or None.")
if self._values is None:
self._values = []
self._in_graph_mode = not context.executing_eagerly()
if self._in_graph_mode:
self._graph = ops._get_graph_from_inputs(self._values) # pylint: disable=protected-access
self._cached_pure_variable_scope = None
self._current_name_scope = None
if not isinstance(auxiliary_name_scope, bool):
raise TypeError("The auxiliary_name_scope must be `True` or `False`, "
"while get {}".format(auxiliary_name_scope))
self._auxiliary_name_scope = auxiliary_name_scope
def __enter__(self):
# If the default graph is building a function, then we should not replace it
# with the cached graph.
if ops.get_default_graph().building_function:
self._building_function = True
else:
self._building_function = False
if self._in_graph_mode and not self._building_function:
self._graph_context_manager = self._graph.as_default()
self._graph_context_manager.__enter__()
if self._cached_pure_variable_scope is not None:
# Fast path for re-entering variable_scopes. We've held on to the pure
# variable scope from a previous successful __enter__, so we avoid some
# overhead by re-using that object.
if self._current_name_scope is not None:
self._current_name_scope.__enter__()
return self._cached_pure_variable_scope.__enter__()
try:
return self._enter_scope_uncached()
finally:
if (self._in_graph_mode and not self._building_function and
self._graph_context_manager is not None):
self._graph_context_manager.__exit__(*sys.exc_info())
def _enter_scope_uncached(self):
"""Enters the context manager when there is no cached scope yet.
Returns:
The entered variable scope.
Raises:
TypeError: A wrong type is passed as `scope` at __init__().
ValueError: `reuse` is incorrectly set at __init__().
"""
if self._auxiliary_name_scope:
# Create a new name scope later
current_name_scope = None
else:
# Reenter the current name scope
name_scope = ops.get_name_scope()
if name_scope:
# Hack to reenter
name_scope += "/"
current_name_scope = ops.name_scope(name_scope)
else:
# Root scope
current_name_scope = ops.name_scope(name_scope)
# IMPORTANT: Only assign to self._cached_pure_variable_scope and
# self._current_name_scope after successful __enter__() calls.
if self._name_or_scope is not None:
if not isinstance(self._name_or_scope,
(VariableScope,) + six.string_types):
raise TypeError("VariableScope: name_or_scope must be a string or "
"VariableScope.")
if isinstance(self._name_or_scope, six.string_types):
name_scope = self._name_or_scope
else:
name_scope = self._name_or_scope.name.split("/")[-1]
if name_scope or current_name_scope:
current_name_scope = current_name_scope or ops.name_scope(name_scope)
try:
current_name_scope_name = current_name_scope.__enter__()
except:
current_name_scope.__exit__(*sys.exc_info())
raise
self._current_name_scope = current_name_scope
if isinstance(self._name_or_scope, six.string_types):
old_name_scope = current_name_scope_name
else:
old_name_scope = self._name_or_scope.original_name_scope
pure_variable_scope = _pure_variable_scope(
self._name_or_scope,
reuse=self._reuse,
initializer=self._initializer,
regularizer=self._regularizer,
caching_device=self._caching_device,
partitioner=self._partitioner,
custom_getter=self._custom_getter,
old_name_scope=old_name_scope,
dtype=self._dtype,
use_resource=self._use_resource,
constraint=self._constraint)
try:
entered_pure_variable_scope = pure_variable_scope.__enter__()
except:
pure_variable_scope.__exit__(*sys.exc_info())
raise
self._cached_pure_variable_scope = pure_variable_scope
return entered_pure_variable_scope
else:
self._current_name_scope = None
# This can only happen if someone is entering the root variable scope.
pure_variable_scope = _pure_variable_scope(
self._name_or_scope,
reuse=self._reuse,
initializer=self._initializer,
regularizer=self._regularizer,
caching_device=self._caching_device,
partitioner=self._partitioner,
custom_getter=self._custom_getter,
dtype=self._dtype,
use_resource=self._use_resource,
constraint=self._constraint)
try:
entered_pure_variable_scope = pure_variable_scope.__enter__()
except:
pure_variable_scope.__exit__(*sys.exc_info())
raise
self._cached_pure_variable_scope = pure_variable_scope
return entered_pure_variable_scope
else: # Here name_or_scope is None. Using default name, but made unique.
if self._reuse:
raise ValueError("reuse=True cannot be used without a name_or_scope")
current_name_scope = current_name_scope or ops.name_scope(
self._default_name)
try:
current_name_scope_name = current_name_scope.__enter__()
except:
current_name_scope.__exit__(*sys.exc_info())
raise
self._current_name_scope = current_name_scope
unique_default_name = _get_unique_variable_scope(self._default_name)
pure_variable_scope = _pure_variable_scope(
unique_default_name,
initializer=self._initializer,
regularizer=self._regularizer,
caching_device=self._caching_device,
partitioner=self._partitioner,
custom_getter=self._custom_getter,
old_name_scope=current_name_scope_name,
dtype=self._dtype,
use_resource=self._use_resource,
constraint=self._constraint)
try:
entered_pure_variable_scope = pure_variable_scope.__enter__()
except:
pure_variable_scope.__exit__(*sys.exc_info())
raise
self._cached_pure_variable_scope = pure_variable_scope
return entered_pure_variable_scope
def __exit__(self, type_arg, value_arg, traceback_arg):
self._cached_pure_variable_scope.__exit__(type_arg, value_arg,
traceback_arg)
if self._current_name_scope:
self._current_name_scope.__exit__(type_arg, value_arg, traceback_arg)
if self._in_graph_mode and not self._building_function:
self._graph_context_manager.__exit__(type_arg, value_arg, traceback_arg)
# pylint: disable=g-doc-return-or-yield
@tf_export(v1=["variable_op_scope"])
@tf_contextlib.contextmanager
def variable_op_scope(values,
name_or_scope,
default_name=None,
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
reuse=None,
dtype=None,
use_resource=None,
constraint=None):
"""Deprecated: context manager for defining an op that creates variables."""
logging.warn("tf.variable_op_scope(values, name, default_name) is deprecated,"
" use tf.variable_scope(name, default_name, values)")
with variable_scope(
name_or_scope,
default_name=default_name,
values=values,
initializer=initializer,
regularizer=regularizer,
caching_device=caching_device,
partitioner=partitioner,
custom_getter=custom_getter,
reuse=reuse,
dtype=dtype,
use_resource=use_resource,
constraint=constraint) as scope:
yield scope
def _call_partitioner(partitioner, shape, dtype):
"""Call partitioner validating its inputs/output.
Args:
partitioner: a function mapping `Tensor` shape and dtype to a list of
partitions.
shape: shape of the `Tensor` to partition, must have at least two
dimensions.
dtype: dtype of the elements in the `Tensor`.
Returns:
A list with elements >=1 and exactly one >1. The index of that
element corresponds to the partitioning axis.
"""
if not shape.is_fully_defined():
raise ValueError("Shape of a new partitioned variable must be "
"fully defined, but instead was %s." % (shape,))
if shape.ndims < 1:
raise ValueError("A partitioned Variable must have rank at least 1, "
"shape: %s" % shape)
slicing = partitioner(shape=shape, dtype=dtype)
if not isinstance(slicing, collections_lib.Sequence):
raise ValueError("Partitioner must return a sequence, but saw: %s" %
slicing)
if len(slicing) != shape.ndims:
raise ValueError(
"Partitioner returned a partition list that does not match the "
"Variable's rank: %s vs. %s" % (slicing, shape))
if any(p < 1 for p in slicing):
raise ValueError("Partitioner returned zero partitions for some axes: %s" %
slicing)
if sum(p > 1 for p in slicing) > 1:
raise ValueError("Can only slice a variable along one dimension: "
"shape: %s, partitioning: %s" % (shape, slicing))
return slicing
# TODO(slebedev): could be inlined, but
# `_VariableStore._get_partitioned_variable` is too complex even
# without this logic.
def _get_slice_dim_and_num_slices(slicing):
"""Get slicing dimension and number of slices from the partitioner output."""
for slice_dim, num_slices in enumerate(slicing):
if num_slices > 1:
break
else:
# Degenerate case: no partitioning applied.
slice_dim = 0
num_slices = 1
return slice_dim, num_slices
def _iter_slices(full_shape, num_slices, slice_dim):
"""Slices a given a shape along the specified dimension."""
num_slices_with_excess = full_shape[slice_dim] % num_slices
offset = [0] * len(full_shape)
min_slice_len = full_shape[slice_dim] // num_slices
for i in xrange(num_slices):
shape = full_shape[:]
shape[slice_dim] = min_slice_len + bool(i < num_slices_with_excess)
yield offset[:], shape
offset[slice_dim] += shape[slice_dim]
def default_variable_creator(next_creator=None, **kwargs):
"""Default variable creator."""
assert next_creator is None
initial_value = kwargs.get("initial_value", None)
trainable = kwargs.get("trainable", None)
collections = kwargs.get("collections", None)
validate_shape = kwargs.get("validate_shape", True)
caching_device = kwargs.get("caching_device", None)
name = kwargs.get("name", None)
variable_def = kwargs.get("variable_def", None)
dtype = kwargs.get("dtype", None)
expected_shape = kwargs.get("expected_shape", None)
import_scope = kwargs.get("import_scope", None)
constraint = kwargs.get("constraint", None)
use_resource = kwargs.get("use_resource", None)
synchronization = kwargs.get("synchronization", None)
aggregation = kwargs.get("aggregation", None)
shape = kwargs.get("shape", None)
if use_resource is None:
use_resource = get_variable_scope().use_resource
if use_resource is None:
use_resource = _DEFAULT_USE_RESOURCE
use_resource = use_resource or context.executing_eagerly()
if use_resource:
distribute_strategy = kwargs.get("distribute_strategy", None)
return resource_variable_ops.ResourceVariable(
initial_value=initial_value,
trainable=trainable,
collections=collections,
validate_shape=validate_shape,
caching_device=caching_device,
name=name,
dtype=dtype,
constraint=constraint,
variable_def=variable_def,
import_scope=import_scope,
distribute_strategy=distribute_strategy,
synchronization=synchronization,
aggregation=aggregation,
shape=shape)
else:
return variables.RefVariable(
initial_value=initial_value,
trainable=trainable,
collections=collections,
validate_shape=validate_shape,
caching_device=caching_device,
name=name,
dtype=dtype,
constraint=constraint,
variable_def=variable_def,
expected_shape=expected_shape,
import_scope=import_scope,
synchronization=synchronization,
aggregation=aggregation,
shape=shape)
def default_variable_creator_v2(next_creator=None, **kwargs):
"""Default variable creator."""
assert next_creator is None
initial_value = kwargs.get("initial_value", None)
trainable = kwargs.get("trainable", None)
validate_shape = kwargs.get("validate_shape", True)
caching_device = kwargs.get("caching_device", None)
name = kwargs.get("name", None)
variable_def = kwargs.get("variable_def", None)
dtype = kwargs.get("dtype", None)
import_scope = kwargs.get("import_scope", None)
constraint = kwargs.get("constraint", None)
distribute_strategy = kwargs.get("distribute_strategy", None)
synchronization = kwargs.get("synchronization", None)
aggregation = kwargs.get("aggregation", None)
shape = kwargs.get("shape", None)
return resource_variable_ops.ResourceVariable(
initial_value=initial_value,
trainable=trainable,
validate_shape=validate_shape,
caching_device=caching_device,
name=name,
dtype=dtype,
constraint=constraint,
variable_def=variable_def,
import_scope=import_scope,
distribute_strategy=distribute_strategy,
synchronization=synchronization,
aggregation=aggregation,
shape=shape)
variables.default_variable_creator = default_variable_creator
variables.default_variable_creator_v2 = default_variable_creator_v2
def _make_getter(captured_getter, captured_previous):
"""Gets around capturing loop variables in python being broken."""
return lambda **kwargs: captured_getter(captured_previous, **kwargs)
# TODO(apassos) remove forwarding symbol
variable = variables.VariableV1
@tf_export(v1=["variable_creator_scope"])
@tf_contextlib.contextmanager
def variable_creator_scope_v1(variable_creator):
"""Scope which defines a variable creation function to be used by variable().
variable_creator is expected to be a function with the following signature:
```
def variable_creator(next_creator, **kwargs)
```
The creator is supposed to eventually call the next_creator to create a
variable if it does want to create a variable and not call Variable or
ResourceVariable directly. This helps make creators composable. A creator may
choose to create multiple variables, return already existing variables, or
simply register that a variable was created and defer to the next creators in
line. Creators can also modify the keyword arguments seen by the next
creators.
Custom getters in the variable scope will eventually resolve down to these
custom creators when they do create variables.
The valid keyword arguments in kwds are:
initial_value: A `Tensor`, or Python object convertible to a `Tensor`,
which is the initial value for the Variable. The initial value must have
a shape specified unless `validate_shape` is set to False. Can also be a
callable with no argument that returns the initial value when called. In
that case, `dtype` must be specified. (Note that initializer functions
from init_ops.py must first be bound to a shape before being used here.)
trainable: If `True`, the default, also adds the variable to the graph
collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
the default list of variables to use by the `Optimizer` classes.
`trainable` defaults to `True` unless `synchronization` is
set to `ON_READ`.
collections: List of graph collections keys. The new variable is added to
these collections. Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
validate_shape: If `False`, allows the variable to be initialized with a
value of unknown shape. If `True`, the default, the shape of
`initial_value` must be known.
caching_device: Optional device string describing where the Variable
should be cached for reading. Defaults to the Variable's device.
If not `None`, caches on another device. Typical use is to cache
on the device where the Ops using the Variable reside, to deduplicate
copying through `Switch` and other conditional statements.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
dtype: If set, initial_value will be converted to the given type.
If `None`, either the datatype will be kept (if `initial_value` is
a Tensor), or `convert_to_tensor` will decide.
constraint: A constraint function to be applied to the variable after
updates by some algorithms.
use_resource: if True, a ResourceVariable is always created.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
This set may grow over time, so it's important the signature of creators is as
mentioned above.
Args:
variable_creator: the passed creator
Yields:
A scope in which the creator is active
"""
with ops.get_default_graph()._variable_creator_scope(variable_creator): # pylint: disable=protected-access
yield
# Note: only the docstrings differ between this and v1.
@tf_export("variable_creator_scope", v1=[])
@tf_contextlib.contextmanager
def variable_creator_scope(variable_creator):
"""Scope which defines a variable creation function to be used by variable().
variable_creator is expected to be a function with the following signature:
```
def variable_creator(next_creator, **kwargs)
```
The creator is supposed to eventually call the next_creator to create a
variable if it does want to create a variable and not call Variable or
ResourceVariable directly. This helps make creators composable. A creator may
choose to create multiple variables, return already existing variables, or
simply register that a variable was created and defer to the next creators in
line. Creators can also modify the keyword arguments seen by the next
creators.
Custom getters in the variable scope will eventually resolve down to these
custom creators when they do create variables.
The valid keyword arguments in kwds are:
initial_value: A `Tensor`, or Python object convertible to a `Tensor`,
which is the initial value for the Variable. The initial value must have
a shape specified unless `validate_shape` is set to False. Can also be a
callable with no argument that returns the initial value when called. In
that case, `dtype` must be specified. (Note that initializer functions
from init_ops.py must first be bound to a shape before being used here.)
trainable: If `True`, the default, GradientTapes automatically watch
uses of this Variable.
validate_shape: If `False`, allows the variable to be initialized with a
value of unknown shape. If `True`, the default, the shape of
`initial_value` must be known.
caching_device: Optional device string describing where the Variable
should be cached for reading. Defaults to the Variable's device.
If not `None`, caches on another device. Typical use is to cache
on the device where the Ops using the Variable reside, to deduplicate
copying through `Switch` and other conditional statements.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
dtype: If set, initial_value will be converted to the given type.
If `None`, either the datatype will be kept (if `initial_value` is
a Tensor), or `convert_to_tensor` will decide.
constraint: A constraint function to be applied to the variable after
updates by some algorithms.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
This set may grow over time, so it's important the signature of creators is as
mentioned above.
Args:
variable_creator: the passed creator
Yields:
A scope in which the creator is active
"""
with ops.get_default_graph()._variable_creator_scope(variable_creator): # pylint: disable=protected-access
yield
| codeparrot/github-code-clean |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles all processes relating to instances (guest vms).
The :py:class:`ComputeManager` class is a :py:class:`nova.manager.Manager` that
handles RPC calls relating to creating instances. It is responsible for
building a disk image, launching it via the underlying virtualization driver,
responding to calls to check its state, attaching persistent storage, and
terminating it.
"""
import base64
import contextlib
import functools
import socket
import sys
import time
import traceback
import uuid
from cinderclient import exceptions as cinder_exception
import eventlet.event
from eventlet import greenthread
import eventlet.semaphore
import eventlet.timeout
from keystoneclient import exceptions as keystone_exception
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import strutils
from oslo_utils import timeutils
import six
from nova import block_device
from nova.cells import rpcapi as cells_rpcapi
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import build_results
from nova.compute import power_state
from nova.compute import resource_tracker
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova import conductor
from nova import consoleauth
import nova.context
from nova import exception
from nova import hooks
from nova.i18n import _
from nova.i18n import _LE
from nova.i18n import _LI
from nova.i18n import _LW
from nova import image
from nova.image import glance
from nova import manager
from nova import network
from nova.network import model as network_model
from nova.network.security_group import openstack_driver
from nova import objects
from nova.objects import base as obj_base
from nova.openstack.common import log as logging
from nova.openstack.common import periodic_task
from nova import paths
from nova import rpc
from nova import safe_utils
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova import utils
from nova.virt import block_device as driver_block_device
from nova.virt import driver
from nova.virt import event as virtevent
from nova.virt import storage_users
from nova.virt import virtapi
from nova import volume
from nova.volume import encryptors
compute_opts = [
cfg.StrOpt('console_host',
default=socket.gethostname(),
help='Console proxy host to use to connect '
'to instances on this host.'),
cfg.StrOpt('default_access_ip_network_name',
help='Name of network to use to set access IPs for instances'),
cfg.BoolOpt('defer_iptables_apply',
default=False,
help='Whether to batch up the application of IPTables rules'
' during a host restart and apply all at the end of the'
' init phase'),
cfg.StrOpt('instances_path',
default=paths.state_path_def('instances'),
help='Where instances are stored on disk'),
cfg.BoolOpt('instance_usage_audit',
default=False,
help="Generate periodic compute.instance.exists"
" notifications"),
cfg.IntOpt('live_migration_retry_count',
default=30,
help="Number of 1 second retries needed in live_migration"),
cfg.BoolOpt('resume_guests_state_on_host_boot',
default=False,
help='Whether to start guests that were running before the '
'host rebooted'),
cfg.IntOpt('network_allocate_retries',
default=0,
help="Number of times to retry network allocation on failures"),
cfg.IntOpt('max_concurrent_builds',
default=10,
help='Maximum number of instance builds to run concurrently'),
cfg.IntOpt('block_device_allocate_retries',
default=60,
help='Number of times to retry block device'
' allocation on failures')
]
interval_opts = [
cfg.IntOpt('bandwidth_poll_interval',
default=600,
help='Interval to pull network bandwidth usage info. Not '
'supported on all hypervisors. Set to -1 to disable. '
'Setting this to 0 will run at the default rate.'),
cfg.IntOpt('sync_power_state_interval',
default=600,
help='Interval to sync power states between the database and '
'the hypervisor. Set to -1 to disable. '
'Setting this to 0 will run at the default rate.'),
cfg.IntOpt("heal_instance_info_cache_interval",
default=60,
help="Number of seconds between instance info_cache self "
"healing updates"),
cfg.IntOpt('reclaim_instance_interval',
default=0,
help='Interval in seconds for reclaiming deleted instances'),
cfg.IntOpt('volume_usage_poll_interval',
default=0,
help='Interval in seconds for gathering volume usages'),
cfg.IntOpt('shelved_poll_interval',
default=3600,
help='Interval in seconds for polling shelved instances to '
'offload. Set to -1 to disable.'
'Setting this to 0 will run at the default rate.'),
cfg.IntOpt('shelved_offload_time',
default=0,
help='Time in seconds before a shelved instance is eligible '
'for removing from a host. -1 never offload, 0 offload '
'when shelved'),
cfg.IntOpt('instance_delete_interval',
default=300,
help='Interval in seconds for retrying failed instance file '
'deletes. Set to -1 to disable. '
'Setting this to 0 will run at the default rate.'),
cfg.IntOpt('block_device_allocate_retries_interval',
default=3,
help='Waiting time interval (seconds) between block'
' device allocation retries on failures')
]
timeout_opts = [
cfg.IntOpt("reboot_timeout",
default=0,
help="Automatically hard reboot an instance if it has been "
"stuck in a rebooting state longer than N seconds. "
"Set to 0 to disable."),
cfg.IntOpt("instance_build_timeout",
default=0,
help="Amount of time in seconds an instance can be in BUILD "
"before going into ERROR status. "
"Set to 0 to disable."),
cfg.IntOpt("rescue_timeout",
default=0,
help="Automatically unrescue an instance after N seconds. "
"Set to 0 to disable."),
cfg.IntOpt("resize_confirm_window",
default=0,
help="Automatically confirm resizes after N seconds. "
"Set to 0 to disable."),
cfg.IntOpt("shutdown_timeout",
default=60,
help="Total amount of time to wait in seconds for an instance "
"to perform a clean shutdown."),
]
running_deleted_opts = [
cfg.StrOpt("running_deleted_instance_action",
default="reap",
help="Action to take if a running deleted instance is detected."
" Valid options are 'noop', 'log', 'shutdown', or 'reap'. "
"Set to 'noop' to take no action."),
cfg.IntOpt("running_deleted_instance_poll_interval",
default=1800,
help="Number of seconds to wait between runs of the cleanup "
"task."),
cfg.IntOpt("running_deleted_instance_timeout",
default=0,
help="Number of seconds after being deleted when a running "
"instance should be considered eligible for cleanup."),
]
instance_cleaning_opts = [
cfg.IntOpt('maximum_instance_delete_attempts',
default=5,
help='The number of times to attempt to reap an instance\'s '
'files.'),
]
CONF = cfg.CONF
CONF.register_opts(compute_opts)
CONF.register_opts(interval_opts)
CONF.register_opts(timeout_opts)
CONF.register_opts(running_deleted_opts)
CONF.register_opts(instance_cleaning_opts)
CONF.import_opt('allow_resize_to_same_host', 'nova.compute.api')
CONF.import_opt('console_topic', 'nova.console.rpcapi')
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('vnc_enabled', 'nova.vnc')
CONF.import_opt('enabled', 'nova.spice', group='spice')
CONF.import_opt('enable', 'nova.cells.opts', group='cells')
CONF.import_opt('image_cache_subdirectory_name', 'nova.virt.imagecache')
CONF.import_opt('image_cache_manager_interval', 'nova.virt.imagecache')
CONF.import_opt('enabled', 'nova.rdp', group='rdp')
CONF.import_opt('html5_proxy_base_url', 'nova.rdp', group='rdp')
CONF.import_opt('enabled', 'nova.console.serial', group='serial_console')
CONF.import_opt('base_url', 'nova.console.serial', group='serial_console')
LOG = logging.getLogger(__name__)
get_notifier = functools.partial(rpc.get_notifier, service='compute')
wrap_exception = functools.partial(exception.wrap_exception,
get_notifier=get_notifier)
@utils.expects_func_args('migration')
def errors_out_migration(function):
"""Decorator to error out migration on failure."""
@functools.wraps(function)
def decorated_function(self, context, *args, **kwargs):
try:
return function(self, context, *args, **kwargs)
except Exception:
with excutils.save_and_reraise_exception():
migration = kwargs['migration']
status = migration.status
if status not in ['migrating', 'post-migrating']:
return
migration.status = 'error'
try:
with migration.obj_as_admin():
migration.save()
except Exception:
LOG.debug('Error setting migration status '
'for instance %s.',
migration.instance_uuid, exc_info=True)
return decorated_function
@utils.expects_func_args('instance')
def reverts_task_state(function):
"""Decorator to revert task_state on failure."""
@functools.wraps(function)
def decorated_function(self, context, *args, **kwargs):
try:
return function(self, context, *args, **kwargs)
except exception.UnexpectedTaskStateError as e:
# Note(maoy): unexpected task state means the current
# task is preempted. Do not clear task state in this
# case.
with excutils.save_and_reraise_exception():
LOG.info(_LI("Task possibly preempted: %s"),
e.format_message())
except Exception:
with excutils.save_and_reraise_exception():
try:
self._instance_update(context,
kwargs['instance']['uuid'],
task_state=None)
except Exception:
pass
return decorated_function
@utils.expects_func_args('instance')
def wrap_instance_fault(function):
"""Wraps a method to catch exceptions related to instances.
This decorator wraps a method to catch any exceptions having to do with
an instance that may get thrown. It then logs an instance fault in the db.
"""
@functools.wraps(function)
def decorated_function(self, context, *args, **kwargs):
try:
return function(self, context, *args, **kwargs)
except exception.InstanceNotFound:
raise
except Exception as e:
# NOTE(gtt): If argument 'instance' is in args rather than kwargs,
# we will get a KeyError exception which will cover up the real
# exception. So, we update kwargs with the values from args first.
# then, we can get 'instance' from kwargs easily.
kwargs.update(dict(zip(function.func_code.co_varnames[2:], args)))
with excutils.save_and_reraise_exception():
compute_utils.add_instance_fault_from_exc(context,
kwargs['instance'], e, sys.exc_info())
return decorated_function
@utils.expects_func_args('instance')
def wrap_instance_event(function):
"""Wraps a method to log the event taken on the instance, and result.
This decorator wraps a method to log the start and result of an event, as
part of an action taken on an instance.
"""
@functools.wraps(function)
def decorated_function(self, context, *args, **kwargs):
wrapped_func = utils.get_wrapped_function(function)
keyed_args = safe_utils.getcallargs(wrapped_func, context, *args,
**kwargs)
instance_uuid = keyed_args['instance']['uuid']
event_name = 'compute_{0}'.format(function.func_name)
with compute_utils.EventReporter(context, event_name, instance_uuid):
return function(self, context, *args, **kwargs)
return decorated_function
@utils.expects_func_args('image_id', 'instance')
def delete_image_on_error(function):
"""Used for snapshot related method to ensure the image created in
compute.api is deleted when an error occurs.
"""
@functools.wraps(function)
def decorated_function(self, context, image_id, instance,
*args, **kwargs):
try:
return function(self, context, image_id, instance,
*args, **kwargs)
except Exception:
with excutils.save_and_reraise_exception():
LOG.debug("Cleaning up image %s", image_id,
exc_info=True, instance=instance)
try:
self.image_api.delete(context, image_id)
except Exception:
LOG.exception(_LE("Error while trying to clean up "
"image %s"), image_id,
instance=instance)
return decorated_function
# TODO(danms): Remove me after Icehouse
# NOTE(mikal): if the method being decorated has more than one decorator, then
# put this one first. Otherwise the various exception handling decorators do
# not function correctly.
def object_compat(function):
"""Wraps a method that expects a new-world instance
This provides compatibility for callers passing old-style dict
instances.
"""
@functools.wraps(function)
def decorated_function(self, context, *args, **kwargs):
def _load_instance(instance_or_dict):
if isinstance(instance_or_dict, dict):
instance = objects.Instance._from_db_object(
context, objects.Instance(), instance_or_dict,
expected_attrs=metas)
instance._context = context
return instance
return instance_or_dict
metas = ['metadata', 'system_metadata']
try:
kwargs['instance'] = _load_instance(kwargs['instance'])
except KeyError:
args = (_load_instance(args[0]),) + args[1:]
migration = kwargs.get('migration')
if isinstance(migration, dict):
migration = objects.Migration._from_db_object(
context.elevated(), objects.Migration(),
migration)
kwargs['migration'] = migration
return function(self, context, *args, **kwargs)
return decorated_function
# TODO(danms): Remove me after Icehouse
def aggregate_object_compat(function):
"""Wraps a method that expects a new-world aggregate."""
@functools.wraps(function)
def decorated_function(self, context, *args, **kwargs):
aggregate = kwargs.get('aggregate')
if isinstance(aggregate, dict):
aggregate = objects.Aggregate._from_db_object(
context.elevated(), objects.Aggregate(),
aggregate)
kwargs['aggregate'] = aggregate
return function(self, context, *args, **kwargs)
return decorated_function
class InstanceEvents(object):
def __init__(self):
self._events = {}
@staticmethod
def _lock_name(instance):
return '%s-%s' % (instance.uuid, 'events')
def prepare_for_instance_event(self, instance, event_name):
"""Prepare to receive an event for an instance.
This will register an event for the given instance that we will
wait on later. This should be called before initiating whatever
action will trigger the event. The resulting eventlet.event.Event
object should be wait()'d on to ensure completion.
:param instance: the instance for which the event will be generated
:param event_name: the name of the event we're expecting
:returns: an event object that should be wait()'d on
"""
@utils.synchronized(self._lock_name(instance))
def _create_or_get_event():
if instance.uuid not in self._events:
self._events.setdefault(instance.uuid, {})
return self._events[instance.uuid].setdefault(
event_name, eventlet.event.Event())
LOG.debug('Preparing to wait for external event %(event)s',
{'event': event_name}, instance=instance)
return _create_or_get_event()
def pop_instance_event(self, instance, event):
"""Remove a pending event from the wait list.
This will remove a pending event from the wait list so that it
can be used to signal the waiters to wake up.
:param instance: the instance for which the event was generated
:param event: the nova.objects.external_event.InstanceExternalEvent
that describes the event
:returns: the eventlet.event.Event object on which the waiters
are blocked
"""
no_events_sentinel = object()
no_matching_event_sentinel = object()
@utils.synchronized(self._lock_name(instance))
def _pop_event():
events = self._events.get(instance.uuid)
if not events:
return no_events_sentinel
_event = events.pop(event.key, None)
if not events:
del self._events[instance.uuid]
if _event is None:
return no_matching_event_sentinel
return _event
result = _pop_event()
if result == no_events_sentinel:
LOG.debug('No waiting events found dispatching %(event)s',
{'event': event.key},
instance=instance)
return None
elif result == no_matching_event_sentinel:
LOG.debug('No event matching %(event)s in %(events)s',
{'event': event.key,
'events': self._events.get(instance.uuid, {}).keys()},
instance=instance)
return None
else:
return result
def clear_events_for_instance(self, instance):
"""Remove all pending events for an instance.
This will remove all events currently pending for an instance
and return them (indexed by event name).
:param instance: the instance for which events should be purged
:returns: a dictionary of {event_name: eventlet.event.Event}
"""
@utils.synchronized(self._lock_name(instance))
def _clear_events():
# NOTE(danms): Use getitem syntax for the instance until
# all the callers are using objects
return self._events.pop(instance['uuid'], {})
return _clear_events()
class ComputeVirtAPI(virtapi.VirtAPI):
def __init__(self, compute):
super(ComputeVirtAPI, self).__init__()
self._compute = compute
def provider_fw_rule_get_all(self, context):
return self._compute.conductor_api.provider_fw_rule_get_all(context)
def _default_error_callback(self, event_name, instance):
raise exception.NovaException(_('Instance event failed'))
@contextlib.contextmanager
def wait_for_instance_event(self, instance, event_names, deadline=300,
error_callback=None):
"""Plan to wait for some events, run some code, then wait.
This context manager will first create plans to wait for the
provided event_names, yield, and then wait for all the scheduled
events to complete.
Note that this uses an eventlet.timeout.Timeout to bound the
operation, so callers should be prepared to catch that
failure and handle that situation appropriately.
If the event is not received by the specified timeout deadline,
eventlet.timeout.Timeout is raised.
If the event is received but did not have a 'completed'
status, a NovaException is raised. If an error_callback is
provided, instead of raising an exception as detailed above
for the failure case, the callback will be called with the
event_name and instance, and can return True to continue
waiting for the rest of the events, False to stop processing,
or raise an exception which will bubble up to the waiter.
:param instance: The instance for which an event is expected
:param event_names: A list of event names. Each element can be a
string event name or tuple of strings to
indicate (name, tag).
:param deadline: Maximum number of seconds we should wait for all
of the specified events to arrive.
:param error_callback: A function to be called if an event arrives
"""
if error_callback is None:
error_callback = self._default_error_callback
events = {}
for event_name in event_names:
if isinstance(event_name, tuple):
name, tag = event_name
event_name = objects.InstanceExternalEvent.make_key(
name, tag)
events[event_name] = (
self._compute.instance_events.prepare_for_instance_event(
instance, event_name))
yield
with eventlet.timeout.Timeout(deadline):
for event_name, event in events.items():
actual_event = event.wait()
if actual_event.status == 'completed':
continue
decision = error_callback(event_name, instance)
if decision is False:
break
class ComputeManager(manager.Manager):
"""Manages the running instances from creation to destruction."""
target = messaging.Target(version='3.38')
# How long to wait in seconds before re-issuing a shutdown
# signal to a instance during power off. The overall
# time to wait is set by CONF.shutdown_timeout.
SHUTDOWN_RETRY_INTERVAL = 10
def __init__(self, compute_driver=None, *args, **kwargs):
"""Load configuration options and connect to the hypervisor."""
self.virtapi = ComputeVirtAPI(self)
self.network_api = network.API()
self.volume_api = volume.API()
self.image_api = image.API()
self._last_host_check = 0
self._last_bw_usage_poll = 0
self._bw_usage_supported = True
self._last_bw_usage_cell_update = 0
self.compute_api = compute.API()
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
self.conductor_api = conductor.API()
self.compute_task_api = conductor.ComputeTaskAPI()
self.is_neutron_security_groups = (
openstack_driver.is_neutron_security_groups())
self.consoleauth_rpcapi = consoleauth.rpcapi.ConsoleAuthAPI()
self.cells_rpcapi = cells_rpcapi.CellsAPI()
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
self._resource_tracker_dict = {}
self.instance_events = InstanceEvents()
self._sync_power_pool = eventlet.GreenPool()
self._syncs_in_progress = {}
if CONF.max_concurrent_builds != 0:
self._build_semaphore = eventlet.semaphore.Semaphore(
CONF.max_concurrent_builds)
else:
self._build_semaphore = compute_utils.UnlimitedSemaphore()
super(ComputeManager, self).__init__(service_name="compute",
*args, **kwargs)
# NOTE(russellb) Load the driver last. It may call back into the
# compute manager via the virtapi, so we want it to be fully
# initialized before that happens.
self.driver = driver.load_compute_driver(self.virtapi, compute_driver)
self.use_legacy_block_device_info = \
self.driver.need_legacy_block_device_info
def _get_resource_tracker(self, nodename):
rt = self._resource_tracker_dict.get(nodename)
if not rt:
if not self.driver.node_is_available(nodename):
raise exception.NovaException(
_("%s is not a valid node managed by this "
"compute host.") % nodename)
rt = resource_tracker.ResourceTracker(self.host,
self.driver,
nodename)
self._resource_tracker_dict[nodename] = rt
return rt
def _update_resource_tracker(self, context, instance):
"""Let the resource tracker know that an instance has changed state."""
if (instance['host'] == self.host and
self.driver.node_is_available(instance['node'])):
rt = self._get_resource_tracker(instance.get('node'))
rt.update_usage(context, instance)
def _instance_update(self, context, instance_uuid, **kwargs):
"""Update an instance in the database using kwargs as value."""
instance_ref = self.conductor_api.instance_update(context,
instance_uuid,
**kwargs)
self._update_resource_tracker(context, instance_ref)
return instance_ref
def _set_instance_error_state(self, context, instance):
instance_uuid = instance['uuid']
try:
self._instance_update(context, instance_uuid,
vm_state=vm_states.ERROR)
except exception.InstanceNotFound:
LOG.debug('Instance has been destroyed from under us while '
'trying to set it to ERROR',
instance_uuid=instance_uuid)
def _set_instance_obj_error_state(self, context, instance):
try:
instance.vm_state = vm_states.ERROR
instance.save()
except exception.InstanceNotFound:
LOG.debug('Instance has been destroyed from under us while '
'trying to set it to ERROR', instance=instance)
def _get_instances_on_driver(self, context, filters=None):
"""Return a list of instance records for the instances found
on the hypervisor which satisfy the specified filters. If filters=None
return a list of instance records for all the instances found on the
hypervisor.
"""
if not filters:
filters = {}
try:
driver_uuids = self.driver.list_instance_uuids()
if len(driver_uuids) == 0:
# Short circuit, don't waste a DB call
return objects.InstanceList()
filters['uuid'] = driver_uuids
local_instances = objects.InstanceList.get_by_filters(
context, filters, use_slave=True)
return local_instances
except NotImplementedError:
pass
# The driver doesn't support uuids listing, so we'll have
# to brute force.
driver_instances = self.driver.list_instances()
instances = objects.InstanceList.get_by_filters(context, filters,
use_slave=True)
name_map = {instance.name: instance for instance in instances}
local_instances = []
for driver_instance in driver_instances:
instance = name_map.get(driver_instance)
if not instance:
continue
local_instances.append(instance)
return local_instances
def _destroy_evacuated_instances(self, context):
"""Destroys evacuated instances.
While nova-compute was down, the instances running on it could be
evacuated to another host. Check that the instances reported
by the driver are still associated with this host. If they are
not, destroy them, with the exception of instances which are in
the MIGRATING, RESIZE_MIGRATING, RESIZE_MIGRATED, RESIZE_FINISH
task state or RESIZED vm state.
"""
our_host = self.host
filters = {'deleted': False}
local_instances = self._get_instances_on_driver(context, filters)
for instance in local_instances:
if instance.host != our_host:
if (instance.task_state in [task_states.MIGRATING,
task_states.RESIZE_MIGRATING,
task_states.RESIZE_MIGRATED,
task_states.RESIZE_FINISH]
or instance.vm_state in [vm_states.RESIZED]):
LOG.debug('Will not delete instance as its host ('
'%(instance_host)s) is not equal to our '
'host (%(our_host)s) but its task state is '
'(%(task_state)s) and vm state is '
'(%(vm_state)s)',
{'instance_host': instance.host,
'our_host': our_host,
'task_state': instance.task_state,
'vm_state': instance.vm_state},
instance=instance)
continue
LOG.info(_LI('Deleting instance as its host ('
'%(instance_host)s) is not equal to our '
'host (%(our_host)s).'),
{'instance_host': instance.host,
'our_host': our_host}, instance=instance)
try:
network_info = self._get_instance_nw_info(context,
instance)
bdi = self._get_instance_block_device_info(context,
instance)
destroy_disks = not (self._is_instance_storage_shared(
context, instance))
except exception.InstanceNotFound:
network_info = network_model.NetworkInfo()
bdi = {}
LOG.info(_LI('Instance has been marked deleted already, '
'removing it from the hypervisor.'),
instance=instance)
# always destroy disks if the instance was deleted
destroy_disks = True
self.driver.destroy(context, instance,
network_info,
bdi, destroy_disks)
def _is_instance_storage_shared(self, context, instance):
shared_storage = True
data = None
try:
data = self.driver.check_instance_shared_storage_local(context,
instance)
if data:
shared_storage = (self.compute_rpcapi.
check_instance_shared_storage(context,
instance, data))
except NotImplementedError:
LOG.warning(_LW('Hypervisor driver does not support '
'instance shared storage check, '
'assuming it\'s not on shared storage'),
instance=instance)
shared_storage = False
except Exception:
LOG.exception(_LE('Failed to check if instance shared'),
instance=instance)
finally:
if data:
self.driver.check_instance_shared_storage_cleanup(context,
data)
return shared_storage
def _complete_partial_deletion(self, context, instance):
"""Complete deletion for instances in DELETED status but not marked as
deleted in the DB
"""
instance.destroy()
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
quotas = objects.Quotas(context)
project_id, user_id = objects.quotas.ids_from_instance(context,
instance)
quotas.reserve(context, project_id=project_id, user_id=user_id,
instances=-1, cores=-instance.vcpus,
ram=-instance.memory_mb)
self._complete_deletion(context,
instance,
bdms,
quotas,
instance.system_metadata)
def _complete_deletion(self, context, instance, bdms,
quotas, system_meta):
if quotas:
quotas.commit()
# ensure block device mappings are not leaked
for bdm in bdms:
bdm.destroy()
self._notify_about_instance_usage(context, instance, "delete.end",
system_metadata=system_meta)
if CONF.vnc_enabled or CONF.spice.enabled:
if CONF.cells.enable:
self.cells_rpcapi.consoleauth_delete_tokens(context,
instance.uuid)
else:
self.consoleauth_rpcapi.delete_tokens_for_instance(context,
instance.uuid)
def _init_instance(self, context, instance):
'''Initialize this instance during service init.'''
# Instances that are shut down, or in an error state can not be
# initialized and are not attempted to be recovered. The exception
# to this are instances that are in RESIZE_MIGRATING or DELETING,
# which are dealt with further down.
if (instance.vm_state == vm_states.SOFT_DELETED or
(instance.vm_state == vm_states.ERROR and
instance.task_state not in
(task_states.RESIZE_MIGRATING, task_states.DELETING))):
LOG.debug("Instance is in %s state.",
instance.vm_state, instance=instance)
return
if instance.vm_state == vm_states.DELETED:
try:
self._complete_partial_deletion(context, instance)
except Exception:
# we don't want that an exception blocks the init_host
msg = _LE('Failed to complete a deletion')
LOG.exception(msg, instance=instance)
return
if (instance.vm_state == vm_states.BUILDING or
instance.task_state in [task_states.SCHEDULING,
task_states.BLOCK_DEVICE_MAPPING,
task_states.NETWORKING,
task_states.SPAWNING]):
# NOTE(dave-mcnally) compute stopped before instance was fully
# spawned so set to ERROR state. This is safe to do as the state
# may be set by the api but the host is not so if we get here the
# instance has already been scheduled to this particular host.
LOG.debug("Instance failed to spawn correctly, "
"setting to ERROR state", instance=instance)
instance.task_state = None
instance.vm_state = vm_states.ERROR
instance.save()
return
if (instance.vm_state in [vm_states.ACTIVE, vm_states.STOPPED] and
instance.task_state in [task_states.REBUILDING,
task_states.REBUILD_BLOCK_DEVICE_MAPPING,
task_states.REBUILD_SPAWNING]):
# NOTE(jichenjc) compute stopped before instance was fully
# spawned so set to ERROR state. This is consistent to BUILD
LOG.debug("Instance failed to rebuild correctly, "
"setting to ERROR state", instance=instance)
instance.task_state = None
instance.vm_state = vm_states.ERROR
instance.save()
return
if (instance.vm_state != vm_states.ERROR and
instance.task_state in [task_states.IMAGE_SNAPSHOT_PENDING,
task_states.IMAGE_PENDING_UPLOAD,
task_states.IMAGE_UPLOADING,
task_states.IMAGE_SNAPSHOT]):
LOG.debug("Instance in transitional state %s at start-up "
"clearing task state",
instance.task_state, instance=instance)
try:
self._post_interrupted_snapshot_cleanup(context, instance)
except Exception:
# we don't want that an exception blocks the init_host
msg = _LE('Failed to cleanup snapshot.')
LOG.exception(msg, instance=instance)
instance.task_state = None
instance.save()
if instance.task_state == task_states.DELETING:
try:
LOG.info(_LI('Service started deleting the instance during '
'the previous run, but did not finish. Restarting'
' the deletion now.'), instance=instance)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
# FIXME(comstud): This needs fixed. We should be creating
# reservations and updating quotas, because quotas
# wouldn't have been updated for this instance since it is
# still in DELETING. See bug 1296414.
#
# Create a dummy quota object for now.
quotas = objects.Quotas.from_reservations(
context, None, instance=instance)
self._delete_instance(context, instance, bdms, quotas)
except Exception:
# we don't want that an exception blocks the init_host
msg = _LE('Failed to complete a deletion')
LOG.exception(msg, instance=instance)
self._set_instance_error_state(context, instance)
return
try_reboot, reboot_type = self._retry_reboot(context, instance)
current_power_state = self._get_power_state(context, instance)
if try_reboot:
LOG.debug("Instance in transitional state (%(task_state)s) at "
"start-up and power state is (%(power_state)s), "
"triggering reboot",
{'task_state': instance.task_state,
'power_state': current_power_state},
instance=instance)
self.compute_rpcapi.reboot_instance(context, instance,
block_device_info=None,
reboot_type=reboot_type)
return
elif (current_power_state == power_state.RUNNING and
instance.task_state in [task_states.REBOOT_STARTED,
task_states.REBOOT_STARTED_HARD,
task_states.PAUSING,
task_states.UNPAUSING]):
LOG.warning(_LW("Instance in transitional state "
"(%(task_state)s) at start-up and power state "
"is (%(power_state)s), clearing task state"),
{'task_state': instance.task_state,
'power_state': current_power_state},
instance=instance)
instance.task_state = None
instance.vm_state = vm_states.ACTIVE
instance.save()
elif (current_power_state == power_state.PAUSED and
instance.task_state == task_states.UNPAUSING):
LOG.warning(_LW("Instance in transitional state "
"(%(task_state)s) at start-up and power state "
"is (%(power_state)s), clearing task state "
"and unpausing the instance"),
{'task_state': instance.task_state,
'power_state': current_power_state},
instance=instance)
try:
self.unpause_instance(context, instance)
except NotImplementedError:
# Some virt driver didn't support pause and unpause
pass
except Exception:
LOG.exception(_LE('Failed to unpause instance'),
instance=instance)
return
if instance.task_state == task_states.POWERING_OFF:
try:
LOG.debug("Instance in transitional state %s at start-up "
"retrying stop request",
instance.task_state, instance=instance)
self.stop_instance(context, instance)
except Exception:
# we don't want that an exception blocks the init_host
msg = _LE('Failed to stop instance')
LOG.exception(msg, instance=instance)
return
if instance.task_state == task_states.POWERING_ON:
try:
LOG.debug("Instance in transitional state %s at start-up "
"retrying start request",
instance.task_state, instance=instance)
self.start_instance(context, instance)
except Exception:
# we don't want that an exception blocks the init_host
msg = _LE('Failed to start instance')
LOG.exception(msg, instance=instance)
return
net_info = compute_utils.get_nw_info_for_instance(instance)
try:
self.driver.plug_vifs(instance, net_info)
except NotImplementedError as e:
LOG.debug(e, instance=instance)
except exception.VirtualInterfacePlugException:
# we don't want an exception to block the init_host
LOG.exception(_LE("Vifs plug failed"), instance=instance)
self._set_instance_error_state(context, instance)
return
if instance.task_state == task_states.RESIZE_MIGRATING:
# We crashed during resize/migration, so roll back for safety
try:
# NOTE(mriedem): check old_vm_state for STOPPED here, if it's
# not in system_metadata we default to True for backwards
# compatibility
power_on = (instance.system_metadata.get('old_vm_state') !=
vm_states.STOPPED)
block_dev_info = self._get_instance_block_device_info(context,
instance)
self.driver.finish_revert_migration(context,
instance, net_info, block_dev_info, power_on)
except Exception as e:
LOG.exception(_LE('Failed to revert crashed migration'),
instance=instance)
finally:
LOG.info(_LI('Instance found in migrating state during '
'startup. Resetting task_state'),
instance=instance)
instance.task_state = None
instance.save()
if instance.task_state == task_states.MIGRATING:
# Live migration did not complete, but instance is on this
# host, so reset the state.
instance.task_state = None
instance.save(expected_task_state=[task_states.MIGRATING])
db_state = instance.power_state
drv_state = self._get_power_state(context, instance)
expect_running = (db_state == power_state.RUNNING and
drv_state != db_state)
LOG.debug('Current state is %(drv_state)s, state in DB is '
'%(db_state)s.',
{'drv_state': drv_state, 'db_state': db_state},
instance=instance)
if expect_running and CONF.resume_guests_state_on_host_boot:
LOG.info(_LI('Rebooting instance after nova-compute restart.'),
instance=instance)
block_device_info = \
self._get_instance_block_device_info(context, instance)
try:
self.driver.resume_state_on_host_boot(
context, instance, net_info, block_device_info)
except NotImplementedError:
LOG.warning(_LW('Hypervisor driver does not support '
'resume guests'), instance=instance)
except Exception:
# NOTE(vish): The instance failed to resume, so we set the
# instance to error and attempt to continue.
LOG.warning(_LW('Failed to resume instance'),
instance=instance)
self._set_instance_error_state(context, instance)
elif drv_state == power_state.RUNNING:
# VMwareAPI drivers will raise an exception
try:
self.driver.ensure_filtering_rules_for_instance(
instance, net_info)
except NotImplementedError:
LOG.warning(_LW('Hypervisor driver does not support '
'firewall rules'), instance=instance)
def _retry_reboot(self, context, instance):
current_power_state = self._get_power_state(context, instance)
current_task_state = instance.task_state
retry_reboot = False
reboot_type = compute_utils.get_reboot_type(current_task_state,
current_power_state)
pending_soft = (current_task_state == task_states.REBOOT_PENDING and
instance.vm_state in vm_states.ALLOW_SOFT_REBOOT)
pending_hard = (current_task_state == task_states.REBOOT_PENDING_HARD
and instance.vm_state in vm_states.ALLOW_HARD_REBOOT)
started_not_running = (current_task_state in
[task_states.REBOOT_STARTED,
task_states.REBOOT_STARTED_HARD] and
current_power_state != power_state.RUNNING)
if pending_soft or pending_hard or started_not_running:
retry_reboot = True
return retry_reboot, reboot_type
def handle_lifecycle_event(self, event):
LOG.info(_LI("VM %(state)s (Lifecycle Event)"),
{'state': event.get_name()},
instance_uuid=event.get_instance_uuid())
context = nova.context.get_admin_context(read_deleted='yes')
instance = objects.Instance.get_by_uuid(context,
event.get_instance_uuid(),
expected_attrs=[])
vm_power_state = None
if event.get_transition() == virtevent.EVENT_LIFECYCLE_STOPPED:
vm_power_state = power_state.SHUTDOWN
elif event.get_transition() == virtevent.EVENT_LIFECYCLE_STARTED:
vm_power_state = power_state.RUNNING
elif event.get_transition() == virtevent.EVENT_LIFECYCLE_PAUSED:
vm_power_state = power_state.PAUSED
elif event.get_transition() == virtevent.EVENT_LIFECYCLE_RESUMED:
vm_power_state = power_state.RUNNING
else:
LOG.warning(_LW("Unexpected power state %d"),
event.get_transition())
if vm_power_state is not None:
LOG.debug('Synchronizing instance power state after lifecycle '
'event "%(event)s"; current vm_state: %(vm_state)s, '
'current task_state: %(task_state)s, current DB '
'power_state: %(db_power_state)s, VM power_state: '
'%(vm_power_state)s',
dict(event=event.get_name(),
vm_state=instance.vm_state,
task_state=instance.task_state,
db_power_state=instance.power_state,
vm_power_state=vm_power_state),
instance_uuid=instance.uuid)
self._sync_instance_power_state(context,
instance,
vm_power_state)
def handle_events(self, event):
if isinstance(event, virtevent.LifecycleEvent):
try:
self.handle_lifecycle_event(event)
except exception.InstanceNotFound:
LOG.debug("Event %s arrived for non-existent instance. The "
"instance was probably deleted.", event)
else:
LOG.debug("Ignoring event %s", event)
def init_virt_events(self):
self.driver.register_event_listener(self.handle_events)
def init_host(self):
"""Initialization for a standalone compute service."""
self.driver.init_host(host=self.host)
context = nova.context.get_admin_context()
instances = objects.InstanceList.get_by_host(
context, self.host, expected_attrs=['info_cache'])
if CONF.defer_iptables_apply:
self.driver.filter_defer_apply_on()
self.init_virt_events()
try:
# checking that instance was not already evacuated to other host
self._destroy_evacuated_instances(context)
for instance in instances:
self._init_instance(context, instance)
finally:
if CONF.defer_iptables_apply:
self.driver.filter_defer_apply_off()
def cleanup_host(self):
self.driver.cleanup_host(host=self.host)
def pre_start_hook(self):
"""After the service is initialized, but before we fully bring
the service up by listening on RPC queues, make sure to update
our available resources (and indirectly our available nodes).
"""
self.update_available_resource(nova.context.get_admin_context())
def _get_power_state(self, context, instance):
"""Retrieve the power state for the given instance."""
LOG.debug('Checking state', instance=instance)
try:
return self.driver.get_info(instance).state
except exception.InstanceNotFound:
return power_state.NOSTATE
def get_console_topic(self, context):
"""Retrieves the console host for a project on this host.
Currently this is just set in the flags for each compute host.
"""
# TODO(mdragon): perhaps make this variable by console_type?
return '%s.%s' % (CONF.console_topic, CONF.console_host)
def get_console_pool_info(self, context, console_type):
return self.driver.get_console_pool_info(console_type)
@wrap_exception()
def refresh_security_group_rules(self, context, security_group_id):
"""Tell the virtualization driver to refresh security group rules.
Passes straight through to the virtualization driver.
"""
return self.driver.refresh_security_group_rules(security_group_id)
@wrap_exception()
def refresh_security_group_members(self, context, security_group_id):
"""Tell the virtualization driver to refresh security group members.
Passes straight through to the virtualization driver.
"""
return self.driver.refresh_security_group_members(security_group_id)
@wrap_exception()
def refresh_instance_security_rules(self, context, instance):
"""Tell the virtualization driver to refresh security rules for
an instance.
Passes straight through to the virtualization driver.
Synchronise the call because we may still be in the middle of
creating the instance.
"""
@utils.synchronized(instance['uuid'])
def _sync_refresh():
try:
return self.driver.refresh_instance_security_rules(instance)
except NotImplementedError:
LOG.warning(_LW('Hypervisor driver does not support '
'security groups.'), instance=instance)
return _sync_refresh()
@wrap_exception()
def refresh_provider_fw_rules(self, context):
"""This call passes straight through to the virtualization driver."""
return self.driver.refresh_provider_fw_rules()
def _get_instance_nw_info(self, context, instance, use_slave=False):
"""Get a list of dictionaries of network data of an instance."""
if (not hasattr(instance, 'system_metadata') or
len(instance['system_metadata']) == 0):
# NOTE(danms): Several places in the code look up instances without
# pulling system_metadata for performance, and call this function.
# If we get an instance without it, re-fetch so that the call
# to network_api (which requires it for instance_type) will
# succeed.
attrs = ['system_metadata']
instance = objects.Instance.get_by_uuid(context,
instance['uuid'],
expected_attrs=attrs,
use_slave=use_slave)
network_info = self.network_api.get_instance_nw_info(context,
instance)
return network_info
def _await_block_device_map_created(self, context, vol_id):
# TODO(yamahata): creating volume simultaneously
# reduces creation time?
# TODO(yamahata): eliminate dumb polling
start = time.time()
retries = CONF.block_device_allocate_retries
if retries < 0:
LOG.warning(_LW("Treating negative config value (%(retries)s) for "
"'block_device_retries' as 0."),
{'retries': retries})
# (1) treat negative config value as 0
# (2) the configured value is 0, one attempt should be made
# (3) the configured value is > 0, then the total number attempts
# is (retries + 1)
attempts = 1
if retries >= 1:
attempts = retries + 1
for attempt in range(1, attempts + 1):
volume = self.volume_api.get(context, vol_id)
volume_status = volume['status']
if volume_status not in ['creating', 'downloading']:
if volume_status != 'available':
LOG.warning(_LW("Volume id: %s finished being created but "
"was not set as 'available'"), vol_id)
return attempt
greenthread.sleep(CONF.block_device_allocate_retries_interval)
# NOTE(harlowja): Should only happen if we ran out of attempts
raise exception.VolumeNotCreated(volume_id=vol_id,
seconds=int(time.time() - start),
attempts=attempts)
def _decode_files(self, injected_files):
"""Base64 decode the list of files to inject."""
if not injected_files:
return []
def _decode(f):
path, contents = f
try:
decoded = base64.b64decode(contents)
return path, decoded
except TypeError:
raise exception.Base64Exception(path=path)
return [_decode(f) for f in injected_files]
def _run_instance(self, context, request_spec,
filter_properties, requested_networks, injected_files,
admin_password, is_first_time, node, instance,
legacy_bdm_in_spec):
"""Launch a new instance with specified options."""
extra_usage_info = {}
def notify(status, msg="", fault=None, **kwargs):
"""Send a create.{start,error,end} notification."""
type_ = "create.%(status)s" % dict(status=status)
info = extra_usage_info.copy()
info['message'] = msg
self._notify_about_instance_usage(context, instance, type_,
extra_usage_info=info, fault=fault, **kwargs)
try:
self._prebuild_instance(context, instance)
if request_spec and request_spec.get('image'):
image_meta = request_spec['image']
else:
image_meta = {}
extra_usage_info = {"image_name": image_meta.get('name', '')}
notify("start") # notify that build is starting
instance, network_info = self._build_instance(context,
request_spec, filter_properties, requested_networks,
injected_files, admin_password, is_first_time, node,
instance, image_meta, legacy_bdm_in_spec)
notify("end", msg=_("Success"), network_info=network_info)
except exception.RescheduledException as e:
# Instance build encountered an error, and has been rescheduled.
notify("error", fault=e)
except exception.BuildAbortException as e:
# Instance build aborted due to a non-failure
LOG.info(e)
notify("end", msg=e.format_message()) # notify that build is done
except Exception as e:
# Instance build encountered a non-recoverable error:
with excutils.save_and_reraise_exception():
self._set_instance_error_state(context, instance)
notify("error", fault=e) # notify that build failed
def _prebuild_instance(self, context, instance):
self._check_instance_exists(context, instance)
try:
self._start_building(context, instance)
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError):
msg = _("Instance disappeared before we could start it")
# Quickly bail out of here
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=msg)
def _validate_instance_group_policy(self, context, instance,
filter_properties):
# NOTE(russellb) Instance group policy is enforced by the scheduler.
# However, there is a race condition with the enforcement of
# anti-affinity. Since more than one instance may be scheduled at the
# same time, it's possible that more than one instance with an
# anti-affinity policy may end up here. This is a validation step to
# make sure that starting the instance here doesn't violate the policy.
scheduler_hints = filter_properties.get('scheduler_hints') or {}
group_hint = scheduler_hints.get('group')
if not group_hint:
return
@utils.synchronized(group_hint)
def _do_validation(context, instance, group_hint):
group = objects.InstanceGroup.get_by_hint(context, group_hint)
if 'anti-affinity' not in group.policies:
return
group_hosts = group.get_hosts(context, exclude=[instance.uuid])
if self.host in group_hosts:
msg = _("Anti-affinity instance group policy was violated.")
raise exception.RescheduledException(
instance_uuid=instance.uuid,
reason=msg)
_do_validation(context, instance, group_hint)
def _build_instance(self, context, request_spec, filter_properties,
requested_networks, injected_files, admin_password, is_first_time,
node, instance, image_meta, legacy_bdm_in_spec):
original_context = context
context = context.elevated()
# NOTE(danms): This method is deprecated, but could be called,
# and if it is, it will have an old megatuple for requested_networks.
if requested_networks is not None:
requested_networks_obj = objects.NetworkRequestList(
objects=[objects.NetworkRequest.from_tuple(t)
for t in requested_networks])
else:
requested_networks_obj = None
# If neutron security groups pass requested security
# groups to allocate_for_instance()
if request_spec and self.is_neutron_security_groups:
security_groups = request_spec.get('security_group')
else:
security_groups = []
if node is None:
node = self.driver.get_available_nodes(refresh=True)[0]
LOG.debug("No node specified, defaulting to %s", node)
network_info = None
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
# b64 decode the files to inject:
injected_files_orig = injected_files
injected_files = self._decode_files(injected_files)
rt = self._get_resource_tracker(node)
try:
limits = filter_properties.get('limits', {})
with rt.instance_claim(context, instance, limits) as inst_claim:
# NOTE(russellb) It's important that this validation be done
# *after* the resource tracker instance claim, as that is where
# the host is set on the instance.
self._validate_instance_group_policy(context, instance,
filter_properties)
macs = self.driver.macs_for_instance(instance)
dhcp_options = self.driver.dhcp_options_for_instance(instance)
network_info = self._allocate_network(original_context,
instance, requested_networks_obj, macs,
security_groups, dhcp_options)
# Verify that all the BDMs have a device_name set and assign a
# default to the ones missing it with the help of the driver.
self._default_block_device_names(context, instance, image_meta,
bdms)
instance.vm_state = vm_states.BUILDING
instance.task_state = task_states.BLOCK_DEVICE_MAPPING
instance.numa_topology = inst_claim.claimed_numa_topology
instance.save()
block_device_info = self._prep_block_device(
context, instance, bdms)
set_access_ip = (is_first_time and
not instance.access_ip_v4 and
not instance.access_ip_v6)
flavor = None
if filter_properties is not None:
flavor = filter_properties.get('instance_type')
instance = self._spawn(context, instance, image_meta,
network_info, block_device_info,
injected_files, admin_password,
set_access_ip=set_access_ip,
flavor=flavor)
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError):
# the instance got deleted during the spawn
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
try:
self._deallocate_network(context, instance)
except Exception:
msg = _LE('Failed to dealloc network '
'for deleted instance')
LOG.exception(msg, instance=instance)
raise exception.BuildAbortException(
instance_uuid=instance.uuid,
reason=_("Instance disappeared during build"))
except (exception.UnexpectedTaskStateError,
exception.VirtualInterfaceCreateException) as e:
# Don't try to reschedule, just log and reraise.
with excutils.save_and_reraise_exception():
LOG.debug(e.format_message(), instance=instance)
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
except exception.InvalidBDM:
with excutils.save_and_reraise_exception():
if network_info is not None:
network_info.wait(do_raise=False)
try:
self._deallocate_network(context, instance)
except Exception:
msg = _LE('Failed to dealloc network '
'for failed instance')
LOG.exception(msg, instance=instance)
except Exception:
exc_info = sys.exc_info()
# try to re-schedule instance:
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
rescheduled = self._reschedule_or_error(original_context, instance,
exc_info, requested_networks, admin_password,
injected_files_orig, is_first_time, request_spec,
filter_properties, bdms, legacy_bdm_in_spec)
if rescheduled:
# log the original build error
self._log_original_error(exc_info, instance.uuid)
raise exception.RescheduledException(
instance_uuid=instance.uuid,
reason=six.text_type(exc_info[1]))
else:
# not re-scheduling, go to error:
raise exc_info[0], exc_info[1], exc_info[2]
# spawn success
return instance, network_info
def _log_original_error(self, exc_info, instance_uuid):
LOG.error(_LE('Error: %s'), exc_info[1], instance_uuid=instance_uuid,
exc_info=exc_info)
def _reschedule_or_error(self, context, instance, exc_info,
requested_networks, admin_password, injected_files, is_first_time,
request_spec, filter_properties, bdms=None,
legacy_bdm_in_spec=True):
"""Try to re-schedule the build or re-raise the original build error to
error out the instance.
"""
original_context = context
context = context.elevated()
instance_uuid = instance.uuid
rescheduled = False
compute_utils.add_instance_fault_from_exc(context,
instance, exc_info[1], exc_info=exc_info)
self._notify_about_instance_usage(context, instance,
'instance.create.error', fault=exc_info[1])
try:
LOG.debug("Clean up resource before rescheduling.",
instance=instance)
if bdms is None:
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
self._shutdown_instance(context, instance,
bdms, requested_networks)
self._cleanup_volumes(context, instance.uuid, bdms)
except Exception:
# do not attempt retry if clean up failed:
with excutils.save_and_reraise_exception():
self._log_original_error(exc_info, instance_uuid)
try:
method_args = (request_spec, admin_password, injected_files,
requested_networks, is_first_time, filter_properties,
legacy_bdm_in_spec)
task_state = task_states.SCHEDULING
rescheduled = self._reschedule(original_context, request_spec,
filter_properties, instance,
self.scheduler_rpcapi.run_instance, method_args,
task_state, exc_info)
except Exception:
rescheduled = False
LOG.exception(_LE("Error trying to reschedule"),
instance_uuid=instance_uuid)
return rescheduled
def _reschedule(self, context, request_spec, filter_properties,
instance, reschedule_method, method_args, task_state,
exc_info=None):
"""Attempt to re-schedule a compute operation."""
instance_uuid = instance.uuid
retry = filter_properties.get('retry', None)
if not retry:
# no retry information, do not reschedule.
LOG.debug("Retry info not present, will not reschedule",
instance_uuid=instance_uuid)
return
if not request_spec:
LOG.debug("No request spec, will not reschedule",
instance_uuid=instance_uuid)
return
LOG.debug("Re-scheduling %(method)s: attempt %(num)d",
{'method': reschedule_method.func_name,
'num': retry['num_attempts']}, instance_uuid=instance_uuid)
# reset the task state:
self._instance_update(context, instance_uuid, task_state=task_state)
if exc_info:
# stringify to avoid circular ref problem in json serialization:
retry['exc'] = traceback.format_exception_only(exc_info[0],
exc_info[1])
reschedule_method(context, *method_args)
return True
@periodic_task.periodic_task
def _check_instance_build_time(self, context):
"""Ensure that instances are not stuck in build."""
timeout = CONF.instance_build_timeout
if timeout == 0:
return
filters = {'vm_state': vm_states.BUILDING,
'host': self.host}
building_insts = objects.InstanceList.get_by_filters(context,
filters, expected_attrs=[], use_slave=True)
for instance in building_insts:
if timeutils.is_older_than(instance['created_at'], timeout):
self._set_instance_error_state(context, instance)
LOG.warning(_LW("Instance build timed out. Set to error "
"state."), instance=instance)
def _check_instance_exists(self, context, instance):
"""Ensure an instance with the same name is not already present."""
if self.driver.instance_exists(instance):
raise exception.InstanceExists(name=instance.name)
def _start_building(self, context, instance):
"""Save the host and launched_on fields and log appropriately."""
LOG.audit(_('Starting instance...'), context=context,
instance=instance)
self._instance_update(context, instance.uuid,
vm_state=vm_states.BUILDING,
task_state=None,
expected_task_state=(task_states.SCHEDULING,
None))
def _allocate_network_async(self, context, instance, requested_networks,
macs, security_groups, is_vpn, dhcp_options):
"""Method used to allocate networks in the background.
Broken out for testing.
"""
LOG.debug("Allocating IP information in the background.",
instance=instance)
retries = CONF.network_allocate_retries
if retries < 0:
LOG.warning(_LW("Treating negative config value (%(retries)s) for "
"'network_allocate_retries' as 0."),
{'retries': retries})
retries = 0
attempts = retries + 1
retry_time = 1
for attempt in range(1, attempts + 1):
try:
nwinfo = self.network_api.allocate_for_instance(
context, instance, vpn=is_vpn,
requested_networks=requested_networks,
macs=macs,
security_groups=security_groups,
dhcp_options=dhcp_options)
LOG.debug('Instance network_info: |%s|', nwinfo,
instance=instance)
sys_meta = instance.system_metadata
sys_meta['network_allocated'] = 'True'
self._instance_update(context, instance.uuid,
system_metadata=sys_meta)
return nwinfo
except Exception:
exc_info = sys.exc_info()
log_info = {'attempt': attempt,
'attempts': attempts}
if attempt == attempts:
LOG.exception(_LE('Instance failed network setup '
'after %(attempts)d attempt(s)'),
log_info)
raise exc_info[0], exc_info[1], exc_info[2]
LOG.warning(_LW('Instance failed network setup '
'(attempt %(attempt)d of %(attempts)d)'),
log_info, instance=instance)
time.sleep(retry_time)
retry_time *= 2
if retry_time > 30:
retry_time = 30
# Not reached.
def _build_networks_for_instance(self, context, instance,
requested_networks, security_groups):
# If we're here from a reschedule the network may already be allocated.
if strutils.bool_from_string(
instance.system_metadata.get('network_allocated', 'False')):
# NOTE(alex_xu): The network_allocated is True means the network
# resource already allocated at previous scheduling, and the
# network setup is cleanup at previous. After rescheduling, the
# network resource need setup on the new host.
self.network_api.setup_instance_network_on_host(
context, instance, instance.host)
return self._get_instance_nw_info(context, instance)
if not self.is_neutron_security_groups:
security_groups = []
macs = self.driver.macs_for_instance(instance)
dhcp_options = self.driver.dhcp_options_for_instance(instance)
network_info = self._allocate_network(context, instance,
requested_networks, macs, security_groups, dhcp_options)
if not instance.access_ip_v4 and not instance.access_ip_v6:
# If CONF.default_access_ip_network_name is set, grab the
# corresponding network and set the access ip values accordingly.
# Note that when there are multiple ips to choose from, an
# arbitrary one will be chosen.
network_name = CONF.default_access_ip_network_name
if not network_name:
return network_info
for vif in network_info:
if vif['network']['label'] == network_name:
for ip in vif.fixed_ips():
if ip['version'] == 4:
instance.access_ip_v4 = ip['address']
if ip['version'] == 6:
instance.access_ip_v6 = ip['address']
instance.save()
break
return network_info
def _allocate_network(self, context, instance, requested_networks, macs,
security_groups, dhcp_options):
"""Start network allocation asynchronously. Return an instance
of NetworkInfoAsyncWrapper that can be used to retrieve the
allocated networks when the operation has finished.
"""
# NOTE(comstud): Since we're allocating networks asynchronously,
# this task state has little meaning, as we won't be in this
# state for very long.
instance.vm_state = vm_states.BUILDING
instance.task_state = task_states.NETWORKING
instance.save(expected_task_state=[None])
self._update_resource_tracker(context, instance)
is_vpn = pipelib.is_vpn_image(instance.image_ref)
return network_model.NetworkInfoAsyncWrapper(
self._allocate_network_async, context, instance,
requested_networks, macs, security_groups, is_vpn,
dhcp_options)
def _default_root_device_name(self, instance, image_meta, root_bdm):
try:
return self.driver.default_root_device_name(instance,
image_meta,
root_bdm)
except NotImplementedError:
return compute_utils.get_next_device_name(instance, [])
def _default_device_names_for_instance(self, instance,
root_device_name,
*block_device_lists):
try:
self.driver.default_device_names_for_instance(instance,
root_device_name,
*block_device_lists)
except NotImplementedError:
compute_utils.default_device_names_for_instance(
instance, root_device_name, *block_device_lists)
def _default_block_device_names(self, context, instance,
image_meta, block_devices):
"""Verify that all the devices have the device_name set. If not,
provide a default name.
It also ensures that there is a root_device_name and is set to the
first block device in the boot sequence (boot_index=0).
"""
root_bdm = block_device.get_root_bdm(block_devices)
if not root_bdm:
return
# Get the root_device_name from the root BDM or the instance
root_device_name = None
update_root_bdm = False
if root_bdm.device_name:
root_device_name = root_bdm.device_name
instance.root_device_name = root_device_name
elif instance.root_device_name:
root_device_name = instance.root_device_name
root_bdm.device_name = root_device_name
update_root_bdm = True
else:
root_device_name = self._default_root_device_name(instance,
image_meta,
root_bdm)
instance.root_device_name = root_device_name
root_bdm.device_name = root_device_name
update_root_bdm = True
if update_root_bdm:
root_bdm.save()
ephemerals = filter(block_device.new_format_is_ephemeral,
block_devices)
swap = filter(block_device.new_format_is_swap,
block_devices)
block_device_mapping = filter(
driver_block_device.is_block_device_mapping, block_devices)
self._default_device_names_for_instance(instance,
root_device_name,
ephemerals,
swap,
block_device_mapping)
def _prep_block_device(self, context, instance, bdms,
do_check_attach=True):
"""Set up the block device for an instance with error logging."""
try:
block_device_info = {
'root_device_name': instance['root_device_name'],
'swap': driver_block_device.convert_swap(bdms),
'ephemerals': driver_block_device.convert_ephemerals(bdms),
'block_device_mapping': (
driver_block_device.attach_block_devices(
driver_block_device.convert_volumes(bdms),
context, instance, self.volume_api,
self.driver, do_check_attach=do_check_attach) +
driver_block_device.attach_block_devices(
driver_block_device.convert_snapshots(bdms),
context, instance, self.volume_api,
self.driver, self._await_block_device_map_created,
do_check_attach=do_check_attach) +
driver_block_device.attach_block_devices(
driver_block_device.convert_images(bdms),
context, instance, self.volume_api,
self.driver, self._await_block_device_map_created,
do_check_attach=do_check_attach) +
driver_block_device.attach_block_devices(
driver_block_device.convert_blanks(bdms),
context, instance, self.volume_api,
self.driver, self._await_block_device_map_created,
do_check_attach=do_check_attach))
}
if self.use_legacy_block_device_info:
for bdm_type in ('swap', 'ephemerals', 'block_device_mapping'):
block_device_info[bdm_type] = \
driver_block_device.legacy_block_devices(
block_device_info[bdm_type])
# Get swap out of the list
block_device_info['swap'] = driver_block_device.get_swap(
block_device_info['swap'])
return block_device_info
except exception.OverQuota:
msg = _LW('Failed to create block device for instance due to '
'being over volume resource quota')
LOG.warn(msg, instance=instance)
raise exception.InvalidBDM()
except Exception:
LOG.exception(_LE('Instance failed block device setup'),
instance=instance)
raise exception.InvalidBDM()
@object_compat
def _spawn(self, context, instance, image_meta, network_info,
block_device_info, injected_files, admin_password,
set_access_ip=False, flavor=None):
"""Spawn an instance with error logging and update its power state."""
instance.vm_state = vm_states.BUILDING
instance.task_state = task_states.SPAWNING
instance.save(expected_task_state=task_states.BLOCK_DEVICE_MAPPING)
try:
self.driver.spawn(context, instance, image_meta,
injected_files, admin_password,
network_info,
block_device_info,
flavor=flavor)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Instance failed to spawn'),
instance=instance)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.launched_at = timeutils.utcnow()
def _set_access_ip_values():
"""Add access ip values for a given instance.
If CONF.default_access_ip_network_name is set, this method will
grab the corresponding network and set the access ip values
accordingly. Note that when there are multiple ips to choose
from, an arbitrary one will be chosen.
"""
network_name = CONF.default_access_ip_network_name
if not network_name:
return
for vif in network_info:
if vif['network']['label'] == network_name:
for ip in vif.fixed_ips():
if ip['version'] == 4:
instance.access_ip_v4 = ip['address']
if ip['version'] == 6:
instance.access_ip_v6 = ip['address']
return
if set_access_ip:
_set_access_ip_values()
network_info.wait(do_raise=True)
instance.info_cache.network_info = network_info
instance.save(expected_task_state=task_states.SPAWNING)
return instance
def _notify_about_instance_usage(self, context, instance, event_suffix,
network_info=None, system_metadata=None,
extra_usage_info=None, fault=None):
compute_utils.notify_about_instance_usage(
self.notifier, context, instance, event_suffix,
network_info=network_info,
system_metadata=system_metadata,
extra_usage_info=extra_usage_info, fault=fault)
def _deallocate_network(self, context, instance,
requested_networks=None):
LOG.debug('Deallocating network for instance', instance=instance)
self.network_api.deallocate_for_instance(
context, instance, requested_networks=requested_networks)
def _get_instance_block_device_info(self, context, instance,
refresh_conn_info=False,
bdms=None):
"""Transform block devices to the driver block_device format."""
if not bdms:
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance['uuid'])
swap = driver_block_device.convert_swap(bdms)
ephemerals = driver_block_device.convert_ephemerals(bdms)
block_device_mapping = (
driver_block_device.convert_volumes(bdms) +
driver_block_device.convert_snapshots(bdms) +
driver_block_device.convert_images(bdms))
if not refresh_conn_info:
# if the block_device_mapping has no value in connection_info
# (returned as None), don't include in the mapping
block_device_mapping = [
bdm for bdm in block_device_mapping
if bdm.get('connection_info')]
else:
block_device_mapping = driver_block_device.refresh_conn_infos(
block_device_mapping, context, instance, self.volume_api,
self.driver)
if self.use_legacy_block_device_info:
swap = driver_block_device.legacy_block_devices(swap)
ephemerals = driver_block_device.legacy_block_devices(ephemerals)
block_device_mapping = driver_block_device.legacy_block_devices(
block_device_mapping)
# Get swap out of the list
swap = driver_block_device.get_swap(swap)
root_device_name = instance.get('root_device_name')
return {'swap': swap,
'root_device_name': root_device_name,
'ephemerals': ephemerals,
'block_device_mapping': block_device_mapping}
# NOTE(mikal): No object_compat wrapper on this method because its
# callers all pass objects already
@wrap_exception()
@reverts_task_state
@wrap_instance_fault
def build_and_run_instance(self, context, instance, image, request_spec,
filter_properties, admin_password=None,
injected_files=None, requested_networks=None,
security_groups=None, block_device_mapping=None,
node=None, limits=None):
# NOTE(danms): Remove this in v4.0 of the RPC API
if (requested_networks and
not isinstance(requested_networks,
objects.NetworkRequestList)):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest.from_tuple(t)
for t in requested_networks])
# NOTE(melwitt): Remove this in v4.0 of the RPC API
flavor = filter_properties.get('instance_type')
if flavor and not isinstance(flavor, objects.Flavor):
# Code downstream may expect extra_specs to be populated since it
# is receiving an object, so lookup the flavor to ensure this.
flavor = objects.Flavor.get_by_id(context, flavor['id'])
filter_properties = dict(filter_properties, instance_type=flavor)
@utils.synchronized(instance.uuid)
def _locked_do_build_and_run_instance(*args, **kwargs):
# NOTE(danms): We grab the semaphore with the instance uuid
# locked because we could wait in line to build this instance
# for a while and we want to make sure that nothing else tries
# to do anything with this instance while we wait.
with self._build_semaphore:
self._do_build_and_run_instance(*args, **kwargs)
# NOTE(danms): We spawn here to return the RPC worker thread back to
# the pool. Since what follows could take a really long time, we don't
# want to tie up RPC workers.
utils.spawn_n(_locked_do_build_and_run_instance,
context, instance, image, request_spec,
filter_properties, admin_password, injected_files,
requested_networks, security_groups,
block_device_mapping, node, limits)
@hooks.add_hook('build_instance')
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def _do_build_and_run_instance(self, context, instance, image,
request_spec, filter_properties, admin_password, injected_files,
requested_networks, security_groups, block_device_mapping,
node=None, limits=None):
try:
LOG.audit(_('Starting instance...'), context=context,
instance=instance)
instance.vm_state = vm_states.BUILDING
instance.task_state = None
instance.save(expected_task_state=
(task_states.SCHEDULING, None))
except exception.InstanceNotFound:
msg = 'Instance disappeared before build.'
LOG.debug(msg, instance=instance)
return build_results.FAILED
except exception.UnexpectedTaskStateError as e:
LOG.debug(e.format_message(), instance=instance)
return build_results.FAILED
# b64 decode the files to inject:
decoded_files = self._decode_files(injected_files)
if limits is None:
limits = {}
if node is None:
node = self.driver.get_available_nodes(refresh=True)[0]
LOG.debug('No node specified, defaulting to %s', node,
instance=instance)
try:
self._build_and_run_instance(context, instance, image,
decoded_files, admin_password, requested_networks,
security_groups, block_device_mapping, node, limits,
filter_properties)
return build_results.ACTIVE
except exception.RescheduledException as e:
LOG.debug(e.format_message(), instance=instance)
retry = filter_properties.get('retry', None)
if not retry:
# no retry information, do not reschedule.
LOG.debug("Retry info not present, will not reschedule",
instance=instance)
self._cleanup_allocated_networks(context, instance,
requested_networks)
compute_utils.add_instance_fault_from_exc(context,
instance, e, sys.exc_info())
self._set_instance_error_state(context, instance)
return build_results.FAILED
retry['exc'] = traceback.format_exception(*sys.exc_info())
# NOTE(comstud): Deallocate networks if the driver wants
# us to do so.
if self.driver.deallocate_networks_on_reschedule(instance):
self._cleanup_allocated_networks(context, instance,
requested_networks)
else:
# NOTE(alex_xu): Network already allocated and we don't
# want to deallocate them before rescheduling. But we need
# cleanup those network resource setup on this host before
# rescheduling.
self.network_api.cleanup_instance_network_on_host(
context, instance, self.host)
instance.task_state = task_states.SCHEDULING
instance.save()
self.compute_task_api.build_instances(context, [instance],
image, filter_properties, admin_password,
injected_files, requested_networks, security_groups,
block_device_mapping)
return build_results.RESCHEDULED
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError):
msg = 'Instance disappeared during build.'
LOG.debug(msg, instance=instance)
self._cleanup_allocated_networks(context, instance,
requested_networks)
return build_results.FAILED
except exception.BuildAbortException as e:
LOG.exception(e.format_message(), instance=instance)
self._cleanup_allocated_networks(context, instance,
requested_networks)
self._cleanup_volumes(context, instance.uuid,
block_device_mapping, raise_exc=False)
compute_utils.add_instance_fault_from_exc(context, instance,
e, sys.exc_info())
self._set_instance_error_state(context, instance)
return build_results.FAILED
except Exception as e:
# Should not reach here.
msg = _LE('Unexpected build failure, not rescheduling build.')
LOG.exception(msg, instance=instance)
self._cleanup_allocated_networks(context, instance,
requested_networks)
self._cleanup_volumes(context, instance.uuid,
block_device_mapping, raise_exc=False)
compute_utils.add_instance_fault_from_exc(context, instance,
e, sys.exc_info())
self._set_instance_error_state(context, instance)
return build_results.FAILED
def _build_and_run_instance(self, context, instance, image, injected_files,
admin_password, requested_networks, security_groups,
block_device_mapping, node, limits, filter_properties):
image_name = image.get('name')
self._notify_about_instance_usage(context, instance, 'create.start',
extra_usage_info={'image_name': image_name})
try:
rt = self._get_resource_tracker(node)
with rt.instance_claim(context, instance, limits) as inst_claim:
# NOTE(russellb) It's important that this validation be done
# *after* the resource tracker instance claim, as that is where
# the host is set on the instance.
self._validate_instance_group_policy(context, instance,
filter_properties)
with self._build_resources(context, instance,
requested_networks, security_groups, image,
block_device_mapping) as resources:
instance.vm_state = vm_states.BUILDING
instance.task_state = task_states.SPAWNING
instance.numa_topology = inst_claim.claimed_numa_topology
instance.save(expected_task_state=
task_states.BLOCK_DEVICE_MAPPING)
block_device_info = resources['block_device_info']
network_info = resources['network_info']
flavor = None
if filter_properties is not None:
flavor = filter_properties.get('instance_type')
self.driver.spawn(context, instance, image,
injected_files, admin_password,
network_info=network_info,
block_device_info=block_device_info,
flavor=flavor)
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError) as e:
with excutils.save_and_reraise_exception():
self._notify_about_instance_usage(context, instance,
'create.end', fault=e)
except exception.ComputeResourcesUnavailable as e:
LOG.debug(e.format_message(), instance=instance)
self._notify_about_instance_usage(context, instance,
'create.error', fault=e)
raise exception.RescheduledException(
instance_uuid=instance.uuid, reason=e.format_message())
except exception.BuildAbortException as e:
with excutils.save_and_reraise_exception():
LOG.debug(e.format_message(), instance=instance)
self._notify_about_instance_usage(context, instance,
'create.error', fault=e)
except (exception.FixedIpLimitExceeded,
exception.NoMoreNetworks, exception.NoMoreFixedIps) as e:
LOG.warning(_LW('No more network or fixed IP to be allocated'),
instance=instance)
self._notify_about_instance_usage(context, instance,
'create.error', fault=e)
msg = _('Failed to allocate the network(s) with error %s, '
'not rescheduling.') % e.format_message()
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=msg)
except (exception.VirtualInterfaceCreateException,
exception.VirtualInterfaceMacAddressException) as e:
LOG.exception(_LE('Failed to allocate network(s)'),
instance=instance)
self._notify_about_instance_usage(context, instance,
'create.error', fault=e)
msg = _('Failed to allocate the network(s), not rescheduling.')
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=msg)
except (exception.FlavorDiskTooSmall,
exception.FlavorMemoryTooSmall,
exception.ImageNotActive,
exception.ImageUnacceptable) as e:
self._notify_about_instance_usage(context, instance,
'create.error', fault=e)
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=e.format_message())
except Exception as e:
self._notify_about_instance_usage(context, instance,
'create.error', fault=e)
raise exception.RescheduledException(
instance_uuid=instance.uuid, reason=six.text_type(e))
# NOTE(alaski): This is only useful during reschedules, remove it now.
instance.system_metadata.pop('network_allocated', None)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.launched_at = timeutils.utcnow()
try:
instance.save(expected_task_state=task_states.SPAWNING)
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError) as e:
with excutils.save_and_reraise_exception():
self._notify_about_instance_usage(context, instance,
'create.end', fault=e)
self._notify_about_instance_usage(context, instance, 'create.end',
extra_usage_info={'message': _('Success')},
network_info=network_info)
@contextlib.contextmanager
def _build_resources(self, context, instance, requested_networks,
security_groups, image, block_device_mapping):
resources = {}
network_info = None
try:
network_info = self._build_networks_for_instance(context, instance,
requested_networks, security_groups)
resources['network_info'] = network_info
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError):
raise
except exception.UnexpectedTaskStateError as e:
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=e.format_message())
except Exception:
# Because this allocation is async any failures are likely to occur
# when the driver accesses network_info during spawn().
LOG.exception(_LE('Failed to allocate network(s)'),
instance=instance)
msg = _('Failed to allocate the network(s), not rescheduling.')
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=msg)
try:
# Verify that all the BDMs have a device_name set and assign a
# default to the ones missing it with the help of the driver.
self._default_block_device_names(context, instance, image,
block_device_mapping)
instance.vm_state = vm_states.BUILDING
instance.task_state = task_states.BLOCK_DEVICE_MAPPING
instance.save()
block_device_info = self._prep_block_device(context, instance,
block_device_mapping)
resources['block_device_info'] = block_device_info
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError):
with excutils.save_and_reraise_exception() as ctxt:
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
except exception.UnexpectedTaskStateError as e:
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=e.format_message())
except Exception:
LOG.exception(_LE('Failure prepping block device'),
instance=instance)
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
msg = _('Failure prepping block device.')
raise exception.BuildAbortException(instance_uuid=instance.uuid,
reason=msg)
try:
yield resources
except Exception as exc:
with excutils.save_and_reraise_exception() as ctxt:
if not isinstance(exc, (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError)):
LOG.exception(_LE('Instance failed to spawn'),
instance=instance)
# Make sure the async call finishes
if network_info is not None:
network_info.wait(do_raise=False)
try:
self._shutdown_instance(context, instance,
block_device_mapping, requested_networks,
try_deallocate_networks=False)
except Exception:
ctxt.reraise = False
msg = _('Could not clean up failed build,'
' not rescheduling')
raise exception.BuildAbortException(
instance_uuid=instance.uuid, reason=msg)
def _cleanup_allocated_networks(self, context, instance,
requested_networks):
try:
self._deallocate_network(context, instance, requested_networks)
except Exception:
msg = _LE('Failed to deallocate networks')
LOG.exception(msg, instance=instance)
return
instance.system_metadata['network_allocated'] = 'False'
try:
instance.save()
except exception.InstanceNotFound:
# NOTE(alaski): It's possible that we're cleaning up the networks
# because the instance was deleted. If that's the case then this
# exception will be raised by instance.save()
pass
@object_compat
@messaging.expected_exceptions(exception.BuildAbortException,
exception.UnexpectedTaskStateError,
exception.VirtualInterfaceCreateException,
exception.RescheduledException)
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def run_instance(self, context, instance, request_spec,
filter_properties, requested_networks,
injected_files, admin_password,
is_first_time, node, legacy_bdm_in_spec):
# NOTE(alaski) This method should be deprecated when the scheduler and
# compute rpc interfaces are bumped to 4.x, and slated for removal in
# 5.x as it is no longer used.
if filter_properties is None:
filter_properties = {}
@utils.synchronized(instance.uuid)
def do_run_instance():
self._run_instance(context, request_spec,
filter_properties, requested_networks, injected_files,
admin_password, is_first_time, node, instance,
legacy_bdm_in_spec)
do_run_instance()
def _try_deallocate_network(self, context, instance,
requested_networks=None):
try:
# tear down allocated network structure
self._deallocate_network(context, instance, requested_networks)
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to deallocate network for instance.'),
instance=instance)
self._set_instance_error_state(context, instance)
def _get_power_off_values(self, context, instance, clean_shutdown):
"""Get the timing configuration for powering down this instance."""
if clean_shutdown:
timeout = compute_utils.get_value_from_system_metadata(instance,
key='image_os_shutdown_timeout', type=int,
default=CONF.shutdown_timeout)
retry_interval = self.SHUTDOWN_RETRY_INTERVAL
else:
timeout = 0
retry_interval = 0
return timeout, retry_interval
def _power_off_instance(self, context, instance, clean_shutdown=True):
"""Power off an instance on this host."""
timeout, retry_interval = self._get_power_off_values(context,
instance, clean_shutdown)
self.driver.power_off(instance, timeout, retry_interval)
def _shutdown_instance(self, context, instance,
bdms, requested_networks=None, notify=True,
try_deallocate_networks=True):
"""Shutdown an instance on this host.
:param:context: security context
:param:instance: a nova.objects.Instance object
:param:bdms: the block devices for the instance to be torn
down
:param:requested_networks: the networks on which the instance
has ports
:param:notify: true if a final usage notification should be
emitted
:param:try_deallocate_networks: false if we should avoid
trying to teardown networking
"""
context = context.elevated()
LOG.audit(_('%(action_str)s instance') % {'action_str': 'Terminating'},
context=context, instance=instance)
if notify:
self._notify_about_instance_usage(context, instance,
"shutdown.start")
network_info = compute_utils.get_nw_info_for_instance(instance)
# NOTE(vish) get bdms before destroying the instance
vol_bdms = [bdm for bdm in bdms if bdm.is_volume]
block_device_info = self._get_instance_block_device_info(
context, instance, bdms=bdms)
# NOTE(melwitt): attempt driver destroy before releasing ip, may
# want to keep ip allocated for certain failures
try:
self.driver.destroy(context, instance, network_info,
block_device_info)
except exception.InstancePowerOffFailure:
# if the instance can't power off, don't release the ip
with excutils.save_and_reraise_exception():
pass
except Exception:
with excutils.save_and_reraise_exception():
# deallocate ip and fail without proceeding to
# volume api calls, preserving current behavior
if try_deallocate_networks:
self._try_deallocate_network(context, instance,
requested_networks)
if try_deallocate_networks:
self._try_deallocate_network(context, instance, requested_networks)
for bdm in vol_bdms:
try:
# NOTE(vish): actual driver detach done in driver.destroy, so
# just tell cinder that we are done with it.
connector = self.driver.get_volume_connector(instance)
self.volume_api.terminate_connection(context,
bdm.volume_id,
connector)
self.volume_api.detach(context, bdm.volume_id)
except exception.DiskNotFound as exc:
LOG.debug('Ignoring DiskNotFound: %s', exc,
instance=instance)
except exception.VolumeNotFound as exc:
LOG.debug('Ignoring VolumeNotFound: %s', exc,
instance=instance)
except (cinder_exception.EndpointNotFound,
keystone_exception.EndpointNotFound) as exc:
LOG.warning(_LW('Ignoring EndpointNotFound: %s'), exc,
instance=instance)
if notify:
self._notify_about_instance_usage(context, instance,
"shutdown.end")
def _cleanup_volumes(self, context, instance_uuid, bdms, raise_exc=True):
exc_info = None
for bdm in bdms:
LOG.debug("terminating bdm %s", bdm,
instance_uuid=instance_uuid)
if bdm.volume_id and bdm.delete_on_termination:
try:
self.volume_api.delete(context, bdm.volume_id)
except Exception as exc:
exc_info = sys.exc_info()
LOG.warning(_LW('Failed to delete volume: %(volume_id)s '
'due to %(exc)s'),
{'volume_id': bdm.volume_id, 'exc': exc})
if exc_info is not None and raise_exc:
six.reraise(exc_info[0], exc_info[1], exc_info[2])
@hooks.add_hook("delete_instance")
def _delete_instance(self, context, instance, bdms, quotas):
"""Delete an instance on this host. Commit or rollback quotas
as necessary.
:param context: nova request context
:param instance: nova.objects.instance.Instance object
:param bdms: nova.objects.block_device.BlockDeviceMappingList object
:param quotas: nova.objects.quotas.Quotas object
"""
was_soft_deleted = instance.vm_state == vm_states.SOFT_DELETED
if was_soft_deleted:
# Instances in SOFT_DELETED vm_state have already had quotas
# decremented.
try:
quotas.rollback()
except Exception:
pass
try:
events = self.instance_events.clear_events_for_instance(instance)
if events:
LOG.debug('Events pending at deletion: %(events)s',
{'events': ','.join(events.keys())},
instance=instance)
instance.info_cache.delete()
self._notify_about_instance_usage(context, instance,
"delete.start")
self._shutdown_instance(context, instance, bdms)
# NOTE(vish): We have already deleted the instance, so we have
# to ignore problems cleaning up the volumes. It
# would be nice to let the user know somehow that
# the volume deletion failed, but it is not
# acceptable to have an instance that can not be
# deleted. Perhaps this could be reworked in the
# future to set an instance fault the first time
# and to only ignore the failure if the instance
# is already in ERROR.
self._cleanup_volumes(context, instance.uuid, bdms,
raise_exc=False)
# if a delete task succeeded, always update vm state and task
# state without expecting task state to be DELETING
instance.vm_state = vm_states.DELETED
instance.task_state = None
instance.power_state = power_state.NOSTATE
instance.terminated_at = timeutils.utcnow()
instance.save()
self._update_resource_tracker(context, instance)
system_meta = instance.system_metadata
instance.destroy()
except Exception:
with excutils.save_and_reraise_exception():
quotas.rollback()
self._complete_deletion(context,
instance,
bdms,
quotas,
system_meta)
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def terminate_instance(self, context, instance, bdms, reservations):
"""Terminate an instance on this host."""
# NOTE (ndipanov): If we get non-object BDMs, just get them from the
# db again, as this means they are sent in the old format and we want
# to avoid converting them back when we can just get them.
# Remove this when we bump the RPC major version to 4.0
if (bdms and
any(not isinstance(bdm, obj_base.NovaObject)
for bdm in bdms)):
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
@utils.synchronized(instance.uuid)
def do_terminate_instance(instance, bdms):
try:
self._delete_instance(context, instance, bdms, quotas)
except exception.InstanceNotFound:
LOG.info(_LI("Instance disappeared during terminate"),
instance=instance)
except Exception:
# As we're trying to delete always go to Error if something
# goes wrong that _delete_instance can't handle.
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Setting instance vm_state to ERROR'),
instance=instance)
self._set_instance_error_state(context, instance)
do_terminate_instance(instance, bdms)
# NOTE(johannes): This is probably better named power_off_instance
# so it matches the driver method, but because of other issues, we
# can't use that name in grizzly.
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def stop_instance(self, context, instance, clean_shutdown=True):
"""Stopping an instance on this host."""
@utils.synchronized(instance.uuid)
def do_stop_instance():
current_power_state = self._get_power_state(context, instance)
LOG.debug('Stopping instance; current vm_state: %(vm_state)s, '
'current task_state: %(task_state)s, current DB '
'power_state: %(db_power_state)s, current VM '
'power_state: %(current_power_state)s',
dict(vm_state=instance.vm_state,
task_state=instance.task_state,
db_power_state=instance.power_state,
current_power_state=current_power_state),
instance_uuid=instance.uuid)
# NOTE(mriedem): If the instance is already powered off, we are
# possibly tearing down and racing with other operations, so we can
# expect the task_state to be None if something else updates the
# instance and we're not locking it.
expected_task_state = [task_states.POWERING_OFF]
# The list of power states is from _sync_instance_power_state.
if current_power_state in (power_state.NOSTATE,
power_state.SHUTDOWN,
power_state.CRASHED):
LOG.info(_LI('Instance is already powered off in the '
'hypervisor when stop is called.'),
instance=instance)
expected_task_state.append(None)
self._notify_about_instance_usage(context, instance,
"power_off.start")
self._power_off_instance(context, instance, clean_shutdown)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.STOPPED
instance.task_state = None
instance.save(expected_task_state=expected_task_state)
self._notify_about_instance_usage(context, instance,
"power_off.end")
do_stop_instance()
def _power_on(self, context, instance):
network_info = self._get_instance_nw_info(context, instance)
block_device_info = self._get_instance_block_device_info(context,
instance)
self.driver.power_on(context, instance,
network_info,
block_device_info)
# NOTE(johannes): This is probably better named power_on_instance
# so it matches the driver method, but because of other issues, we
# can't use that name in grizzly.
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def start_instance(self, context, instance):
"""Starting an instance on this host."""
self._notify_about_instance_usage(context, instance, "power_on.start")
self._power_on(context, instance)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.save(expected_task_state=task_states.POWERING_ON)
self._notify_about_instance_usage(context, instance, "power_on.end")
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def soft_delete_instance(self, context, instance, reservations):
"""Soft delete an instance on this host."""
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
try:
self._notify_about_instance_usage(context, instance,
"soft_delete.start")
try:
self.driver.soft_delete(instance)
except NotImplementedError:
# Fallback to just powering off the instance if the
# hypervisor doesn't implement the soft_delete method
self.driver.power_off(instance)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.SOFT_DELETED
instance.task_state = None
instance.save(expected_task_state=[task_states.SOFT_DELETING])
except Exception:
with excutils.save_and_reraise_exception():
quotas.rollback()
quotas.commit()
self._notify_about_instance_usage(context, instance, "soft_delete.end")
@object_compat
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def restore_instance(self, context, instance):
"""Restore a soft-deleted instance on this host."""
self._notify_about_instance_usage(context, instance, "restore.start")
try:
self.driver.restore(instance)
except NotImplementedError:
# Fallback to just powering on the instance if the hypervisor
# doesn't implement the restore method
self._power_on(context, instance)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.save(expected_task_state=task_states.RESTORING)
self._notify_about_instance_usage(context, instance, "restore.end")
def _rebuild_default_impl(self, context, instance, image_meta,
injected_files, admin_password, bdms,
detach_block_devices, attach_block_devices,
network_info=None,
recreate=False, block_device_info=None,
preserve_ephemeral=False):
if preserve_ephemeral:
# The default code path does not support preserving ephemeral
# partitions.
raise exception.PreserveEphemeralNotSupported()
detach_block_devices(context, bdms)
if not recreate:
self.driver.destroy(context, instance, network_info,
block_device_info=block_device_info)
instance.task_state = task_states.REBUILD_BLOCK_DEVICE_MAPPING
instance.save(expected_task_state=[task_states.REBUILDING])
new_block_device_info = attach_block_devices(context, instance, bdms)
instance.task_state = task_states.REBUILD_SPAWNING
instance.save(
expected_task_state=[task_states.REBUILD_BLOCK_DEVICE_MAPPING])
self.driver.spawn(context, instance, image_meta, injected_files,
admin_password, network_info=network_info,
block_device_info=new_block_device_info)
@object_compat
@messaging.expected_exceptions(exception.PreserveEphemeralNotSupported)
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def rebuild_instance(self, context, instance, orig_image_ref, image_ref,
injected_files, new_pass, orig_sys_metadata,
bdms, recreate, on_shared_storage,
preserve_ephemeral=False):
"""Destroy and re-make this instance.
A 'rebuild' effectively purges all existing data from the system and
remakes the VM with given 'metadata' and 'personalities'.
:param context: `nova.RequestContext` object
:param instance: Instance object
:param orig_image_ref: Original image_ref before rebuild
:param image_ref: New image_ref for rebuild
:param injected_files: Files to inject
:param new_pass: password to set on rebuilt instance
:param orig_sys_metadata: instance system metadata from pre-rebuild
:param bdms: block-device-mappings to use for rebuild
:param recreate: True if the instance is being recreated (e.g. the
hypervisor it was on failed) - cleanup of old state will be
skipped.
:param on_shared_storage: True if instance files on shared storage
:param preserve_ephemeral: True if the default ephemeral storage
partition must be preserved on rebuild
"""
context = context.elevated()
# NOTE (ndipanov): If we get non-object BDMs, just get them from the
# db again, as this means they are sent in the old format and we want
# to avoid converting them back when we can just get them.
# Remove this on the next major RPC version bump
if (bdms and
any(not isinstance(bdm, obj_base.NovaObject)
for bdm in bdms)):
bdms = None
orig_vm_state = instance.vm_state
with self._error_out_instance_on_exception(context, instance):
LOG.audit(_("Rebuilding instance"), context=context,
instance=instance)
if recreate:
if not self.driver.capabilities["supports_recreate"]:
raise exception.InstanceRecreateNotSupported
self._check_instance_exists(context, instance)
# To cover case when admin expects that instance files are on
# shared storage, but not accessible and vice versa
if on_shared_storage != self.driver.instance_on_disk(instance):
raise exception.InvalidSharedStorage(
_("Invalid state of instance files on shared"
" storage"))
if on_shared_storage:
LOG.info(_LI('disk on shared storage, recreating using'
' existing disk'))
else:
image_ref = orig_image_ref = instance.image_ref
LOG.info(_LI("disk not on shared storage, rebuilding from:"
" '%s'"), str(image_ref))
# NOTE(mriedem): On a recreate (evacuate), we need to update
# the instance's host and node properties to reflect it's
# destination node for the recreate.
node_name = None
try:
compute_node = self._get_compute_info(context, self.host)
node_name = compute_node.hypervisor_hostname
except exception.ComputeHostNotFound:
LOG.exception(_LE('Failed to get compute_info for %s'),
self.host)
finally:
instance.host = self.host
instance.node = node_name
instance.save()
if image_ref:
image_meta = self.image_api.get(context, image_ref)
else:
image_meta = {}
# This instance.exists message should contain the original
# image_ref, not the new one. Since the DB has been updated
# to point to the new one... we have to override it.
# TODO(jaypipes): Move generate_image_url() into the nova.image.api
orig_image_ref_url = glance.generate_image_url(orig_image_ref)
extra_usage_info = {'image_ref_url': orig_image_ref_url}
self.conductor_api.notify_usage_exists(context, instance,
current_period=True, system_metadata=orig_sys_metadata,
extra_usage_info=extra_usage_info)
# This message should contain the new image_ref
extra_usage_info = {'image_name': image_meta.get('name', '')}
self._notify_about_instance_usage(context, instance,
"rebuild.start", extra_usage_info=extra_usage_info)
instance.power_state = self._get_power_state(context, instance)
instance.task_state = task_states.REBUILDING
instance.save(expected_task_state=[task_states.REBUILDING])
if recreate:
self.network_api.setup_networks_on_host(
context, instance, self.host)
network_info = compute_utils.get_nw_info_for_instance(instance)
if bdms is None:
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
block_device_info = \
self._get_instance_block_device_info(
context, instance, bdms=bdms)
def detach_block_devices(context, bdms):
for bdm in bdms:
if bdm.is_volume:
self.volume_api.detach(context, bdm.volume_id)
files = self._decode_files(injected_files)
kwargs = dict(
context=context,
instance=instance,
image_meta=image_meta,
injected_files=files,
admin_password=new_pass,
bdms=bdms,
detach_block_devices=detach_block_devices,
attach_block_devices=self._prep_block_device,
block_device_info=block_device_info,
network_info=network_info,
preserve_ephemeral=preserve_ephemeral,
recreate=recreate)
try:
self.driver.rebuild(**kwargs)
except NotImplementedError:
# NOTE(rpodolyaka): driver doesn't provide specialized version
# of rebuild, fall back to the default implementation
self._rebuild_default_impl(**kwargs)
instance.power_state = self._get_power_state(context, instance)
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.launched_at = timeutils.utcnow()
instance.save(expected_task_state=[task_states.REBUILD_SPAWNING])
if orig_vm_state == vm_states.STOPPED:
LOG.info(_LI("bringing vm to original state: '%s'"),
orig_vm_state, instance=instance)
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.progress = 0
instance.save()
self.stop_instance(context, instance)
self._notify_about_instance_usage(
context, instance, "rebuild.end",
network_info=network_info,
extra_usage_info=extra_usage_info)
def _handle_bad_volumes_detached(self, context, instance, bad_devices,
block_device_info):
"""Handle cases where the virt-layer had to detach non-working volumes
in order to complete an operation.
"""
for bdm in block_device_info['block_device_mapping']:
if bdm.get('mount_device') in bad_devices:
try:
volume_id = bdm['connection_info']['data']['volume_id']
except KeyError:
continue
# NOTE(sirp): ideally we'd just call
# `compute_api.detach_volume` here but since that hits the
# DB directly, that's off limits from within the
# compute-manager.
#
# API-detach
LOG.info(_LI("Detaching from volume api: %s"), volume_id)
volume = self.volume_api.get(context, volume_id)
self.volume_api.check_detach(context, volume)
self.volume_api.begin_detaching(context, volume_id)
# Manager-detach
self.detach_volume(context, volume_id, instance)
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def reboot_instance(self, context, instance, block_device_info,
reboot_type):
"""Reboot an instance on this host."""
# acknowledge the request made it to the manager
if reboot_type == "SOFT":
instance.task_state = task_states.REBOOT_PENDING
expected_states = (task_states.REBOOTING,
task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED)
else:
instance.task_state = task_states.REBOOT_PENDING_HARD
expected_states = (task_states.REBOOTING_HARD,
task_states.REBOOT_PENDING_HARD,
task_states.REBOOT_STARTED_HARD)
context = context.elevated()
LOG.audit(_("Rebooting instance"), context=context, instance=instance)
block_device_info = self._get_instance_block_device_info(context,
instance)
network_info = self._get_instance_nw_info(context, instance)
self._notify_about_instance_usage(context, instance, "reboot.start")
instance.power_state = self._get_power_state(context, instance)
instance.save(expected_task_state=expected_states)
if instance.power_state != power_state.RUNNING:
state = instance.power_state
running = power_state.RUNNING
LOG.warning(_LW('trying to reboot a non-running instance:'
' (state: %(state)s expected: %(running)s)'),
{'state': state, 'running': running},
context=context, instance=instance)
def bad_volumes_callback(bad_devices):
self._handle_bad_volumes_detached(
context, instance, bad_devices, block_device_info)
try:
# Don't change it out of rescue mode
if instance.vm_state == vm_states.RESCUED:
new_vm_state = vm_states.RESCUED
else:
new_vm_state = vm_states.ACTIVE
new_power_state = None
if reboot_type == "SOFT":
instance.task_state = task_states.REBOOT_STARTED
expected_state = task_states.REBOOT_PENDING
else:
instance.task_state = task_states.REBOOT_STARTED_HARD
expected_state = task_states.REBOOT_PENDING_HARD
instance.save(expected_task_state=expected_state)
self.driver.reboot(context, instance,
network_info,
reboot_type,
block_device_info=block_device_info,
bad_volumes_callback=bad_volumes_callback)
except Exception as error:
with excutils.save_and_reraise_exception() as ctxt:
exc_info = sys.exc_info()
# if the reboot failed but the VM is running don't
# put it into an error state
new_power_state = self._get_power_state(context, instance)
if new_power_state == power_state.RUNNING:
LOG.warning(_LW('Reboot failed but instance is running'),
context=context, instance=instance)
compute_utils.add_instance_fault_from_exc(context,
instance, error, exc_info)
self._notify_about_instance_usage(context, instance,
'reboot.error', fault=error)
ctxt.reraise = False
else:
LOG.error(_LE('Cannot reboot instance: %s'), error,
context=context, instance=instance)
self._set_instance_obj_error_state(context, instance)
if not new_power_state:
new_power_state = self._get_power_state(context, instance)
try:
instance.power_state = new_power_state
instance.vm_state = new_vm_state
instance.task_state = None
instance.save()
except exception.InstanceNotFound:
LOG.warning(_LW("Instance disappeared during reboot"),
context=context, instance=instance)
self._notify_about_instance_usage(context, instance, "reboot.end")
@delete_image_on_error
def _do_snapshot_instance(self, context, image_id, instance, rotation):
if rotation < 0:
raise exception.RotationRequiredForBackup()
self._snapshot_instance(context, image_id, instance,
task_states.IMAGE_BACKUP)
@wrap_exception()
@reverts_task_state
@wrap_instance_fault
def backup_instance(self, context, image_id, instance, backup_type,
rotation):
"""Backup an instance on this host.
:param backup_type: daily | weekly
:param rotation: int representing how many backups to keep around
"""
self._do_snapshot_instance(context, image_id, instance, rotation)
self._rotate_backups(context, instance, backup_type, rotation)
@wrap_exception()
@reverts_task_state
@wrap_instance_fault
@delete_image_on_error
def snapshot_instance(self, context, image_id, instance):
"""Snapshot an instance on this host.
:param context: security context
:param instance: a nova.objects.instance.Instance object
:param image_id: glance.db.sqlalchemy.models.Image.Id
"""
# NOTE(dave-mcnally) the task state will already be set by the api
# but if the compute manager has crashed/been restarted prior to the
# request getting here the task state may have been cleared so we set
# it again and things continue normally
try:
instance.task_state = task_states.IMAGE_SNAPSHOT
instance.save(
expected_task_state=task_states.IMAGE_SNAPSHOT_PENDING)
except exception.InstanceNotFound:
# possibility instance no longer exists, no point in continuing
LOG.debug("Instance not found, could not set state %s "
"for instance.",
task_states.IMAGE_SNAPSHOT, instance=instance)
return
except exception.UnexpectedDeletingTaskStateError:
LOG.debug("Instance being deleted, snapshot cannot continue",
instance=instance)
return
self._snapshot_instance(context, image_id, instance,
task_states.IMAGE_SNAPSHOT)
def _snapshot_instance(self, context, image_id, instance,
expected_task_state):
context = context.elevated()
instance.power_state = self._get_power_state(context, instance)
try:
instance.save()
LOG.audit(_('instance snapshotting'), context=context,
instance=instance)
if instance.power_state != power_state.RUNNING:
state = instance.power_state
running = power_state.RUNNING
LOG.warning(_LW('trying to snapshot a non-running instance: '
'(state: %(state)s expected: %(running)s)'),
{'state': state, 'running': running},
instance=instance)
self._notify_about_instance_usage(
context, instance, "snapshot.start")
def update_task_state(task_state,
expected_state=expected_task_state):
instance.task_state = task_state
instance.save(expected_task_state=expected_state)
self.driver.snapshot(context, instance, image_id,
update_task_state)
instance.task_state = None
instance.save(expected_task_state=task_states.IMAGE_UPLOADING)
self._notify_about_instance_usage(context, instance,
"snapshot.end")
except (exception.InstanceNotFound,
exception.UnexpectedDeletingTaskStateError):
# the instance got deleted during the snapshot
# Quickly bail out of here
msg = 'Instance disappeared during snapshot'
LOG.debug(msg, instance=instance)
try:
image_service = glance.get_default_image_service()
image = image_service.show(context, image_id)
if image['status'] != 'active':
image_service.delete(context, image_id)
except Exception:
LOG.warning(_LW("Error while trying to clean up image %s"),
image_id, instance=instance)
except exception.ImageNotFound:
instance.task_state = None
instance.save()
msg = _("Image not found during snapshot")
LOG.warn(msg, instance=instance)
def _post_interrupted_snapshot_cleanup(self, context, instance):
self.driver.post_interrupted_snapshot_cleanup(context, instance)
@object_compat
@messaging.expected_exceptions(NotImplementedError)
def volume_snapshot_create(self, context, instance, volume_id,
create_info):
self.driver.volume_snapshot_create(context, instance, volume_id,
create_info)
@object_compat
@messaging.expected_exceptions(NotImplementedError)
def volume_snapshot_delete(self, context, instance, volume_id,
snapshot_id, delete_info):
self.driver.volume_snapshot_delete(context, instance, volume_id,
snapshot_id, delete_info)
@wrap_instance_fault
def _rotate_backups(self, context, instance, backup_type, rotation):
"""Delete excess backups associated to an instance.
Instances are allowed a fixed number of backups (the rotation number);
this method deletes the oldest backups that exceed the rotation
threshold.
:param context: security context
:param instance: Instance dict
:param backup_type: daily | weekly
:param rotation: int representing how many backups to keep around;
None if rotation shouldn't be used (as in the case of snapshots)
"""
filters = {'property-image_type': 'backup',
'property-backup_type': backup_type,
'property-instance_uuid': instance.uuid}
images = self.image_api.get_all(context, filters=filters,
sort_key='created_at', sort_dir='desc')
num_images = len(images)
LOG.debug("Found %(num_images)d images (rotation: %(rotation)d)",
{'num_images': num_images, 'rotation': rotation},
instance=instance)
if num_images > rotation:
# NOTE(sirp): this deletes all backups that exceed the rotation
# limit
excess = len(images) - rotation
LOG.debug("Rotating out %d backups", excess,
instance=instance)
for i in xrange(excess):
image = images.pop()
image_id = image['id']
LOG.debug("Deleting image %s", image_id,
instance=instance)
self.image_api.delete(context, image_id)
@object_compat
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def set_admin_password(self, context, instance, new_pass):
"""Set the root/admin password for an instance on this host.
This is generally only called by API password resets after an
image has been built.
@param context: Nova auth context.
@param instance: Nova instance object.
@param new_pass: The admin password for the instance.
"""
context = context.elevated()
if new_pass is None:
# Generate a random password
new_pass = utils.generate_password()
current_power_state = self._get_power_state(context, instance)
expected_state = power_state.RUNNING
if current_power_state != expected_state:
instance.task_state = None
instance.save(expected_task_state=task_states.UPDATING_PASSWORD)
_msg = _('instance %s is not running') % instance.uuid
raise exception.InstancePasswordSetFailed(
instance=instance.uuid, reason=_msg)
try:
self.driver.set_admin_password(instance, new_pass)
LOG.audit(_("Root password set"), instance=instance)
instance.task_state = None
instance.save(
expected_task_state=task_states.UPDATING_PASSWORD)
except NotImplementedError:
LOG.warning(_LW('set_admin_password is not implemented '
'by this driver or guest instance.'),
instance=instance)
instance.task_state = None
instance.save(
expected_task_state=task_states.UPDATING_PASSWORD)
raise NotImplementedError(_('set_admin_password is not '
'implemented by this driver or guest '
'instance.'))
except exception.UnexpectedTaskStateError:
# interrupted by another (most likely delete) task
# do not retry
raise
except Exception as e:
# Catch all here because this could be anything.
LOG.exception(_LE('set_admin_password failed: %s'), e,
instance=instance)
self._set_instance_obj_error_state(context, instance)
# We create a new exception here so that we won't
# potentially reveal password information to the
# API caller. The real exception is logged above
_msg = _('error setting admin password')
raise exception.InstancePasswordSetFailed(
instance=instance.uuid, reason=_msg)
@wrap_exception()
@reverts_task_state
@wrap_instance_fault
def inject_file(self, context, path, file_contents, instance):
"""Write a file to the specified path in an instance on this host."""
# NOTE(russellb) Remove this method, as well as the underlying virt
# driver methods, when the compute rpc interface is bumped to 4.x
# as it is no longer used.
context = context.elevated()
current_power_state = self._get_power_state(context, instance)
expected_state = power_state.RUNNING
if current_power_state != expected_state:
LOG.warning(_LW('trying to inject a file into a non-running '
'(state: %(current_state)s expected: '
'%(expected_state)s)'),
{'current_state': current_power_state,
'expected_state': expected_state},
instance=instance)
LOG.audit(_('injecting file to %s'), path,
instance=instance)
self.driver.inject_file(instance, path, file_contents)
def _get_rescue_image(self, context, instance, rescue_image_ref=None):
"""Determine what image should be used to boot the rescue VM."""
# 1. If rescue_image_ref is passed in, use that for rescue.
# 2. Else, use the base image associated with instance's current image.
# The idea here is to provide the customer with a rescue
# environment which they are familiar with.
# So, if they built their instance off of a Debian image,
# their rescue VM will also be Debian.
# 3. As a last resort, use instance's current image.
if not rescue_image_ref:
system_meta = utils.instance_sys_meta(instance)
rescue_image_ref = system_meta.get('image_base_image_ref')
if not rescue_image_ref:
LOG.warning(_LW('Unable to find a different image to use for '
'rescue VM, using instance\'s current image'),
instance=instance)
rescue_image_ref = instance.image_ref
image_meta = compute_utils.get_image_metadata(context, self.image_api,
rescue_image_ref,
instance)
# NOTE(belliott) bug #1227350 - xenapi needs the actual image id
image_meta['id'] = rescue_image_ref
return image_meta
@object_compat
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def rescue_instance(self, context, instance, rescue_password,
rescue_image_ref=None, clean_shutdown=True):
context = context.elevated()
LOG.audit(_('Rescuing'), context=context, instance=instance)
admin_password = (rescue_password if rescue_password else
utils.generate_password())
network_info = self._get_instance_nw_info(context, instance)
rescue_image_meta = self._get_rescue_image(context, instance,
rescue_image_ref)
extra_usage_info = {'rescue_image_name':
rescue_image_meta.get('name', '')}
self._notify_about_instance_usage(context, instance,
"rescue.start", extra_usage_info=extra_usage_info,
network_info=network_info)
try:
self._power_off_instance(context, instance, clean_shutdown)
self.driver.rescue(context, instance,
network_info,
rescue_image_meta, admin_password)
except Exception as e:
LOG.exception(_LE("Error trying to Rescue Instance"),
instance=instance)
raise exception.InstanceNotRescuable(
instance_id=instance.uuid,
reason=_("Driver Error: %s") % e)
self.conductor_api.notify_usage_exists(context, instance,
current_period=True)
instance.vm_state = vm_states.RESCUED
instance.task_state = None
instance.power_state = self._get_power_state(context, instance)
instance.launched_at = timeutils.utcnow()
instance.save(expected_task_state=task_states.RESCUING)
self._notify_about_instance_usage(context, instance,
"rescue.end", extra_usage_info=extra_usage_info,
network_info=network_info)
@object_compat
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def unrescue_instance(self, context, instance):
context = context.elevated()
LOG.audit(_('Unrescuing'), context=context, instance=instance)
network_info = self._get_instance_nw_info(context, instance)
self._notify_about_instance_usage(context, instance,
"unrescue.start", network_info=network_info)
with self._error_out_instance_on_exception(context, instance):
self.driver.unrescue(instance,
network_info)
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.power_state = self._get_power_state(context, instance)
instance.save(expected_task_state=task_states.UNRESCUING)
self._notify_about_instance_usage(context,
instance,
"unrescue.end",
network_info=network_info)
@object_compat
@wrap_exception()
@wrap_instance_fault
def change_instance_metadata(self, context, diff, instance):
"""Update the metadata published to the instance."""
LOG.debug("Changing instance metadata according to %r",
diff, instance=instance)
self.driver.change_instance_metadata(context, instance, diff)
def _cleanup_stored_instance_types(self, instance, restore_old=False):
"""Clean up "old" and "new" instance_type information stored in
instance's system_metadata. Optionally update the "current"
instance_type to the saved old one first.
Returns the updated system_metadata as a dict, the
post-cleanup current instance type and the to-be dropped
instance type.
"""
sys_meta = instance.system_metadata
if restore_old:
instance_type = instance.get_flavor('old')
drop_instance_type = instance.get_flavor()
instance.set_flavor(instance_type)
else:
instance_type = instance.get_flavor()
drop_instance_type = instance.get_flavor('old')
instance.delete_flavor('old')
instance.delete_flavor('new')
return sys_meta, instance_type, drop_instance_type
@wrap_exception()
@wrap_instance_event
@wrap_instance_fault
def confirm_resize(self, context, instance, reservations, migration):
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
@utils.synchronized(instance['uuid'])
def do_confirm_resize(context, instance, migration_id):
# NOTE(wangpan): Get the migration status from db, if it has been
# confirmed, we do nothing and return here
LOG.debug("Going to confirm migration %s", migration_id,
context=context, instance=instance)
try:
# TODO(russellb) Why are we sending the migration object just
# to turn around and look it up from the db again?
migration = objects.Migration.get_by_id(
context.elevated(), migration_id)
except exception.MigrationNotFound:
LOG.error(_LE("Migration %s is not found during confirmation"),
migration_id, context=context, instance=instance)
quotas.rollback()
return
if migration.status == 'confirmed':
LOG.info(_LI("Migration %s is already confirmed"),
migration_id, context=context, instance=instance)
quotas.rollback()
return
elif migration.status not in ('finished', 'confirming'):
LOG.warning(_LW("Unexpected confirmation status '%(status)s' "
"of migration %(id)s, exit confirmation "
"process"),
{"status": migration.status, "id": migration_id},
context=context, instance=instance)
quotas.rollback()
return
# NOTE(wangpan): Get the instance from db, if it has been
# deleted, we do nothing and return here
expected_attrs = ['metadata', 'system_metadata', 'flavor']
try:
instance = objects.Instance.get_by_uuid(
context, instance.uuid,
expected_attrs=expected_attrs)
except exception.InstanceNotFound:
LOG.info(_LI("Instance is not found during confirmation"),
context=context, instance=instance)
quotas.rollback()
return
self._confirm_resize(context, instance, quotas,
migration=migration)
do_confirm_resize(context, instance, migration.id)
def _confirm_resize(self, context, instance, quotas,
migration=None):
"""Destroys the source instance."""
self._notify_about_instance_usage(context, instance,
"resize.confirm.start")
with self._error_out_instance_on_exception(context, instance,
quotas=quotas):
# NOTE(danms): delete stashed migration information
sys_meta, instance_type, old_instance_type = (
self._cleanup_stored_instance_types(instance))
sys_meta.pop('old_vm_state', None)
instance.system_metadata = sys_meta
instance.save()
# NOTE(tr3buchet): tear down networks on source host
self.network_api.setup_networks_on_host(context, instance,
migration.source_compute, teardown=True)
network_info = self._get_instance_nw_info(context, instance)
self.driver.confirm_migration(migration, instance,
network_info)
migration.status = 'confirmed'
with migration.obj_as_admin():
migration.save()
rt = self._get_resource_tracker(migration.source_node)
rt.drop_resize_claim(context, instance, old_instance_type)
# NOTE(mriedem): The old_vm_state could be STOPPED but the user
# might have manually powered up the instance to confirm the
# resize/migrate, so we need to check the current power state
# on the instance and set the vm_state appropriately. We default
# to ACTIVE because if the power state is not SHUTDOWN, we
# assume _sync_instance_power_state will clean it up.
p_state = instance.power_state
vm_state = None
if p_state == power_state.SHUTDOWN:
vm_state = vm_states.STOPPED
LOG.debug("Resized/migrated instance is powered off. "
"Setting vm_state to '%s'.", vm_state,
instance=instance)
else:
vm_state = vm_states.ACTIVE
instance.vm_state = vm_state
instance.task_state = None
instance.save(expected_task_state=[None, task_states.DELETING])
self._notify_about_instance_usage(
context, instance, "resize.confirm.end",
network_info=network_info)
quotas.commit()
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def revert_resize(self, context, instance, migration, reservations):
"""Destroys the new instance on the destination machine.
Reverts the model changes, and powers on the old instance on the
source machine.
"""
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
# NOTE(comstud): A revert_resize is essentially a resize back to
# the old size, so we need to send a usage event here.
self.conductor_api.notify_usage_exists(
context, instance, current_period=True)
with self._error_out_instance_on_exception(context, instance,
quotas=quotas):
# NOTE(tr3buchet): tear down networks on destination host
self.network_api.setup_networks_on_host(context, instance,
teardown=True)
migration_p = obj_base.obj_to_primitive(migration)
self.network_api.migrate_instance_start(context,
instance,
migration_p)
network_info = self._get_instance_nw_info(context, instance)
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
block_device_info = self._get_instance_block_device_info(
context, instance, bdms=bdms)
destroy_disks = not self._is_instance_storage_shared(context,
instance)
self.driver.destroy(context, instance, network_info,
block_device_info, destroy_disks)
self._terminate_volume_connections(context, instance, bdms)
migration.status = 'reverted'
with migration.obj_as_admin():
migration.save()
rt = self._get_resource_tracker(instance.node)
rt.drop_resize_claim(context, instance)
self.compute_rpcapi.finish_revert_resize(context, instance,
migration, migration.source_compute,
quotas.reservations)
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def finish_revert_resize(self, context, instance, reservations, migration):
"""Finishes the second half of reverting a resize.
Bring the original source instance state back (active/shutoff) and
revert the resized attributes in the database.
"""
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
with self._error_out_instance_on_exception(context, instance,
quotas=quotas):
network_info = self._get_instance_nw_info(context, instance)
self._notify_about_instance_usage(
context, instance, "resize.revert.start")
sys_meta, instance_type, drop_instance_type = (
self._cleanup_stored_instance_types(instance, True))
# NOTE(mriedem): delete stashed old_vm_state information; we
# default to ACTIVE for backwards compatibility if old_vm_state
# is not set
old_vm_state = sys_meta.pop('old_vm_state', vm_states.ACTIVE)
instance.system_metadata = sys_meta
instance.memory_mb = instance_type['memory_mb']
instance.vcpus = instance_type['vcpus']
instance.root_gb = instance_type['root_gb']
instance.ephemeral_gb = instance_type['ephemeral_gb']
instance.instance_type_id = instance_type['id']
instance.host = migration.source_compute
instance.node = migration.source_node
instance.save()
migration.dest_compute = migration.source_compute
with migration.obj_as_admin():
migration.save()
self.network_api.setup_networks_on_host(context, instance,
migration.source_compute)
block_device_info = self._get_instance_block_device_info(
context, instance, refresh_conn_info=True)
power_on = old_vm_state != vm_states.STOPPED
self.driver.finish_revert_migration(context, instance,
network_info,
block_device_info, power_on)
instance.launched_at = timeutils.utcnow()
instance.save(expected_task_state=task_states.RESIZE_REVERTING)
migration_p = obj_base.obj_to_primitive(migration)
self.network_api.migrate_instance_finish(context,
instance,
migration_p)
# if the original vm state was STOPPED, set it back to STOPPED
LOG.info(_LI("Updating instance to original state: '%s'"),
old_vm_state)
if power_on:
instance.vm_state = vm_states.ACTIVE
instance.task_state = None
instance.save()
else:
instance.task_state = task_states.POWERING_OFF
instance.save()
self.stop_instance(context, instance=instance)
self._notify_about_instance_usage(
context, instance, "resize.revert.end")
quotas.commit()
def _prep_resize(self, context, image, instance, instance_type,
quotas, request_spec, filter_properties, node,
clean_shutdown=True):
if not filter_properties:
filter_properties = {}
if not instance['host']:
self._set_instance_error_state(context, instance)
msg = _('Instance has no source host')
raise exception.MigrationError(reason=msg)
same_host = instance['host'] == self.host
if same_host and not CONF.allow_resize_to_same_host:
self._set_instance_error_state(context, instance)
msg = _('destination same as source!')
raise exception.MigrationError(reason=msg)
# NOTE(danms): Stash the new instance_type to avoid having to
# look it up in the database later
instance.set_flavor(instance_type, 'new')
# NOTE(mriedem): Stash the old vm_state so we can set the
# resized/reverted instance back to the same state later.
vm_state = instance['vm_state']
LOG.debug('Stashing vm_state: %s', vm_state, instance=instance)
instance.system_metadata['old_vm_state'] = vm_state
instance.save()
limits = filter_properties.get('limits', {})
rt = self._get_resource_tracker(node)
with rt.resize_claim(context, instance, instance_type,
image_meta=image, limits=limits) as claim:
LOG.audit(_('Migrating'), context=context, instance=instance)
self.compute_rpcapi.resize_instance(
context, instance, claim.migration, image,
instance_type, quotas.reservations,
clean_shutdown)
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@wrap_instance_fault
def prep_resize(self, context, image, instance, instance_type,
reservations, request_spec, filter_properties, node,
clean_shutdown=True):
"""Initiates the process of moving a running instance to another host.
Possibly changes the RAM and disk size in the process.
"""
if node is None:
node = self.driver.get_available_nodes(refresh=True)[0]
LOG.debug("No node specified, defaulting to %s", node,
instance=instance)
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
with self._error_out_instance_on_exception(context, instance,
quotas=quotas):
self.conductor_api.notify_usage_exists(
context, instance, current_period=True)
self._notify_about_instance_usage(
context, instance, "resize.prep.start")
try:
self._prep_resize(context, image, instance,
instance_type, quotas,
request_spec, filter_properties,
node, clean_shutdown)
# NOTE(dgenin): This is thrown in LibvirtDriver when the
# instance to be migrated is backed by LVM.
# Remove when LVM migration is implemented.
except exception.MigrationPreCheckError:
raise
except Exception:
# try to re-schedule the resize elsewhere:
exc_info = sys.exc_info()
self._reschedule_resize_or_reraise(context, image, instance,
exc_info, instance_type, quotas, request_spec,
filter_properties)
finally:
extra_usage_info = dict(
new_instance_type=instance_type['name'],
new_instance_type_id=instance_type['id'])
self._notify_about_instance_usage(
context, instance, "resize.prep.end",
extra_usage_info=extra_usage_info)
def _reschedule_resize_or_reraise(self, context, image, instance, exc_info,
instance_type, quotas, request_spec, filter_properties):
"""Try to re-schedule the resize or re-raise the original error to
error out the instance.
"""
if not request_spec:
request_spec = {}
if not filter_properties:
filter_properties = {}
rescheduled = False
instance_uuid = instance['uuid']
try:
reschedule_method = self.compute_task_api.resize_instance
scheduler_hint = dict(filter_properties=filter_properties)
method_args = (instance, None, scheduler_hint, instance_type,
quotas.reservations)
task_state = task_states.RESIZE_PREP
rescheduled = self._reschedule(context, request_spec,
filter_properties, instance, reschedule_method,
method_args, task_state, exc_info)
except Exception as error:
rescheduled = False
LOG.exception(_LE("Error trying to reschedule"),
instance_uuid=instance_uuid)
compute_utils.add_instance_fault_from_exc(context,
instance, error,
exc_info=sys.exc_info())
self._notify_about_instance_usage(context, instance,
'resize.error', fault=error)
if rescheduled:
self._log_original_error(exc_info, instance_uuid)
compute_utils.add_instance_fault_from_exc(context,
instance, exc_info[1], exc_info=exc_info)
self._notify_about_instance_usage(context, instance,
'resize.error', fault=exc_info[1])
else:
# not re-scheduling
raise exc_info[0], exc_info[1], exc_info[2]
@wrap_exception()
@reverts_task_state
@wrap_instance_event
@errors_out_migration
@wrap_instance_fault
def resize_instance(self, context, instance, image,
reservations, migration, instance_type,
clean_shutdown=True):
"""Starts the migration of a running instance to another host."""
quotas = objects.Quotas.from_reservations(context,
reservations,
instance=instance)
with self._error_out_instance_on_exception(context, instance,
quotas=quotas):
if not instance_type:
instance_type = objects.Flavor.get_by_id(
context, migration['new_instance_type_id'])
network_info = self._get_instance_nw_info(context, instance)
migration.status = 'migrating'
with migration.obj_as_admin():
migration.save()
instance.task_state = task_states.RESIZE_MIGRATING
instance.save(expected_task_state=task_states.RESIZE_PREP)
self._notify_about_instance_usage(
context, instance, "resize.start", network_info=network_info)
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
block_device_info = self._get_instance_block_device_info(
context, instance, bdms=bdms)
timeout, retry_interval = self._get_power_off_values(context,
instance, clean_shutdown)
disk_info = self.driver.migrate_disk_and_power_off(
context, instance, migration.dest_host,
instance_type, network_info,
block_device_info,
timeout, retry_interval)
self._terminate_volume_connections(context, instance, bdms)
migration_p = obj_base.obj_to_primitive(migration)
self.network_api.migrate_instance_start(context,
instance,
migration_p)
migration.status = 'post-migrating'
with migration.obj_as_admin():
migration.save()
instance.host = migration.dest_compute
instance.node = migration.dest_node
instance.task_state = task_states.RESIZE_MIGRATED
instance.save(expected_task_state=task_states.RESIZE_MIGRATING)
self.compute_rpcapi.finish_resize(context, instance,
migration, image, disk_info,
migration.dest_compute, reservations=quotas.reservations)
self._notify_about | codeparrot/github-code-clean |
#!/usr/bin/env python
import os
import sys
import time
sys.path.append(os.path.join(os.path.dirname(__file__), "../../pox"))
import argparse
from collections import defaultdict
import networkx as nx
from pox.lib.packet.ethernet import ethernet
from pox.openflow.libopenflow_01 import ofp_flow_mod_command_rev_map
from pox.openflow.libopenflow_01 import OFPT_HELLO
from pox.openflow.libopenflow_01 import OFPT_FEATURES_REQUEST
from pox.openflow.libopenflow_01 import OFPT_FEATURES_REPLY
from pox.openflow.libopenflow_01 import OFPT_SET_CONFIG
from pox.openflow.libopenflow_01 import OFPFC_DELETE_STRICT
from pox.openflow.libopenflow_01 import OFPT_STATS_REQUEST
from pox.openflow.libopenflow_01 import OFPT_VENDOR
from pox.openflow.libopenflow_01 import OFPT_GET_CONFIG_REQUEST
from pox.openflow.libopenflow_01 import OFPT_GET_CONFIG_REPLY
from pox.openflow.libopenflow_01 import OFPT_STATS_REPLY
from hb_utils import pkt_info
from hb_shadow_table import ShadowFlowTable
from hb_race_detector import RaceDetector
from hb_race_detector import predecessor_types
# To make sure all events are registered
from hb_json_event import *
from hb_events import *
from hb_sts_events import *
from hb_utils import dfs_edge_filter
from hb_utils import just_mid_iter
from hb_utils import pretty_match
#
# Do not import any STS types! We would like to be able to run this offline
# from a trace file without having to depend on STS.
#
OFP_COMMANDS = {v: k for k, v in ofp_flow_mod_command_rev_map.iteritems()}
# OF Message types to skip from the trace
SKIP_MSGS = [OFPT_HELLO, OFPT_VENDOR, OFPT_FEATURES_REQUEST, OFPT_FEATURES_REPLY,
OFPT_SET_CONFIG, OFPT_GET_CONFIG_REQUEST, OFPT_GET_CONFIG_REPLY,
OFPT_STATS_REQUEST, OFPT_STATS_REPLY]
class HappensBeforeGraph(object):
def __init__(self, results_dir=None, add_hb_time=False, rw_delta=5,
ww_delta=1, filter_rw=False, ignore_ethertypes=None,
no_race=False, alt_barr=False, disable_path_cache=True, data_deps=False,
verify_and_minimize_only=False, is_minimized=False):
self.results_dir = results_dir
self.g = nx.DiGraph()
self.disable_path_cache = disable_path_cache
self._cached_paths = None
self._cached_reverse_paths = None
self.events_by_id = dict()
self.events_with_reads_writes = list()
self.events_by_pid_out = defaultdict(list)
self.events_by_mid_out = defaultdict(list)
# events that have a mid_in/mid_in and are still looking for a pid_out/mid_out to match
self.events_pending_pid_in = defaultdict(list)
self.events_pending_mid_in = defaultdict(list)
# for barrier pre rule
self.events_before_next_barrier = defaultdict(list)
# for barrier post rule
self.most_recent_barrier = dict()
# for races
self.race_detector = RaceDetector(
self, filter_rw=filter_rw, add_hb_time=add_hb_time, ww_delta=ww_delta,
rw_delta=rw_delta)
self.ww_delta = ww_delta
self.rw_delta = rw_delta
# Only mark time edges in the RaceDetetcor
self.add_hb_time = False
# Just to keep track of how many HB edges where added based on time
self._time_hb_rw_edges_counter = 0
self._time_hb_ww_edges_counter = 0
self.ignore_ethertypes = check_list(ignore_ethertypes)
self.no_race = no_race
self.packet_traces = None
self.host_sends = {}
# Handled messages from the controller to the switch
self.msg_handles = {}
# Messages from the switch to the controller
self.msgs = {}
self.alt_barr = alt_barr
self.versions = {}
# add read-after-write dependency edges
self.data_deps = data_deps
self.shadow_tables = dict()
self.covered_races = dict()
self.verify_and_minimize_only = verify_and_minimize_only
self.is_minimized = is_minimized
@property
def events(self):
for _, data in self.g.nodes_iter(True):
yield data['event']
@property
def predecessors(self):
"""Get predecessor events for all events. """
for eid, data in self.g.nodes(data=True):
this_predecessors = set()
for pred in self.g.predecessors_iter(eid):
this_predecessors.add(self.g.node[pred]['event'])
yield (data['event'],this_predecessors)
def _add_to_lookup_tables(self, event):
if hasattr(event, 'pid_out'):
for x in event.pid_out:
self.events_by_pid_out[x].append(event)
if hasattr(event, 'mid_out'):
for x in event.mid_out:
self.events_by_mid_out[x].append(event)
self.lookup_tables = [ #( field name,
# condition to be included,
# search key
#),
(self.events_pending_pid_in,
lambda x: hasattr(x, 'pid_in'),
lambda x: x.pid_in ),
(self.events_pending_mid_in,
lambda x: hasattr(x, 'mid_in'),
lambda x: x.mid_in ),
]
for entry in self.lookup_tables:
table, condition, key = entry
if condition(event):
table[key(event)].append(event)
def _update_event_is_linked_pid_in(self, event):
if event in self.events_pending_pid_in[event.pid_in]:
self.events_pending_pid_in[event.pid_in].remove(event)
def _update_event_is_linked_mid_in(self, event):
if event in self.events_pending_mid_in[event.mid_in]:
self.events_pending_mid_in[event.mid_in].remove(event)
def update_path_cache(self):
print "Updating has_path path cache..."
self._cached_paths = nx.all_pairs_shortest_path_length(self.g)
def has_path(self, src_eid, dst_eid, bidirectional=True, use_path_cache=True):
if self.disable_path_cache or not use_path_cache:
return nx.has_path(self.g, src_eid, dst_eid) or (bidirectional and nx.has_path(self.g, dst_eid, src_eid))
else:
if self._cached_paths is None:
self.update_path_cache()
if dst_eid in self._cached_paths[src_eid]:
return True
if bidirectional:
if src_eid in self._cached_paths[dst_eid]:
return True
return False
def _add_edge(self, before, after, sanity_check=True, update_path_cache=True, **attrs):
if sanity_check and before.type not in predecessor_types[after.type]:
print "Warning: Not a valid HB edge: "+before.typestr+" ("+str(before.eid)+") < "+after.typestr+" ("+str(after.eid)+")"
assert False
src, dst = before.eid, after.eid
if self.g.has_edge(src, dst):
rel = self.g.edge[src][dst]['rel']
# Allow edge to be added multiple times because of the same relation
# This is useful for time based edges
if rel != attrs['rel']:
raise ValueError(
"Edge already added %d->%d and relation: %s" % (src, dst, rel))
self.g.add_edge(before.eid, after.eid, attrs)
if update_path_cache:
# TODO(jm): do incremental update later. But for now, this is sufficient.
self._cached_paths = None
def _rule_01_pid(self, event):
# pid_out -> pid_in
if hasattr(event, 'pid_in'):
if event.pid_in in self.events_by_pid_out:
for other in self.events_by_pid_out[event.pid_in]:
self._add_edge(other, event, rel='pid')
self._update_event_is_linked_pid_in(event)
# TODO(jm): remove by reordering first
# recheck events added in an order different from the trace order
if hasattr(event, 'pid_out'):
for pid_out in event.pid_out:
if pid_out in self.events_pending_pid_in:
for other in self.events_pending_pid_in[pid_out][:]: # copy list [:], so we can remove from it
self._add_edge(event, other, rel='pid')
self._update_event_is_linked_pid_in(other)
def _rule_02_mid(self, event):
# mid_out -> mid_in
if hasattr(event, 'mid_in'):
if event.mid_in in self.events_by_mid_out:
for other in self.events_by_mid_out[event.mid_in]:
self._add_edge(other, event, rel='mid')
self._update_event_is_linked_mid_in(event)
# TODO(jm): remove by reordering first
# recheck events added in an order different from the trace order (mainly controller events as they are asynchronously logged)
if hasattr(event, 'mid_out'):
for mid_out in event.mid_out:
if mid_out in self.events_pending_mid_in:
for other in self.events_pending_mid_in[mid_out][:]: # copy list [:], so we can remove from it
self._add_edge(event, other, rel='mid')
self._update_event_is_linked_mid_in(other)
def _rule_03_barrier_pre(self, event):
if event.type == 'HbMessageHandle':
if event.msg_type_str == "OFPT_BARRIER_REQUEST":
for other in self.events_before_next_barrier[event.dpid]:
self._add_edge(other, event, rel='barrier_pre')
del self.events_before_next_barrier[event.dpid]
else:
self.events_before_next_barrier[event.dpid].append(event)
def _rule_04_barrier_post(self, event):
if event.type == 'HbMessageHandle':
if event.msg_type_str == "OFPT_BARRIER_REQUEST":
self.most_recent_barrier[event.dpid] = event
else:
if event.dpid in self.most_recent_barrier:
other = self.most_recent_barrier[event.dpid]
self._add_edge(other, event, rel='barrier_post')
def _find_triggering_HbControllerHandle_for_alternative_barrier(self, event):
"""
Returns the HbControllerHandle that is responsible for triggering this event
event (HbMessageHandle) <- (HbControllerSend) <- trigger (HbControllerHandle)
"""
preds = self.g.predecessors(event.eid)
if len(preds) > 0:
candidates = filter(lambda x: self.g.node[x]['event'].type == "HbControllerSend", preds)
assert len(candidates) <= 1 # at most one HbControllerSend exists
if len(candidates) == 1:
send_event_eid = candidates[0]
assert self.g.node[send_event_eid]['event'].type == "HbControllerSend"
preds = self.g.predecessors(send_event_eid)
candidates = filter(lambda x: self.g.node[x]['event'].type == "HbControllerHandle", preds)
assert len(candidates) <= 1 # at most one HbControllerHandle exists
if len(candidates) == 1:
handle_event_eid = candidates[0]
assert self.g.node[handle_event_eid]['event'].type == "HbControllerHandle"
return handle_event_eid
return None
def _rule_03b_alternative_barrier_pre(self, event):
"""
Instead of using the dpid for barriers, this uses the eid of the predecessor HbControllerSend (if it exists).
"""
if event.type == 'HbMessageHandle':
ctrl_handle_eid = self._find_triggering_HbControllerHandle_for_alternative_barrier(event)
if ctrl_handle_eid is not None:
if event.msg_type_str == "OFPT_BARRIER_REQUEST":
for other in self.events_before_next_barrier[ctrl_handle_eid]:
self._add_edge(other, event, rel='barrier_pre')
del self.events_before_next_barrier[ctrl_handle_eid]
else:
self.events_before_next_barrier[ctrl_handle_eid].append(event)
elif event.type == 'HbControllerSend':
succ = self.g.successors(event.eid)
for i in succ:
self._rule_03b_alternative_barrier_pre(self.g.node[i]['event'])
self._rule_04b_alternative_barrier_post(self.g.node[i]['event'])
def _rule_04b_alternative_barrier_post(self, event):
"""
Instead of using the dpid for barriers, this uses the eid of the predecessor HbControllerSend (if it exists).
"""
if event.type == 'HbMessageHandle':
ctrl_handle_eid = self._find_triggering_HbControllerHandle_for_alternative_barrier(event)
if ctrl_handle_eid is not None:
if event.msg_type_str == "OFPT_BARRIER_REQUEST":
self.most_recent_barrier[ctrl_handle_eid] = event
else:
if ctrl_handle_eid in self.most_recent_barrier:
other = self.most_recent_barrier[ctrl_handle_eid]
self._add_edge(other, event, rel='barrier_post')
elif event.type == 'HbControllerSend':
succ = self.g.successors(event.eid)
for i in succ:
self._rule_03b_alternative_barrier_pre(self.g.node[i]['event'])
self._rule_04b_alternative_barrier_post(self.g.node[i]['event'])
def _rule_05_flow_removed(self, event):
if isinstance(event, HbAsyncFlowExpiry):
assert len(event.operations) == 1
expiry = event.operations[0]
flow_table = expiry.flow_table # the flow table before the removal
flow_mod = expiry.flow_mod # the removed entry
reason = expiry.reason # Either idle or hard timeout. Deletes are not handled
duration = expiry.duration_sec*10^9 + expiry.duration_nsec
# TODO(JM): Handle deletes a different way? Currently deletes are recorded
# to the trace as async switch events, same as timeouts. This means
# that the instrumentation does NOT add a HB edge between the delete
# operation itself and the async delete notification to the controller.
# We might want to add such an edge, to do this we need the hb_logger
# to link the two events already during instrumentation, as this is
# almost impossible to do here as we do not have enough information
# and the events might be recorded out of order in the trace.
# TODO(jm): We should implement read-after-write data dependency edges
# also for flow expiry messages, i.e. flows expire *after* they
# have been written. This information is already partially
# available in the hb_shadow_table module, but not currently
# used for flow expiry.
# create "dummy" operation that acts as a strict delete
class DummyObject(object):
pass
dummy_event = DummyObject()
dummy_op = DummyObject()
dummy_event.eid = event.eid
dummy_op.flow_mod = ofp_flow_mod(match=flow_mod.match,priority=flow_mod.priority,command=OFPFC_DELETE_STRICT)
# Find other write events in the graph.
for e in self.events:
if e == event:
continue
# Skip none switch event
if type(e) != HbMessageHandle:
continue
kw_ops = []
kr_ops = []
# Find the write ops
for op in e.operations:
if type(op) == TraceSwitchFlowTableWrite:
kw_ops.append(op)
elif type(op) == TraceSwitchFlowTableRead:
kr_ops.append(op)
if (not kw_ops) and (not kr_ops):
continue
# Make the edge
for kw_op in kw_ops:
# Skip if events commute anyway
if self.race_detector.commutativity_checker.check_commutativity_ww(
e, kw_op, dummy_event, dummy_op):
continue
delta = abs(expiry.t - kw_op.t)
if delta > self.ww_delta:
self._time_hb_ww_edges_counter += 1
self._add_edge(e, event, sanity_check=False, rel='time')
break
for kr_op in kr_ops:
# Skip if events commute anyway
if self.race_detector.commutativity_checker.check_commutativity_rw(
e, kr_op, dummy_event, dummy_op):
continue
delta = abs(expiry.t - kr_op.t)
if delta > self.rw_delta:
self._time_hb_rw_edges_counter += 1
self._add_edge(e, event, sanity_check=False, rel='time')
break
def _rule_06_time_rw(self, event):
if type(event) not in [HbPacketHandle]:
return
packet_match = ofp_match.from_packet(event.packet, event.in_port)
operations = []
# Get all the read operations in the event
# For OF 1.0 should be only one op, but more for OF1.3
for op in event.operations:
if type(op) == TraceSwitchFlowTableRead:
operations.append(op)
for e in self.events:
if type(e) != HbMessageHandle:
continue
for op in e.operations:
if type(op) != TraceSwitchFlowTableWrite:
continue
if not op.flow_mod.match.matches_with_wildcards(packet_match, consider_other_wildcards=False):
continue
delta = abs(op.t - operations[0].t)
if (delta > self.rw_delta):
self._time_hb_rw_edges_counter += 1
self._add_edge(e, event, sanity_check=False, rel='time')
break
def _rule_07_time_ww(self, event):
if type(event) not in [HbMessageHandle]:
return
i_ops = []
# Get all the write operations in the event
# For OF 1.0 should be only one op, but more for OF1.3
for op in event.operations:
if type(op) == TraceSwitchFlowTableWrite:
i_ops.append(op)
# No write operations in the event, just skip
if not i_ops:
return
# Find other write events in the graph.
for e in self.events:
if e == event:
continue
# Skip none switch event
if type(e) != HbMessageHandle:
continue
k_ops = []
# Find the write ops
for op in e.operations:
if type(op) == TraceSwitchFlowTableWrite:
k_ops.append(op)
if not k_ops:
continue
# Make the edge
for i_op in i_ops:
for k_op in k_ops:
# Skip if events commute anyway
if self.race_detector.commutativity_checker.check_commutativity_ww(
event, i_op, e, k_op):
continue
delta = abs(i_op.t - k_op.t)
if delta > self.ww_delta:
self._time_hb_ww_edges_counter += 1
self._add_edge(e, event, sanity_check=False, rel='time')
break
def _update_edges(self, event):
self._rule_01_pid(event)
self._rule_02_mid(event)
if self.alt_barr:
self._rule_03b_alternative_barrier_pre(event)
self._rule_04b_alternative_barrier_post(event)
else:
self._rule_03_barrier_pre(event)
self._rule_04_barrier_post(event)
self._rule_05_flow_removed(event)
if self.add_hb_time:
self._rule_06_time_rw(event)
self._rule_07_time_ww(event)
def _update_shadow_tables(self, event):
if event.dpid not in self.shadow_tables:
self.shadow_tables[event.dpid] = ShadowFlowTable(event.dpid, self.is_minimized)
self.shadow_tables[event.dpid].apply_event(event)
def unpack_line(self, line):
# Skip empty lines and the ones start with '#'
if not line or line.startswith('#'):
return
# TODO(jm): I did some tests to see why loading events is so slow.
# JsonEvent.from_json is the slow part, everything else
# (including json.loads()) is blazing fast.
# We might want to speed that up a bit.
event = JsonEvent.from_json(json.loads(line))
return event
def add_line(self, line):
event = self.unpack_line(line)
if event:
self.add_event(event)
def add_event(self, event):
assert event.eid not in self.events_by_id
if self.ignore_ethertypes:
packet = None
if hasattr(event, 'packet'):
packet = event.packet
if type(event) == HbMessageHandle and getattr(event.msg, 'data', None):
packet = ethernet(event.msg.data)
if packet and packet.type in self.ignore_ethertypes:
# print "Filtered PKT in ignore_ethertypes"
return
msg_type = getattr(event, 'msg_type', None)
if msg_type in SKIP_MSGS:
return
self.g.add_node(event.eid, event=event)
self.events_by_id[event.eid] = event
self._add_to_lookup_tables(event)
if hasattr(event, 'operations'):
for op in event.operations:
if type(op) in [TraceSwitchFlowTableRead, TraceSwitchFlowTableWrite]:
# TODO(jm): Add TraceSwitchFlowTableEntryExpiry events here as well.
# But before we can do that, we need to assign monotonicially increasing
# eids to the expiry events as well in hb_logger
self.events_with_reads_writes.append(event.eid)
break
def _handle_HbAsyncFlowExpiry(event):
if self.data_deps:
self._update_shadow_tables(event)
self._update_edges(event)
def _handle_HbPacketHandle(event):
if self.data_deps:
self._update_shadow_tables(event)
self._update_edges(event)
def _handle_HbPacketSend(event):
self._update_edges(event)
def _handle_HbMessageHandle(event):
if self.data_deps:
self._update_shadow_tables(event)
self._update_edges(event)
self.msg_handles[event.eid] = event
def _handle_HbMessageSend(event):
self._update_edges(event)
self.msgs[event.eid] = event
def _handle_HbHostHandle(event):
self._update_edges(event)
def _handle_HbHostSend(event):
self._update_edges(event)
self.host_sends[event.eid] = event
def _handle_HbControllerHandle(event):
self._update_edges(event)
def _handle_HbControllerSend(event):
self._update_edges(event)
def _handle_default(event):
assert False
pass
handlers = {'HbAsyncFlowExpiry': _handle_HbAsyncFlowExpiry,
'HbPacketHandle': _handle_HbPacketHandle,
'HbPacketSend': _handle_HbPacketSend,
'HbMessageHandle': _handle_HbMessageHandle,
'HbMessageSend': _handle_HbMessageSend,
'HbHostHandle': _handle_HbHostHandle,
'HbHostSend': _handle_HbHostSend,
'HbControllerHandle': _handle_HbControllerHandle,
'HbControllerSend': _handle_HbControllerSend,
}
handlers.get(event.type, _handle_default)(event)
def load_trace(self, filename):
self.g = nx.DiGraph()
self.events_by_id = dict()
unpacked_events = list()
with open(filename) as f:
for line in f:
event = self.unpack_line(line)
if event:
unpacked_events.append(event)
print "Read " + str(len(unpacked_events)) + " events."
for event in unpacked_events:
self.add_event(event)
print "Added " + str(len(list(self.events))) + " events."
def verify_and_minimize_trace(self, filename):
unpacked_events = 0
outfilename = filename + ".min"
with open(filename + ".min", 'w') as fout:
with open(filename) as f:
for line in f:
event = self.unpack_line(line)
if event:
unpacked_events += 1
has_reads_writes = False
if hasattr(event, 'operations'):
for op in event.operations:
if type(op) in [TraceSwitchFlowTableRead, TraceSwitchFlowTableWrite, TraceSwitchFlowTableEntryExpiry]:
has_reads_writes = True
break
if type(event) in [HbAsyncFlowExpiry, HbPacketHandle, HbMessageHandle]:
self._update_shadow_tables(event)
# cleanup operations
if hasattr(event, 'operations'):
for op in event.operations:
if hasattr(op, "flow_table"):
delattr(op, "flow_table")
# cleanup attributes
fout.write(str(event.to_json()) + '\n')
fout.flush()
print "Verified, minimized, and wrote " + str(unpacked_events) + " events to "+str(outfilename)
def store_graph(self, filename="hb.dot", print_packets=False):
if self.results_dir is not None:
filename = os.path.join(self.results_dir,filename)
self.prep_draw(self.g, print_packets)
nx.write_dot(self.g, os.path.join(self.results_dir, filename))
@staticmethod
def prep_draw(g, print_packets, allow_none_event=False):
"""
Adds proper annotation for the graph to make drawing it more pleasant.
"""
for eid, data in g.nodes_iter(data=True):
event = data.get('event', None)
if not event and allow_none_event:
label = "N %s" % eid
shape = "oval"
g.node[eid]['label'] = label
g.node[eid]['shape'] = shape
continue
label = "ID %d \\n %s" % (eid, event.type)
if hasattr(event, 'hid'):
label += "\\nHID: " + str(event.hid)
if hasattr(event, 'dpid'):
label += "\\nDPID: " + str(event.dpid)
shape = "oval"
op = None
if hasattr(event, 'operations'):
for x in event.operations:
if x.type == 'TraceSwitchFlowTableWrite':
op = "FlowTableWrite"
op += "\\nCMD: " + OFP_COMMANDS[x.flow_mod.command]
op += "\\nMatch: " + pretty_match(x.flow_mod.match)
op += "\\nActions: " + str(x.flow_mod.actions)
label += "\\nt: " + repr(x.t)
shape = 'box'
g.node[eid]['style'] = 'bold'
break
if x.type == 'TraceSwitchFlowTableRead':
op = "FlowTableRead"
label += "\\nt: " + repr(x.t)
shape = 'box'
break
if hasattr(event, 'msg') and getattr(event.msg, 'actions', None):
op = "\\nActions: " + str(event.msg.actions)
cmd_type = data.get('cmd_type')
if cmd_type:
label += "\\n%s" % cmd_type
if op:
label += "\\nOp: %s" % op
if hasattr(event, 'msg_type'):
label += "\\nMsgType: " + event.msg_type_str
if getattr(event, 'msg', None):
label += "\\nXID: %d" % event.msg.xid
if hasattr(event, 'in_port'):
label += "\\nInPort: " + str(event.in_port)
if hasattr(event, 'out_port') and not isinstance(event.out_port, basestring):
label += "\\nOut Port: " + str(event.out_port)
if hasattr(event, 'buffer_id'):
label += "\\nBufferId: " + str(event.buffer_id)
if print_packets and hasattr(event, 'packet'):
pkt = pkt_info(event.packet)
label += "\\nPkt: " + pkt
if print_packets and getattr(event, 'msg', None):
if getattr(event.msg, 'data', None):
pkt = pkt_info(ethernet(event.msg.data))
label += "\\nPkt: " + pkt
g.node[eid]['label'] = label
g.node[eid]['shape'] = shape
for src, dst, data in g.edges_iter(data=True):
g.edge[src][dst]['label'] = data['rel']
if data['rel'] == 'race':
if data['harmful']:
g.edge[src][dst]['color'] = 'red'
g.edge[src][dst]['style'] = 'bold'
else:
g.edge[src][dst]['style'] = 'dotted'
elif data['rel'] == 'covered':
g.edge[src][dst]['color'] = 'blue'
g.edge[src][dst]['style'] = 'bold'
def extract_traces(self, g):
"""
Given HB graph g, this method return a list of subgraph starting from
a HostSend event and all the subsequent nodes that happened after it.
This method will exclude all the nodes connected because of time and the
nodes connected after HostHandle.
"""
traces = []
# Sort host sends by eid, this will make the output follow the trace order
eids = self.host_sends.keys()
eids = sorted(eids)
for eid in eids:
nodes = list(nx.dfs_preorder_nodes(g, eid))
# Remove other HostSends
for node in nodes:
if eid != node and isinstance(g.node[node]['event'], HbHostSend):
nodes.remove(node)
subg = nx.DiGraph(g.subgraph(nodes), host_send=g.node[eid]['event'])
traces.append(subg)
for i, subg in enumerate(traces):
for src, dst, data in subg.edges(data=True):
if data['rel'] in ['time', 'race']:
subg.remove_edge(src, dst)
# Remove disconnected subgraph
host_send = subg.graph['host_send']
nodes = nx.dfs_preorder_nodes(subg, host_send.eid)
traces[i] = nx.DiGraph(subg.subgraph(nodes), host_send=host_send)
self.packet_traces = traces
return traces
def store_traces(self, results_dir, print_packets=True, subgraphs=None):
if not subgraphs:
subgraphs = self.extract_traces(self.g)
for i in range(len(subgraphs)):
subg = subgraphs[i]
send = subg.graph['host_send']
HappensBeforeGraph.prep_draw(subg, print_packets)
nx.write_dot(subg, "%s/trace_%s_%s_%04d.dot" % (results_dir,
str(send.packet.src),
str(send.packet.dst), send.eid))
def get_racing_events(self, trace, ignore_other_traces=True):
"""
For a given packet trace, return all the races that races with its events
"""
# Set of all events that are part of a harmful race
all_harmful = set([event.eid for event in
self.race_detector.racing_events_harmful])
# Set of event ids of a packet trace
eids = set(trace.nodes())
# All events in packet trace that are also part of a race
racing_eids = sorted(list(eids.intersection(all_harmful)))
# Get the actual reported race;
# will get us the other event that has been part of the race
rw_races_with_trace = list()
for race in self.race_detector.races_harmful_with_covered:
if race.rtype == 'r/w':
# i_event is read, k_event is write
if race.i_event.eid in racing_eids or race.k_event.eid in racing_eids:
# We don't care about write on the packet trace that races with reads
# on other packet traces. The other traces will be reported anyway.
# logical implication: ignore_other_traces ==> race.i_event.eid in racing_eids
if (not ignore_other_traces) or (race.i_event.eid in racing_eids):
rw_races_with_trace.append(race)
# make sure the races are sorted first by read, then by write. The default
# sort on the namedtuple already does this
return sorted(rw_races_with_trace)
def get_all_packet_traces_with_races(self):
"""
Finds all the races related each packet trace
"""
races = list()
for trace in self.packet_traces:
racing_events = self.get_racing_events(trace, True)
if len(racing_events) > 0:
races.append((trace, racing_events,))
return races
def summarize_per_packet_inconsistent(self, traces_races):
"""
If two packets are inconsistent, but they race with the same set of writes,
then only one will be reported
"""
# TODO(jm): This does not take into account the order of the writes or the path the packets took. Do we care?
result = {}
removed = defaultdict(list)
for trace, races, versions in traces_races:
# First get the writes
writes = []
for race in races:
if isinstance(race.i_op, TraceSwitchFlowTableWrite):
writes.append(race.i_op.eid)
if isinstance(race.k_op, TraceSwitchFlowTableWrite):
writes.append(race.k_op.eid)
key = (tuple(sorted(writes)))
if key in result:
removed[key].append((trace, races, versions))
else:
result[key] = (trace, races, versions)
return result.values()
def print_racing_packet_trace(self, trace, races, label, show_covered=True):
"""
first is the trace
second is the list of races
"""
host_send = trace.graph['host_send']
g = nx.DiGraph(trace, host_send= host_send)
for race in races:
if not g.has_node(race.i_event.eid):
g.add_node(race.i_event.eid, event=race.i_event)
if not g.has_node(race.k_event.eid):
g.add_node(race.k_event.eid, event=race.k_event)
if show_covered and race in self.covered_races:
for path in nx.all_simple_paths(self.g, race.i_event.eid, race.k_event.eid):
for src, dst in zip(path, path[1:]):
g.node[src] = self.g.node[src]
g.node[dst] = self.g.node[dst]
g.add_edge(src, dst, self.g.edge[src][dst])
for path in nx.all_simple_paths(self.g, race.k_event.eid, race.i_event.eid):
for src, dst in zip(path, path[1:]):
g.node[src] = self.g.node[src]
g.node[dst] = self.g.node[dst]
g.add_edge(src, dst, self.g.edge[src][dst])
g.add_edge(race.i_event.eid, race.k_event.eid, rel='covered', harmful=True)
else:
#if not g.has_edge(race.i_event.eid, race.k_event.eid) and not \
# g.has_edge(race.k_event.eid, race.i_event.eid):
g.add_edge(race.i_event.eid, race.k_event.eid, rel='race', harmful=True)
self.prep_draw(g, TraceSwitchPacketUpdateBegin)
src = str(host_send.packet.src)
dst = str(host_send.packet.dst)
name = "%s_%s_%s_%s.dot" %(label, src, dst, host_send.eid)
name = os.path.join(self.results_dir, name)
print "Storing packet %s for %s->%s in %s " % (label, src, dst, name)
nx.write_dot(g, name)
def races_graph(self):
races = self.race_detector.races_harmful
races_graph = nx.DiGraph()
for rtype, i_event, i_op, k_event, k_op in races:
races_graph.add_node(i_event.eid, event=i_event)
races_graph.add_node(k_event.eid, event=k_event)
races_graph.add_edge(i_event.eid, k_event.eid, rel='race', harmful=True)
return races_graph
def save_races_graph(self, print_pkts=True, name=None):
if not name:
name = "just_races.dot"
graph = self.races_graph()
self.prep_draw(graph, print_pkts)
print "Saving all races graph in", name
nx.write_dot(graph, os.path.join(self.results_dir, name))
def find_covered_races(self):
"""
Go through events in trace order, add a RaW dependency and then check if
there are any races that are part of:
- the set of predecessors of W, and
- the set of successors of R
These are now ordered so we can add them to the list.
"""
if self.covered_races:
return self.covered_races
covered_races = dict()
data_dep_races = set()
time_races = set()
remaining_harmful_races = set()
# remove all races that were already removed due to time based rules
for r in self.race_detector.races_harmful_with_covered:
if self.has_path(r.i_event.eid, r.k_event.eid, bidirectional=True):
# race is not a race anymore
time_races.add(r)
else:
# race is still a race and can become covered when adding data deps
remaining_harmful_races.add(r)
# check for monotonically increasing eids, i.e. the list must be sorted
assert all(x <= y for x, y in zip(self.events_with_reads_writes,
self.events_with_reads_writes[1:]))
for eid in self.events_with_reads_writes:
event = self.events_by_id[eid]
dpid = event.dpid
shadow_table = self.shadow_tables[dpid]
if hasattr(event, 'operations'):
has_reads = False
for op in event.operations:
if type(op) in [TraceSwitchFlowTableRead]:
has_reads = True
if has_reads:
# add RaW dependencies (HB edge from event containing W -> event containing R)
for write_eid in shadow_table.data_deps[event.eid]:
write_event = self.events_by_id[write_eid]
if self.g.has_edge(write_event.eid, event.eid):
assert self.g.get_edge_data(write_event.eid, event.eid)['rel'] == 'time'
else:
self._add_edge(write_event, event, sanity_check=False, rel='dep_raw')
# Should we check this after adding *all* dependencies or after each. E.g. for events with a read and a write.
# includes write_eid itself
write_succs = set(nx.dfs_preorder_nodes(self.g, write_eid))
for r in remaining_harmful_races: # TODO(jm): get rid of this loop here, lots of unnecessary looping
# is there a path from our write to the the race
if r.i_event.eid in write_succs or r.k_event.eid in write_succs:
# ignore races that we just removed using the data dep edge.
if (r.i_event == event and r.k_event == write_event) or (r.i_event == write_event and r.k_event == event):
data_dep_races.add(r)
else:
# only add a covered race the first time
if r not in covered_races and r not in data_dep_races:
if self.has_path(r.i_event.eid, r.k_event.eid, bidirectional=True, use_path_cache=False):
# race is not a race anymore
self.race_detector._races_harmful.remove(r)
self.race_detector.covered_races.append(r)
covered_races[r] = (eid, write_eid)
self.covered_races = covered_races
return self.covered_races
def _get_versions_for_races(self, races):
# assume races is ordered!
assert all(races[i] < races[i+1] for i in xrange(len(races)-1))
versions_for_race = defaultdict(set)
for race in races:
# get versions for each race
for version, cmds in self.versions.iteritems():
if race.i_event.eid in cmds or race.k_event.eid in cmds:
versions_for_race[race].add(version)
return versions_for_race
def _is_inconsistent_packet_entry_version(self, trace, race, dpids_affected):
trace_nodes = nx.dfs_preorder_nodes(trace, trace.graph['host_send'].eid)
trace_dpids = [getattr(self.g.node[node]['event'], 'dpid', None) for node in trace_nodes]
racing_dpid = race.i_event.dpid
# which switches/nodes does the packet traverse before hitting this 1 uncovered race?
none_racing_dpids = set([x for x in trace_dpids[:trace_dpids.index(racing_dpid)] if x is not None])
return not dpids_affected.intersection(none_racing_dpids)
def find_per_packet_inconsistent(self, covered_races=None, summarize=True):
"""
Returns the following sets of packet traces.
1) all packet traces that race with a write event
2) all per-packet TRUE inconsistent traces
3) Covered packet traces (trace with races cannot happen because of HB)
4) Packet traces with races with first switch on version update
5) Summarize traces after removing covered and trimming traces that races with the same writes
all packet traces =TRUE inconsistent traces + covered + entry switch races
summazied = all per-packet inconsistent traces - repeatd all per-packet inconsistent traces
"""
# list of (trace, races), ordered by trace order
packet_races = self.get_all_packet_traces_with_races()
inconsistent_packet_traces = []
consistent_packet_traces_covered = []
consistent_packet_entry_version = []
summarized = []
dpids_for_version = {}
for version, cmds in self.versions.iteritems():
dpids_for_version[version] = set([getattr(self.g.node[cmd]['event'], 'dpid', None) for cmd in cmds])
for trace, races in packet_races:
uncovered_races = [race for race in races if race not in covered_races]
uncovered_races_dpids = list(set([race.i_event.dpid for race in uncovered_races]))
versions_for_race = self._get_versions_for_races(uncovered_races)
racing_versions = sorted(list(set(versions_for_race.keys())))
# check if all the races are actually covered
if not uncovered_races:
consistent_packet_traces_covered.append((trace, races, racing_versions))
elif len(uncovered_races_dpids) == 1:
# check entry
is_entry = True
for race in uncovered_races:
version = list(versions_for_race[race])[0]
affected_dpids = dpids_for_version[version]
is_entry = self._is_inconsistent_packet_entry_version(trace, race, affected_dpids)
# If only one of the races is not entry then even though the races
# are one switch, one of them makes this trace inconsistent.
if not is_entry:
break
has_covered = len(races) > len(uncovered_races)
if is_entry:
if has_covered:
consistent_packet_traces_covered.append((trace, races, racing_versions))
else:
consistent_packet_entry_version.append((trace, races, racing_versions))
else:
inconsistent_packet_traces.append((trace, races, racing_versions))
else:
inconsistent_packet_traces.append((trace, races, racing_versions))
if summarize:
summarized = self.summarize_per_packet_inconsistent(inconsistent_packet_traces)
assert len(packet_races) == len(inconsistent_packet_traces) + \
len(consistent_packet_entry_version) + \
len(consistent_packet_traces_covered)
return packet_races, inconsistent_packet_traces, \
consistent_packet_traces_covered, \
consistent_packet_entry_version, summarized
def find_barrier_replies(self):
barrier_replies = []
for eid in self.msgs:
if self.msgs[eid].msg_type_str != 'OFPT_BARRIER_REPLY':
continue
nodes = []
# TODO(jm): Are we sure just_mid_iter is correct? What about packets sent
# out by a PACKET_OUT that then trigger a PACKET_IN -> ... -> BARRIER_REPLY?find_barrier_replies
edges = dfs_edge_filter(self.g, eid, just_mid_iter)
for src, dst in edges:
src_event = self.g.node[src]['event']
dst_event = self.g.node[dst]['event']
if isinstance(src_event, HbMessageHandle):
nodes.append(src_event)
#self.g.node[src]['cmd_type'] = "Reactive to %d" % eid
if isinstance(dst_event, HbMessageHandle):
nodes.append(dst_event)
#self.g.node[dst]['cmd_type'] = "Reactive to %d" % eid
# Get unique and sort by time
nodes = sorted(list(set(nodes)),
key=lambda n: n.operations[0].t if n.operations else 0)
barrier_replies.append((self.msgs[eid], nodes))
return barrier_replies
def find_reactive_versions2(self):
considered = []
cmds = []
ordered_msgs = OrderedDict()
#sorted_msgs = sorted(self.msgs.values(), key=lambda m: m.operations[0].t if getattr(m, 'operations', None) else 0)
sorted_msgs = sorted(self.msgs.values(), key=lambda m: m.eid)
for m in sorted_msgs:
ordered_msgs[m.eid] = m
for eid in ordered_msgs:
if self.msgs[eid].msg_type_str == 'OFPT_BARRIER_REPLY':
continue
if eid in considered:
continue
else:
considered.append(eid)
nodes = []
# TODO(jm): Are we sure just_mid_iter is correct? What about packets sent
# out by a PACKET_OUT that then trigger a PACKET_IN -> ... -> BARRIER_REPLY?find_barrier_replies
#edges = dfs_edge_filter(self.g, eid, just_mid_iter, filter_msg_type='OFPT_PACKET_IN')
edges = dfs_edge_filter(self.g, eid, just_mid_iter)
for src, dst in edges:
src_event = self.g.node[src]['event']
dst_event = self.g.node[dst]['event']
if isinstance(dst_event, HbMessageSend):
considered.append(dst_event.eid)
if isinstance(src_event, HbMessageHandle) and src_event.eid not in considered:
nodes.append(src_event)
self.g.node[src]['cmd_type'] = "Reactive to %d" % eid
if isinstance(dst_event, HbMessageHandle) and dst_event.eid not in considered:
nodes.append(dst_event)
self.g.node[dst]['cmd_type'] = "Reactive to %d" % eid
# Get unique and sort by time
nodes = sorted(list(set(nodes)),
key=lambda n: n.operations[0].t if n.operations else 0)
for n in nodes:
considered.append(n.eid)
cmds.append((self.msgs[eid], nodes))
for l, (x, i) in enumerate(cmds):
for k, (y, j) in enumerate(cmds):
if l == k:
continue
assert set(i).intersection(j), "l %s and k %s" % (l, k)
return cmds
def find_reactive_versions(self):
cmds = []
considered = []
cv = dict()
for eid in self.msgs:
if self.msgs[eid].msg_type_str == 'OFPT_BARRIER_REPLY':
continue
nodes = []
# TODO(jm): Are we sure just_mid_iter is correct? What about packets sent
# out by a PACKET_OUT that then trigger a PACKET_IN -> ... -> BARRIER_REPLY?find_barrier_replies
edges = dfs_edge_filter(self.g, eid, just_mid_iter, filter_msg_type=HbMessageSend)
for src, dst in edges:
src_event = self.g.node[src]['event']
dst_event = self.g.node[dst]['event']
if isinstance(src_event, HbMessageHandle):
nodes.append(src_event)
self.g.node[src]['cmd_type'] = "Reactive to %d" % eid
if isinstance(dst_event, HbMessageHandle):
nodes.append(dst_event)
self.g.node[dst]['cmd_type'] = "Reactive to %d" % eid
# Get unique and sort by time
nodes = sorted(list(set(nodes)),
key=lambda n: n.operations[0].t if n.operations else 0)
for n in nodes:
assert n.eid not in considered, "For event %d at eid %d it was considered at %d" % (n.eid, eid, cv[n.eid])
considered.append(n.eid)
cv[n.eid] = eid
considered.append(n.eid)
cmds.append((self.msgs[eid], nodes))
for l, (x, i) in enumerate(cmds):
for k, (y, j) in enumerate(cmds):
if l == k:
continue
assert not set(i).intersection(j), "l %s and k%s" % (l, k)
return cmds
def find_proactive_cmds(self, reactive_versions=None):
"""
Proactive is all the cmds that were not in the reactive set
"""
# TODO(jm): At the end of the trace, some of the controller instrumentation might not be there, so some of the commands at the very end could be reactive. Cut them off somehow?
if not reactive_versions:
reactive_versions = self.find_reactive_versions()
reactive_cmds = []
for msgs, cmds in reactive_versions:
for cmd in cmds:
reactive_cmds.append(cmd.eid)
proactive_eid = set(self.msg_handles.keys()).difference(set(reactive_cmds))
proactive = [self.g.node[eid]['event'] for eid in list(proactive_eid)]
for cmd in proactive:
self.g.node[cmd.eid]['cmd_type'] = 'Proactive'
proactive.sort(key=lambda n: n.operations[0].t)
return proactive
def cluster_cmds(self, cmds):
"""
Cluster the update commands by time.
"""
# Cluster by time
from scipy.cluster.hierarchy import fclusterdata
# TODO(jm): Should we add a setting for the threshold, or use STS rounds instead of time?
features = [[e.operations[0].t] for e in cmds]
result = fclusterdata(features, 0.8, criterion="distance")
clustered = defaultdict(list)
for i in range(len(cmds)):
clustered[result[i]].append(cmds[i])
# just trying to order the versions
ordered = sorted(clustered.keys(), key= lambda i: clustered[i][0].operations[0].t)
clustered_ordered = dict()
for i in range(len(ordered)):
clustered_ordered[i] = clustered[ordered[i]]
self.clustered_cmds = clustered_ordered
return clustered_ordered
def find_versions(self):
"""
Find all versions, reactive or proactive
"""
if self.versions:
return self.versions
reactive = self.find_reactive_versions()
proactive = self.find_proactive_cmds(reactive)
self.cluster_cmds(proactive)
# Consider all proactive and reactive versions
versions = {}
for version, events in self.clustered_cmds.iteritems():
versions[version] = list(set([event.eid for event in events]))
for pktin, events in reactive:
versions[pktin] = list(set([event.eid for event in events]))
# Now merge versions if one contains a response to a barrier request
# from previous version
# TODO(jm): Perhaps we should not just consider barrier replies, but also flow removed messages for explicit deletes? Are there more such replies?
barrier_replies = self.find_barrier_replies()
replies_by_xid = {} # (dpid, xid) -> cmds
replies_by_xid_versions = {} # (dpid, xid) -> versions
requests_by_xid = {} # (dpid, xid) -> version
# Sort replies by dpid and xid
for rep, cmds in barrier_replies:
key = (rep.dpid, rep.msg.xid)
replies_by_xid[key] = [event.eid for event in cmds]
replies_by_xid_versions[key] = []
reactive_cmds = set(replies_by_xid[key])
for v, v_cmds in versions.iteritems():
if reactive_cmds.intersection(v_cmds):
replies_by_xid_versions[key].append(v)
# Sort requests by dpid and xid
for v, v_cmds in versions.iteritems():
for v_cmd in v_cmds:
event = self.g.node[v_cmd]['event']
if event.msg_type_str == 'OFPT_BARRIER_REQUEST':
requests_by_xid[(event.dpid, event.msg.xid)] = v
for key, version in requests_by_xid.iteritems():
if version not in versions:
continue # already merged
if key not in replies_by_xid:
continue
new_cmds = versions[version]
for v in replies_by_xid_versions[key]:
if v == version:
continue # we already considered the first version
if v not in versions:
continue # already merged
new_cmds += versions[v]
del versions[v]
# Sort cmds by time, just to make it nicer
for version in versions:
versions[version].sort(key=lambda x: self.g.node[x]['event'].operations[0].t)
versions = dict([k, v] for k, v in versions.iteritems() if v)
self.versions = versions
return versions
def find_inconsistent_updates(self):
"""Try to find if two versions race with each other"""
versions = self.find_versions()
# TODO(jm): Could we check the races directly instead of creating the ww_races variable?
racing_versions_tuples = []
racing_versions_dict = {}
ww_races = defaultdict(list)
for race in self.race_detector.races_harmful_with_covered:
if race.rtype == 'w/w':
ww_races[race.i_event.eid].append(race.k_event.eid)
ww_races[race.k_event.eid].append(race.i_event.eid)
racing_events = []
for version, cmds in versions.iteritems():
for cmd in cmds:
if cmd in ww_races:
for other in ww_races[cmd]:
if other not in cmds:
racing_events.append((cmd, other))
racing_versions = []
for eid1, eid2 in racing_events:
v1 = None
v2 = None
for version, cmds in versions.iteritems():
if eid1 in cmds:
v1 = version
if eid2 in cmds:
v2 = version
if v1 and v2 and v1 != v2:
break
racing_versions.append((v1, v2, (eid1, eid2), (versions[v1], versions[v2])))
if set([v1, v2]) not in racing_versions_tuples:
racing_versions_tuples.append(set([v1, v2]))
ordered_versions = (v1, v2)
er1 = eid1
er2 = eid2
if ordered_versions not in racing_versions_dict:
ordered_versions = (v2, v1)
er1 = eid2
er2 = eid1
if ordered_versions not in racing_versions_dict:
racing_versions_dict[ordered_versions] = [[], []]
if er1 not in racing_versions_dict[ordered_versions][0] and\
er2 not in racing_versions_dict[ordered_versions][1]:
racing_versions_dict[ordered_versions][0].append(er1)
racing_versions_dict[ordered_versions][1].append(er2)
return racing_versions, racing_versions_tuples, racing_versions_dict
def print_versions(self, versions, selected_versions=[]):
# Printing versions
if not selected_versions:
selected_versions = versions.keys()
for v, cmds in versions.iteritems():
if v not in selected_versions:
continue
print "IN Version", v
if isinstance(v, HbMessageSend):
print "React to Msg: ", v.msg_type_str
for cmd in cmds:
node = self.g.node[cmd]['event']
match = ''
if getattr(node.msg, 'match', None):
match = node.msg.show().replace('\n', ' ')
of_cmd = ''
if hasattr(node.msg, 'command'):
of_cmd = OFP_COMMANDS[node.msg.command]
print "\t eid", node.eid, " dpid:", node.dpid, " xid:", node.msg.xid ,\
" cmd:", node.msg_type_str, of_cmd, ' ',\
pretty_match(getattr(node.msg, 'match', None)),\
getattr(node.msg, 'actions', None)
def print_covered_races(self):
print "Covered races:"
eids = []
race_edges = []
nodes_on_path = []
for r,v in self.covered_races.iteritems():
print "Race (r/w): ", r.rtype, r.i_event.eid, r.k_event.eid, ", covered by data dep w -> r: ", v
eids.append(r.i_event.eid)
eids.append(r.k_event.eid)
race_edges.append((r.i_event.eid, r.k_event.eid))
eids.append(v[0])
eids.append(v[1])
for path in nx.all_simple_paths(self.g, r.i_event.eid, r.k_event.eid):
nodes_on_path.extend(path)
for path in nx.all_simple_paths(self.g, r.k_event.eid, r.i_event.eid):
nodes_on_path.extend(path)
nodes_on_path = list(set(nodes_on_path))
sub_nodes = nodes_on_path + eids
subg = self.g.subgraph(list(set(sub_nodes)))
for i, k in race_edges:
subg.add_edge(k, i, rel='covered')
self.prep_draw(subg, True)
nx.write_dot(subg, os.path.join(self.results_dir, 'covered_races.dot'))
def racing_versions_graph(self, v1, cmd1, v2, cmd2):
nodes = []
extra_nodes = []
extra_edges = []
nodes.extend(cmd1)
nodes.extend(cmd2)
if hasattr(v1, 'eid') and self.g.has_node(v1.eid):
nodes.append(v1.eid)
for eid in cmd1:
nodes.append(eid)
extra_edges.append((v1.eid, eid))
else:
vid = 'Proactive%d' % v1
extra_nodes.append(vid)
for eid in cmd1:
extra_edges.append((vid, eid))
if hasattr(v2, 'eid') and self.g.has_node(v2.eid):
nodes.append(v2.eid)
for eid in cmd2:
nodes.append(eid)
extra_edges.append((v2.eid, eid))
else:
vid = 'Proactive%d' % v2
extra_nodes.append(vid)
for eid in cmd2:
extra_edges.append((vid, eid))
vg = self.g.subgraph(nodes)
for n in extra_nodes:
vg.add_node(n)
for src, dst in extra_edges:
vg.add_edge(src, dst, rel='version')
races = self.race_detector.races_harmful
for rtype, i_event, i_op, k_event, k_op in races:
if i_event.eid in nodes and k_event.eid in nodes:
vg.add_edge(i_event.eid, k_event.eid, rel='race', harmful=True)
vg.add_edge(k_event.eid, i_event.eid, rel='race', harmful=True)
self.prep_draw(vg, True, allow_none_event=True)
return vg
class Main(object):
def __init__(self, filename, print_pkt,
add_hb_time=True, rw_delta=5, ww_delta=5, filter_rw=False,
ignore_ethertypes=None, no_race=False, alt_barr=False,
verbose=True, ignore_first=False, disable_path_cache=False, data_deps=False,
no_dot_files=False, verify_and_minimize_only=False,
is_minimized=False):
self.filename = os.path.realpath(filename)
self.results_dir = os.path.dirname(self.filename)
self.output_filename = self.results_dir + "/" + "hb.dot"
self.print_pkt = print_pkt
self.add_hb_time = add_hb_time
self.rw_delta = rw_delta
self.ww_delta = ww_delta
self.filter_rw = filter_rw
self.ignore_ethertypes = ignore_ethertypes
self.no_race = no_race
self.alt_barr = alt_barr
self.verbose = verbose
self.ignore_first = ignore_first
self.disable_path_cache = disable_path_cache
self.data_deps = data_deps
self.no_dot_files = no_dot_files
self.verify_and_minimize_only = verify_and_minimize_only
self.is_minimized = is_minimized
def run(self):
self.graph = HappensBeforeGraph(results_dir=self.results_dir,
add_hb_time=self.add_hb_time,
rw_delta=self.rw_delta,
ww_delta=self.ww_delta,
filter_rw=self.filter_rw,
ignore_ethertypes=self.ignore_ethertypes,
no_race=self.no_race,
alt_barr=self.alt_barr,
disable_path_cache=self.disable_path_cache,
data_deps=self.data_deps,
verify_and_minimize_only=self.verify_and_minimize_only,
is_minimized=self.is_minimized)
import resource
# from guppy import hpy
# import objgraph
import gc
#gc.collect()
#print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
t0 = time.time()
if self.verify_and_minimize_only:
self.graph.verify_and_minimize_trace(self.filename)
#gc.collect()
#print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
else:
self.graph.load_trace(self.filename)
#gc.collect()
#print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
t1 = time.time()
self.graph.race_detector.detect_races(verbose=True)
#gc.collect()
#print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
self.graph.update_path_cache() # the race detector doesn't do it, so we do it ourself.
#gc.collect()
#print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
self.graph.race_detector.print_races(self.verbose)
#gc.collect()
#print 'Memory usage: %s (kb)' % resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
t2 = time.time()
packet_traces = self.graph.extract_traces(self.graph.g)
t3 = time.time()
reactive_cmds = self.graph.find_reactive_versions()
t4 = time.time()
proactive_cmds = self.graph.find_proactive_cmds(reactive_cmds)
versions = self.graph.find_versions()
t5 = time.time()
if self.data_deps:
covered_races = self.graph.find_covered_races()
else:
covered_races = dict()
t6 = time.time()
packet_races, inconsistent_packet_traces, \
inconsistent_packet_traces_covered, \
inconsistent_packet_entry_version, summarized = \
self.graph.find_per_packet_inconsistent(covered_races, True)
t7 = time.time()
racing_versions, racing_versions_tuples, racing_versions_tuples_dict = self.graph.find_inconsistent_updates()
t8 = time.time()
if not self.no_dot_files:
self.graph.store_traces(self.results_dir, print_packets=True, subgraphs=packet_traces)
print "Saving HB graph to:", self.output_filename
self.graph.store_graph(self.output_filename, self.print_pkt)
# Print traces
for trace, races in packet_races:
self.graph.print_racing_packet_trace(trace, races, label='incoherent', show_covered=False)
for trace, races, _ in inconsistent_packet_traces:
self.graph.print_racing_packet_trace(trace, races, label='incoherent_remaining')
for trace, races, _ in inconsistent_packet_traces_covered:
self.graph.print_racing_packet_trace(trace, races, label='covered')
for trace, races, _ in inconsistent_packet_entry_version:
self.graph.print_racing_packet_trace(trace, races, label='entry')
for trace, races, _ in summarized:
#self.graph.print_racing_packet_trace(trace, races, label='summarized')
pass
self.graph.save_races_graph(self.print_pkt)
# self.graph.print_versions(versions)
# self.graph.print_covered_races()
num_writes = len(self.graph.race_detector.write_operations)
num_read = len(self.graph.race_detector.read_operations)
num_ops = num_writes + num_read
num_harmful = self.graph.race_detector.total_harmful
num_commute = self.graph.race_detector.total_commute
num_races = self.graph.race_detector.total_races
num_time_filtered_races = self.graph.race_detector.total_time_filtered_races
num_covered = self.graph.race_detector.total_covered
num_time_edges = self.graph.race_detector.time_edges_counter
num_per_pkt_races = len(packet_races)
num_per_pkt_inconsistent = len(inconsistent_packet_traces)
num_per_pkt_inconsistent_covered = len(inconsistent_packet_traces_covered)
num_per_pkt_entry_version_race = len(inconsistent_packet_entry_version)
num_per_pkt_inconsistent_no_repeat = len(summarized)
load_time = t1 - t0
detect_races_time = t2 - t1
extract_traces_time = t3 - t2
find_reactive_cmds_time = t4 - t3
find_proactive_cmds_time = t5 - t4
find_covered_races_time = t6 - t5
per_packet_inconsistent_time = t7 - t6
find_inconsistent_update_time = t8 - t7
##### Final time, everything else is just print statements
t_final = time.time()
total_time = t_final - t0
print "\n######## Update isolation violations ########"
for counter, (v1, v2) in enumerate(racing_versions_tuples_dict):
if not self.no_dot_files:
rvg = self.graph.racing_versions_graph(v1, racing_versions_tuples_dict[(v1, v2)][0], v2, racing_versions_tuples_dict[(v1, v2)][1])
rvg_path = os.path.join(self.results_dir, 'isolation_violation_%d.dot' % counter)
print "Saving update isolation violation graph to %s" % rvg_path
nx.write_dot(rvg, rvg_path)
if hasattr(v1, 'eid'):
pv1 = "React to event %s, %s" % (v1.eid , getattr(v1, 'msg_type_str', ''))
else:
pv1 = "Practive version %d" % v1
if hasattr(v2, 'eid'):
pv2 = "React to event %d" % v2.eid
else:
pv2 = "Practive version %d" % v2
print "V1:{}".format(pv1)
print "\tEventing racing: {}".format(racing_versions_tuples_dict[(v1, v2)][0])
print "V2:{}".format(pv2)
print "\tEventing racing: {}".format(racing_versions_tuples_dict[(v1, v2)][1])
print ""
print "\n########## Summary ###########"
print "* Race analysis *"
print "\tTotal number of events in the trace:", self.graph.g.number_of_nodes()
print "\tTotal number of events with read operations:", num_read
print "\tTotal number of events with write operations:", num_writes
print "\tTotal number of events with read or write operations:", num_ops
print "\tTotal number of observed races without any filters:", num_races
print "\tTotal number of commuting races:", num_commute
print "\tTotal number of races filtered by Time HB edges:", num_time_filtered_races
print "\tTotal number of races covered by data dependency:", num_covered
print "\tRemaining number of races after applying all enabled filters: %d (%.02f%%)" % (num_harmful, (num_harmful / float(num_races) * 100))
print "\n\n"
print "* Properties analysis *"
print "\tNumber of observed network updates:", len(versions)
print "\tNumber of update isolation violations:", len(racing_versions_tuples)
print ""
print "\tTotal number of packets in the traces:", len(self.graph.host_sends)
print "\tNumber of packet coherence violations:", len(packet_races)
print "\tNumber of packet coherence violations filtered due covered races: ", len(inconsistent_packet_traces_covered)
print "\tNumber of packet coherence but only on the first switch in the update: ", len(inconsistent_packet_entry_version)
print "\tNumber of packet coherence violations after filtering covered races: ", len(inconsistent_packet_traces)
#print "\tNumber of packet inconsistencies after trimming repeated races: ", len(summarized)
#print "\tNumber of packet inconsistent updates: ", len(racing_versions)
#print "\tNumber of races: ", self.graph.race_detector.total_races
#print "\tNumber of races filtered by time: ", self.graph.race_detector.total_time_filtered_races
#print "\tNumber of commuting races: ", len(self.graph.race_detector.races_commute)
#print "\tNumber of harmful races: ", len(self.graph.race_detector.races_harmful)
#print "\tNumber of covered races: ", self.graph.race_detector.total_covered
#print "Number of versions:", len(versions)
print "* Timing information *"
print "\tDone. Time elapsed:",total_time,"s"
print "\tload_trace:", load_time, "s"
print "\tdetect_races:", detect_races_time, "s"
print "\textract_traces_time:", extract_traces_time, "s"
print "\tfind_reactive_cmds_time:", find_reactive_cmds_time, "s"
print "\tfind_proactive_cmds_time:", find_proactive_cmds_time, "s"
print "\tfind_covered_races_time:", find_covered_races_time, "s"
print "\tper_packet_inconsistent_time:", per_packet_inconsistent_time, "s"
print "\tfind_inconsistent_update_time:", find_inconsistent_update_time, "s"
#print "print_races:"+(str(t3-t2))+"s"
#print "store_graph:"+(str(t4-t3))+"s"
#print "Extracting Packet traces time: "+ (str(t5 - t4)) + "s"
#print "Finding inconsistent traces time: "+ (str(t6 - t5)) + "s"
# Printing dat file
hbt = self.add_hb_time
rw_delta = self.rw_delta if self.add_hb_time else 'inf'
ww_delta = self.ww_delta if self.add_hb_time else 'inf'
file_name = "results_hbt_%s_altbarr_%s_dep_%s_rw_%s_ww_%s.dat" % (hbt, self.alt_barr, self.data_deps, rw_delta, ww_delta)
file_name = os.path.join(self.results_dir, file_name)
timings_file_name = "timings_hbt_%s_altbarr_%s_dep_%s_rw_%s_ww_%s.dat" % (hbt, self.alt_barr, self.data_deps, rw_delta, ww_delta)
timings_file_name = os.path.join(self.results_dir, timings_file_name)
def write_general_info_to_file(f):
# General info
f.write('key,value\n')
f.write('rw_delta,%s\n' % rw_delta)
f.write('ww_delta,%s\n' % ww_delta)
f.write('alt_barr,%s\n' % self.alt_barr)
f.write('data_deps,%s\n' % self.data_deps)
with open(file_name, 'w') as f:
write_general_info_to_file(f)
# Operations
f.write('num_events,%d\n' % self.graph.g.number_of_nodes())
f.write('num_edges,%d\n' % self.graph.g.number_of_edges())
f.write('num_read,%d\n' % num_read)
f.write('num_writes,%d\n' % num_writes)
f.write('num_ops,%d\n' % num_ops)
# HB time edges
f.write('num_time_edges,%d\n' % num_time_edges)
# Races info
# One last check
assert num_races == num_commute + num_covered + num_harmful + num_time_filtered_races
f.write('num_races,%d\n' % num_races)
f.write('num_harmful,%d\n' % num_harmful)
f.write('num_commute,%d\n' % num_commute)
f.write('num_time_filtered_races,%d\n' % num_time_filtered_races)
f.write('num_covered,%d\n' % num_covered)
# Inconsistency
f.write('num_pkts,%d\n' % len(self.graph.host_sends))
assert len(self.graph.host_sends) >= num_per_pkt_races
assert num_per_pkt_races == num_per_pkt_inconsistent + num_per_pkt_inconsistent_covered + num_per_pkt_entry_version_race
f.write('num_per_pkt_races,%d\n' % num_per_pkt_races)
f.write('num_per_pkt_inconsistent,%d\n' % num_per_pkt_inconsistent)
f.write('num_per_pkt_inconsistent_covered,%d\n' % num_per_pkt_inconsistent_covered)
f.write('num_per_pkt_entry_version_race,%d\n' % num_per_pkt_entry_version_race)
f.write('num_per_pkt_inconsistent_no_repeat,%d\n' % num_per_pkt_inconsistent_no_repeat)
f.write('num_versions,%d\n' % len(versions))
f.write('num_racing_versions,%d\n' % len(racing_versions_tuples))
with open(timings_file_name, 'w') as f:
write_general_info_to_file(f)
# Times
f.write('total_time_sec,%f\n'% total_time)
f.write('load_time_sec,%f\n' % load_time )
f.write('detect_races_time_sec,%f\n' % detect_races_time )
f.write('extract_traces_time_sec,%f\n' % extract_traces_time )
f.write('find_reactive_cmds_time_sec,%f\n' % find_reactive_cmds_time )
f.write('find_proactive_cmds_time_sec,%f\n' % find_proactive_cmds_time )
f.write('find_covered_races_time,%f\n' % find_covered_races_time )
f.write('per_packet_inconsistent_time_sec,%f\n' % per_packet_inconsistent_time )
f.write('find_inconsistent_update_time_sec,%f\n' % find_inconsistent_update_time )
def auto_int(x):
return int(x, 0)
if __name__ == '__main__':
empty_delta = 1000000
parser = argparse.ArgumentParser()
parser.add_argument('trace_file',
help='Trace file produced by the instrumented sts, usually "hb.json"')
parser.add_argument('--no-hbt', dest='no_hbt', action='store_true', default=False,
help="Don't add HB edges based on time")
parser.add_argument('--time-delta', dest='delta', default=2, type=int,
help="delta time (in secs) for adding HB edges based on time")
parser.add_argument('--pkt', dest='print_pkt', action='store_true', default=False,
help="Print packet headers in the produced dot files")
parser.add_argument('--rw_delta', dest='rw_delta', default=2, type=int,
help="delta time (in secs) for adding HB edges based on time")
parser.add_argument('--ww_delta', dest='ww_delta', default=2, type=int,
help="delta time (in secs) for adding HB edges based on time")
parser.add_argument('--filter_rw', dest='filter_rw', action='store_true', default=False,
help="Filter Read/Write operations with HB relations")
parser.add_argument('--ignore-ethertypes', dest='ignore_ethertypes', nargs='*',
type=auto_int, default=0,
help='Ether types to ignore from the graph')
parser.add_argument('--no-race', dest='no_race', action='store_true', default=False,
help="Don't add edge between racing events in the visualized graph")
parser.add_argument('--alt-barr', dest='alt_barr', action='store_true', default=False,
help="Use alternative barrier rules for purely reactive controllers")
parser.add_argument('-v', dest='verbose', action='store_true', default=False,
help="Print all commute and harmful races")
parser.add_argument('--ignore-first', dest='ignore_first', action='store_true',
default=False, help="Ignore the first race for per-packet consistency check")
parser.add_argument('--disable-path-cache', dest='disable_path_cache', action='store_true',
default=False, help="Disable using all_pairs_shortest_path_length() preprocessing.")
parser.add_argument('--data-deps', dest='data_deps', action='store_true',
default=False, help="Use shadow tables for adding data dependency edges between reads/writes.")
parser.add_argument('--no-dot-files', dest='no_dot_files', action='store_true',
default=False, help="Do not write any .dot files to the disk.")
parser.add_argument('--verify-and-minimize-only', dest='verify_and_minimize_only', action='store_true',
default=False, help="Verify the input trace, then write out a minimized version.")
parser.add_argument('--is-minimized', dest='is_minimized', action='store_true',
default=False, help="Process a minimized trace.")
# TODO(jm): Make option naming consistent (use _ everywhere, not a mixture of - and _).
args = parser.parse_args()
if not args.no_hbt:
if args.delta == empty_delta:
assert args.rw_delta == args.ww_delta
else:
args.rw_delta = args.ww_delta = args.delta
main = Main(args.trace_file, print_pkt=args.print_pkt,
add_hb_time=not args.no_hbt, rw_delta=args.rw_delta, ww_delta=args.ww_delta,
filter_rw=args.filter_rw, ignore_ethertypes=args.ignore_ethertypes,
no_race=args.no_race, alt_barr=args.alt_barr, verbose=args.verbose,
ignore_first=args.ignore_first, disable_path_cache=args.disable_path_cache,
data_deps=args.data_deps, no_dot_files=args.no_dot_files,
verify_and_minimize_only=args.verify_and_minimize_only,
is_minimized=args.is_minimized)
main.run()
| codeparrot/github-code-clean |
# Copyright 2010 OpenStack Foundation
# Copyright 2012 University Of Minho
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import copy
import datetime
import errno
import glob
import os
import random
import re
import shutil
import signal
import threading
import time
import uuid
import eventlet
from eventlet import greenthread
import fixtures
from lxml import etree
import mock
from mox3 import mox
from os_brick.initiator import connector
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_service import loopingcall
from oslo_utils import encodeutils
from oslo_utils import fileutils
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import units
from oslo_utils import uuidutils
import six
from six.moves import builtins
from six.moves import range
from nova.api.metadata import base as instance_metadata
from nova.compute import arch
from nova.compute import cpumodel
from nova.compute import manager
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import vm_mode
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova.network import model as network_model
from nova import objects
from nova.objects import fields
from nova.pci import manager as pci_manager
from nova import test
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network
import nova.tests.unit.image.fake
from nova.tests.unit import matchers
from nova.tests.unit.objects import test_pci_device
from nova.tests.unit.objects import test_vcpu_model
from nova.tests.unit.virt.libvirt import fake_imagebackend
from nova.tests.unit.virt.libvirt import fake_libvirt_utils
from nova.tests.unit.virt.libvirt import fakelibvirt
from nova import utils
from nova import version
from nova.virt import block_device as driver_block_device
from nova.virt import configdrive
from nova.virt.disk import api as disk
from nova.virt import driver
from nova.virt import fake
from nova.virt import firewall as base_firewall
from nova.virt import hardware
from nova.virt.image import model as imgmodel
from nova.virt.libvirt import blockinfo
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt import driver as libvirt_driver
from nova.virt.libvirt import firewall
from nova.virt.libvirt import guest as libvirt_guest
from nova.virt.libvirt import host
from nova.virt.libvirt import imagebackend
from nova.virt.libvirt.storage import dmcrypt
from nova.virt.libvirt.storage import lvm
from nova.virt.libvirt.storage import rbd_utils
from nova.virt.libvirt import utils as libvirt_utils
from nova.virt.libvirt.volume import volume as volume_drivers
libvirt_driver.libvirt = fakelibvirt
host.libvirt = fakelibvirt
libvirt_guest.libvirt = fakelibvirt
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
CONF.import_opt('host', 'nova.netconf')
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('image_cache_subdirectory_name', 'nova.virt.imagecache')
CONF.import_opt('instances_path', 'nova.compute.manager')
_fake_network_info = fake_network.fake_get_instance_nw_info
_fake_NodeDevXml = \
{"pci_0000_04_00_3": """
<device>
<name>pci_0000_04_00_3</name>
<parent>pci_0000_00_01_1</parent>
<driver>
<name>igb</name>
</driver>
<capability type='pci'>
<domain>0</domain>
<bus>4</bus>
<slot>0</slot>
<function>3</function>
<product id='0x1521'>I350 Gigabit Network Connection</product>
<vendor id='0x8086'>Intel Corporation</vendor>
<capability type='virt_functions'>
<address domain='0x0000' bus='0x04' slot='0x10' function='0x3'/>
<address domain='0x0000' bus='0x04' slot='0x10' function='0x7'/>
<address domain='0x0000' bus='0x04' slot='0x11' function='0x3'/>
<address domain='0x0000' bus='0x04' slot='0x11' function='0x7'/>
</capability>
</capability>
</device>""",
"pci_0000_04_10_7": """
<device>
<name>pci_0000_04_10_7</name>
<parent>pci_0000_00_01_1</parent>
<driver>
<name>igbvf</name>
</driver>
<capability type='pci'>
<domain>0</domain>
<bus>4</bus>
<slot>16</slot>
<function>7</function>
<product id='0x1520'>I350 Ethernet Controller Virtual Function
</product>
<vendor id='0x8086'>Intel Corporation</vendor>
<capability type='phys_function'>
<address domain='0x0000' bus='0x04' slot='0x00' function='0x3'/>
</capability>
<capability type='virt_functions'>
</capability>
</capability>
</device>""",
"pci_0000_04_11_7": """
<device>
<name>pci_0000_04_11_7</name>
<parent>pci_0000_00_01_1</parent>
<driver>
<name>igbvf</name>
</driver>
<capability type='pci'>
<domain>0</domain>
<bus>4</bus>
<slot>17</slot>
<function>7</function>
<product id='0x1520'>I350 Ethernet Controller Virtual Function
</product>
<vendor id='0x8086'>Intel Corporation</vendor>
<numa node='0'/>
<capability type='phys_function'>
<address domain='0x0000' bus='0x04' slot='0x00' function='0x3'/>
</capability>
<capability type='virt_functions'>
</capability>
</capability>
</device>"""}
_fake_cpu_info = {
"arch": "test_arch",
"model": "test_model",
"vendor": "test_vendor",
"topology": {
"sockets": 1,
"cores": 8,
"threads": 16
},
"features": ["feature1", "feature2"]
}
def _concurrency(signal, wait, done, target, is_block_dev=False):
signal.send()
wait.wait()
done.send()
class FakeVirDomainSnapshot(object):
def __init__(self, dom=None):
self.dom = dom
def delete(self, flags):
pass
class FakeVirtDomain(object):
def __init__(self, fake_xml=None, uuidstr=None, id=None, name=None):
if uuidstr is None:
uuidstr = str(uuid.uuid4())
self.uuidstr = uuidstr
self.id = id
self.domname = name
self._info = [power_state.RUNNING, 2048 * units.Mi, 1234 * units.Mi,
None, None]
if fake_xml:
self._fake_dom_xml = fake_xml
else:
self._fake_dom_xml = """
<domain type='kvm'>
<devices>
<disk type='file'>
<source file='filename'/>
</disk>
</devices>
</domain>
"""
def name(self):
if self.domname is None:
return "fake-domain %s" % self
else:
return self.domname
def ID(self):
return self.id
def info(self):
return self._info
def create(self):
pass
def managedSave(self, *args):
pass
def createWithFlags(self, launch_flags):
pass
def XMLDesc(self, flags):
return self._fake_dom_xml
def UUIDString(self):
return self.uuidstr
def attachDeviceFlags(self, xml, flags):
pass
def attachDevice(self, xml):
pass
def detachDeviceFlags(self, xml, flags):
pass
def snapshotCreateXML(self, xml, flags):
pass
def blockCommit(self, disk, base, top, bandwidth=0, flags=0):
pass
def blockRebase(self, disk, base, bandwidth=0, flags=0):
pass
def blockJobInfo(self, path, flags):
pass
def resume(self):
pass
def destroy(self):
pass
def fsFreeze(self, disks=None, flags=0):
pass
def fsThaw(self, disks=None, flags=0):
pass
class CacheConcurrencyTestCase(test.NoDBTestCase):
def setUp(self):
super(CacheConcurrencyTestCase, self).setUp()
self.flags(instances_path=self.useFixture(fixtures.TempDir()).path)
# utils.synchronized() will create the lock_path for us if it
# doesn't already exist. It will also delete it when it's done,
# which can cause race conditions with the multiple threads we
# use for tests. So, create the path here so utils.synchronized()
# won't delete it out from under one of the threads.
self.lock_path = os.path.join(CONF.instances_path, 'locks')
fileutils.ensure_tree(self.lock_path)
def fake_exists(fname):
basedir = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name)
if fname == basedir or fname == self.lock_path:
return True
return False
def fake_execute(*args, **kwargs):
pass
def fake_extend(image, size, use_cow=False):
pass
self.stubs.Set(os.path, 'exists', fake_exists)
self.stubs.Set(utils, 'execute', fake_execute)
self.stubs.Set(imagebackend.disk, 'extend', fake_extend)
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.imagebackend.libvirt_utils',
fake_libvirt_utils))
def _fake_instance(self, uuid):
return objects.Instance(id=1, uuid=uuid)
def test_same_fname_concurrency(self):
# Ensures that the same fname cache runs at a sequentially.
uuid = uuidutils.generate_uuid()
backend = imagebackend.Backend(False)
wait1 = eventlet.event.Event()
done1 = eventlet.event.Event()
sig1 = eventlet.event.Event()
thr1 = eventlet.spawn(backend.image(self._fake_instance(uuid),
'name').cache,
_concurrency, 'fname', None,
signal=sig1, wait=wait1, done=done1)
eventlet.sleep(0)
# Thread 1 should run before thread 2.
sig1.wait()
wait2 = eventlet.event.Event()
done2 = eventlet.event.Event()
sig2 = eventlet.event.Event()
thr2 = eventlet.spawn(backend.image(self._fake_instance(uuid),
'name').cache,
_concurrency, 'fname', None,
signal=sig2, wait=wait2, done=done2)
wait2.send()
eventlet.sleep(0)
try:
self.assertFalse(done2.ready())
finally:
wait1.send()
done1.wait()
eventlet.sleep(0)
self.assertTrue(done2.ready())
# Wait on greenthreads to assert they didn't raise exceptions
# during execution
thr1.wait()
thr2.wait()
def test_different_fname_concurrency(self):
# Ensures that two different fname caches are concurrent.
uuid = uuidutils.generate_uuid()
backend = imagebackend.Backend(False)
wait1 = eventlet.event.Event()
done1 = eventlet.event.Event()
sig1 = eventlet.event.Event()
thr1 = eventlet.spawn(backend.image(self._fake_instance(uuid),
'name').cache,
_concurrency, 'fname2', None,
signal=sig1, wait=wait1, done=done1)
eventlet.sleep(0)
# Thread 1 should run before thread 2.
sig1.wait()
wait2 = eventlet.event.Event()
done2 = eventlet.event.Event()
sig2 = eventlet.event.Event()
thr2 = eventlet.spawn(backend.image(self._fake_instance(uuid),
'name').cache,
_concurrency, 'fname1', None,
signal=sig2, wait=wait2, done=done2)
eventlet.sleep(0)
# Wait for thread 2 to start.
sig2.wait()
wait2.send()
tries = 0
while not done2.ready() and tries < 10:
eventlet.sleep(0)
tries += 1
try:
self.assertTrue(done2.ready())
finally:
wait1.send()
eventlet.sleep(0)
# Wait on greenthreads to assert they didn't raise exceptions
# during execution
thr1.wait()
thr2.wait()
class FakeVolumeDriver(object):
def __init__(self, *args, **kwargs):
pass
def attach_volume(self, *args):
pass
def detach_volume(self, *args):
pass
def get_xml(self, *args):
return ""
def get_config(self, *args):
"""Connect the volume to a fake device."""
conf = vconfig.LibvirtConfigGuestDisk()
conf.source_type = "network"
conf.source_protocol = "fake"
conf.source_name = "fake"
conf.target_dev = "fake"
conf.target_bus = "fake"
return conf
def connect_volume(self, *args):
"""Connect the volume to a fake device."""
return self.get_config()
class FakeConfigGuestDisk(object):
def __init__(self, *args, **kwargs):
self.source_type = None
self.driver_cache = None
class FakeConfigGuest(object):
def __init__(self, *args, **kwargs):
self.driver_cache = None
class FakeNodeDevice(object):
def __init__(self, fakexml):
self.xml = fakexml
def XMLDesc(self, flags):
return self.xml
def _create_test_instance():
flavor = objects.Flavor(memory_mb=2048,
swap=0,
vcpu_weight=None,
root_gb=1,
id=2,
name=u'm1.small',
ephemeral_gb=0,
rxtx_factor=1.0,
flavorid=u'1',
vcpus=1,
extra_specs={})
return {
'id': 1,
'uuid': '32dfcb37-5af1-552b-357c-be8c3aa38310',
'memory_kb': '1024000',
'basepath': '/some/path',
'bridge_name': 'br100',
'display_name': "Acme webserver",
'vcpus': 2,
'project_id': 'fake',
'bridge': 'br101',
'image_ref': '155d900f-4e14-4e4c-a73d-069cbf4541e6',
'root_gb': 10,
'ephemeral_gb': 20,
'instance_type_id': '5', # m1.small
'extra_specs': {},
'system_metadata': {
'image_disk_format': 'raw',
},
'flavor': flavor,
'new_flavor': None,
'old_flavor': None,
'pci_devices': objects.PciDeviceList(),
'numa_topology': None,
'config_drive': None,
'vm_mode': None,
'kernel_id': None,
'ramdisk_id': None,
'os_type': 'linux',
'user_id': '838a72b0-0d54-4827-8fd6-fb1227633ceb',
'ephemeral_key_uuid': None,
'vcpu_model': None,
'host': 'fake-host',
}
class LibvirtConnTestCase(test.NoDBTestCase):
REQUIRES_LOCKING = True
_EPHEMERAL_20_DEFAULT = ('ephemeral_20_%s' %
utils.get_hash_str(disk._DEFAULT_FILE_SYSTEM)[:7])
def setUp(self):
super(LibvirtConnTestCase, self).setUp()
self.flags(fake_call=True)
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.get_admin_context()
temp_dir = self.useFixture(fixtures.TempDir()).path
self.flags(instances_path=temp_dir)
self.flags(snapshots_directory=temp_dir, group='libvirt')
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.driver.libvirt_utils',
fake_libvirt_utils))
self.flags(sysinfo_serial="hardware", group="libvirt")
self.useFixture(fixtures.MonkeyPatch(
'nova.virt.libvirt.imagebackend.libvirt_utils',
fake_libvirt_utils))
def fake_extend(image, size, use_cow=False):
pass
self.stubs.Set(libvirt_driver.disk, 'extend', fake_extend)
self.stubs.Set(imagebackend.Image, 'resolve_driver_format',
imagebackend.Image._get_driver_format)
self.useFixture(fakelibvirt.FakeLibvirtFixture())
self.test_instance = _create_test_instance()
self.test_image_meta = {
"disk_format": "raw",
}
self.image_service = nova.tests.unit.image.fake.stub_out_image_service(
self.stubs)
self.device_xml_tmpl = """
<domain type='kvm'>
<devices>
<disk type='block' device='disk'>
<driver name='qemu' type='raw' cache='none'/>
<source dev='{device_path}'/>
<target bus='virtio' dev='vdb'/>
<serial>58a84f6d-3f0c-4e19-a0af-eb657b790657</serial>
<address type='pci' domain='0x0' bus='0x0' slot='0x04' \
function='0x0'/>
</disk>
</devices>
</domain>
"""
def relpath(self, path):
return os.path.relpath(path, CONF.instances_path)
def tearDown(self):
nova.tests.unit.image.fake.FakeImageService_reset()
super(LibvirtConnTestCase, self).tearDown()
def test_driver_capabilities(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), False)
self.assertTrue(drvr.capabilities['has_imagecache'],
'Driver capabilities for \'has_imagecache\' '
'is invalid')
self.assertTrue(drvr.capabilities['supports_recreate'],
'Driver capabilities for \'supports_recreate\' '
'is invalid')
self.assertFalse(drvr.capabilities['supports_migrate_to_same_host'],
'Driver capabilities for '
'\'supports_migrate_to_same_host\' is invalid')
def create_fake_libvirt_mock(self, **kwargs):
"""Defining mocks for LibvirtDriver(libvirt is not used)."""
# A fake libvirt.virConnect
class FakeLibvirtDriver(object):
def defineXML(self, xml):
return FakeVirtDomain()
# Creating mocks
volume_driver = ['iscsi=nova.tests.unit.virt.libvirt.test_driver'
'.FakeVolumeDriver']
fake = FakeLibvirtDriver()
# Customizing above fake if necessary
for key, val in kwargs.items():
fake.__setattr__(key, val)
self.stubs.Set(libvirt_driver.LibvirtDriver, '_conn', fake)
self.stubs.Set(libvirt_driver.LibvirtDriver, '_get_volume_drivers',
lambda x: volume_driver)
self.stubs.Set(host.Host, 'get_connection', lambda x: fake)
def fake_lookup(self, instance_name):
return FakeVirtDomain()
def fake_execute(self, *args, **kwargs):
open(args[-1], "a").close()
def _create_service(self, **kwargs):
service_ref = {'host': kwargs.get('host', 'dummy'),
'disabled': kwargs.get('disabled', False),
'binary': 'nova-compute',
'topic': 'compute',
'report_count': 0}
return objects.Service(**service_ref)
def _get_pause_flag(self, drvr, network_info, power_on=True,
vifs_already_plugged=False):
timeout = CONF.vif_plugging_timeout
events = []
if (drvr._conn_supports_start_paused and
utils.is_neutron() and
not vifs_already_plugged and
power_on and timeout):
events = drvr._get_neutron_events(network_info)
return bool(events)
def test_public_api_signatures(self):
baseinst = driver.ComputeDriver(None)
inst = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertPublicAPISignatures(baseinst, inst)
def test_legacy_block_device_info(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertFalse(drvr.need_legacy_block_device_info)
@mock.patch.object(host.Host, "has_min_version")
def test_min_version_start_ok(self, mock_version):
mock_version.return_value = True
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.init_host("dummyhost")
@mock.patch.object(host.Host, "has_min_version")
def test_min_version_start_abort(self, mock_version):
mock_version.return_value = False
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.NovaException,
drvr.init_host,
"dummyhost")
@mock.patch.object(fakelibvirt.Connection, 'getLibVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.NEXT_MIN_LIBVIRT_VERSION) - 1)
@mock.patch.object(libvirt_driver.LOG, 'warning')
def test_next_min_version_deprecation_warning(self, mock_warning,
mock_get_libversion):
# Test that a warning is logged if the libvirt version is less than
# the next required minimum version.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.init_host("dummyhost")
# assert that the next min version is in a warning message
expected_arg = {'version': '0.10.2'}
version_arg_found = False
for call in mock_warning.call_args_list:
if call[0][1] == expected_arg:
version_arg_found = True
break
self.assertTrue(version_arg_found)
@mock.patch.object(fakelibvirt.Connection, 'getLibVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.NEXT_MIN_LIBVIRT_VERSION))
@mock.patch.object(libvirt_driver.LOG, 'warning')
def test_next_min_version_ok(self, mock_warning, mock_get_libversion):
# Test that a warning is not logged if the libvirt version is greater
# than or equal to NEXT_MIN_LIBVIRT_VERSION.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.init_host("dummyhost")
# assert that the next min version is in a warning message
expected_arg = {'version': '0.10.2'}
version_arg_found = False
for call in mock_warning.call_args_list:
if call[0][1] == expected_arg:
version_arg_found = True
break
self.assertFalse(version_arg_found)
@mock.patch.object(fakelibvirt.Connection, 'getLibVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_KVM_S390_VERSION) - 1)
@mock.patch.object(fakelibvirt.Connection, 'getVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_S390_VERSION))
@mock.patch.object(arch, "from_host", return_value=arch.S390X)
def test_min_version_s390_old_libvirt(self, mock_arch,
mock_qemu_version, mock_lv_version):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.NovaException,
drvr.init_host,
"dummyhost")
@mock.patch.object(fakelibvirt.Connection, 'getLibVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_KVM_S390_VERSION))
@mock.patch.object(fakelibvirt.Connection, 'getVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_S390_VERSION) - 1)
@mock.patch.object(arch, "from_host", return_value=arch.S390X)
def test_min_version_s390_old_qemu(self, mock_arch,
mock_qemu_version, mock_lv_version):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.NovaException,
drvr.init_host,
"dummyhost")
@mock.patch.object(fakelibvirt.Connection, 'getLibVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_KVM_S390_VERSION))
@mock.patch.object(fakelibvirt.Connection, 'getVersion',
return_value=utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_S390_VERSION))
@mock.patch.object(arch, "from_host", return_value=arch.S390X)
def test_min_version_s390_ok(self, mock_arch,
mock_qemu_version, mock_lv_version):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.init_host("dummyhost")
@mock.patch('nova.utils.get_image_from_system_metadata')
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
@mock.patch('nova.virt.libvirt.host.Host.get_guest')
def test_set_admin_password(self, mock_get_guest, ver, mock_image):
self.flags(virt_type='kvm', group='libvirt')
instance = objects.Instance(**self.test_instance)
mock_image.return_value = {"properties": {
"hw_qemu_guest_agent": "yes"}}
mock_guest = mock.Mock(spec=libvirt_guest.Guest)
mock_get_guest.return_value = mock_guest
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.set_admin_password(instance, "123")
mock_guest.set_user_password.assert_called_once_with("root", "123")
@mock.patch('nova.utils.get_image_from_system_metadata')
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
@mock.patch('nova.virt.libvirt.host.Host.get_guest')
def test_set_admin_password_windows(self, mock_get_guest, ver, mock_image):
self.flags(virt_type='kvm', group='libvirt')
instance = objects.Instance(**self.test_instance)
instance.os_type = "windows"
mock_image.return_value = {"properties": {
"hw_qemu_guest_agent": "yes"}}
mock_guest = mock.Mock(spec=libvirt_guest.Guest)
mock_get_guest.return_value = mock_guest
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.set_admin_password(instance, "123")
mock_guest.set_user_password.assert_called_once_with(
"Administrator", "123")
@mock.patch('nova.utils.get_image_from_system_metadata')
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
@mock.patch('nova.virt.libvirt.host.Host.get_guest')
def test_set_admin_password_image(self, mock_get_guest, ver, mock_image):
self.flags(virt_type='kvm', group='libvirt')
instance = objects.Instance(**self.test_instance)
mock_image.return_value = {"properties": {
"hw_qemu_guest_agent": "yes",
"os_admin_user": "foo"
}}
mock_guest = mock.Mock(spec=libvirt_guest.Guest)
mock_get_guest.return_value = mock_guest
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.set_admin_password(instance, "123")
mock_guest.set_user_password.assert_called_once_with("foo", "123")
@mock.patch('nova.utils.get_image_from_system_metadata')
@mock.patch.object(host.Host,
'has_min_version', return_value=False)
def test_set_admin_password_bad_version(self, mock_svc, mock_image):
self.flags(virt_type='kvm', group='libvirt')
instance = objects.Instance(**self.test_instance)
mock_image.return_value = {"properties": {
"hw_qemu_guest_agent": "yes"}}
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.SetAdminPasswdNotSupported,
drvr.set_admin_password, instance, "123")
@mock.patch('nova.utils.get_image_from_system_metadata')
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
def test_set_admin_password_bad_hyp(self, mock_svc, mock_image):
self.flags(virt_type='foo', group='libvirt')
instance = objects.Instance(**self.test_instance)
mock_image.return_value = {"properties": {
"hw_qemu_guest_agent": "yes"}}
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.SetAdminPasswdNotSupported,
drvr.set_admin_password, instance, "123")
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
def test_set_admin_password_guest_agent_not_running(self, mock_svc):
self.flags(virt_type='kvm', group='libvirt')
instance = objects.Instance(**self.test_instance)
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.QemuGuestAgentNotEnabled,
drvr.set_admin_password, instance, "123")
@mock.patch('nova.utils.get_image_from_system_metadata')
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
@mock.patch('nova.virt.libvirt.host.Host.get_guest')
def test_set_admin_password_error(self, mock_get_guest, ver, mock_image):
self.flags(virt_type='kvm', group='libvirt')
instance = objects.Instance(**self.test_instance)
mock_image.return_value = {"properties": {
"hw_qemu_guest_agent": "yes"}}
mock_guest = mock.Mock(spec=libvirt_guest.Guest)
mock_guest.set_user_password.side_effect = (
fakelibvirt.libvirtError("error"))
mock_get_guest.return_value = mock_guest
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.NovaException,
drvr.set_admin_password, instance, "123")
@mock.patch.object(objects.Service, 'get_by_compute_host')
def test_set_host_enabled_with_disable(self, mock_svc):
# Tests disabling an enabled host.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
svc = self._create_service(host='fake-mini')
mock_svc.return_value = svc
drvr._set_host_enabled(False)
self.assertTrue(svc.disabled)
@mock.patch.object(objects.Service, 'get_by_compute_host')
def test_set_host_enabled_with_enable(self, mock_svc):
# Tests enabling a disabled host.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
svc = self._create_service(disabled=True, host='fake-mini')
mock_svc.return_value = svc
drvr._set_host_enabled(True)
self.assertTrue(svc.disabled)
@mock.patch.object(objects.Service, 'get_by_compute_host')
def test_set_host_enabled_with_enable_state_enabled(self, mock_svc):
# Tests enabling an enabled host.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
svc = self._create_service(disabled=False, host='fake-mini')
mock_svc.return_value = svc
drvr._set_host_enabled(True)
self.assertFalse(svc.disabled)
@mock.patch.object(objects.Service, 'get_by_compute_host')
def test_set_host_enabled_with_disable_state_disabled(self, mock_svc):
# Tests disabling a disabled host.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
svc = self._create_service(disabled=True, host='fake-mini')
mock_svc.return_value = svc
drvr._set_host_enabled(False)
self.assertTrue(svc.disabled)
def test_set_host_enabled_swallows_exceptions(self):
# Tests that set_host_enabled will swallow exceptions coming from the
# db_api code so they don't break anything calling it, e.g. the
# _get_new_connection method.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
with mock.patch.object(db, 'service_get_by_compute_host') as db_mock:
# Make db.service_get_by_compute_host raise NovaException; this
# is more robust than just raising ComputeHostNotFound.
db_mock.side_effect = exception.NovaException
drvr._set_host_enabled(False)
@mock.patch.object(fakelibvirt.virConnect, "nodeDeviceLookupByName")
def test_prepare_pci_device(self, mock_lookup):
pci_devices = [dict(hypervisor_name='xxx')]
self.flags(virt_type='xen', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
conn = drvr._host.get_connection()
mock_lookup.side_effect = lambda x: fakelibvirt.NodeDevice(conn)
drvr._prepare_pci_devices_for_use(pci_devices)
@mock.patch.object(fakelibvirt.virConnect, "nodeDeviceLookupByName")
@mock.patch.object(fakelibvirt.virNodeDevice, "dettach")
def test_prepare_pci_device_exception(self, mock_detach, mock_lookup):
pci_devices = [dict(hypervisor_name='xxx',
id='id1',
instance_uuid='uuid')]
self.flags(virt_type='xen', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
conn = drvr._host.get_connection()
mock_lookup.side_effect = lambda x: fakelibvirt.NodeDevice(conn)
mock_detach.side_effect = fakelibvirt.libvirtError("xxxx")
self.assertRaises(exception.PciDevicePrepareFailed,
drvr._prepare_pci_devices_for_use, pci_devices)
def test_detach_pci_devices_exception(self):
pci_devices = [dict(hypervisor_name='xxx',
id='id1',
instance_uuid='uuid')]
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.mox.StubOutWithMock(host.Host,
'has_min_version')
host.Host.has_min_version = lambda x, y: False
self.assertRaises(exception.PciDeviceDetachFailed,
drvr._detach_pci_devices, None, pci_devices)
def test_detach_pci_devices(self):
fake_domXML1 =\
"""<domain> <devices>
<disk type='file' device='disk'>
<driver name='qemu' type='qcow2' cache='none'/>
<source file='xxx'/>
<target dev='vda' bus='virtio'/>
<alias name='virtio-disk0'/>
<address type='pci' domain='0x0000' bus='0x00'
slot='0x04' function='0x0'/>
</disk>
<hostdev mode="subsystem" type="pci" managed="yes">
<source>
<address function="0x1" slot="0x10" domain="0x0000"
bus="0x04"/>
</source>
</hostdev></devices></domain>"""
pci_devices = [dict(hypervisor_name='xxx',
id='id1',
instance_uuid='uuid',
address="0001:04:10:1")]
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.mox.StubOutWithMock(host.Host,
'has_min_version')
host.Host.has_min_version = lambda x, y: True
self.mox.StubOutWithMock(libvirt_driver.LibvirtDriver,
'_get_guest_pci_device')
class FakeDev(object):
def to_xml(self):
pass
libvirt_driver.LibvirtDriver._get_guest_pci_device =\
lambda x, y: FakeDev()
class FakeDomain(object):
def detachDeviceFlags(self, xml, flags):
pci_devices[0]['hypervisor_name'] = 'marked'
pass
def XMLDesc(self, flags):
return fake_domXML1
guest = libvirt_guest.Guest(FakeDomain())
drvr._detach_pci_devices(guest, pci_devices)
self.assertEqual(pci_devices[0]['hypervisor_name'], 'marked')
def test_detach_pci_devices_timeout(self):
fake_domXML1 =\
"""<domain>
<devices>
<hostdev mode="subsystem" type="pci" managed="yes">
<source>
<address function="0x1" slot="0x10" domain="0x0000" bus="0x04"/>
</source>
</hostdev>
</devices>
</domain>"""
pci_devices = [dict(hypervisor_name='xxx',
id='id1',
instance_uuid='uuid',
address="0000:04:10:1")]
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.mox.StubOutWithMock(host.Host,
'has_min_version')
host.Host.has_min_version = lambda x, y: True
self.mox.StubOutWithMock(libvirt_driver.LibvirtDriver,
'_get_guest_pci_device')
class FakeDev(object):
def to_xml(self):
pass
libvirt_driver.LibvirtDriver._get_guest_pci_device =\
lambda x, y: FakeDev()
class FakeDomain(object):
def detachDeviceFlags(self, xml, flags):
pass
def XMLDesc(self, flags):
return fake_domXML1
guest = libvirt_guest.Guest(FakeDomain())
self.assertRaises(exception.PciDeviceDetachFailed,
drvr._detach_pci_devices, guest, pci_devices)
@mock.patch.object(connector, 'get_connector_properties')
def test_get_connector(self, fake_get_connector):
initiator = 'fake.initiator.iqn'
ip = 'fakeip'
host = 'fakehost'
wwpns = ['100010604b019419']
wwnns = ['200010604b019419']
self.flags(my_ip=ip)
self.flags(host=host)
expected = {
'ip': ip,
'initiator': initiator,
'host': host,
'wwpns': wwpns,
'wwnns': wwnns
}
volume = {
'id': 'fake'
}
# TODO(walter-boring) add the fake in os-brick
fake_get_connector.return_value = expected
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
result = drvr.get_volume_connector(volume)
self.assertThat(expected, matchers.DictMatches(result))
@mock.patch.object(connector, 'get_connector_properties')
def test_get_connector_storage_ip(self, fake_get_connector):
ip = '100.100.100.100'
storage_ip = '101.101.101.101'
self.flags(my_block_storage_ip=storage_ip, my_ip=ip)
volume = {
'id': 'fake'
}
expected = {
'ip': storage_ip
}
# TODO(walter-boring) add the fake in os-brick
fake_get_connector.return_value = expected
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
result = drvr.get_volume_connector(volume)
self.assertEqual(storage_ip, result['ip'])
def test_lifecycle_event_registration(self):
calls = []
def fake_registerErrorHandler(*args, **kwargs):
calls.append('fake_registerErrorHandler')
def fake_get_host_capabilities(**args):
cpu = vconfig.LibvirtConfigGuestCPU()
cpu.arch = arch.ARMV7
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = cpu
calls.append('fake_get_host_capabilities')
return caps
@mock.patch.object(fakelibvirt, 'registerErrorHandler',
side_effect=fake_registerErrorHandler)
@mock.patch.object(host.Host, "get_capabilities",
side_effect=fake_get_host_capabilities)
def test_init_host(get_host_capabilities, register_error_handler):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
drvr.init_host("test_host")
test_init_host()
# NOTE(dkliban): Will fail if get_host_capabilities is called before
# registerErrorHandler
self.assertEqual(['fake_registerErrorHandler',
'fake_get_host_capabilities'], calls)
def test_sanitize_log_to_xml(self):
# setup fake data
data = {'auth_password': 'scrubme'}
bdm = [{'connection_info': {'data': data}}]
bdi = {'block_device_mapping': bdm}
# Tests that the parameters to the _get_guest_xml method
# are sanitized for passwords when logged.
def fake_debug(*args, **kwargs):
if 'auth_password' in args[0]:
self.assertNotIn('scrubme', args[0])
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
conf = mock.Mock()
with contextlib.nested(
mock.patch.object(libvirt_driver.LOG, 'debug',
side_effect=fake_debug),
mock.patch.object(drvr, '_get_guest_config', return_value=conf)
) as (
debug_mock, conf_mock
):
drvr._get_guest_xml(self.context, self.test_instance,
network_info={}, disk_info={},
image_meta={}, block_device_info=bdi)
# we don't care what the log message is, we just want to make sure
# our stub method is called which asserts the password is scrubbed
self.assertTrue(debug_mock.called)
@mock.patch.object(time, "time")
def test_get_guest_config(self, time_mock):
time_mock.return_value = 1234567.89
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
test_instance = copy.deepcopy(self.test_instance)
test_instance["display_name"] = "purple tomatoes"
ctxt = context.RequestContext(project_id=123,
project_name="aubergine",
user_id=456,
user_name="pie")
flavor = objects.Flavor(name='m1.small',
memory_mb=6,
vcpus=28,
root_gb=496,
ephemeral_gb=8128,
swap=33550336,
extra_specs={})
instance_ref = objects.Instance(**test_instance)
instance_ref.flavor = flavor
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, disk_info,
context=ctxt)
self.assertEqual(cfg.uuid, instance_ref["uuid"])
self.assertEqual(2, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeatureACPI)
self.assertIsInstance(cfg.features[1],
vconfig.LibvirtConfigGuestFeatureAPIC)
self.assertEqual(cfg.memory, 6 * units.Ki)
self.assertEqual(cfg.vcpus, 28)
self.assertEqual(cfg.os_type, vm_mode.HVM)
self.assertEqual(cfg.os_boot_dev, ["hd"])
self.assertIsNone(cfg.os_root)
self.assertEqual(len(cfg.devices), 10)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestInterface)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[9],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(len(cfg.metadata), 1)
self.assertIsInstance(cfg.metadata[0],
vconfig.LibvirtConfigGuestMetaNovaInstance)
self.assertEqual(version.version_string_with_package(),
cfg.metadata[0].package)
self.assertEqual("purple tomatoes",
cfg.metadata[0].name)
self.assertEqual(1234567.89,
cfg.metadata[0].creationTime)
self.assertEqual("image",
cfg.metadata[0].roottype)
self.assertEqual(str(instance_ref["image_ref"]),
cfg.metadata[0].rootid)
self.assertIsInstance(cfg.metadata[0].owner,
vconfig.LibvirtConfigGuestMetaNovaOwner)
self.assertEqual(456,
cfg.metadata[0].owner.userid)
self.assertEqual("pie",
cfg.metadata[0].owner.username)
self.assertEqual(123,
cfg.metadata[0].owner.projectid)
self.assertEqual("aubergine",
cfg.metadata[0].owner.projectname)
self.assertIsInstance(cfg.metadata[0].flavor,
vconfig.LibvirtConfigGuestMetaNovaFlavor)
self.assertEqual("m1.small",
cfg.metadata[0].flavor.name)
self.assertEqual(6,
cfg.metadata[0].flavor.memory)
self.assertEqual(28,
cfg.metadata[0].flavor.vcpus)
self.assertEqual(496,
cfg.metadata[0].flavor.disk)
self.assertEqual(8128,
cfg.metadata[0].flavor.ephemeral)
self.assertEqual(33550336,
cfg.metadata[0].flavor.swap)
def test_get_guest_config_lxc(self):
self.flags(virt_type='lxc', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, {'mapping': {}})
self.assertEqual(instance_ref["uuid"], cfg.uuid)
self.assertEqual(2 * units.Mi, cfg.memory)
self.assertEqual(1, cfg.vcpus)
self.assertEqual(vm_mode.EXE, cfg.os_type)
self.assertEqual("/sbin/init", cfg.os_init_path)
self.assertEqual("console=tty0 console=ttyS0", cfg.os_cmdline)
self.assertIsNone(cfg.os_root)
self.assertEqual(3, len(cfg.devices))
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestFilesys)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestInterface)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestConsole)
def test_get_guest_config_lxc_with_id_maps(self):
self.flags(virt_type='lxc', group='libvirt')
self.flags(uid_maps=['0:1000:100'], group='libvirt')
self.flags(gid_maps=['0:1000:100'], group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, {'mapping': {}})
self.assertEqual(instance_ref["uuid"], cfg.uuid)
self.assertEqual(2 * units.Mi, cfg.memory)
self.assertEqual(1, cfg.vcpus)
self.assertEqual(vm_mode.EXE, cfg.os_type)
self.assertEqual("/sbin/init", cfg.os_init_path)
self.assertEqual("console=tty0 console=ttyS0", cfg.os_cmdline)
self.assertIsNone(cfg.os_root)
self.assertEqual(3, len(cfg.devices))
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestFilesys)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestInterface)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestConsole)
self.assertEqual(len(cfg.idmaps), 2)
self.assertIsInstance(cfg.idmaps[0],
vconfig.LibvirtConfigGuestUIDMap)
self.assertIsInstance(cfg.idmaps[1],
vconfig.LibvirtConfigGuestGIDMap)
def test_get_guest_config_numa_host_instance_fits(self):
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=1, vcpus=2, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps)):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertIsNone(cfg.cpuset)
self.assertEqual(0, len(cfg.cputune.vcpupin))
self.assertIsNone(cfg.cpu.numa)
def test_get_guest_config_numa_host_instance_no_fit(self):
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=4096, vcpus=4, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(host.Host, "get_capabilities",
return_value=caps),
mock.patch.object(
hardware, 'get_vcpu_pin_set', return_value=set([3])),
mock.patch.object(random, 'choice')
) as (get_host_cap_mock,
get_vcpu_pin_set_mock, choice_mock):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertFalse(choice_mock.called)
self.assertEqual(set([3]), cfg.cpuset)
self.assertEqual(0, len(cfg.cputune.vcpupin))
self.assertIsNone(cfg.cpu.numa)
def _test_get_guest_memory_backing_config(
self, host_topology, inst_topology, numatune):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
with mock.patch.object(
drvr, "_get_host_numa_topology",
return_value=host_topology):
return drvr._get_guest_memory_backing_config(
inst_topology, numatune)
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
def test_get_guest_memory_backing_config_large_success(self, mock_version):
host_topology = objects.NUMATopology(
cells=[
objects.NUMACell(
id=3, cpuset=set([1]), memory=1024, mempages=[
objects.NUMAPagesTopology(size_kb=4, total=2000,
used=0),
objects.NUMAPagesTopology(size_kb=2048, total=512,
used=0),
objects.NUMAPagesTopology(size_kb=1048576, total=0,
used=0),
])])
inst_topology = objects.InstanceNUMATopology(cells=[
objects.InstanceNUMACell(
id=3, cpuset=set([0, 1]), memory=1024, pagesize=2048)])
numa_tune = vconfig.LibvirtConfigGuestNUMATune()
numa_tune.memnodes = [vconfig.LibvirtConfigGuestNUMATuneMemNode()]
numa_tune.memnodes[0].cellid = 0
numa_tune.memnodes[0].nodeset = [3]
result = self._test_get_guest_memory_backing_config(
host_topology, inst_topology, numa_tune)
self.assertEqual(1, len(result.hugepages))
self.assertEqual(2048, result.hugepages[0].size_kb)
self.assertEqual([0], result.hugepages[0].nodeset)
@mock.patch.object(host.Host,
'has_min_version', return_value=True)
def test_get_guest_memory_backing_config_smallest(self, mock_version):
host_topology = objects.NUMATopology(
cells=[
objects.NUMACell(
id=3, cpuset=set([1]), memory=1024, mempages=[
objects.NUMAPagesTopology(size_kb=4, total=2000,
used=0),
objects.NUMAPagesTopology(size_kb=2048, total=512,
used=0),
objects.NUMAPagesTopology(size_kb=1048576, total=0,
used=0),
])])
inst_topology = objects.InstanceNUMATopology(cells=[
objects.InstanceNUMACell(
id=3, cpuset=set([0, 1]), memory=1024, pagesize=4)])
numa_tune = vconfig.LibvirtConfigGuestNUMATune()
numa_tune.memnodes = [vconfig.LibvirtConfigGuestNUMATuneMemNode()]
numa_tune.memnodes[0].cellid = 0
numa_tune.memnodes[0].nodeset = [3]
result = self._test_get_guest_memory_backing_config(
host_topology, inst_topology, numa_tune)
self.assertIsNone(result)
def test_get_guest_config_numa_host_instance_pci_no_numa_info(self):
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=1, vcpus=2, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
conn = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
pci_device_info = dict(test_pci_device.fake_db_dev)
pci_device_info.update(compute_node_id=1,
label='fake',
status=fields.PciDeviceStatus.AVAILABLE,
address='0000:00:00.1',
instance_uuid=None,
request_id=None,
extra_info={},
numa_node=None)
pci_device = objects.PciDevice(**pci_device_info)
with contextlib.nested(
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(
host.Host, "get_capabilities", return_value=caps),
mock.patch.object(
hardware, 'get_vcpu_pin_set', return_value=set([3])),
mock.patch.object(host.Host, 'get_online_cpus',
return_value=set(range(8))),
mock.patch.object(pci_manager, "get_instance_pci_devs",
return_value=[pci_device])):
cfg = conn._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(set([3]), cfg.cpuset)
self.assertEqual(0, len(cfg.cputune.vcpupin))
self.assertIsNone(cfg.cpu.numa)
def test_get_guest_config_numa_host_instance_2pci_no_fit(self):
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=4096, vcpus=4, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
conn = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
pci_device_info = dict(test_pci_device.fake_db_dev)
pci_device_info.update(compute_node_id=1,
label='fake',
status=fields.PciDeviceStatus.AVAILABLE,
address='0000:00:00.1',
instance_uuid=None,
request_id=None,
extra_info={},
numa_node=1)
pci_device = objects.PciDevice(**pci_device_info)
pci_device_info.update(numa_node=0, address='0000:00:00.2')
pci_device2 = objects.PciDevice(**pci_device_info)
with contextlib.nested(
mock.patch.object(
host.Host, "get_capabilities", return_value=caps),
mock.patch.object(
hardware, 'get_vcpu_pin_set', return_value=set([3])),
mock.patch.object(random, 'choice'),
mock.patch.object(pci_manager, "get_instance_pci_devs",
return_value=[pci_device, pci_device2])
) as (get_host_cap_mock,
get_vcpu_pin_set_mock, choice_mock, pci_mock):
cfg = conn._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertFalse(choice_mock.called)
self.assertEqual(set([3]), cfg.cpuset)
self.assertEqual(0, len(cfg.cputune.vcpupin))
self.assertIsNone(cfg.cpu.numa)
@mock.patch.object(fakelibvirt.Connection, 'getType')
@mock.patch.object(fakelibvirt.Connection, 'getVersion')
@mock.patch.object(fakelibvirt.Connection, 'getLibVersion')
@mock.patch.object(host.Host, 'get_capabilities')
@mock.patch.object(libvirt_driver.LibvirtDriver, '_set_host_enabled')
def _test_get_guest_config_numa_unsupported(self, fake_lib_version,
fake_version, fake_type,
fake_arch, exception_class,
pagesize, mock_host,
mock_caps, mock_lib_version,
mock_version, mock_type):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=0, cpuset=set([0]),
memory=1024, pagesize=pagesize)])
instance_ref = objects.Instance(**self.test_instance)
instance_ref.numa_topology = instance_topology
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=1, vcpus=2, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = fake_arch
caps.host.topology = self._fake_caps_numa_topology()
mock_type.return_value = fake_type
mock_version.return_value = fake_version
mock_lib_version.return_value = fake_lib_version
mock_caps.return_value = caps
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
self.assertRaises(exception_class,
drvr._get_guest_config,
instance_ref, [],
image_meta, disk_info)
def test_get_guest_config_numa_old_version_libvirt(self):
self.flags(virt_type='kvm', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_NUMA_VERSION) - 1,
utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_NUMA_HUGEPAGE_VERSION),
host.HV_DRIVER_QEMU,
arch.X86_64,
exception.NUMATopologyUnsupported,
None)
def test_get_guest_config_numa_bad_version_libvirt(self):
self.flags(virt_type='kvm', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.BAD_LIBVIRT_NUMA_VERSIONS[0]),
utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_NUMA_HUGEPAGE_VERSION),
host.HV_DRIVER_QEMU,
arch.X86_64,
exception.NUMATopologyUnsupported,
None)
@mock.patch.object(libvirt_driver.LOG, 'warn')
def test_has_numa_support_bad_version_libvirt_log(self, mock_warn):
# Tests that a warning is logged once and only once when there is a bad
# BAD_LIBVIRT_NUMA_VERSIONS detected.
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertFalse(hasattr(drvr, '_bad_libvirt_numa_version_warn'))
with mock.patch.object(drvr._host, 'has_version', return_value=True):
for i in xrange(2):
self.assertFalse(drvr._has_numa_support())
self.assertTrue(drvr._bad_libvirt_numa_version_warn)
self.assertEqual(1, mock_warn.call_count)
# assert the version is logged properly
self.assertEqual('1.2.9.2', mock_warn.call_args[0][1])
def test_get_guest_config_numa_old_version_qemu(self):
self.flags(virt_type='kvm', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_NUMA_VERSION),
utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_NUMA_HUGEPAGE_VERSION) - 1,
host.HV_DRIVER_QEMU,
arch.X86_64,
exception.NUMATopologyUnsupported,
None)
def test_get_guest_config_numa_other_arch_qemu(self):
self.flags(virt_type='kvm', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_NUMA_VERSION),
utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_NUMA_HUGEPAGE_VERSION),
host.HV_DRIVER_QEMU,
arch.PPC64,
exception.NUMATopologyUnsupported,
None)
def test_get_guest_config_numa_xen(self):
self.flags(virt_type='xen', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_NUMA_VERSION),
utils.convert_version_to_int((4, 5, 0)),
'XEN',
arch.X86_64,
exception.NUMATopologyUnsupported,
None)
def test_get_guest_config_numa_old_pages_libvirt(self):
self.flags(virt_type='kvm', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_HUGEPAGE_VERSION) - 1,
utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_NUMA_HUGEPAGE_VERSION),
host.HV_DRIVER_QEMU,
arch.X86_64,
exception.MemoryPagesUnsupported,
2048)
def test_get_guest_config_numa_old_pages_qemu(self):
self.flags(virt_type='kvm', group='libvirt')
self._test_get_guest_config_numa_unsupported(
utils.convert_version_to_int(
libvirt_driver.MIN_LIBVIRT_HUGEPAGE_VERSION),
utils.convert_version_to_int(
libvirt_driver.MIN_QEMU_NUMA_HUGEPAGE_VERSION) - 1,
host.HV_DRIVER_QEMU,
arch.X86_64,
exception.NUMATopologyUnsupported,
2048)
def test_get_guest_config_numa_host_instance_fit_w_cpu_pinset(self):
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=1024, vcpus=2, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology(kb_mem=4194304)
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps),
mock.patch.object(
hardware, 'get_vcpu_pin_set', return_value=set([2, 3])),
mock.patch.object(host.Host, 'get_online_cpus',
return_value=set(range(8)))
) as (has_min_version_mock, get_host_cap_mock,
get_vcpu_pin_set_mock, get_online_cpus_mock):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
# NOTE(ndipanov): we make sure that pin_set was taken into account
# when choosing viable cells
self.assertEqual(set([2, 3]), cfg.cpuset)
self.assertEqual(0, len(cfg.cputune.vcpupin))
self.assertIsNone(cfg.cpu.numa)
def test_get_guest_config_non_numa_host_instance_topo(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=0, cpuset=set([0]), memory=1024),
objects.InstanceNUMACell(
id=1, cpuset=set([2]), memory=1024)])
instance_ref = objects.Instance(**self.test_instance)
instance_ref.numa_topology = instance_topology
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=2048, vcpus=2, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = None
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(
objects.InstanceNUMATopology, "get_by_instance_uuid",
return_value=instance_topology),
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps)):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertIsNone(cfg.cpuset)
self.assertEqual(0, len(cfg.cputune.vcpupin))
self.assertIsNone(cfg.numatune)
self.assertIsNotNone(cfg.cpu.numa)
for instance_cell, numa_cfg_cell in zip(
instance_topology.cells, cfg.cpu.numa.cells):
self.assertEqual(instance_cell.id, numa_cfg_cell.id)
self.assertEqual(instance_cell.cpuset, numa_cfg_cell.cpus)
self.assertEqual(instance_cell.memory * units.Ki,
numa_cfg_cell.memory)
def test_get_guest_config_numa_host_instance_topo(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([0, 1]), memory=1024, pagesize=None),
objects.InstanceNUMACell(
id=2, cpuset=set([2, 3]), memory=1024,
pagesize=None)])
instance_ref = objects.Instance(**self.test_instance)
instance_ref.numa_topology = instance_topology
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=2048, vcpus=4, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(
objects.InstanceNUMATopology, "get_by_instance_uuid",
return_value=instance_topology),
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps),
mock.patch.object(
hardware, 'get_vcpu_pin_set',
return_value=set([2, 3, 4, 5])),
mock.patch.object(host.Host, 'get_online_cpus',
return_value=set(range(8))),
):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertIsNone(cfg.cpuset)
# Test that the pinning is correct and limited to allowed only
self.assertEqual(0, cfg.cputune.vcpupin[0].id)
self.assertEqual(set([2, 3]), cfg.cputune.vcpupin[0].cpuset)
self.assertEqual(1, cfg.cputune.vcpupin[1].id)
self.assertEqual(set([2, 3]), cfg.cputune.vcpupin[1].cpuset)
self.assertEqual(2, cfg.cputune.vcpupin[2].id)
self.assertEqual(set([4, 5]), cfg.cputune.vcpupin[2].cpuset)
self.assertEqual(3, cfg.cputune.vcpupin[3].id)
self.assertEqual(set([4, 5]), cfg.cputune.vcpupin[3].cpuset)
self.assertIsNotNone(cfg.cpu.numa)
self.assertIsInstance(cfg.cputune.emulatorpin,
vconfig.LibvirtConfigGuestCPUTuneEmulatorPin)
self.assertEqual(set([2, 3, 4, 5]), cfg.cputune.emulatorpin.cpuset)
for instance_cell, numa_cfg_cell, index in zip(
instance_topology.cells,
cfg.cpu.numa.cells,
range(len(instance_topology.cells))):
self.assertEqual(index, numa_cfg_cell.id)
self.assertEqual(instance_cell.cpuset, numa_cfg_cell.cpus)
self.assertEqual(instance_cell.memory * units.Ki,
numa_cfg_cell.memory)
allnodes = [cell.id for cell in instance_topology.cells]
self.assertEqual(allnodes, cfg.numatune.memory.nodeset)
self.assertEqual("strict", cfg.numatune.memory.mode)
for instance_cell, memnode, index in zip(
instance_topology.cells,
cfg.numatune.memnodes,
range(len(instance_topology.cells))):
self.assertEqual(index, memnode.cellid)
self.assertEqual([instance_cell.id], memnode.nodeset)
self.assertEqual("strict", memnode.mode)
def test_get_guest_config_numa_host_instance_topo_reordered(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=3, cpuset=set([0, 1]), memory=1024),
objects.InstanceNUMACell(
id=0, cpuset=set([2, 3]), memory=1024)])
instance_ref = objects.Instance(**self.test_instance)
instance_ref.numa_topology = instance_topology
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=2048, vcpus=4, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(
objects.InstanceNUMATopology, "get_by_instance_uuid",
return_value=instance_topology),
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps),
mock.patch.object(host.Host, 'get_online_cpus',
return_value=set(range(8))),
):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertIsNone(cfg.cpuset)
# Test that the pinning is correct and limited to allowed only
self.assertEqual(0, cfg.cputune.vcpupin[0].id)
self.assertEqual(set([6, 7]), cfg.cputune.vcpupin[0].cpuset)
self.assertEqual(1, cfg.cputune.vcpupin[1].id)
self.assertEqual(set([6, 7]), cfg.cputune.vcpupin[1].cpuset)
self.assertEqual(2, cfg.cputune.vcpupin[2].id)
self.assertEqual(set([0, 1]), cfg.cputune.vcpupin[2].cpuset)
self.assertEqual(3, cfg.cputune.vcpupin[3].id)
self.assertEqual(set([0, 1]), cfg.cputune.vcpupin[3].cpuset)
self.assertIsNotNone(cfg.cpu.numa)
self.assertIsInstance(cfg.cputune.emulatorpin,
vconfig.LibvirtConfigGuestCPUTuneEmulatorPin)
self.assertEqual(set([0, 1, 6, 7]), cfg.cputune.emulatorpin.cpuset)
for index, (instance_cell, numa_cfg_cell) in enumerate(zip(
instance_topology.cells,
cfg.cpu.numa.cells)):
self.assertEqual(index, numa_cfg_cell.id)
self.assertEqual(instance_cell.cpuset, numa_cfg_cell.cpus)
self.assertEqual(instance_cell.memory * units.Ki,
numa_cfg_cell.memory)
self.assertIsNone(numa_cfg_cell.memAccess)
allnodes = set([cell.id for cell in instance_topology.cells])
self.assertEqual(allnodes, set(cfg.numatune.memory.nodeset))
self.assertEqual("strict", cfg.numatune.memory.mode)
for index, (instance_cell, memnode) in enumerate(zip(
instance_topology.cells,
cfg.numatune.memnodes)):
self.assertEqual(index, memnode.cellid)
self.assertEqual([instance_cell.id], memnode.nodeset)
self.assertEqual("strict", memnode.mode)
def test_get_guest_config_numa_host_instance_topo_cpu_pinning(self):
instance_topology = objects.InstanceNUMATopology(
cells=[objects.InstanceNUMACell(
id=1, cpuset=set([0, 1]), memory=1024,
cpu_pinning={0: 24, 1: 25}),
objects.InstanceNUMACell(
id=0, cpuset=set([2, 3]), memory=1024,
cpu_pinning={2: 0, 3: 1})])
instance_ref = objects.Instance(**self.test_instance)
instance_ref.numa_topology = instance_topology
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=2048, vcpus=2, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology(
sockets_per_cell=4, cores_per_socket=3, threads_per_core=2)
conn = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(
objects.InstanceNUMATopology, "get_by_instance_uuid",
return_value=instance_topology),
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps),
mock.patch.object(host.Host, 'get_online_cpus',
return_value=set(range(8))),
):
cfg = conn._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertIsNone(cfg.cpuset)
# Test that the pinning is correct and limited to allowed only
self.assertEqual(0, cfg.cputune.vcpupin[0].id)
self.assertEqual(set([24]), cfg.cputune.vcpupin[0].cpuset)
self.assertEqual(1, cfg.cputune.vcpupin[1].id)
self.assertEqual(set([25]), cfg.cputune.vcpupin[1].cpuset)
self.assertEqual(2, cfg.cputune.vcpupin[2].id)
self.assertEqual(set([0]), cfg.cputune.vcpupin[2].cpuset)
self.assertEqual(3, cfg.cputune.vcpupin[3].id)
self.assertEqual(set([1]), cfg.cputune.vcpupin[3].cpuset)
self.assertIsNotNone(cfg.cpu.numa)
# Emulator must be pinned to union of cfg.cputune.vcpupin[*].cpuset
self.assertIsInstance(cfg.cputune.emulatorpin,
vconfig.LibvirtConfigGuestCPUTuneEmulatorPin)
self.assertEqual(set([0, 1, 24, 25]),
cfg.cputune.emulatorpin.cpuset)
for i, (instance_cell, numa_cfg_cell) in enumerate(zip(
instance_topology.cells, cfg.cpu.numa.cells)):
self.assertEqual(i, numa_cfg_cell.id)
self.assertEqual(instance_cell.cpuset, numa_cfg_cell.cpus)
self.assertEqual(instance_cell.memory * units.Ki,
numa_cfg_cell.memory)
self.assertIsNone(numa_cfg_cell.memAccess)
allnodes = set([cell.id for cell in instance_topology.cells])
self.assertEqual(allnodes, set(cfg.numatune.memory.nodeset))
self.assertEqual("strict", cfg.numatune.memory.mode)
for i, (instance_cell, memnode) in enumerate(zip(
instance_topology.cells, cfg.numatune.memnodes)):
self.assertEqual(i, memnode.cellid)
self.assertEqual([instance_cell.id], memnode.nodeset)
self.assertEqual("strict", memnode.mode)
def test_get_guest_config_numa_host_mempages_shared(self):
instance_topology = objects.InstanceNUMATopology(
cells=[
objects.InstanceNUMACell(
id=1, cpuset=set([0, 1]),
memory=1024, pagesize=2048),
objects.InstanceNUMACell(
id=2, cpuset=set([2, 3]),
memory=1024, pagesize=2048)])
instance_ref = objects.Instance(**self.test_instance)
instance_ref.numa_topology = instance_topology
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
flavor = objects.Flavor(memory_mb=2048, vcpus=4, root_gb=496,
ephemeral_gb=8128, swap=33550336, name='fake',
extra_specs={})
instance_ref.flavor = flavor
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
with contextlib.nested(
mock.patch.object(
objects.InstanceNUMATopology, "get_by_instance_uuid",
return_value=instance_topology),
mock.patch.object(host.Host, 'has_min_version',
return_value=True),
mock.patch.object(host.Host, "get_capabilities",
return_value=caps),
mock.patch.object(
hardware, 'get_vcpu_pin_set',
return_value=set([2, 3, 4, 5])),
mock.patch.object(host.Host, 'get_online_cpus',
return_value=set(range(8))),
):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
for instance_cell, numa_cfg_cell, index in zip(
instance_topology.cells,
cfg.cpu.numa.cells,
range(len(instance_topology.cells))):
self.assertEqual(index, numa_cfg_cell.id)
self.assertEqual(instance_cell.cpuset, numa_cfg_cell.cpus)
self.assertEqual(instance_cell.memory * units.Ki,
numa_cfg_cell.memory)
self.assertEqual("shared", numa_cfg_cell.memAccess)
allnodes = [cell.id for cell in instance_topology.cells]
self.assertEqual(allnodes, cfg.numatune.memory.nodeset)
self.assertEqual("strict", cfg.numatune.memory.mode)
for instance_cell, memnode, index in zip(
instance_topology.cells,
cfg.numatune.memnodes,
range(len(instance_topology.cells))):
self.assertEqual(index, memnode.cellid)
self.assertEqual([instance_cell.id], memnode.nodeset)
self.assertEqual("strict", memnode.mode)
def test_get_cpu_numa_config_from_instance(self):
topology = objects.InstanceNUMATopology(cells=[
objects.InstanceNUMACell(id=0, cpuset=set([1, 2]), memory=128),
objects.InstanceNUMACell(id=1, cpuset=set([3, 4]), memory=128),
])
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
conf = drvr._get_cpu_numa_config_from_instance(topology, True)
self.assertIsInstance(conf, vconfig.LibvirtConfigGuestCPUNUMA)
self.assertEqual(0, conf.cells[0].id)
self.assertEqual(set([1, 2]), conf.cells[0].cpus)
self.assertEqual(131072, conf.cells[0].memory)
self.assertEqual("shared", conf.cells[0].memAccess)
self.assertEqual(1, conf.cells[1].id)
self.assertEqual(set([3, 4]), conf.cells[1].cpus)
self.assertEqual(131072, conf.cells[1].memory)
self.assertEqual("shared", conf.cells[1].memAccess)
def test_get_cpu_numa_config_from_instance_none(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
conf = drvr._get_cpu_numa_config_from_instance(None, False)
self.assertIsNone(conf)
@mock.patch.object(host.Host, 'has_version', return_value=True)
def test_has_cpu_policy_support(self, mock_has_version):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
self.assertRaises(exception.CPUPinningNotSupported,
drvr._has_cpu_policy_support)
@mock.patch.object(libvirt_driver.LibvirtDriver, "_has_numa_support",
return_value=True)
@mock.patch.object(libvirt_driver.LibvirtDriver, "_has_hugepage_support",
return_value=True)
@mock.patch.object(host.Host, "get_capabilities")
def test_does_not_want_hugepages(self, mock_caps, mock_numa, mock_hp):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_topology = objects.InstanceNUMATopology(
cells=[
objects.InstanceNUMACell(
id=1, cpuset=set([0, 1]),
memory=1024, pagesize=4),
objects.InstanceNUMACell(
id=2, cpuset=set([2, 3]),
memory=1024, pagesize=4)])
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
mock_caps.return_value = caps
host_topology = drvr._get_host_numa_topology()
self.assertFalse(drvr._wants_hugepages(None, None))
self.assertFalse(drvr._wants_hugepages(host_topology, None))
self.assertFalse(drvr._wants_hugepages(None, instance_topology))
self.assertFalse(drvr._wants_hugepages(host_topology,
instance_topology))
@mock.patch.object(libvirt_driver.LibvirtDriver, "_has_numa_support",
return_value=True)
@mock.patch.object(libvirt_driver.LibvirtDriver, "_has_hugepage_support",
return_value=True)
@mock.patch.object(host.Host, "get_capabilities")
def test_does_want_hugepages(self, mock_caps, mock_numa, mock_hp):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_topology = objects.InstanceNUMATopology(
cells=[
objects.InstanceNUMACell(
id=1, cpuset=set([0, 1]),
memory=1024, pagesize=2048),
objects.InstanceNUMACell(
id=2, cpuset=set([2, 3]),
memory=1024, pagesize=2048)])
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = vconfig.LibvirtConfigCPU()
caps.host.cpu.arch = "x86_64"
caps.host.topology = self._fake_caps_numa_topology()
mock_caps.return_value = caps
host_topology = drvr._get_host_numa_topology()
self.assertTrue(drvr._wants_hugepages(host_topology,
instance_topology))
def test_get_guest_config_clock(self):
self.flags(virt_type='kvm', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
hpet_map = {
arch.X86_64: True,
arch.I686: True,
arch.PPC: False,
arch.PPC64: False,
arch.ARMV7: False,
arch.AARCH64: False,
}
for guestarch, expect_hpet in hpet_map.items():
with mock.patch.object(libvirt_driver.libvirt_utils,
'get_arch',
return_value=guestarch):
cfg = drvr._get_guest_config(instance_ref, [],
image_meta,
disk_info)
self.assertIsInstance(cfg.clock,
vconfig.LibvirtConfigGuestClock)
self.assertEqual(cfg.clock.offset, "utc")
self.assertIsInstance(cfg.clock.timers[0],
vconfig.LibvirtConfigGuestTimer)
self.assertIsInstance(cfg.clock.timers[1],
vconfig.LibvirtConfigGuestTimer)
self.assertEqual(cfg.clock.timers[0].name, "pit")
self.assertEqual(cfg.clock.timers[0].tickpolicy,
"delay")
self.assertEqual(cfg.clock.timers[1].name, "rtc")
self.assertEqual(cfg.clock.timers[1].tickpolicy,
"catchup")
if expect_hpet:
self.assertEqual(3, len(cfg.clock.timers))
self.assertIsInstance(cfg.clock.timers[2],
vconfig.LibvirtConfigGuestTimer)
self.assertEqual('hpet', cfg.clock.timers[2].name)
self.assertFalse(cfg.clock.timers[2].present)
else:
self.assertEqual(2, len(cfg.clock.timers))
@mock.patch.object(libvirt_utils, 'get_arch')
@mock.patch.object(host.Host, 'has_min_version')
def test_get_guest_config_windows(self, mock_version, mock_get_arch):
mock_version.return_value = False
mock_get_arch.return_value = arch.I686
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref['os_type'] = 'windows'
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, disk_info)
self.assertIsInstance(cfg.clock,
vconfig.LibvirtConfigGuestClock)
self.assertEqual(cfg.clock.offset, "localtime")
self.assertEqual(3, len(cfg.clock.timers), cfg.clock.timers)
self.assertEqual("pit", cfg.clock.timers[0].name)
self.assertEqual("rtc", cfg.clock.timers[1].name)
self.assertEqual("hpet", cfg.clock.timers[2].name)
self.assertFalse(cfg.clock.timers[2].present)
@mock.patch.object(libvirt_utils, 'get_arch')
@mock.patch.object(host.Host, 'has_min_version')
def test_get_guest_config_windows_timer(self, mock_version, mock_get_arch):
mock_version.return_value = True
mock_get_arch.return_value = arch.I686
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref['os_type'] = 'windows'
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, disk_info)
self.assertIsInstance(cfg.clock,
vconfig.LibvirtConfigGuestClock)
self.assertEqual(cfg.clock.offset, "localtime")
self.assertEqual(4, len(cfg.clock.timers), cfg.clock.timers)
self.assertEqual("pit", cfg.clock.timers[0].name)
self.assertEqual("rtc", cfg.clock.timers[1].name)
self.assertEqual("hpet", cfg.clock.timers[2].name)
self.assertFalse(cfg.clock.timers[2].present)
self.assertEqual("hypervclock", cfg.clock.timers[3].name)
self.assertTrue(cfg.clock.timers[3].present)
self.assertEqual(3, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeatureACPI)
self.assertIsInstance(cfg.features[1],
vconfig.LibvirtConfigGuestFeatureAPIC)
self.assertIsInstance(cfg.features[2],
vconfig.LibvirtConfigGuestFeatureHyperV)
@mock.patch.object(host.Host, 'has_min_version')
def test_get_guest_config_windows_hyperv_feature1(self, mock_version):
def fake_version(lv_ver=None, hv_ver=None, hv_type=None):
if lv_ver == (1, 0, 0) and hv_ver == (1, 1, 0):
return True
return False
mock_version.side_effect = fake_version
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref['os_type'] = 'windows'
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, disk_info)
self.assertIsInstance(cfg.clock,
vconfig.LibvirtConfigGuestClock)
self.assertEqual(cfg.clock.offset, "localtime")
self.assertEqual(3, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeatureACPI)
self.assertIsInstance(cfg.features[1],
vconfig.LibvirtConfigGuestFeatureAPIC)
self.assertIsInstance(cfg.features[2],
vconfig.LibvirtConfigGuestFeatureHyperV)
self.assertTrue(cfg.features[2].relaxed)
self.assertFalse(cfg.features[2].spinlocks)
self.assertFalse(cfg.features[2].vapic)
@mock.patch.object(host.Host, 'has_min_version')
def test_get_guest_config_windows_hyperv_feature2(self, mock_version):
mock_version.return_value = True
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref['os_type'] = 'windows'
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 1),
image_meta, disk_info)
self.assertIsInstance(cfg.clock,
vconfig.LibvirtConfigGuestClock)
self.assertEqual(cfg.clock.offset, "localtime")
self.assertEqual(3, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeatureACPI)
self.assertIsInstance(cfg.features[1],
vconfig.LibvirtConfigGuestFeatureAPIC)
self.assertIsInstance(cfg.features[2],
vconfig.LibvirtConfigGuestFeatureHyperV)
self.assertTrue(cfg.features[2].relaxed)
self.assertTrue(cfg.features[2].spinlocks)
self.assertEqual(8191, cfg.features[2].spinlock_retries)
self.assertTrue(cfg.features[2].vapic)
def test_get_guest_config_with_two_nics(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref,
_fake_network_info(self.stubs, 2),
image_meta, disk_info)
self.assertEqual(2, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeatureACPI)
self.assertIsInstance(cfg.features[1],
vconfig.LibvirtConfigGuestFeatureAPIC)
self.assertEqual(cfg.memory, 2 * units.Mi)
self.assertEqual(cfg.vcpus, 1)
self.assertEqual(cfg.os_type, vm_mode.HVM)
self.assertEqual(cfg.os_boot_dev, ["hd"])
self.assertIsNone(cfg.os_root)
self.assertEqual(len(cfg.devices), 10)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestInterface)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestInterface)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[9],
vconfig.LibvirtConfigMemoryBalloon)
def test_get_guest_config_bug_1118829(self):
self.flags(virt_type='uml', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
disk_info = {'disk_bus': 'virtio',
'cdrom_bus': 'ide',
'mapping': {u'vda': {'bus': 'virtio',
'type': 'disk',
'dev': u'vda'},
'root': {'bus': 'virtio',
'type': 'disk',
'dev': 'vda'}}}
# NOTE(jdg): For this specific test leave this blank
# This will exercise the failed code path still,
# and won't require fakes and stubs of the iscsi discovery
block_device_info = {}
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
drvr._get_guest_config(instance_ref, [], image_meta, disk_info,
None, block_device_info)
self.assertEqual(instance_ref['root_device_name'], '/dev/vda')
def test_get_guest_config_with_root_device_name(self):
self.flags(virt_type='uml', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
block_device_info = {'root_device_name': '/dev/vdb'}
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta,
block_device_info)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info,
None, block_device_info)
self.assertEqual(0, len(cfg.features))
self.assertEqual(cfg.memory, 2 * units.Mi)
self.assertEqual(cfg.vcpus, 1)
self.assertEqual(cfg.os_type, "uml")
self.assertEqual(cfg.os_boot_dev, [])
self.assertEqual(cfg.os_root, '/dev/vdb')
self.assertEqual(len(cfg.devices), 3)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestConsole)
def test_get_guest_config_with_block_device(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
conn_info = {'driver_volume_type': 'fake'}
info = {'block_device_mapping': driver_block_device.convert_volumes([
fake_block_device.FakeDbBlockDeviceDict(
{'id': 1,
'source_type': 'volume', 'destination_type': 'volume',
'device_name': '/dev/vdc'}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 2,
'source_type': 'volume', 'destination_type': 'volume',
'device_name': '/dev/vdd'}),
])}
info['block_device_mapping'][0]['connection_info'] = conn_info
info['block_device_mapping'][1]['connection_info'] = conn_info
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta,
info)
with mock.patch.object(
driver_block_device.DriverVolumeBlockDevice, 'save'
) as mock_save:
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info,
None, info)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[2].target_dev, 'vdc')
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[3].target_dev, 'vdd')
mock_save.assert_called_with()
def test_get_guest_config_lxc_with_attached_volume(self):
self.flags(virt_type='lxc', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
conn_info = {'driver_volume_type': 'fake'}
info = {'block_device_mapping': driver_block_device.convert_volumes([
fake_block_device.FakeDbBlockDeviceDict(
{'id': 1,
'source_type': 'volume', 'destination_type': 'volume',
'boot_index': 0}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 2,
'source_type': 'volume', 'destination_type': 'volume',
}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 3,
'source_type': 'volume', 'destination_type': 'volume',
}),
])}
info['block_device_mapping'][0]['connection_info'] = conn_info
info['block_device_mapping'][1]['connection_info'] = conn_info
info['block_device_mapping'][2]['connection_info'] = conn_info
info['block_device_mapping'][0]['mount_device'] = '/dev/vda'
info['block_device_mapping'][1]['mount_device'] = '/dev/vdc'
info['block_device_mapping'][2]['mount_device'] = '/dev/vdd'
with mock.patch.object(
driver_block_device.DriverVolumeBlockDevice, 'save'
) as mock_save:
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta,
info)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info,
None, info)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[1].target_dev, 'vdc')
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[2].target_dev, 'vdd')
mock_save.assert_called_with()
def test_get_guest_config_with_configdrive(self):
# It's necessary to check if the architecture is power, because
# power doesn't have support to ide, and so libvirt translate
# all ide calls to scsi
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
# make configdrive.required_by() return True
instance_ref['config_drive'] = True
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
# The last device is selected for this. on x86 is the last ide
# device (hdd). Since power only support scsi, the last device
# is sdz
expect = {"ppc": "sdz", "ppc64": "sdz"}
disk = expect.get(blockinfo.libvirt_utils.get_arch({}), "hdd")
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[2].target_dev, disk)
def test_get_guest_config_with_virtio_scsi_bus(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_scsi_model": "virtio-scsi"}})
instance_ref = objects.Instance(**self.test_instance)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta,
[])
cfg = drvr._get_guest_config(instance_ref, [], image_meta, disk_info)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestController)
self.assertEqual(cfg.devices[2].model, 'virtio-scsi')
def test_get_guest_config_with_virtio_scsi_bus_bdm(self):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_scsi_model": "virtio-scsi"}})
instance_ref = objects.Instance(**self.test_instance)
conn_info = {'driver_volume_type': 'fake'}
bd_info = {
'block_device_mapping': driver_block_device.convert_volumes([
fake_block_device.FakeDbBlockDeviceDict(
{'id': 1,
'source_type': 'volume', 'destination_type': 'volume',
'device_name': '/dev/sdc', 'disk_bus': 'scsi'}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 2,
'source_type': 'volume', 'destination_type': 'volume',
'device_name': '/dev/sdd', 'disk_bus': 'scsi'}),
])}
bd_info['block_device_mapping'][0]['connection_info'] = conn_info
bd_info['block_device_mapping'][1]['connection_info'] = conn_info
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta,
bd_info)
with mock.patch.object(
driver_block_device.DriverVolumeBlockDevice, 'save'
) as mock_save:
cfg = drvr._get_guest_config(instance_ref, [], image_meta,
disk_info, [], bd_info)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[2].target_dev, 'sdc')
self.assertEqual(cfg.devices[2].target_bus, 'scsi')
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestDisk)
self.assertEqual(cfg.devices[3].target_dev, 'sdd')
self.assertEqual(cfg.devices[3].target_bus, 'scsi')
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestController)
self.assertEqual(cfg.devices[4].model, 'virtio-scsi')
mock_save.assert_called_with()
def test_get_guest_config_with_vnc(self):
self.flags(enabled=True, group='vnc')
self.flags(virt_type='kvm',
use_usb_tablet=False,
group='libvirt')
self.flags(enabled=False, group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 7)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[4].type, "vnc")
def test_get_guest_config_with_vnc_and_tablet(self):
self.flags(enabled=True, group='vnc')
self.flags(virt_type='kvm',
use_usb_tablet=True,
group='libvirt')
self.flags(enabled=False, group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 8)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[4].type, "tablet")
self.assertEqual(cfg.devices[5].type, "vnc")
def test_get_guest_config_with_spice_and_tablet(self):
self.flags(enabled=False, group='vnc')
self.flags(virt_type='kvm',
use_usb_tablet=True,
group='libvirt')
self.flags(enabled=True,
agent_enabled=False,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 8)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[4].type, "tablet")
self.assertEqual(cfg.devices[5].type, "spice")
def test_get_guest_config_with_spice_and_agent(self):
self.flags(enabled=False, group='vnc')
self.flags(virt_type='kvm',
use_usb_tablet=True,
group='libvirt')
self.flags(enabled=True,
agent_enabled=True,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 8)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestChannel)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[4].target_name, "com.redhat.spice.0")
self.assertEqual(cfg.devices[5].type, "spice")
self.assertEqual(cfg.devices[6].type, "qxl")
@mock.patch('nova.console.serial.acquire_port')
@mock.patch('nova.virt.hardware.get_number_of_serial_ports',
return_value=1)
@mock.patch.object(libvirt_driver.libvirt_utils, 'get_arch',)
def test_create_serial_console_devices_based_on_arch(self, mock_get_arch,
mock_get_port_number,
mock_acquire_port):
self.flags(enabled=True, group='serial_console')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
expected = {arch.X86_64: vconfig.LibvirtConfigGuestSerial,
arch.S390: vconfig.LibvirtConfigGuestConsole,
arch.S390X: vconfig.LibvirtConfigGuestConsole}
for guest_arch, device_type in expected.items():
mock_get_arch.return_value = guest_arch
guest = vconfig.LibvirtConfigGuest()
drvr._create_serial_console_devices(guest, instance=None,
flavor={}, image_meta={})
self.assertEqual(1, len(guest.devices))
console_device = guest.devices[0]
self.assertIsInstance(console_device, device_type)
self.assertEqual("tcp", console_device.type)
@mock.patch('nova.console.serial.acquire_port')
def test_get_guest_config_serial_console(self, acquire_port):
self.flags(enabled=True, group='serial_console')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
acquire_port.return_value = 11111
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(8, len(cfg.devices))
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual("tcp", cfg.devices[2].type)
self.assertEqual(11111, cfg.devices[2].listen_port)
def test_get_guest_config_serial_console_through_flavor(self):
self.flags(enabled=True, group='serial_console')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.extra_specs = {'hw:serial_port_count': 3}
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(10, len(cfg.devices))
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[9],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual("tcp", cfg.devices[2].type)
self.assertEqual("tcp", cfg.devices[3].type)
self.assertEqual("tcp", cfg.devices[4].type)
def test_get_guest_config_serial_console_invalid_flavor(self):
self.flags(enabled=True, group='serial_console')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.extra_specs = {'hw:serial_port_count': "a"}
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
self.assertRaises(
exception.ImageSerialPortNumberInvalid,
drvr._get_guest_config, instance_ref, [],
image_meta, disk_info)
def test_get_guest_config_serial_console_image_and_flavor(self):
self.flags(enabled=True, group='serial_console')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_serial_port_count": "3"}})
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.extra_specs = {'hw:serial_port_count': 4}
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [], image_meta,
disk_info)
self.assertEqual(10, len(cfg.devices), cfg.devices)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[9],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual("tcp", cfg.devices[2].type)
self.assertEqual("tcp", cfg.devices[3].type)
self.assertEqual("tcp", cfg.devices[4].type)
@mock.patch('nova.console.serial.acquire_port')
def test_get_guest_config_serial_console_through_port_rng_exhausted(
self, acquire_port):
self.flags(enabled=True, group='serial_console')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
acquire_port.side_effect = exception.SocketPortRangeExhaustedException(
'127.0.0.1')
self.assertRaises(
exception.SocketPortRangeExhaustedException,
drvr._get_guest_config, instance_ref, [],
image_meta, disk_info)
@mock.patch.object(libvirt_guest.Guest, "get_xml_desc")
def test_get_serial_ports_from_guest(self, mock_get_xml_desc):
i = self._test_get_serial_ports_from_guest(None,
mock_get_xml_desc)
self.assertEqual([
('127.0.0.1', 100),
('127.0.0.1', 101),
('127.0.0.2', 100),
('127.0.0.2', 101)], list(i))
@mock.patch.object(libvirt_guest.Guest, "get_xml_desc")
def test_get_serial_ports_from_guest_bind_only(self, mock_get_xml_desc):
i = self._test_get_serial_ports_from_guest('bind',
mock_get_xml_desc)
self.assertEqual([
('127.0.0.1', 101),
('127.0.0.2', 100)], list(i))
@mock.patch.object(libvirt_guest.Guest, "get_xml_desc")
def test_get_serial_ports_from_guest_connect_only(self,
mock_get_xml_desc):
i = self._test_get_serial_ports_from_guest('connect',
mock_get_xml_desc)
self.assertEqual([
('127.0.0.1', 100),
('127.0.0.2', 101)], list(i))
@mock.patch.object(libvirt_guest.Guest, "get_xml_desc")
def test_get_serial_ports_from_guest_on_s390(self, mock_get_xml_desc):
i = self._test_get_serial_ports_from_guest(None,
mock_get_xml_desc,
'console')
self.assertEqual([
('127.0.0.1', 100),
('127.0.0.1', 101),
('127.0.0.2', 100),
('127.0.0.2', 101)], list(i))
def _test_get_serial_ports_from_guest(self, mode, mock_get_xml_desc,
dev_name='serial'):
xml = """
<domain type='kvm'>
<devices>
<%(dev_name)s type="tcp">
<source host="127.0.0.1" service="100" mode="connect"/>
</%(dev_name)s>
<%(dev_name)s type="tcp">
<source host="127.0.0.1" service="101" mode="bind"/>
</%(dev_name)s>
<%(dev_name)s type="tcp">
<source host="127.0.0.2" service="100" mode="bind"/>
</%(dev_name)s>
<%(dev_name)s type="tcp">
<source host="127.0.0.2" service="101" mode="connect"/>
</%(dev_name)s>
</devices>
</domain>""" % {'dev_name': dev_name}
mock_get_xml_desc.return_value = xml
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
guest = libvirt_guest.Guest(FakeVirtDomain())
return drvr._get_serial_ports_from_guest(guest, mode=mode)
def test_get_guest_config_with_type_xen(self):
self.flags(enabled=True, group='vnc')
self.flags(virt_type='xen',
use_usb_tablet=False,
group='libvirt')
self.flags(enabled=False,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 6)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestConsole)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[3].type, "vnc")
self.assertEqual(cfg.devices[4].type, "xen")
@mock.patch.object(libvirt_driver.libvirt_utils, 'get_arch',
return_value=arch.S390X)
def test_get_guest_config_with_type_kvm_on_s390(self, mock_get_arch):
self.flags(enabled=False, group='vnc')
self.flags(virt_type='kvm',
use_usb_tablet=False,
group='libvirt')
self._stub_host_capabilities_cpu_arch(arch.S390X)
instance_ref = objects.Instance(**self.test_instance)
cfg = self._get_guest_config_via_fake_api(instance_ref)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
log_file_device = cfg.devices[2]
self.assertIsInstance(log_file_device,
vconfig.LibvirtConfigGuestConsole)
self.assertEqual("sclplm", log_file_device.target_type)
self.assertEqual("file", log_file_device.type)
terminal_device = cfg.devices[3]
self.assertIsInstance(terminal_device,
vconfig.LibvirtConfigGuestConsole)
self.assertEqual("sclp", terminal_device.target_type)
self.assertEqual("pty", terminal_device.type)
self.assertEqual("s390-ccw-virtio", cfg.os_mach_type)
def _stub_host_capabilities_cpu_arch(self, cpu_arch):
def get_host_capabilities_stub(self):
cpu = vconfig.LibvirtConfigGuestCPU()
cpu.arch = cpu_arch
caps = vconfig.LibvirtConfigCaps()
caps.host = vconfig.LibvirtConfigCapsHost()
caps.host.cpu = cpu
return caps
self.stubs.Set(host.Host, "get_capabilities",
get_host_capabilities_stub)
def _get_guest_config_via_fake_api(self, instance):
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance,
image_meta)
return drvr._get_guest_config(instance, [],
image_meta, disk_info)
def test_get_guest_config_with_type_xen_pae_hvm(self):
self.flags(enabled=True, group='vnc')
self.flags(virt_type='xen',
use_usb_tablet=False,
group='libvirt')
self.flags(enabled=False,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref['vm_mode'] = vm_mode.HVM
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(cfg.os_type, vm_mode.HVM)
self.assertEqual(cfg.os_loader, CONF.libvirt.xen_hvmloader_path)
self.assertEqual(3, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeaturePAE)
self.assertIsInstance(cfg.features[1],
vconfig.LibvirtConfigGuestFeatureACPI)
self.assertIsInstance(cfg.features[2],
vconfig.LibvirtConfigGuestFeatureAPIC)
def test_get_guest_config_with_type_xen_pae_pvm(self):
self.flags(enabled=True, group='vnc')
self.flags(virt_type='xen',
use_usb_tablet=False,
group='libvirt')
self.flags(enabled=False,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(cfg.os_type, vm_mode.XEN)
self.assertEqual(1, len(cfg.features))
self.assertIsInstance(cfg.features[0],
vconfig.LibvirtConfigGuestFeaturePAE)
def test_get_guest_config_with_vnc_and_spice(self):
self.flags(enabled=True, group='vnc')
self.flags(virt_type='kvm',
use_usb_tablet=True,
group='libvirt')
self.flags(enabled=True,
agent_enabled=True,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 10)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestChannel)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[9],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[4].type, "tablet")
self.assertEqual(cfg.devices[5].target_name, "com.redhat.spice.0")
self.assertEqual(cfg.devices[6].type, "vnc")
self.assertEqual(cfg.devices[7].type, "spice")
def test_get_guest_config_with_watchdog_action_image_meta(self):
self.flags(virt_type='kvm', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_watchdog_action": "none"}})
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [], image_meta, disk_info)
self.assertEqual(len(cfg.devices), 9)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestWatchdog)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual("none", cfg.devices[7].action)
def _test_get_guest_usb_tablet(self, vnc_enabled, spice_enabled, os_type,
agent_enabled=False):
self.flags(enabled=vnc_enabled, group='vnc')
self.flags(enabled=spice_enabled,
agent_enabled=agent_enabled, group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
return drvr._get_guest_usb_tablet(os_type)
def test_get_guest_usb_tablet_wipe(self):
self.flags(use_usb_tablet=True, group='libvirt')
tablet = self._test_get_guest_usb_tablet(True, True, vm_mode.HVM)
self.assertIsNotNone(tablet)
tablet = self._test_get_guest_usb_tablet(True, False, vm_mode.HVM)
self.assertIsNotNone(tablet)
tablet = self._test_get_guest_usb_tablet(False, True, vm_mode.HVM)
self.assertIsNotNone(tablet)
tablet = self._test_get_guest_usb_tablet(False, False, vm_mode.HVM)
self.assertIsNone(tablet)
tablet = self._test_get_guest_usb_tablet(True, True, "foo")
self.assertIsNone(tablet)
tablet = self._test_get_guest_usb_tablet(
False, True, vm_mode.HVM, True)
self.assertIsNone(tablet)
def _test_get_guest_config_with_watchdog_action_flavor(self,
hw_watchdog_action="hw:watchdog_action"):
self.flags(virt_type='kvm', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.extra_specs = {hw_watchdog_action: 'none'}
image_meta = objects.ImageMeta.from_dict(self.test_image_meta)
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(9, len(cfg.devices))
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestWatchdog)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual("none", cfg.devices[7].action)
def test_get_guest_config_with_watchdog_action_through_flavor(self):
self._test_get_guest_config_with_watchdog_action_flavor()
# TODO(pkholkin): the test accepting old property name 'hw_watchdog_action'
# should be removed in the next release
def test_get_guest_config_with_watchdog_action_through_flavor_no_scope(
self):
self._test_get_guest_config_with_watchdog_action_flavor(
hw_watchdog_action="hw_watchdog_action")
def test_get_guest_config_with_watchdog_overrides_flavor(self):
self.flags(virt_type='kvm', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.extra_specs = {'hw_watchdog_action': 'none'}
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_watchdog_action": "pause"}})
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(9, len(cfg.devices))
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestWatchdog)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual("pause", cfg.devices[7].action)
def test_get_guest_config_with_video_driver_image_meta(self):
self.flags(virt_type='kvm', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_video_model": "vmvga"}})
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [], image_meta, disk_info)
self.assertEqual(len(cfg.devices), 8)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[5].type, "vnc")
self.assertEqual(cfg.devices[6].type, "vmvga")
def test_get_guest_config_with_qga_through_image_meta(self):
self.flags(virt_type='kvm', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_qemu_guest_agent": "yes"}})
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [], image_meta, disk_info)
self.assertEqual(len(cfg.devices), 9)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestInput)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigGuestChannel)
self.assertIsInstance(cfg.devices[8],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[4].type, "tablet")
self.assertEqual(cfg.devices[5].type, "vnc")
self.assertEqual(cfg.devices[7].type, "unix")
self.assertEqual(cfg.devices[7].target_name, "org.qemu.guest_agent.0")
def test_get_guest_config_with_video_driver_vram(self):
self.flags(enabled=False, group='vnc')
self.flags(virt_type='kvm', group='libvirt')
self.flags(enabled=True,
agent_enabled=True,
group='spice')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
instance_ref = objects.Instance(**self.test_instance)
instance_ref.flavor.extra_specs = {'hw_video:ram_max_mb': "100"}
image_meta = objects.ImageMeta.from_dict({
"disk_format": "raw",
"properties": {"hw_video_model": "qxl",
"hw_video_ram": "64"}})
disk_info = blockinfo.get_disk_info(CONF.libvirt.virt_type,
instance_ref,
image_meta)
cfg = drvr._get_guest_config(instance_ref, [],
image_meta, disk_info)
self.assertEqual(len(cfg.devices), 8)
self.assertIsInstance(cfg.devices[0],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[1],
vconfig.LibvirtConfigGuestDisk)
self.assertIsInstance(cfg.devices[2],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[3],
vconfig.LibvirtConfigGuestSerial)
self.assertIsInstance(cfg.devices[4],
vconfig.LibvirtConfigGuestChannel)
self.assertIsInstance(cfg.devices[5],
vconfig.LibvirtConfigGuestGraphics)
self.assertIsInstance(cfg.devices[6],
vconfig.LibvirtConfigGuestVideo)
self.assertIsInstance(cfg.devices[7],
vconfig.LibvirtConfigMemoryBalloon)
self.assertEqual(cfg.devices[5].type, "spice")
self.assertEqual(cfg.devices[6].type, "qxl")
self.assertEqual(cfg.devices[6].vram, 64 * units.Mi / units.Ki)
@mock.patch('nova.virt.disk.api.teardown_container')
@mock.patch('nova.virt.libvirt.driver.LibvirtDriver.get_info')
@mock.patch('nova.virt.disk.api.setup_container')
@mock.patch('oslo_utils.fileutils.ensure_tree')
@mock.patch.object(fake_libvirt_utils, 'get_instance_path')
def test_unmount_fs_if_error_during_lxc_create_domain(self,
mock_get_inst_path, mock_ensure_tree, mock_setup_container,
mock_get_info, mock_teardown):
"""If we hit an error during a `_create_domain` call to `libvirt+lxc`
we need to ensure the guest FS is unmounted from the host so that any
future `lvremove` calls will work.
"""
self.flags(virt_type='lxc', group='libvirt')
drvr = libvirt_driver.LibvirtDriver(fake.FakeVirtAPI(), True)
mock_instance = mock.MagicMock()
mock_get_inst_path.return_value = '/tmp/'
mock_image_backend = mock.MagicMock()
drvr.image_backend = mock_image_backend
mock_image = mock.MagicMock()
mock_image.path = '/tmp/test.img'
drvr.image_backend.image.return_value = mock_image
mock_setup_container.return_value = '/dev/nbd0'
mock_get_info.side_effect = exception.InstanceNotFound(
instance_id='foo')
drvr._conn.defineXML = mock.Mock()
drvr._conn.defineXML.side_effect = ValueError('somethingbad')
with contextlib.nested(
mock.patch.object(drvr, '_is_booted_from_volume',
return_value=False),
mock.patch.object(drvr, 'plug_vifs'),
mock.patch.object(drvr, 'firewall_driver'),
mock.patch.object(drvr, 'cleanup')):
self.assertRaises(ValueError,
drvr._create_domain_and_network,
self.context,
'xml',
mock_instance, None, None)
mock_teardown | codeparrot/github-code-clean |
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Abstract base class of Port-specific entry points for the layout tests
test infrastructure (the Port and Driver classes)."""
import cgi
import difflib
import errno
import itertools
import json
import logging
import os
import operator
import optparse
import re
import sys
try:
from collections import OrderedDict
except ImportError:
# Needed for Python < 2.7
from webkitpy.thirdparty.ordered_dict import OrderedDict
from webkitpy.common import find_files
from webkitpy.common import read_checksum_from_png
from webkitpy.common.memoized import memoized
from webkitpy.common.system import path
from webkitpy.common.system.executive import ScriptError
from webkitpy.common.system.path import cygpath
from webkitpy.common.system.systemhost import SystemHost
from webkitpy.common.webkit_finder import WebKitFinder
from webkitpy.layout_tests.layout_package.bot_test_expectations import BotTestExpectationsFactory
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.layout_tests.port import config as port_config
from webkitpy.layout_tests.port import driver
from webkitpy.layout_tests.port import server_process
from webkitpy.layout_tests.port.factory import PortFactory
from webkitpy.layout_tests.servers import apache_http
from webkitpy.layout_tests.servers import pywebsocket
from webkitpy.layout_tests.servers import wptserve
_log = logging.getLogger(__name__)
# FIXME: This class should merge with WebKitPort now that Chromium behaves mostly like other webkit ports.
class Port(object):
"""Abstract class for Port-specific hooks for the layout_test package."""
# Subclasses override this. This should indicate the basic implementation
# part of the port name, e.g., 'mac', 'win', 'gtk'; there is probably (?)
# one unique value per class.
# FIXME: We should probably rename this to something like 'implementation_name'.
port_name = None
# Test names resemble unix relative paths, and use '/' as a directory separator.
TEST_PATH_SEPARATOR = '/'
ALL_BUILD_TYPES = ('debug', 'release')
CONTENT_SHELL_NAME = 'content_shell'
# True if the port as aac and mp3 codecs built in.
PORT_HAS_AUDIO_CODECS_BUILT_IN = False
ALL_SYSTEMS = (
# FIXME: We treat Retina (High-DPI) devices as if they are running
# a different operating system version. This isn't accurate, but will work until
# we need to test and support baselines across multiple O/S versions.
('retina', 'x86'),
('mac10.9', 'x86'),
('mac10.10', 'x86'),
('mac10.11', 'x86'),
('win7', 'x86'),
('win10', 'x86'),
('precise', 'x86_64'),
('trusty', 'x86_64'),
# FIXME: Technically this should be 'arm', but adding a third architecture type breaks TestConfigurationConverter.
# If we need this to be 'arm' in the future, then we first have to fix TestConfigurationConverter.
('icecreamsandwich', 'x86'),
)
CONFIGURATION_SPECIFIER_MACROS = {
'mac': ['retina', 'mac10.9', 'mac10.10', 'mac10.11'],
'win': ['win7', 'win10'],
'linux': ['precise', 'trusty'],
'android': ['icecreamsandwich'],
}
DEFAULT_BUILD_DIRECTORIES = ('out',)
# overridden in subclasses.
FALLBACK_PATHS = {}
SUPPORTED_VERSIONS = []
# URL to the build requirements page.
BUILD_REQUIREMENTS_URL = ''
@classmethod
def latest_platform_fallback_path(cls):
return cls.FALLBACK_PATHS[cls.SUPPORTED_VERSIONS[-1]]
@classmethod
def _static_build_path(cls, filesystem, build_directory, chromium_base, target, comps):
if build_directory:
return filesystem.join(build_directory, target, *comps)
hits = []
for directory in cls.DEFAULT_BUILD_DIRECTORIES:
base_dir = filesystem.join(chromium_base, directory, target)
path = filesystem.join(base_dir, *comps)
if filesystem.exists(path):
hits.append((filesystem.mtime(path), path))
if hits:
hits.sort(reverse=True)
return hits[0][1] # Return the newest file found.
# We have to default to something, so pick the last one.
return filesystem.join(base_dir, *comps)
@classmethod
def determine_full_port_name(cls, host, options, port_name):
"""Return a fully-specified port name that can be used to construct objects."""
# Subclasses will usually override this.
assert port_name.startswith(cls.port_name)
return port_name
def __init__(self, host, port_name, options=None, **kwargs):
# This value may be different from cls.port_name by having version modifiers
# and other fields appended to it (for example, 'qt-arm' or 'mac-wk2').
self._name = port_name
# These are default values that should be overridden in a subclasses.
self._version = ''
self._architecture = 'x86'
# FIXME: Ideally we'd have a package-wide way to get a
# well-formed options object that had all of the necessary
# options defined on it.
self._options = options or optparse.Values()
self.host = host
self._executive = host.executive
self._filesystem = host.filesystem
self._webkit_finder = WebKitFinder(host.filesystem)
self._config = port_config.Config(self._executive, self._filesystem, self.port_name)
self._helper = None
self._http_server = None
self._websocket_server = None
self._is_wpt_enabled = hasattr(options, 'enable_wptserve') and options.enable_wptserve
self._wpt_server = None
self._image_differ = None
self._server_process_constructor = server_process.ServerProcess # overridable for testing
self._http_lock = None # FIXME: Why does this live on the port object?
self._dump_reader = None
# Python's Popen has a bug that causes any pipes opened to a
# process that can't be executed to be leaked. Since this
# code is specifically designed to tolerate exec failures
# to gracefully handle cases where wdiff is not installed,
# the bug results in a massive file descriptor leak. As a
# workaround, if an exec failure is ever experienced for
# wdiff, assume it's not available. This will leak one
# file descriptor but that's better than leaking each time
# wdiff would be run.
#
# http://mail.python.org/pipermail/python-list/
# 2008-August/505753.html
# http://bugs.python.org/issue3210
self._wdiff_available = None
# FIXME: prettypatch.py knows this path, why is it copied here?
self._pretty_patch_path = self.path_from_webkit_base("Tools", "Scripts", "webkitruby", "PrettyPatch", "prettify.rb")
self._pretty_patch_available = None
if not hasattr(options, 'configuration') or not options.configuration:
self.set_option_default('configuration', self.default_configuration())
if not hasattr(options, 'target') or not options.target:
self.set_option_default('target', self._options.configuration)
self._test_configuration = None
self._reftest_list = {}
self._results_directory = None
self._virtual_test_suites = None
def __str__(self):
return "Port{name=%s, version=%s, architecture=%s, test_configuration=%s}" % (self._name, self._version, self._architecture, self._test_configuration)
def buildbot_archives_baselines(self):
return True
def additional_driver_flag(self):
if self.driver_name() == self.CONTENT_SHELL_NAME:
return ['--run-layout-test']
return []
def supports_per_test_timeout(self):
return False
def default_pixel_tests(self):
return True
def default_smoke_test_only(self):
return False
def default_timeout_ms(self):
timeout_ms = 6 * 1000
if self.get_option('configuration') == 'Debug':
# Debug is usually 2x-3x slower than Release.
return 3 * timeout_ms
return timeout_ms
def driver_stop_timeout(self):
""" Returns the amount of time in seconds to wait before killing the process in driver.stop()."""
# We want to wait for at least 3 seconds, but if we are really slow, we want to be slow on cleanup as
# well (for things like ASAN, Valgrind, etc.)
return 3.0 * float(self.get_option('time_out_ms', '0')) / self.default_timeout_ms()
def wdiff_available(self):
if self._wdiff_available is None:
self._wdiff_available = self.check_wdiff(logging=False)
return self._wdiff_available
def pretty_patch_available(self):
if self._pretty_patch_available is None:
self._pretty_patch_available = self.check_pretty_patch(logging=False)
return self._pretty_patch_available
def default_batch_size(self):
"""Return the default batch size to use for this port."""
if self.get_option('enable_sanitizer'):
# ASAN/MSAN/TSAN use more memory than regular content_shell. Their
# memory usage may also grow over time, up to a certain point.
# Relaunching the driver periodically helps keep it under control.
return 40
# The default is infinte batch size.
return None
def default_child_processes(self):
"""Return the number of child processes to use for this port."""
return self._executive.cpu_count()
def max_drivers_per_process(self):
"""The maximum number of drivers a child process can use for this port."""
return 2
def default_max_locked_shards(self):
"""Return the number of "locked" shards to run in parallel (like the http tests)."""
max_locked_shards = int(self.default_child_processes()) / 4
if not max_locked_shards:
return 1
return max_locked_shards
def baseline_path(self):
"""Return the absolute path to the directory to store new baselines in for this port."""
# FIXME: remove once all callers are calling either baseline_version_dir() or baseline_platform_dir()
return self.baseline_version_dir()
def baseline_platform_dir(self):
"""Return the absolute path to the default (version-independent) platform-specific results."""
return self._filesystem.join(self.layout_tests_dir(), 'platform', self.port_name)
def baseline_version_dir(self):
"""Return the absolute path to the platform-and-version-specific results."""
baseline_search_paths = self.baseline_search_path()
return baseline_search_paths[0]
def virtual_baseline_search_path(self, test_name):
suite = self.lookup_virtual_suite(test_name)
if not suite:
return None
return [self._filesystem.join(path, suite.name) for path in self.default_baseline_search_path()]
def baseline_search_path(self):
return self.get_option('additional_platform_directory', []) + self._compare_baseline() + self.default_baseline_search_path()
def default_baseline_search_path(self):
"""Return a list of absolute paths to directories to search under for
baselines. The directories are searched in order."""
return map(self._webkit_baseline_path, self.FALLBACK_PATHS[self.version()])
@memoized
def _compare_baseline(self):
factory = PortFactory(self.host)
target_port = self.get_option('compare_port')
if target_port:
return factory.get(target_port).default_baseline_search_path()
return []
def _check_file_exists(self, path_to_file, file_description,
override_step=None, logging=True):
"""Verify the file is present where expected or log an error.
Args:
file_name: The (human friendly) name or description of the file
you're looking for (e.g., "HTTP Server"). Used for error logging.
override_step: An optional string to be logged if the check fails.
logging: Whether or not log the error messages."""
if not self._filesystem.exists(path_to_file):
if logging:
_log.error('Unable to find %s' % file_description)
_log.error(' at %s' % path_to_file)
if override_step:
_log.error(' %s' % override_step)
_log.error('')
return False
return True
def check_build(self, needs_http, printer):
result = True
dump_render_tree_binary_path = self._path_to_driver()
result = self._check_file_exists(dump_render_tree_binary_path,
'test driver') and result
if not result and self.get_option('build'):
result = self._check_driver_build_up_to_date(
self.get_option('configuration'))
else:
_log.error('')
helper_path = self._path_to_helper()
if helper_path:
result = self._check_file_exists(helper_path,
'layout test helper') and result
if self.get_option('pixel_tests'):
result = self.check_image_diff(
'To override, invoke with --no-pixel-tests') and result
# It's okay if pretty patch and wdiff aren't available, but we will at least log messages.
self._pretty_patch_available = self.check_pretty_patch()
self._wdiff_available = self.check_wdiff()
if self._dump_reader:
result = self._dump_reader.check_is_functional() and result
if needs_http:
result = self.check_httpd() and result
return test_run_results.OK_EXIT_STATUS if result else test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
def _check_driver(self):
driver_path = self._path_to_driver()
if not self._filesystem.exists(driver_path):
_log.error("%s was not found at %s" % (self.driver_name(), driver_path))
return False
return True
def _check_port_build(self):
# Ports can override this method to do additional checks.
return True
def check_sys_deps(self, needs_http):
"""If the port needs to do some runtime checks to ensure that the
tests can be run successfully, it should override this routine.
This step can be skipped with --nocheck-sys-deps.
Returns whether the system is properly configured."""
cmd = [self._path_to_driver(), '--check-layout-test-sys-deps']
local_error = ScriptError()
def error_handler(script_error):
local_error.exit_code = script_error.exit_code
output = self._executive.run_command(cmd, error_handler=error_handler)
if local_error.exit_code:
_log.error('System dependencies check failed.')
_log.error('To override, invoke with --nocheck-sys-deps')
_log.error('')
_log.error(output)
if self.BUILD_REQUIREMENTS_URL is not '':
_log.error('')
_log.error('For complete build requirements, please see:')
_log.error(self.BUILD_REQUIREMENTS_URL)
return test_run_results.SYS_DEPS_EXIT_STATUS
return test_run_results.OK_EXIT_STATUS
def check_image_diff(self, override_step=None, logging=True):
"""This routine is used to check whether image_diff binary exists."""
image_diff_path = self._path_to_image_diff()
if not self._filesystem.exists(image_diff_path):
_log.error("image_diff was not found at %s" % image_diff_path)
return False
return True
def check_pretty_patch(self, logging=True):
"""Checks whether we can use the PrettyPatch ruby script."""
try:
_ = self._executive.run_command(['ruby', '--version'])
except OSError, e:
if e.errno in [errno.ENOENT, errno.EACCES, errno.ECHILD]:
if logging:
_log.warning("Ruby is not installed; can't generate pretty patches.")
_log.warning('')
return False
if not self._filesystem.exists(self._pretty_patch_path):
if logging:
_log.warning("Unable to find %s; can't generate pretty patches." % self._pretty_patch_path)
_log.warning('')
return False
return True
def check_wdiff(self, logging=True):
if not self._path_to_wdiff():
# Don't need to log here since this is the port choosing not to use wdiff.
return False
try:
_ = self._executive.run_command([self._path_to_wdiff(), '--help'])
except OSError:
if logging:
message = self._wdiff_missing_message()
if message:
for line in message.splitlines():
_log.warning(' ' + line)
_log.warning('')
return False
return True
def _wdiff_missing_message(self):
return 'wdiff is not installed; please install it to generate word-by-word diffs.'
def check_httpd(self):
httpd_path = self.path_to_apache()
if httpd_path:
try:
server_name = self._filesystem.basename(httpd_path)
env = self.setup_environ_for_server(server_name)
if self._executive.run_command([httpd_path, "-v"], env=env, return_exit_code=True) != 0:
_log.error("httpd seems broken. Cannot run http tests.")
return False
return True
except OSError:
pass
_log.error("No httpd found. Cannot run http tests.")
return False
def do_text_results_differ(self, expected_text, actual_text):
return expected_text != actual_text
def do_audio_results_differ(self, expected_audio, actual_audio):
return expected_audio != actual_audio
def diff_image(self, expected_contents, actual_contents):
"""Compare two images and return a tuple of an image diff, and an error string.
If an error occurs (like image_diff isn't found, or crashes, we log an error and return True (for a diff).
"""
# If only one of them exists, return that one.
if not actual_contents and not expected_contents:
return (None, None)
if not actual_contents:
return (expected_contents, None)
if not expected_contents:
return (actual_contents, None)
tempdir = self._filesystem.mkdtemp()
expected_filename = self._filesystem.join(str(tempdir), "expected.png")
self._filesystem.write_binary_file(expected_filename, expected_contents)
actual_filename = self._filesystem.join(str(tempdir), "actual.png")
self._filesystem.write_binary_file(actual_filename, actual_contents)
diff_filename = self._filesystem.join(str(tempdir), "diff.png")
# image_diff needs native win paths as arguments, so we need to convert them if running under cygwin.
native_expected_filename = self._convert_path(expected_filename)
native_actual_filename = self._convert_path(actual_filename)
native_diff_filename = self._convert_path(diff_filename)
executable = self._path_to_image_diff()
# Note that although we are handed 'old', 'new', image_diff wants 'new', 'old'.
comand = [executable, '--diff', native_actual_filename, native_expected_filename, native_diff_filename]
result = None
err_str = None
try:
exit_code = self._executive.run_command(comand, return_exit_code=True)
if exit_code == 0:
# The images are the same.
result = None
elif exit_code == 1:
result = self._filesystem.read_binary_file(native_diff_filename)
else:
err_str = "Image diff returned an exit code of %s. See http://crbug.com/278596" % exit_code
except OSError, e:
err_str = 'error running image diff: %s' % str(e)
finally:
self._filesystem.rmtree(str(tempdir))
return (result, err_str or None)
def diff_text(self, expected_text, actual_text, expected_filename, actual_filename):
"""Returns a string containing the diff of the two text strings
in 'unified diff' format."""
# The filenames show up in the diff output, make sure they're
# raw bytes and not unicode, so that they don't trigger join()
# trying to decode the input.
def to_raw_bytes(string_value):
if isinstance(string_value, unicode):
return string_value.encode('utf-8')
return string_value
expected_filename = to_raw_bytes(expected_filename)
actual_filename = to_raw_bytes(actual_filename)
diff = difflib.unified_diff(expected_text.splitlines(True),
actual_text.splitlines(True),
expected_filename,
actual_filename)
# The diff generated by the difflib is incorrect if one of the files
# does not have a newline at the end of the file and it is present in
# the diff. Relevant Python issue: http://bugs.python.org/issue2142
def diff_fixup(diff):
for line in diff:
yield line
if not line.endswith('\n'):
yield '\n\ No newline at end of file\n'
return ''.join(diff_fixup(diff))
def driver_name(self):
if self.get_option('driver_name'):
return self.get_option('driver_name')
return self.CONTENT_SHELL_NAME
def expected_baselines_by_extension(self, test_name):
"""Returns a dict mapping baseline suffix to relative path for each baseline in
a test. For reftests, it returns ".==" or ".!=" instead of the suffix."""
# FIXME: The name similarity between this and expected_baselines() below, is unfortunate.
# We should probably rename them both.
baseline_dict = {}
reference_files = self.reference_files(test_name)
if reference_files:
# FIXME: How should this handle more than one type of reftest?
baseline_dict['.' + reference_files[0][0]] = self.relative_test_filename(reference_files[0][1])
for extension in self.baseline_extensions():
path = self.expected_filename(test_name, extension, return_default=False)
baseline_dict[extension] = self.relative_test_filename(path) if path else path
return baseline_dict
def baseline_extensions(self):
"""Returns a tuple of all of the non-reftest baseline extensions we use. The extensions include the leading '.'."""
return ('.wav', '.txt', '.png')
def expected_baselines(self, test_name, suffix, all_baselines=False):
"""Given a test name, finds where the baseline results are located.
Args:
test_name: name of test file (usually a relative path under LayoutTests/)
suffix: file suffix of the expected results, including dot; e.g.
'.txt' or '.png'. This should not be None, but may be an empty
string.
all_baselines: If True, return an ordered list of all baseline paths
for the given platform. If False, return only the first one.
Returns
a list of ( platform_dir, results_filename ), where
platform_dir - abs path to the top of the results tree (or test
tree)
results_filename - relative path from top of tree to the results
file
(port.join() of the two gives you the full path to the file,
unless None was returned.)
Return values will be in the format appropriate for the current
platform (e.g., "\\" for path separators on Windows). If the results
file is not found, then None will be returned for the directory,
but the expected relative pathname will still be returned.
This routine is generic but lives here since it is used in
conjunction with the other baseline and filename routines that are
platform specific.
"""
baseline_filename = self._filesystem.splitext(test_name)[0] + '-expected' + suffix
baseline_search_path = self.baseline_search_path()
baselines = []
for platform_dir in baseline_search_path:
if self._filesystem.exists(self._filesystem.join(platform_dir, baseline_filename)):
baselines.append((platform_dir, baseline_filename))
if not all_baselines and baselines:
return baselines
# If it wasn't found in a platform directory, return the expected
# result in the test directory, even if no such file actually exists.
platform_dir = self.layout_tests_dir()
if self._filesystem.exists(self._filesystem.join(platform_dir, baseline_filename)):
baselines.append((platform_dir, baseline_filename))
if baselines:
return baselines
return [(None, baseline_filename)]
def expected_filename(self, test_name, suffix, return_default=True):
"""Given a test name, returns an absolute path to its expected results.
If no expected results are found in any of the searched directories,
the directory in which the test itself is located will be returned.
The return value is in the format appropriate for the platform
(e.g., "\\" for path separators on windows).
Args:
test_name: name of test file (usually a relative path under LayoutTests/)
suffix: file suffix of the expected results, including dot; e.g. '.txt'
or '.png'. This should not be None, but may be an empty string.
platform: the most-specific directory name to use to build the
search list of directories, e.g., 'win', or
'chromium-cg-mac-leopard' (we follow the WebKit format)
return_default: if True, returns the path to the generic expectation if nothing
else is found; if False, returns None.
This routine is generic but is implemented here to live alongside
the other baseline and filename manipulation routines.
"""
# FIXME: The [0] here is very mysterious, as is the destructured return.
platform_dir, baseline_filename = self.expected_baselines(test_name, suffix)[0]
if platform_dir:
return self._filesystem.join(platform_dir, baseline_filename)
actual_test_name = self.lookup_virtual_test_base(test_name)
if actual_test_name:
return self.expected_filename(actual_test_name, suffix)
if return_default:
return self._filesystem.join(self.layout_tests_dir(), baseline_filename)
return None
def expected_checksum(self, test_name):
"""Returns the checksum of the image we expect the test to produce, or None if it is a text-only test."""
png_path = self.expected_filename(test_name, '.png')
if self._filesystem.exists(png_path):
with self._filesystem.open_binary_file_for_reading(png_path) as filehandle:
return read_checksum_from_png.read_checksum(filehandle)
return None
def expected_image(self, test_name):
"""Returns the image we expect the test to produce."""
baseline_path = self.expected_filename(test_name, '.png')
if not self._filesystem.exists(baseline_path):
return None
return self._filesystem.read_binary_file(baseline_path)
def expected_audio(self, test_name):
baseline_path = self.expected_filename(test_name, '.wav')
if not self._filesystem.exists(baseline_path):
return None
return self._filesystem.read_binary_file(baseline_path)
def expected_text(self, test_name):
"""Returns the text output we expect the test to produce, or None
if we don't expect there to be any text output.
End-of-line characters are normalized to '\n'."""
# FIXME: DRT output is actually utf-8, but since we don't decode the
# output from DRT (instead treating it as a binary string), we read the
# baselines as a binary string, too.
baseline_path = self.expected_filename(test_name, '.txt')
if not self._filesystem.exists(baseline_path):
return None
text = self._filesystem.read_binary_file(baseline_path)
return text.replace("\r\n", "\n")
def _get_reftest_list(self, test_name):
dirname = self._filesystem.join(self.layout_tests_dir(), self._filesystem.dirname(test_name))
if dirname not in self._reftest_list:
self._reftest_list[dirname] = Port._parse_reftest_list(self._filesystem, dirname)
return self._reftest_list[dirname]
@staticmethod
def _parse_reftest_list(filesystem, test_dirpath):
reftest_list_path = filesystem.join(test_dirpath, 'reftest.list')
if not filesystem.isfile(reftest_list_path):
return None
reftest_list_file = filesystem.read_text_file(reftest_list_path)
parsed_list = {}
for line in reftest_list_file.split('\n'):
line = re.sub('#.+$', '', line)
split_line = line.split()
if len(split_line) == 4:
# FIXME: Probably one of mozilla's extensions in the reftest.list format. Do we need to support this?
_log.warning("unsupported reftest.list line '%s' in %s" % (line, reftest_list_path))
continue
if len(split_line) < 3:
continue
expectation_type, test_file, ref_file = split_line
parsed_list.setdefault(filesystem.join(test_dirpath, test_file), []).append(
(expectation_type, filesystem.join(test_dirpath, ref_file)))
return parsed_list
def reference_files(self, test_name):
"""Return a list of expectation (== or !=) and filename pairs"""
reftest_list = self._get_reftest_list(test_name)
if not reftest_list:
reftest_list = []
for expectation, prefix in (('==', ''), ('!=', '-mismatch')):
for extension in Port._supported_file_extensions:
path = self.expected_filename(test_name, prefix + extension)
if self._filesystem.exists(path):
reftest_list.append((expectation, path))
return reftest_list
return reftest_list.get(self._filesystem.join(self.layout_tests_dir(), test_name), []) # pylint: disable=E1103
def tests(self, paths):
"""Return the list of tests found matching paths."""
tests = self._real_tests(paths)
suites = self.virtual_test_suites()
if paths:
tests.extend(self._virtual_tests_matching_paths(paths, suites))
else:
tests.extend(self._all_virtual_tests(suites))
return tests
def _real_tests(self, paths):
# When collecting test cases, skip these directories
skipped_directories = set(['.svn', '_svn', 'platform', 'resources', 'support', 'script-tests', 'reference', 'reftest'])
files = find_files.find(self._filesystem, self.layout_tests_dir(), paths,
skipped_directories, Port.is_test_file, self.test_key)
return [self.relative_test_filename(f) for f in files]
# When collecting test cases, we include any file with these extensions.
_supported_file_extensions = set(['.html', '.xml', '.xhtml', '.xht', '.pl',
'.htm', '.php', '.svg', '.mht', '.pdf'])
@staticmethod
# If any changes are made here be sure to update the isUsedInReftest method in old-run-webkit-tests as well.
def is_reference_html_file(filesystem, dirname, filename):
if filename.startswith('ref-') or filename.startswith('notref-'):
return True
filename_wihout_ext, unused = filesystem.splitext(filename)
for suffix in ['-expected', '-expected-mismatch', '-ref', '-notref']:
if filename_wihout_ext.endswith(suffix):
return True
return False
@staticmethod
def _has_supported_extension(filesystem, filename):
"""Return true if filename is one of the file extensions we want to run a test on."""
extension = filesystem.splitext(filename)[1]
return extension in Port._supported_file_extensions
@staticmethod
def is_test_file(filesystem, dirname, filename):
return Port._has_supported_extension(filesystem, filename) and not Port.is_reference_html_file(filesystem, dirname, filename)
ALL_TEST_TYPES = ['audio', 'harness', 'pixel', 'ref', 'text', 'unknown']
def test_type(self, test_name):
fs = self._filesystem
if fs.exists(self.expected_filename(test_name, '.png')):
return 'pixel'
if fs.exists(self.expected_filename(test_name, '.wav')):
return 'audio'
if self.reference_files(test_name):
return 'ref'
txt = self.expected_text(test_name)
if txt:
if 'layer at (0,0) size 800x600' in txt:
return 'pixel'
for line in txt.splitlines():
if line.startswith('FAIL') or line.startswith('TIMEOUT') or line.startswith('PASS'):
return 'harness'
return 'text'
return 'unknown'
def test_key(self, test_name):
"""Turns a test name into a list with two sublists, the natural key of the
dirname, and the natural key of the basename.
This can be used when sorting paths so that files in a directory.
directory are kept together rather than being mixed in with files in
subdirectories."""
dirname, basename = self.split_test(test_name)
return (self._natural_sort_key(dirname + self.TEST_PATH_SEPARATOR), self._natural_sort_key(basename))
def _natural_sort_key(self, string_to_split):
""" Turns a string into a list of string and number chunks, i.e. "z23a" -> ["z", 23, "a"]
This can be used to implement "natural sort" order. See:
http://www.codinghorror.com/blog/2007/12/sorting-for-humans-natural-sort-order.html
http://nedbatchelder.com/blog/200712.html#e20071211T054956
"""
def tryint(val):
try:
return int(val)
except ValueError:
return val
return [tryint(chunk) for chunk in re.split('(\d+)', string_to_split)]
def test_dirs(self):
"""Returns the list of top-level test directories."""
layout_tests_dir = self.layout_tests_dir()
return filter(lambda x: self._filesystem.isdir(self._filesystem.join(layout_tests_dir, x)),
self._filesystem.listdir(layout_tests_dir))
@memoized
def test_isfile(self, test_name):
"""Return True if the test name refers to a directory of tests."""
# Used by test_expectations.py to apply rules to whole directories.
if self._filesystem.isfile(self.abspath_for_test(test_name)):
return True
base = self.lookup_virtual_test_base(test_name)
return base and self._filesystem.isfile(self.abspath_for_test(base))
@memoized
def test_isdir(self, test_name):
"""Return True if the test name refers to a directory of tests."""
# Used by test_expectations.py to apply rules to whole directories.
if self._filesystem.isdir(self.abspath_for_test(test_name)):
return True
base = self.lookup_virtual_test_base(test_name)
return base and self._filesystem.isdir(self.abspath_for_test(base))
@memoized
def test_exists(self, test_name):
"""Return True if the test name refers to an existing test or baseline."""
# Used by test_expectations.py to determine if an entry refers to a
# valid test and by printing.py to determine if baselines exist.
return self.test_isfile(test_name) or self.test_isdir(test_name)
def split_test(self, test_name):
"""Splits a test name into the 'directory' part and the 'basename' part."""
index = test_name.rfind(self.TEST_PATH_SEPARATOR)
if index < 1:
return ('', test_name)
return (test_name[0:index], test_name[index:])
def normalize_test_name(self, test_name):
"""Returns a normalized version of the test name or test directory."""
if test_name.endswith('/'):
return test_name
if self.test_isdir(test_name):
return test_name + '/'
return test_name
def driver_cmd_line(self):
"""Prints the DRT command line that will be used."""
driver = self.create_driver(0)
return driver.cmd_line(self.get_option('pixel_tests'), [])
def update_baseline(self, baseline_path, data):
"""Updates the baseline for a test.
Args:
baseline_path: the actual path to use for baseline, not the path to
the test. This function is used to update either generic or
platform-specific baselines, but we can't infer which here.
data: contents of the baseline.
"""
self._filesystem.write_binary_file(baseline_path, data)
# FIXME: update callers to create a finder and call it instead of these next five routines (which should be protected).
def webkit_base(self):
return self._webkit_finder.webkit_base()
def path_from_webkit_base(self, *comps):
return self._webkit_finder.path_from_webkit_base(*comps)
def path_from_chromium_base(self, *comps):
return self._webkit_finder.path_from_chromium_base(*comps)
def path_to_script(self, script_name):
return self._webkit_finder.path_to_script(script_name)
def layout_tests_dir(self):
return self._webkit_finder.layout_tests_dir()
def perf_tests_dir(self):
return self._webkit_finder.perf_tests_dir()
def skipped_layout_tests(self, test_list):
"""Returns tests skipped outside of the TestExpectations files."""
return set(self._skipped_tests_for_unsupported_features(test_list))
def _tests_from_skipped_file_contents(self, skipped_file_contents):
tests_to_skip = []
for line in skipped_file_contents.split('\n'):
line = line.strip()
line = line.rstrip('/') # Best to normalize directory names to not include the trailing slash.
if line.startswith('#') or not len(line):
continue
tests_to_skip.append(line)
return tests_to_skip
def _expectations_from_skipped_files(self, skipped_file_paths):
tests_to_skip = []
for search_path in skipped_file_paths:
filename = self._filesystem.join(self._webkit_baseline_path(search_path), "Skipped")
if not self._filesystem.exists(filename):
_log.debug("Skipped does not exist: %s" % filename)
continue
_log.debug("Using Skipped file: %s" % filename)
skipped_file_contents = self._filesystem.read_text_file(filename)
tests_to_skip.extend(self._tests_from_skipped_file_contents(skipped_file_contents))
return tests_to_skip
@memoized
def skipped_perf_tests(self):
return self._expectations_from_skipped_files([self.perf_tests_dir()])
def skips_perf_test(self, test_name):
for test_or_category in self.skipped_perf_tests():
if test_or_category == test_name:
return True
category = self._filesystem.join(self.perf_tests_dir(), test_or_category)
if self._filesystem.isdir(category) and test_name.startswith(test_or_category):
return True
return False
def is_chromium(self):
return True
def name(self):
"""Returns a name that uniquely identifies this particular type of port
(e.g., "mac-snowleopard" or "linux-trusty" and can be passed
to factory.get() to instantiate the port."""
return self._name
def operating_system(self):
# Subclasses should override this default implementation.
return 'mac'
def version(self):
"""Returns a string indicating the version of a given platform, e.g.
'leopard' or 'win7'.
This is used to help identify the exact port when parsing test
expectations, determining search paths, and logging information."""
return self._version
def architecture(self):
return self._architecture
def get_option(self, name, default_value=None):
return getattr(self._options, name, default_value)
def set_option_default(self, name, default_value):
return self._options.ensure_value(name, default_value)
@memoized
def path_to_generic_test_expectations_file(self):
return self._filesystem.join(self.layout_tests_dir(), 'TestExpectations')
def relative_test_filename(self, filename):
"""Returns a test_name a relative unix-style path for a filename under the LayoutTests
directory. Ports may legitimately return abspaths here if no relpath makes sense."""
# Ports that run on windows need to override this method to deal with
# filenames with backslashes in them.
if filename.startswith(self.layout_tests_dir()):
return self.host.filesystem.relpath(filename, self.layout_tests_dir())
else:
return self.host.filesystem.abspath(filename)
@memoized
def abspath_for_test(self, test_name):
"""Returns the full path to the file for a given test name. This is the
inverse of relative_test_filename()."""
return self._filesystem.join(self.layout_tests_dir(), test_name)
def results_directory(self):
"""Absolute path to the place to store the test results (uses --results-directory)."""
if not self._results_directory:
option_val = self.get_option('results_directory') or self.default_results_directory()
self._results_directory = self._filesystem.abspath(option_val)
return self._results_directory
def bot_test_times_path(self):
return self._build_path('webkit_test_times', 'bot_times_ms.json')
def perf_results_directory(self):
return self._build_path()
def inspector_build_directory(self):
return self._build_path('resources', 'inspector')
def default_results_directory(self):
"""Absolute path to the default place to store the test results."""
try:
return self.path_from_chromium_base('out', self.get_option('configuration'), 'layout-test-results')
except AssertionError:
return self._build_path('layout-test-results')
def setup_test_run(self):
"""Perform port-specific work at the beginning of a test run."""
# Delete the disk cache if any to ensure a clean test run.
dump_render_tree_binary_path = self._path_to_driver()
cachedir = self._filesystem.dirname(dump_render_tree_binary_path)
cachedir = self._filesystem.join(cachedir, "cache")
if self._filesystem.exists(cachedir):
self._filesystem.rmtree(cachedir)
if self._dump_reader:
self._filesystem.maybe_make_directory(self._dump_reader.crash_dumps_directory())
def num_workers(self, requested_num_workers):
"""Returns the number of available workers (possibly less than the number requested)."""
return requested_num_workers
def clean_up_test_run(self):
"""Perform port-specific work at the end of a test run."""
if self._image_differ:
self._image_differ.stop()
self._image_differ = None
# FIXME: os.environ access should be moved to onto a common/system class to be more easily mockable.
def _value_or_default_from_environ(self, name, default=None):
if name in os.environ:
return os.environ[name]
return default
def _copy_value_from_environ_if_set(self, clean_env, name):
if name in os.environ:
clean_env[name] = os.environ[name]
def setup_environ_for_server(self, server_name=None):
# We intentionally copy only a subset of os.environ when
# launching subprocesses to ensure consistent test results.
clean_env = {
'LOCAL_RESOURCE_ROOT': self.layout_tests_dir(), # FIXME: Is this used?
}
variables_to_copy = [
'WEBKIT_TESTFONTS', # FIXME: Is this still used?
'WEBKITOUTPUTDIR', # FIXME: Is this still used?
'CHROME_DEVEL_SANDBOX',
'CHROME_IPC_LOGGING',
'ASAN_OPTIONS',
'TSAN_OPTIONS',
'MSAN_OPTIONS',
'LSAN_OPTIONS',
'UBSAN_OPTIONS',
'VALGRIND_LIB',
'VALGRIND_LIB_INNER',
]
if self.host.platform.is_linux() or self.host.platform.is_freebsd():
variables_to_copy += [
'XAUTHORITY',
'HOME',
'LANG',
'LD_LIBRARY_PATH',
'DBUS_SESSION_BUS_ADDRESS',
'XDG_DATA_DIRS',
]
clean_env['DISPLAY'] = self._value_or_default_from_environ('DISPLAY', ':1')
if self.host.platform.is_mac():
clean_env['DYLD_LIBRARY_PATH'] = self._build_path()
variables_to_copy += [
'HOME',
]
if self.host.platform.is_win():
variables_to_copy += [
'PATH',
'GYP_DEFINES', # Required to locate win sdk.
]
if self.host.platform.is_cygwin():
variables_to_copy += [
'HOMEDRIVE',
'HOMEPATH',
'_NT_SYMBOL_PATH',
]
for variable in variables_to_copy:
self._copy_value_from_environ_if_set(clean_env, variable)
for string_variable in self.get_option('additional_env_var', []):
[name, value] = string_variable.split('=', 1)
clean_env[name] = value
return clean_env
def show_results_html_file(self, results_filename):
"""This routine should display the HTML file pointed at by
results_filename in a users' browser."""
return self.host.user.open_url(path.abspath_to_uri(self.host.platform, results_filename))
def create_driver(self, worker_number, no_timeout=False):
"""Return a newly created Driver subclass for starting/stopping the test driver."""
return self._driver_class()(self, worker_number, pixel_tests=self.get_option('pixel_tests'), no_timeout=no_timeout)
def start_helper(self):
"""If a port needs to reconfigure graphics settings or do other
things to ensure a known test configuration, it should override this
method."""
helper_path = self._path_to_helper()
if helper_path:
_log.debug("Starting layout helper %s" % helper_path)
# Note: Not thread safe: http://bugs.python.org/issue2320
self._helper = self._executive.popen([helper_path],
stdin=self._executive.PIPE, stdout=self._executive.PIPE, stderr=None)
is_ready = self._helper.stdout.readline()
if not is_ready.startswith('ready'):
_log.error("layout_test_helper failed to be ready")
def requires_http_server(self):
"""Does the port require an HTTP server for running tests? This could
be the case when the tests aren't run on the host platform."""
return False
def start_http_server(self, additional_dirs, number_of_drivers):
"""Start a web server. Raise an error if it can't start or is already running.
Ports can stub this out if they don't need a web server to be running."""
assert not self._http_server, 'Already running an http server.'
server = apache_http.ApacheHTTP(self, self.results_directory(),
additional_dirs=additional_dirs,
number_of_servers=(number_of_drivers * 4))
server.start()
self._http_server = server
def start_websocket_server(self):
"""Start a web server. Raise an error if it can't start or is already running.
Ports can stub this out if they don't need a websocket server to be running."""
assert not self._websocket_server, 'Already running a websocket server.'
server = pywebsocket.PyWebSocket(self, self.results_directory())
server.start()
self._websocket_server = server
def is_wpt_enabled(self):
"""Used as feature flag for WPT Serve feature."""
return self._is_wpt_enabled
def is_wpt_test(self, test):
"""Whether this test is part of a web-platform-tests which require wptserve servers."""
return "web-platform-tests" in test
def start_wptserve(self):
"""Start a WPT web server. Raise an error if it can't start or is already running.
Ports can stub this out if they don't need a WPT web server to be running."""
assert not self._wpt_server, 'Already running an http server.'
assert self.is_wpt_enabled(), 'Cannot start server if WPT is not enabled.'
# We currently don't support any output mechanism for the WPT server.
server = wptserve.WPTServe(self, self.results_directory())
server.start()
self._wpt_server = server
def stop_wptserve(self):
"""Shut down the WPT server if it is running. Do nothing if it isn't."""
if self._wpt_server:
self._wpt_server.stop()
self._wpt_server = None
def http_server_supports_ipv6(self):
# Apache < 2.4 on win32 does not support IPv6, nor does cygwin apache.
if self.host.platform.is_cygwin() or self.host.platform.is_win():
return False
return True
def stop_helper(self):
"""Shut down the test helper if it is running. Do nothing if
it isn't, or it isn't available. If a port overrides start_helper()
it must override this routine as well."""
if self._helper:
_log.debug("Stopping layout test helper")
try:
self._helper.stdin.write("x\n")
self._helper.stdin.close()
self._helper.wait()
except IOError, e:
pass
finally:
self._helper = None
def stop_http_server(self):
"""Shut down the http server if it is running. Do nothing if it isn't."""
if self._http_server:
self._http_server.stop()
self._http_server = None
def stop_websocket_server(self):
"""Shut down the websocket server if it is running. Do nothing if it isn't."""
if self._websocket_server:
self._websocket_server.stop()
self._websocket_server = None
#
# TEST EXPECTATION-RELATED METHODS
#
def test_configuration(self):
"""Returns the current TestConfiguration for the port."""
if not self._test_configuration:
self._test_configuration = TestConfiguration(self._version, self._architecture, self._options.configuration.lower())
return self._test_configuration
# FIXME: Belongs on a Platform object.
@memoized
def all_test_configurations(self):
"""Returns a list of TestConfiguration instances, representing all available
test configurations for this port."""
return self._generate_all_test_configurations()
# FIXME: Belongs on a Platform object.
def configuration_specifier_macros(self):
"""Ports may provide a way to abbreviate configuration specifiers to conveniently
refer to them as one term or alias specific values to more generic ones. For example:
(vista, win7) -> win # Abbreviate all Windows versions into one namesake.
(precise, trusty) -> linux # Change specific name of Linux distro to a more generic term.
Returns a dictionary, each key representing a macro term ('win', for example),
and value being a list of valid configuration specifiers (such as ['vista', 'win7'])."""
return self.CONFIGURATION_SPECIFIER_MACROS
def _generate_all_test_configurations(self):
"""Returns a sequence of the TestConfigurations the port supports."""
# By default, we assume we want to test every graphics type in
# every configuration on every system.
test_configurations = []
for version, architecture in self.ALL_SYSTEMS:
for build_type in self.ALL_BUILD_TYPES:
test_configurations.append(TestConfiguration(version, architecture, build_type))
return test_configurations
def warn_if_bug_missing_in_test_expectations(self):
return True
def _port_specific_expectations_files(self):
paths = []
paths.append(self._filesystem.join(self.layout_tests_dir(), 'NeverFixTests'))
paths.append(self._filesystem.join(self.layout_tests_dir(), 'StaleTestExpectations'))
paths.append(self._filesystem.join(self.layout_tests_dir(), 'SlowTests'))
if self._is_wpt_enabled:
paths.append(self._filesystem.join(self.layout_tests_dir(), 'WPTServeExpectations'))
return paths
def _flag_specific_expectations_files(self):
return [self._filesystem.join(self.layout_tests_dir(), 'FlagExpectations', flag.lstrip('-'))
for flag in self.get_option('additional_driver_flag', [])]
def expectations_dict(self):
"""Returns an OrderedDict of name -> expectations strings.
The names are expected to be (but not required to be) paths in the filesystem.
If the name is a path, the file can be considered updatable for things like rebaselining,
so don't use names that are paths if they're not paths.
Generally speaking the ordering should be files in the filesystem in cascade order
(TestExpectations followed by Skipped, if the port honors both formats),
then any built-in expectations (e.g., from compile-time exclusions), then --additional-expectations options."""
# FIXME: rename this to test_expectations() once all the callers are updated to know about the ordered dict.
expectations = OrderedDict()
for path in self.expectations_files():
if self._filesystem.exists(path):
expectations[path] = self._filesystem.read_text_file(path)
for path in self.get_option('additional_expectations', []):
expanded_path = self._filesystem.expanduser(path)
if self._filesystem.exists(expanded_path):
_log.debug("reading additional_expectations from path '%s'" % path)
expectations[path] = self._filesystem.read_text_file(expanded_path)
else:
_log.warning("additional_expectations path '%s' does not exist" % path)
return expectations
def bot_expectations(self):
if not self.get_option('ignore_flaky_tests'):
return {}
full_port_name = self.determine_full_port_name(self.host, self._options, self.port_name)
builder_category = self.get_option('ignore_builder_category', 'layout')
factory = BotTestExpectationsFactory()
# FIXME: This only grabs release builder's flakiness data. If we're running debug,
# when we should grab the debug builder's data.
expectations = factory.expectations_for_port(full_port_name, builder_category)
if not expectations:
return {}
ignore_mode = self.get_option('ignore_flaky_tests')
if ignore_mode == 'very-flaky' or ignore_mode == 'maybe-flaky':
return expectations.flakes_by_path(ignore_mode == 'very-flaky')
if ignore_mode == 'unexpected':
return expectations.unexpected_results_by_path()
_log.warning("Unexpected ignore mode: '%s'." % ignore_mode)
return {}
def expectations_files(self):
return ([self.path_to_generic_test_expectations_file()] +
self._port_specific_expectations_files() +
self._flag_specific_expectations_files())
def repository_path(self):
"""Returns the repository path for the chromium code base."""
return self.path_from_chromium_base('build')
_WDIFF_DEL = '##WDIFF_DEL##'
_WDIFF_ADD = '##WDIFF_ADD##'
_WDIFF_END = '##WDIFF_END##'
def _format_wdiff_output_as_html(self, wdiff):
wdiff = cgi.escape(wdiff)
wdiff = wdiff.replace(self._WDIFF_DEL, "<span class=del>")
wdiff = wdiff.replace(self._WDIFF_ADD, "<span class=add>")
wdiff = wdiff.replace(self._WDIFF_END, "</span>")
html = "<head><style>.del { background: #faa; } "
html += ".add { background: #afa; }</style></head>"
html += "<pre>%s</pre>" % wdiff
return html
def _wdiff_command(self, actual_filename, expected_filename):
executable = self._path_to_wdiff()
return [executable,
"--start-delete=%s" % self._WDIFF_DEL,
"--end-delete=%s" % self._WDIFF_END,
"--start-insert=%s" % self._WDIFF_ADD,
"--end-insert=%s" % self._WDIFF_END,
actual_filename,
expected_filename]
@staticmethod
def _handle_wdiff_error(script_error):
# Exit 1 means the files differed, any other exit code is an error.
if script_error.exit_code != 1:
raise script_error
def _run_wdiff(self, actual_filename, expected_filename):
"""Runs wdiff and may throw exceptions.
This is mostly a hook for unit testing."""
# Diffs are treated as binary as they may include multiple files
# with conflicting encodings. Thus we do not decode the output.
command = self._wdiff_command(actual_filename, expected_filename)
wdiff = self._executive.run_command(command, decode_output=False,
error_handler=self._handle_wdiff_error)
return self._format_wdiff_output_as_html(wdiff)
_wdiff_error_html = "Failed to run wdiff, see error log."
def wdiff_text(self, actual_filename, expected_filename):
"""Returns a string of HTML indicating the word-level diff of the
contents of the two filenames. Returns an empty string if word-level
diffing isn't available."""
if not self.wdiff_available():
return ""
try:
# It's possible to raise a ScriptError we pass wdiff invalid paths.
return self._run_wdiff(actual_filename, expected_filename)
except OSError as e:
if e.errno in [errno.ENOENT, errno.EACCES, errno.ECHILD]:
# Silently ignore cases where wdiff is missing.
self._wdiff_available = False
return ""
raise
except ScriptError as e:
_log.error("Failed to run wdiff: %s" % e)
self._wdiff_available = False
return self._wdiff_error_html
# This is a class variable so we can test error output easily.
_pretty_patch_error_html = "Failed to run PrettyPatch, see error log."
def pretty_patch_text(self, diff_path):
if self._pretty_patch_available is None:
self._pretty_patch_available = self.check_pretty_patch(logging=False)
if not self._pretty_patch_available:
return self._pretty_patch_error_html
command = ("ruby", "-I", self._filesystem.dirname(self._pretty_patch_path),
self._pretty_patch_path, diff_path)
try:
# Diffs are treated as binary (we pass decode_output=False) as they
# may contain multiple files of conflicting encodings.
return self._executive.run_command(command, decode_output=False)
except OSError, e:
# If the system is missing ruby log the error and stop trying.
self._pretty_patch_available = False
_log.error("Failed to run PrettyPatch (%s): %s" % (command, e))
return self._pretty_patch_error_html
except ScriptError, e:
# If ruby failed to run for some reason, log the command
# output and stop trying.
self._pretty_patch_available = False
_log.error("Failed to run PrettyPatch (%s):\n%s" % (command, e.message_with_output()))
return self._pretty_patch_error_html
def default_configuration(self):
return self._config.default_configuration()
def clobber_old_port_specific_results(self):
pass
# FIXME: This does not belong on the port object.
@memoized
def path_to_apache(self):
"""Returns the full path to the apache binary.
This is needed only by ports that use the apache_http_server module."""
raise NotImplementedError('Port.path_to_apache')
def path_to_apache_config_file(self):
"""Returns the full path to the apache configuration file.
If the WEBKIT_HTTP_SERVER_CONF_PATH environment variable is set, its
contents will be used instead.
This is needed only by ports that use the apache_http_server module."""
config_file_from_env = os.environ.get('WEBKIT_HTTP_SERVER_CONF_PATH')
if config_file_from_env:
if not self._filesystem.exists(config_file_from_env):
raise IOError('%s was not found on the system' % config_file_from_env)
return config_file_from_env
config_file_name = self._apache_config_file_name_for_platform()
return self._filesystem.join(self.layout_tests_dir(), 'http', 'conf', config_file_name)
#
# PROTECTED ROUTINES
#
# The routines below should only be called by routines in this class
# or any of its subclasses.
#
def _apache_version(self):
config = self._executive.run_command([self.path_to_apache(), '-v'])
return re.sub(r'(?:.|\n)*Server version: Apache/(\d+\.\d+)(?:.|\n)*', r'\1', config)
def _apache_config_file_name_for_platform(self):
if self.host.platform.is_cygwin():
return 'cygwin-httpd.conf' # CYGWIN is the only platform to still use Apache 1.3.
if self.host.platform.is_linux():
distribution = self.host.platform.linux_distribution()
custom_configuration_distributions = ['arch', 'debian', 'redhat']
if distribution in custom_configuration_distributions:
return "%s-httpd-%s.conf" % (distribution, self._apache_version())
return 'apache2-httpd-' + self._apache_version() + '.conf'
def _path_to_driver(self, target=None):
"""Returns the full path to the test driver."""
return self._build_path(target, self.driver_name())
def _path_to_webcore_library(self):
"""Returns the full path to a built copy of WebCore."""
return None
def _path_to_helper(self):
"""Returns the full path to the layout_test_helper binary, which
is used to help configure the system for the test run, or None
if no helper is needed.
This is likely only used by start/stop_helper()."""
return None
def _path_to_image_diff(self):
"""Returns the full path to the image_diff binary, or None if it is not available.
This is likely used only by diff_image()"""
return self._build_path('image_diff')
@memoized
def _path_to_wdiff(self):
"""Returns the full path to the wdiff binary, or None if it is not available.
This is likely used only by wdiff_text()"""
for path in ("/usr/bin/wdiff", "/usr/bin/dwdiff"):
if self._filesystem.exists(path):
return path
return None
def _webkit_baseline_path(self, platform):
"""Return the full path to the top of the baseline tree for a
given platform."""
return self._filesystem.join(self.layout_tests_dir(), 'platform', platform)
def _driver_class(self):
"""Returns the port's driver implementation."""
return driver.Driver
def output_contains_sanitizer_messages(self, output):
if not output:
return None
if 'AddressSanitizer' in output:
return 'AddressSanitizer'
if 'MemorySanitizer' in output:
return 'MemorySanitizer'
return None
def _get_crash_log(self, name, pid, stdout, stderr, newer_than):
if self.output_contains_sanitizer_messages(stderr):
# Running the symbolizer script can take a lot of memory, so we need to
# serialize access to it across all the concurrently running drivers.
llvm_symbolizer_path = self.path_from_chromium_base(
'third_party', 'llvm-build', 'Release+Asserts', 'bin', 'llvm-symbolizer')
if self._filesystem.exists(llvm_symbolizer_path):
env = os.environ.copy()
env['LLVM_SYMBOLIZER_PATH'] = llvm_symbolizer_path
else:
env = None
sanitizer_filter_path = self.path_from_chromium_base('tools', 'valgrind', 'asan', 'asan_symbolize.py')
sanitizer_strip_path_prefix = 'Release/../../'
if self._filesystem.exists(sanitizer_filter_path):
stderr = self._executive.run_command(
['flock', sys.executable, sanitizer_filter_path, sanitizer_strip_path_prefix], input=stderr, decode_output=False, env=env)
name_str = name or '<unknown process name>'
pid_str = str(pid or '<unknown>')
# We require stdout and stderr to be bytestrings, not character strings.
if stdout:
assert isinstance(stdout, str)
stdout_lines = stdout.decode('utf8', 'replace').splitlines()
else:
stdout_lines = [u'<empty>']
if stderr:
assert isinstance(stderr, str)
stderr_lines = stderr.decode('utf8', 'replace').splitlines()
else:
stderr_lines = [u'<empty>']
return (stderr, 'crash log for %s (pid %s):\n%s\n%s\n' % (name_str, pid_str,
'\n'.join(('STDOUT: ' + l) for l in stdout_lines),
'\n'.join(('STDERR: ' + l) for l in stderr_lines)))
def look_for_new_crash_logs(self, crashed_processes, start_time):
pass
def look_for_new_samples(self, unresponsive_processes, start_time):
pass
def sample_process(self, name, pid):
pass
def physical_test_suites(self):
return [
# For example, to turn on force-compositing-mode in the svg/ directory:
# PhysicalTestSuite('svg', ['--force-compositing-mode']),
PhysicalTestSuite('fast/text', ["--enable-direct-write", "--enable-font-antialiasing"]),
]
def virtual_test_suites(self):
if self._virtual_test_suites is None:
path_to_virtual_test_suites = self._filesystem.join(self.layout_tests_dir(), 'VirtualTestSuites')
assert self._filesystem.exists(path_to_virtual_test_suites), 'LayoutTests/VirtualTestSuites not found'
try:
test_suite_json = json.loads(self._filesystem.read_text_file(path_to_virtual_test_suites))
self._virtual_test_suites = [VirtualTestSuite(**d) for d in test_suite_json]
except ValueError as e:
raise ValueError("LayoutTests/VirtualTestSuites is not a valid JSON file: %s" % str(e))
return self._virtual_test_suites
def _all_virtual_tests(self, suites):
tests = []
for suite in suites:
self._populate_virtual_suite(suite)
tests.extend(suite.tests.keys())
return tests
def _virtual_tests_matching_paths(self, paths, suites):
tests = []
for suite in suites:
if any(p.startswith(suite.name) for p in paths):
self._populate_virtual_suite(suite)
for test in suite.tests:
if any(test.startswith(p) for p in paths):
tests.append(test)
return tests
def _populate_virtual_suite(self, suite):
if not suite.tests:
base_tests = self._real_tests([suite.base])
suite.tests = {}
for test in base_tests:
suite.tests[test.replace(suite.base, suite.name, 1)] = test
def is_virtual_test(self, test_name):
return bool(self.lookup_virtual_suite(test_name))
def lookup_virtual_suite(self, test_name):
for suite in self.virtual_test_suites():
if test_name.startswith(suite.name):
return suite
return None
def lookup_virtual_test_base(self, test_name):
suite = self.lookup_virtual_suite(test_name)
if not suite:
return None
return test_name.replace(suite.name, suite.base, 1)
def lookup_virtual_test_args(self, test_name):
for suite in self.virtual_test_suites():
if test_name.startswith(suite.name):
return suite.args
return []
def lookup_virtual_reference_args(self, test_name):
for suite in self.virtual_test_suites():
if test_name.startswith(suite.name):
return suite.reference_args
return []
def lookup_physical_test_args(self, test_name):
for suite in self.physical_test_suites():
if test_name.startswith(suite.name):
return suite.args
return []
def lookup_physical_reference_args(self, test_name):
for suite in self.physical_test_suites():
if test_name.startswith(suite.name):
return suite.reference_args
return []
def should_run_as_pixel_test(self, test_input):
if not self._options.pixel_tests:
return False
if self._options.pixel_test_directories:
return any(test_input.test_name.startswith(directory) for directory in self._options.pixel_test_directories)
# TODO(burnik): Make sure this is the right way to do it.
if self.is_wpt_enabled() and self.is_wpt_test(test_input.test_name):
return False
return True
def _modules_to_search_for_symbols(self):
path = self._path_to_webcore_library()
if path:
return [path]
return []
def _symbols_string(self):
symbols = ''
for path_to_module in self._modules_to_search_for_symbols():
try:
symbols += self._executive.run_command(['nm', path_to_module], error_handler=self._executive.ignore_error)
except OSError, e:
_log.warn("Failed to run nm: %s. Can't determine supported features correctly." % e)
return symbols
# Ports which use compile-time feature detection should define this method and return
# a dictionary mapping from symbol substrings to possibly disabled test directories.
# When the symbol substrings are not matched, the directories will be skipped.
# If ports don't ever enable certain features, then those directories can just be
# in the Skipped list instead of compile-time-checked here.
def _missing_symbol_to_skipped_tests(self):
if self.PORT_HAS_AUDIO_CODECS_BUILT_IN:
return {}
else:
return {
"ff_mp3_decoder": ["webaudio/codec-tests/mp3"],
"ff_aac_decoder": ["webaudio/codec-tests/aac"],
}
def _has_test_in_directories(self, directory_lists, test_list):
if not test_list:
return False
directories = itertools.chain.from_iterable(directory_lists)
for directory, test in itertools.product(directories, test_list):
if test.startswith(directory):
return True
return False
def _skipped_tests_for_unsupported_features(self, test_list):
# Only check the symbols of there are tests in the test_list that might get skipped.
# This is a performance optimization to avoid the calling nm.
# Runtime feature detection not supported, fallback to static detection:
# Disable any tests for symbols missing from the executable or libraries.
if self._has_test_in_directories(self._missing_symbol_to_skipped_tests().values(), test_list):
symbols_string = self._symbols_string()
if symbols_string is not None:
return reduce(operator.add, [directories for symbol_substring, directories in self._missing_symbol_to_skipped_tests().items() if symbol_substring not in symbols_string], [])
return []
def _convert_path(self, path):
"""Handles filename conversion for subprocess command line args."""
# See note above in diff_image() for why we need this.
if sys.platform == 'cygwin':
return cygpath(path)
return path
def _build_path(self, *comps):
return self._build_path_with_target(self._options.target, *comps)
def _build_path_with_target(self, target, *comps):
# Note that we don't do the option caching that the
# base class does, because finding the right directory is relatively
# fast.
target = target or self.get_option('target')
return self._static_build_path(self._filesystem, self.get_option('build_directory'),
self.path_from_chromium_base(), target, comps)
def _check_driver_build_up_to_date(self, target):
# We should probably get rid of this check altogether as it has
# outlived its usefulness in a GN-based world, but for the moment
# we will just check things if they are using the standard
# Debug or Release target directories.
if target not in ('Debug', 'Release'):
return True
try:
debug_path = self._path_to_driver('Debug')
release_path = self._path_to_driver('Release')
debug_mtime = self._filesystem.mtime(debug_path)
release_mtime = self._filesystem.mtime(release_path)
if (debug_mtime > release_mtime and target == 'Release' or
release_mtime > debug_mtime and target == 'Debug'):
most_recent_binary = 'Release' if target == 'Debug' else 'Debug'
_log.warning('You are running the %s binary. However the %s binary appears to be more recent. '
'Please pass --%s.', target, most_recent_binary, most_recent_binary.lower())
_log.warning('')
# This will fail if we don't have both a debug and release binary.
# That's fine because, in this case, we must already be running the
# most up-to-date one.
except OSError:
pass
return True
def _chromium_baseline_path(self, platform):
if platform is None:
platform = self.name()
return self.path_from_webkit_base('LayoutTests', 'platform', platform)
class VirtualTestSuite(object):
def __init__(self, prefix=None, base=None, args=None, references_use_default_args=False):
assert base
assert args
assert prefix.find('/') == -1, "Virtual test suites prefixes cannot contain /'s: %s" % prefix
self.name = 'virtual/' + prefix + '/' + base
self.base = base
self.args = args
self.reference_args = [] if references_use_default_args else args
self.tests = {}
def __repr__(self):
return "VirtualTestSuite('%s', '%s', %s, %s)" % (self.name, self.base, self.args, self.reference_args)
class PhysicalTestSuite(object):
def __init__(self, base, args, reference_args=None):
self.name = base
self.base = base
self.args = args
self.reference_args = args if reference_args is None else reference_args
self.tests = set()
def __repr__(self):
return "PhysicalTestSuite('%s', '%s', %s, %s)" % (self.name, self.base, self.args, self.reference_args)
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import collections
import copy
import datetime
import decimal
import math
import uuid
import warnings
from base64 import b64decode, b64encode
from itertools import tee
from django.apps import apps
from django.db import connection
from django.db.models.lookups import default_lookups, RegisterLookupMixin
from django.db.models.query_utils import QueryWrapper
from django.conf import settings
from django import forms
from django.core import exceptions, validators, checks
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import parse_date, parse_datetime, parse_time, parse_duration
from django.utils.duration import duration_string
from django.utils.functional import cached_property, curry, total_ordering, Promise
from django.utils.text import capfirst
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import (smart_text, force_text, force_bytes,
python_2_unicode_compatible)
from django.utils.ipv6 import clean_ipv6_address
from django.utils import six
from django.utils.itercompat import is_iterable
# When the _meta object was formalized, this exception was moved to
# django.core.exceptions. It is retained here for backwards compatibility
# purposes.
from django.core.exceptions import FieldDoesNotExist # NOQA
# Avoid "TypeError: Item in ``from list'' not a string" -- unicode_literals
# makes these strings unicode
__all__ = [str(x) for x in (
'AutoField', 'BLANK_CHOICE_DASH', 'BigIntegerField', 'BinaryField',
'BooleanField', 'CharField', 'CommaSeparatedIntegerField', 'DateField',
'DateTimeField', 'DecimalField', 'DurationField', 'EmailField', 'Empty',
'Field', 'FieldDoesNotExist', 'FilePathField', 'FloatField',
'GenericIPAddressField', 'IPAddressField', 'IntegerField', 'NOT_PROVIDED',
'NullBooleanField', 'PositiveIntegerField', 'PositiveSmallIntegerField',
'SlugField', 'SmallIntegerField', 'TextField', 'TimeField', 'URLField',
'UUIDField',
)]
class Empty(object):
pass
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
def _load_field(app_label, model_name, field_name):
return apps.get_model(app_label, model_name)._meta.get_field(field_name)
# A guide to Field parameters:
#
# * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
def _empty(of_cls):
new = Empty()
new.__class__ = of_cls
return new
@total_ordering
@python_2_unicode_compatible
class Field(RegisterLookupMixin):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
empty_values = list(validators.EMPTY_VALUES)
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
'invalid_choice': _('Value %(value)r is not a valid choice.'),
'null': _('This field cannot be null.'),
'blank': _('This field cannot be blank.'),
'unique': _('%(model_name)s with this %(field_label)s '
'already exists.'),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
'unique_for_date': _("%(field_label)s must be unique for "
"%(date_field_label)s %(lookup_type)s."),
}
class_lookups = default_lookups.copy()
system_check_deprecated_details = None
system_check_removed_details = None
# Field flags
hidden = False
many_to_many = None
many_to_one = None
one_to_many = None
one_to_one = None
related_model = None
# Generic field type description, usually overridden by subclasses
def _description(self):
return _('Field of type: %(field_type)s') % {
'field_type': self.__class__.__name__
}
description = property(_description)
def __init__(self, verbose_name=None, name=None, primary_key=False,
max_length=None, unique=False, blank=False, null=False,
db_index=False, rel=None, default=NOT_PROVIDED, editable=True,
serialize=True, unique_for_date=None, unique_for_month=None,
unique_for_year=None, choices=None, help_text='', db_column=None,
db_tablespace=None, auto_created=False, validators=[],
error_messages=None):
self.name = name
self.verbose_name = verbose_name # May be set by set_attributes_from_name
self._verbose_name = verbose_name # Store original for deconstruction
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
self.rel = rel
self.is_relation = self.rel is not None
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date = unique_for_date
self.unique_for_month = unique_for_month
self.unique_for_year = unique_for_year
self._choices = choices or []
self.help_text = help_text
self.db_column = db_column
self.db_tablespace = db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
self.auto_created = auto_created
# Set db_index to True if the field has a relationship and doesn't
# explicitly set db_index.
self.db_index = db_index
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self._validators = validators # Store for deconstruction later
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self._error_messages = error_messages # Store for deconstruction later
self.error_messages = messages
def __str__(self):
""" Return "app_label.model_label.field_name". """
model = self.model
app = model._meta.app_label
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
def __repr__(self):
"""
Displays the module, class and name of the field.
"""
path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
name = getattr(self, 'name', None)
if name is not None:
return '<%s: %s>' % (path, name)
return '<%s>' % path
def check(self, **kwargs):
errors = []
errors.extend(self._check_field_name())
errors.extend(self._check_choices())
errors.extend(self._check_db_index())
errors.extend(self._check_null_allowed_for_primary_keys())
errors.extend(self._check_backend_specific_checks(**kwargs))
errors.extend(self._check_deprecation_details())
return errors
def _check_field_name(self):
""" Check if field name is valid, i.e. 1) does not end with an
underscore, 2) does not contain "__" and 3) is not "pk". """
if self.name.endswith('_'):
return [
checks.Error(
'Field names must not end with an underscore.',
hint=None,
obj=self,
id='fields.E001',
)
]
elif '__' in self.name:
return [
checks.Error(
'Field names must not contain "__".',
hint=None,
obj=self,
id='fields.E002',
)
]
elif self.name == 'pk':
return [
checks.Error(
"'pk' is a reserved word that cannot be used as a field name.",
hint=None,
obj=self,
id='fields.E003',
)
]
else:
return []
def _check_choices(self):
if self.choices:
if (isinstance(self.choices, six.string_types) or
not is_iterable(self.choices)):
return [
checks.Error(
"'choices' must be an iterable (e.g., a list or tuple).",
hint=None,
obj=self,
id='fields.E004',
)
]
elif any(isinstance(choice, six.string_types) or
not is_iterable(choice) or len(choice) != 2
for choice in self.choices):
return [
checks.Error(
("'choices' must be an iterable containing "
"(actual value, human readable name) tuples."),
hint=None,
obj=self,
id='fields.E005',
)
]
else:
return []
else:
return []
def _check_db_index(self):
if self.db_index not in (None, True, False):
return [
checks.Error(
"'db_index' must be None, True or False.",
hint=None,
obj=self,
id='fields.E006',
)
]
else:
return []
def _check_null_allowed_for_primary_keys(self):
if (self.primary_key and self.null and
not connection.features.interprets_empty_strings_as_nulls):
# We cannot reliably check this for backends like Oracle which
# consider NULL and '' to be equal (and thus set up
# character-based fields a little differently).
return [
checks.Error(
'Primary keys must not have null=True.',
hint=('Set null=False on the field, or '
'remove primary_key=True argument.'),
obj=self,
id='fields.E007',
)
]
else:
return []
def _check_backend_specific_checks(self, **kwargs):
return connection.validation.check_field(self, **kwargs)
def _check_deprecation_details(self):
if self.system_check_removed_details is not None:
return [
checks.Error(
self.system_check_removed_details.get(
'msg',
'%s has been removed except for support in historical '
'migrations.' % self.__class__.__name__
),
hint=self.system_check_removed_details.get('hint'),
obj=self,
id=self.system_check_removed_details.get('id', 'fields.EXXX'),
)
]
elif self.system_check_deprecated_details is not None:
return [
checks.Warning(
self.system_check_deprecated_details.get(
'msg',
'%s has been deprecated.' % self.__class__.__name__
),
hint=self.system_check_deprecated_details.get('hint'),
obj=self,
id=self.system_check_deprecated_details.get('id', 'fields.WXXX'),
)
]
return []
def get_col(self, alias, source=None):
if source is None:
source = self
if alias != self.model._meta.db_table or source != self:
from django.db.models.expressions import Col
return Col(alias, self, source)
else:
return self.cached_col
@cached_property
def cached_col(self):
from django.db.models.expressions import Col
return Col(self.model._meta.db_table, self)
def select_format(self, compiler, sql, params):
"""
Custom format for select clauses. For example, GIS columns need to be
selected as AsText(table.col) on MySQL as the table.col data can't be used
by Django.
"""
return sql, params
def deconstruct(self):
"""
Returns enough information to recreate the field as a 4-tuple:
* The name of the field on the model, if contribute_to_class has been run
* The import path of the field, including the class: django.db.models.IntegerField
This should be the most portable version, so less specific may be better.
* A list of positional arguments
* A dict of keyword arguments
Note that the positional or keyword arguments must contain values of the
following types (including inner values of collection types):
* None, bool, str, unicode, int, long, float, complex, set, frozenset, list, tuple, dict
* UUID
* datetime.datetime (naive), datetime.date
* top-level classes, top-level functions - will be referenced by their full import path
* Storage instances - these have their own deconstruct() method
This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined.
There's no need to return the exact way the field was instantiated this time,
just ensure that the resulting field is the same - prefer keyword arguments
over positional ones, and omit parameters with their default values.
"""
# Short-form way of fetching all the default parameters
keywords = {}
possibles = {
"verbose_name": None,
"primary_key": False,
"max_length": None,
"unique": False,
"blank": False,
"null": False,
"db_index": False,
"default": NOT_PROVIDED,
"editable": True,
"serialize": True,
"unique_for_date": None,
"unique_for_month": None,
"unique_for_year": None,
"choices": [],
"help_text": '',
"db_column": None,
"db_tablespace": settings.DEFAULT_INDEX_TABLESPACE,
"auto_created": False,
"validators": [],
"error_messages": None,
}
attr_overrides = {
"unique": "_unique",
"choices": "_choices",
"error_messages": "_error_messages",
"validators": "_validators",
"verbose_name": "_verbose_name",
}
equals_comparison = {"choices", "validators", "db_tablespace"}
for name, default in possibles.items():
value = getattr(self, attr_overrides.get(name, name))
# Unroll anything iterable for choices into a concrete list
if name == "choices" and isinstance(value, collections.Iterable):
value = list(value)
# Do correct kind of comparison
if name in equals_comparison:
if value != default:
keywords[name] = value
else:
if value is not default:
keywords[name] = value
# Work out path - we shorten it for known Django core fields
path = "%s.%s" % (self.__class__.__module__, self.__class__.__name__)
if path.startswith("django.db.models.fields.related"):
path = path.replace("django.db.models.fields.related", "django.db.models")
if path.startswith("django.db.models.fields.files"):
path = path.replace("django.db.models.fields.files", "django.db.models")
if path.startswith("django.db.models.fields.proxy"):
path = path.replace("django.db.models.fields.proxy", "django.db.models")
if path.startswith("django.db.models.fields"):
path = path.replace("django.db.models.fields", "django.db.models")
# Return basic info - other fields should override this.
return (
force_text(self.name, strings_only=True),
path,
[],
keywords,
)
def clone(self):
"""
Uses deconstruct() to clone a new copy of this Field.
Will not preserve any class attachments/attribute names.
"""
name, path, args, kwargs = self.deconstruct()
return self.__class__(*args, **kwargs)
def __eq__(self, other):
# Needed for @total_ordering
if isinstance(other, Field):
return self.creation_counter == other.creation_counter
return NotImplemented
def __lt__(self, other):
# This is needed because bisect does not take a comparison function.
if isinstance(other, Field):
return self.creation_counter < other.creation_counter
return NotImplemented
def __hash__(self):
return hash(self.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.rel:
obj.rel = copy.copy(self.rel)
if hasattr(self.rel, 'field') and self.rel.field is self:
obj.rel.field = obj
memodict[id(self)] = obj
return obj
def __copy__(self):
# We need to avoid hitting __reduce__, so define this
# slightly weird copy construct.
obj = Empty()
obj.__class__ = self.__class__
obj.__dict__ = self.__dict__.copy()
return obj
def __reduce__(self):
"""
Pickling should return the model._meta.fields instance of the field,
not a new copy of that field. So, we use the app registry to load the
model and then the field back.
"""
if not hasattr(self, 'model'):
# Fields are sometimes used without attaching them to models (for
# example in aggregation). In this case give back a plain field
# instance. The code below will create a new empty instance of
# class self.__class__, then update its dict with self.__dict__
# values - so, this is very close to normal pickle.
return _empty, (self.__class__,), self.__dict__
if self.model._deferred:
# Deferred model will not be found from the app registry. This
# could be fixed by reconstructing the deferred model on unpickle.
raise RuntimeError("Fields of deferred models can't be reduced")
return _load_field, (self.model._meta.app_label, self.model._meta.object_name,
self.name)
def to_python(self, value):
"""
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
"""
return value
@cached_property
def validators(self):
# Some validators can't be created at field initialization time.
# This method provides a way to delay their creation until required.
return self.default_validators + self._validators
def run_validators(self, value):
if value in self.empty_values:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
errors.extend(e.error_list)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self._choices and value not in self.empty_values:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
raise exceptions.ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages['null'], code='null')
if not self.blank and value in self.empty_values:
raise exceptions.ValidationError(self.error_messages['blank'], code='blank')
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python and validate are propagated. The correct value is
returned if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type(self, connection):
"""
Returns the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific data_types dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
try:
return connection.data_types[self.get_internal_type()] % data
except KeyError:
return None
def db_parameters(self, connection):
"""
Extension of db_type(), providing a range of different return
values (type, checks).
This will look at db_type(), allowing custom model fields to override it.
"""
data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
type_string = self.db_type(connection)
try:
check_string = connection.data_type_check_constraints[self.get_internal_type()] % data
except KeyError:
check_string = None
return {
"type": type_string,
"check": check_string,
}
def db_type_suffix(self, connection):
return connection.data_types_suffix.get(self.get_internal_type())
def get_db_converters(self, connection):
if hasattr(self, 'from_db_value'):
return [self.from_db_value]
return []
@property
def unique(self):
return self._unique or self.primary_key
def set_attributes_from_name(self, name):
if not self.name:
self.name = name
self.attname, self.column = self.get_attname_column()
self.concrete = self.column is not None
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace('_', ' ')
def contribute_to_class(self, cls, name, virtual_only=False):
self.set_attributes_from_name(name)
self.model = cls
if virtual_only:
cls._meta.add_field(self, virtual=True)
else:
cls._meta.add_field(self)
if self.choices:
setattr(cls, 'get_%s_display' % self.name,
curry(cls._get_FIELD_display, field=self))
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_cache_name(self):
return '_%s_cache' % self.name
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""
Returns field's value just before saving.
"""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""
Perform preliminary non-db specific value checks and conversions.
"""
if isinstance(value, Promise):
value = value._proxy____cast()
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""Returns field's value prepared for interacting with the database
backend.
Used by the default implementations of ``get_db_prep_save``and
`get_db_prep_lookup```
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""
Returns field's value prepared for saving into a database.
"""
return self.get_db_prep_value(value, connection=connection,
prepared=False)
def get_prep_lookup(self, lookup_type, value):
"""
Perform preliminary non-db specific lookup checks and conversions
"""
if hasattr(value, '_prepare'):
return value._prepare()
if lookup_type in {
'iexact', 'contains', 'icontains',
'startswith', 'istartswith', 'endswith', 'iendswith',
'month', 'day', 'week_day', 'hour', 'minute', 'second',
'isnull', 'search', 'regex', 'iregex',
}:
return value
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
return self.get_prep_value(value)
elif lookup_type in ('range', 'in'):
return [self.get_prep_value(v) for v in value]
elif lookup_type == 'year':
try:
return int(value)
except ValueError:
raise ValueError("The __year lookup type requires an integer "
"argument")
return self.get_prep_value(value)
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
"""
Returns field's value prepared for database lookup.
"""
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
prepared = True
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabeled_clone method it means the
# value will be handled later on.
if hasattr(value, 'relabeled_clone'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
if lookup_type in ('month', 'day', 'week_day', 'hour', 'minute',
'second', 'search', 'regex', 'iregex', 'contains',
'icontains', 'iexact', 'startswith', 'endswith',
'istartswith', 'iendswith'):
return [value]
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
return [self.get_db_prep_value(value, connection=connection,
prepared=prepared)]
elif lookup_type in ('range', 'in'):
return [self.get_db_prep_value(v, connection=connection,
prepared=prepared) for v in value]
elif lookup_type == 'isnull':
return []
elif lookup_type == 'year':
if isinstance(self, DateTimeField):
return connection.ops.year_lookup_bounds_for_datetime_field(value)
elif isinstance(self, DateField):
return connection.ops.year_lookup_bounds_for_date_field(value)
else:
return [value] # this isn't supposed to happen
else:
return [value]
def has_default(self):
"""
Returns a boolean of whether this field has a default value.
"""
return self.default is not NOT_PROVIDED
def get_default(self):
"""
Returns the default value for this field.
"""
if self.has_default():
if callable(self.default):
return self.default()
return self.default
if (not self.empty_strings_allowed or (self.null and
not connection.features.interprets_empty_strings_as_nulls)):
return None
return ""
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None):
"""Returns choices with a default blank choices included, for use
as SelectField choices for this field."""
blank_defined = False
choices = list(self.choices) if self.choices else []
named_groups = choices and isinstance(choices[0][1], (list, tuple))
if not named_groups:
for choice, __ in choices:
if choice in ('', None):
blank_defined = True
break
first_choice = (blank_choice if include_blank and
not blank_defined else [])
if self.choices:
return first_choice + choices
rel_model = self.rel.to
limit_choices_to = limit_choices_to or self.get_limit_choices_to()
if hasattr(self.rel, 'get_related_field'):
lst = [(getattr(x, self.rel.get_related_field().attname),
smart_text(x))
for x in rel_model._default_manager.complex_filter(
limit_choices_to)]
else:
lst = [(x._get_pk_val(), smart_text(x))
for x in rel_model._default_manager.complex_filter(
limit_choices_to)]
return first_choice + lst
def get_choices_default(self):
return self.get_choices()
def get_flatchoices(self, include_blank=True,
blank_choice=BLANK_CHOICE_DASH):
"""
Returns flattened choices with a default blank choice included.
"""
first_choice = blank_choice if include_blank else []
return first_choice + list(self.flatchoices)
def _get_val_from_obj(self, obj):
if obj is not None:
return getattr(obj, self.attname)
else:
return self.get_default()
def value_to_string(self, obj):
"""
Returns a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return smart_text(self._get_val_from_obj(obj))
def _get_choices(self):
if isinstance(self._choices, collections.Iterator):
choices, self._choices = tee(self._choices)
return choices
else:
return self._choices
choices = property(_get_choices)
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice, value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=None, choices_form_class=None, **kwargs):
"""
Returns a django.forms.Field instance for this database Field.
"""
defaults = {'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
if self.choices:
# Fields with choices get special treatment.
include_blank = (self.blank or
not (self.has_default() or 'initial' in kwargs))
defaults['choices'] = self.get_choices(include_blank=include_blank)
defaults['coerce'] = self.to_python
if self.null:
defaults['empty_value'] = None
if choices_form_class is not None:
form_class = choices_form_class
else:
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in list(kwargs):
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial'):
del kwargs[k]
defaults.update(kwargs)
if form_class is None:
form_class = forms.CharField
return form_class(**defaults)
def value_from_object(self, obj):
"""
Returns the value of this field in the given model instance.
"""
return getattr(obj, self.attname)
class AutoField(Field):
description = _("Integer")
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be an integer."),
}
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(AutoField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
errors = super(AutoField, self).check(**kwargs)
errors.extend(self._check_primary_key())
return errors
def _check_primary_key(self):
if not self.primary_key:
return [
checks.Error(
'AutoFields must set primary_key=True.',
hint=None,
obj=self,
id='fields.E100',
),
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super(AutoField, self).deconstruct()
del kwargs['blank']
kwargs['primary_key'] = True
return name, path, args, kwargs
def get_internal_type(self):
return "AutoField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
value = connection.ops.validate_autopk_value(value)
return value
def get_prep_value(self, value):
value = super(AutoField, self).get_prep_value(value)
if value is None:
return None
return int(value)
def contribute_to_class(self, cls, name, **kwargs):
assert not cls._meta.has_auto_field, \
"A model can't have more than one AutoField."
super(AutoField, self).contribute_to_class(cls, name, **kwargs)
cls._meta.has_auto_field = True
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be either True or False."),
}
description = _("Boolean (Either True or False)")
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(BooleanField, self).__init__(*args, **kwargs)
def check(self, **kwargs):
errors = super(BooleanField, self).check(**kwargs)
errors.extend(self._check_null(**kwargs))
return errors
def _check_null(self, **kwargs):
if getattr(self, 'null', False):
return [
checks.Error(
'BooleanFields do not accept null values.',
hint='Use a NullBooleanField instead.',
obj=self,
id='fields.E110',
)
]
else:
return []
def deconstruct(self):
name, path, args, kwargs = super(BooleanField, self).deconstruct()
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if value in (True, False):
# if value is 1 or 0 than it's equal to True or False, but we want
# to return a true bool for semantic reasons.
return bool(value)
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_prep_lookup(self, lookup_type, value):
# Special-case handling for filters coming from a Web request (e.g. the
# admin interface). Only works for scalar values (not lists). If you're
# passing in a list, you might as well make things the right type when
# constructing the list.
if value in ('1', '0'):
value = bool(int(value))
return super(BooleanField, self).get_prep_lookup(lookup_type, value)
def get_prep_value(self, value):
value = super(BooleanField, self).get_prep_value(value)
if value is None:
return None
return bool(value)
def formfield(self, **kwargs):
# Unlike most fields, BooleanField figures out include_blank from
# self.null instead of self.blank.
if self.choices:
include_blank = not (self.has_default() or 'initial' in kwargs)
defaults = {'choices': self.get_choices(include_blank=include_blank)}
else:
defaults = {'form_class': forms.BooleanField}
defaults.update(kwargs)
return super(BooleanField, self).formfield(**defaults)
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
super(CharField, self).__init__(*args, **kwargs)
self.validators.append(validators.MaxLengthValidator(self.max_length))
def check(self, **kwargs):
errors = super(CharField, self).check(**kwargs)
errors.extend(self._check_max_length_attribute(**kwargs))
return errors
def _check_max_length_attribute(self, **kwargs):
try:
max_length = int(self.max_length)
if max_length <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"CharFields must define a 'max_length' attribute.",
hint=None,
obj=self,
id='fields.E120',
)
]
except ValueError:
return [
checks.Error(
"'max_length' must be a positive integer.",
hint=None,
obj=self,
id='fields.E121',
)
]
else:
return []
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, six.string_types) or value is None:
return value
return smart_text(value)
def get_prep_value(self, value):
value = super(CharField, self).get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length}
defaults.update(kwargs)
return super(CharField, self).formfield(**defaults)
# TODO: Maybe move this into contrib, because it's specialized.
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
def formfield(self, **kwargs):
defaults = {
'error_messages': {
'invalid': _('Enter only digits separated by commas.'),
}
}
defaults.update(kwargs)
return super(CommaSeparatedIntegerField, self).formfield(**defaults)
class DateTimeCheckMixin(object):
def check(self, **kwargs):
errors = super(DateTimeCheckMixin, self).check(**kwargs)
errors.extend(self._check_mutually_exclusive_options())
errors.extend(self._check_fix_default_value())
return errors
def _check_mutually_exclusive_options(self):
# auto_now, auto_now_add, and default are mutually exclusive
# options. The use of more than one of these options together
# will trigger an Error
mutually_exclusive_options = [self.auto_now_add, self.auto_now,
self.has_default()]
enabled_options = [option not in (None, False)
for option in mutually_exclusive_options].count(True)
if enabled_options > 1:
return [
checks.Error(
"The options auto_now, auto_now_add, and default "
"are mutually exclusive. Only one of these options "
"may be present.",
hint=None,
obj=self,
id='fields.E160',
)
]
else:
return []
def _check_fix_default_value(self):
return []
class DateField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid date format. It must be "
"in YYYY-MM-DD format."),
'invalid_date': _("'%(value)s' value has the correct format (YYYY-MM-DD) "
"but it is an invalid date."),
}
description = _("Date (without time)")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super(DateField, self).__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Adds a warning to the checks framework stating, that using an actual
date or datetime value is probably wrong; it's only being evaluated on
server start-up.
For details see ticket #21905
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
if not timezone.is_naive(value):
value = timezone.make_naive(value, timezone.utc)
value = value.date()
elif isinstance(value, datetime.date):
# Nothing to do, as dates don't have tz information
pass
else:
# No explicit date / datetime value -- no checks necessary
return []
offset = datetime.timedelta(days=1)
lower = (now - offset).date()
upper = (now + offset).date()
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(DateField, self).deconstruct()
if self.auto_now:
kwargs['auto_now'] = True
if self.auto_now_add:
kwargs['auto_now_add'] = True
if self.auto_now or self.auto_now_add:
del kwargs['editable']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
if settings.USE_TZ and timezone.is_aware(value):
# Convert aware datetimes to the default time zone
# before casting them to dates (#17742).
default_timezone = timezone.get_default_timezone()
value = timezone.make_naive(value, default_timezone)
return value.date()
if isinstance(value, datetime.date):
return value
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super(DateField, self).pre_save(model_instance, add)
def contribute_to_class(self, cls, name, **kwargs):
super(DateField, self).contribute_to_class(cls, name, **kwargs)
if not self.null:
setattr(cls, 'get_next_by_%s' % self.name,
curry(cls._get_next_or_previous_by_FIELD, field=self,
is_next=True))
setattr(cls, 'get_previous_by_%s' % self.name,
curry(cls._get_next_or_previous_by_FIELD, field=self,
is_next=False))
def get_prep_lookup(self, lookup_type, value):
# For dates lookups, convert the value to an int
# so the database backend always sees a consistent type.
if lookup_type in ('month', 'day', 'week_day', 'hour', 'minute', 'second'):
return int(value)
return super(DateField, self).get_prep_lookup(lookup_type, value)
def get_prep_value(self, value):
value = super(DateField, self).get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_date(value)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateField}
defaults.update(kwargs)
return super(DateField, self).formfield(**defaults)
class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."),
'invalid_date': _("'%(value)s' value has the correct format "
"(YYYY-MM-DD) but it is an invalid date."),
'invalid_datetime': _("'%(value)s' value has the correct format "
"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
"but it is an invalid date/time."),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def _check_fix_default_value(self):
"""
Adds a warning to the checks framework stating, that using an actual
date or datetime value is probably wrong; it's only being evaluated on
server start-up.
For details see ticket #21905
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
elif isinstance(value, datetime.date):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
lower = datetime.datetime(lower.year, lower.month, lower.day)
upper = now + second_offset
upper = datetime.datetime(upper.year, upper.month, upper.day)
value = datetime.datetime(value.year, value.month, value.day)
else:
# No explicit date / datetime value -- no checks necessary
return []
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn("DateTimeField %s.%s received a naive datetime "
"(%s) while time zone support is active." %
(self.model.__name__, self.name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_datetime'],
code='invalid_datetime',
params={'value': value},
)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_date'],
code='invalid_date',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super(DateTimeField, self).pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
# get_prep_lookup is inherited from DateField
def get_prep_value(self, value):
value = super(DateTimeField, self).get_prep_value(value)
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
try:
name = '%s.%s' % (self.model.__name__, self.name)
except AttributeError:
name = '(unbound)'
warnings.warn("DateTimeField %s received a naive datetime (%s)"
" while time zone support is active." %
(name, value),
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_datetime(value)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateTimeField}
defaults.update(kwargs)
return super(DateTimeField, self).formfield(**defaults)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be a decimal number."),
}
description = _("Decimal number")
def __init__(self, verbose_name=None, name=None, max_digits=None,
decimal_places=None, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
super(DecimalField, self).__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super(DecimalField, self).check(**kwargs)
digits_errors = self._check_decimal_places()
digits_errors.extend(self._check_max_digits())
if not digits_errors:
errors.extend(self._check_decimal_places_and_max_digits(**kwargs))
else:
errors.extend(digits_errors)
return errors
def _check_decimal_places(self):
try:
decimal_places = int(self.decimal_places)
if decimal_places < 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'decimal_places' attribute.",
hint=None,
obj=self,
id='fields.E130',
)
]
except ValueError:
return [
checks.Error(
"'decimal_places' must be a non-negative integer.",
hint=None,
obj=self,
id='fields.E131',
)
]
else:
return []
def _check_max_digits(self):
try:
max_digits = int(self.max_digits)
if max_digits <= 0:
raise ValueError()
except TypeError:
return [
checks.Error(
"DecimalFields must define a 'max_digits' attribute.",
hint=None,
obj=self,
id='fields.E132',
)
]
except ValueError:
return [
checks.Error(
"'max_digits' must be a positive integer.",
hint=None,
obj=self,
id='fields.E133',
)
]
else:
return []
def _check_decimal_places_and_max_digits(self, **kwargs):
if int(self.decimal_places) > int(self.max_digits):
return [
checks.Error(
"'max_digits' must be greater or equal to 'decimal_places'.",
hint=None,
obj=self,
id='fields.E134',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(DecimalField, self).deconstruct()
if self.max_digits is not None:
kwargs['max_digits'] = self.max_digits
if self.decimal_places is not None:
kwargs['decimal_places'] = self.decimal_places
return name, path, args, kwargs
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
try:
return decimal.Decimal(value)
except decimal.InvalidOperation:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def _format(self, value):
if isinstance(value, six.string_types):
return value
else:
return self.format_number(value)
def format_number(self, value):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
# Method moved to django.db.backends.utils.
#
# It is preserved because it is used by the oracle backend
# (django.db.backends.oracle.query), and also for
# backwards-compatibility with any external code which may have used
# this method.
from django.db.backends import utils
return utils.format_number(value, self.max_digits, self.decimal_places)
def get_db_prep_save(self, value, connection):
return connection.ops.value_to_db_decimal(self.to_python(value),
self.max_digits, self.decimal_places)
def get_prep_value(self, value):
value = super(DecimalField, self).get_prep_value(value)
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {
'max_digits': self.max_digits,
'decimal_places': self.decimal_places,
'form_class': forms.DecimalField,
}
defaults.update(kwargs)
return super(DecimalField, self).formfield(**defaults)
class DurationField(Field):
"""Stores timedelta objects.
Uses interval on postgres, INVERAL DAY TO SECOND on Oracle, and bigint of
microseconds on other databases.
"""
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"[DD] [HH:[MM:]]ss[.uuuuuu] format.")
}
description = _("Duration")
def get_internal_type(self):
return "DurationField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.timedelta):
return value
try:
parsed = parse_duration(value)
except ValueError:
pass
else:
if parsed is not None:
return parsed
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_db_prep_value(self, value, connection, prepared=False):
if connection.features.has_native_duration_field:
return value
if value is None:
return None
return value.total_seconds() * 1000000
def get_db_converters(self, connection):
converters = []
if not connection.features.has_native_duration_field:
converters.append(connection.ops.convert_durationfield_value)
return converters + super(DurationField, self).get_db_converters(connection)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else duration_string(val)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("Email address")
def __init__(self, *args, **kwargs):
# max_length=254 to be compliant with RFCs 3696 and 5321
kwargs['max_length'] = kwargs.get('max_length', 254)
super(EmailField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(EmailField, self).deconstruct()
# We do not exclude max_length if it matches default as we want to change
# the default in future.
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
defaults = {
'form_class': forms.EmailField,
}
defaults.update(kwargs)
return super(EmailField, self).formfield(**defaults)
class FilePathField(Field):
description = _("File path")
def __init__(self, verbose_name=None, name=None, path='', match=None,
recursive=False, allow_files=True, allow_folders=False, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
self.allow_files, self.allow_folders = allow_files, allow_folders
kwargs['max_length'] = kwargs.get('max_length', 100)
super(FilePathField, self).__init__(verbose_name, name, **kwargs)
def check(self, **kwargs):
errors = super(FilePathField, self).check(**kwargs)
errors.extend(self._check_allowing_files_or_folders(**kwargs))
return errors
def _check_allowing_files_or_folders(self, **kwargs):
if not self.allow_files and not self.allow_folders:
return [
checks.Error(
"FilePathFields must have either 'allow_files' or 'allow_folders' set to True.",
hint=None,
obj=self,
id='fields.E140',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(FilePathField, self).deconstruct()
if self.path != '':
kwargs['path'] = self.path
if self.match is not None:
kwargs['match'] = self.match
if self.recursive is not False:
kwargs['recursive'] = self.recursive
if self.allow_files is not True:
kwargs['allow_files'] = self.allow_files
if self.allow_folders is not False:
kwargs['allow_folders'] = self.allow_folders
if kwargs.get("max_length", None) == 100:
del kwargs["max_length"]
return name, path, args, kwargs
def get_prep_value(self, value):
value = super(FilePathField, self).get_prep_value(value)
if value is None:
return None
return six.text_type(value)
def formfield(self, **kwargs):
defaults = {
'path': self.path,
'match': self.match,
'recursive': self.recursive,
'form_class': forms.FilePathField,
'allow_files': self.allow_files,
'allow_folders': self.allow_folders,
}
defaults.update(kwargs)
return super(FilePathField, self).formfield(**defaults)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
value = super(FloatField, self).get_prep_value(value)
if value is None:
return None
return float(value)
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
defaults = {'form_class': forms.FloatField}
defaults.update(kwargs)
return super(FloatField, self).formfield(**defaults)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be an integer."),
}
description = _("Integer")
def check(self, **kwargs):
errors = super(IntegerField, self).check(**kwargs)
errors.extend(self._check_max_length_warning())
return errors
def _check_max_length_warning(self):
if self.max_length is not None:
return [
checks.Warning(
"'max_length' is ignored when used with IntegerField",
hint="Remove 'max_length' from field",
obj=self,
id='fields.W122',
)
]
return []
@cached_property
def validators(self):
# These validators can't be added at field initialization time since
# they're based on values retrieved from `connection`.
range_validators = []
internal_type = self.get_internal_type()
min_value, max_value = connection.ops.integer_field_range(internal_type)
if min_value is not None:
range_validators.append(validators.MinValueValidator(min_value))
if max_value is not None:
range_validators.append(validators.MaxValueValidator(max_value))
return super(IntegerField, self).validators + range_validators
def get_prep_value(self, value):
value = super(IntegerField, self).get_prep_value(value)
if value is None:
return None
return int(value)
def get_prep_lookup(self, lookup_type, value):
if ((lookup_type == 'gte' or lookup_type == 'lt')
and isinstance(value, float)):
value = math.ceil(value)
return super(IntegerField, self).get_prep_lookup(lookup_type, value)
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def formfield(self, **kwargs):
defaults = {'form_class': forms.IntegerField}
defaults.update(kwargs)
return super(IntegerField, self).formfield(**defaults)
class BigIntegerField(IntegerField):
empty_strings_allowed = False
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1,
'max_value': BigIntegerField.MAX_BIGINT}
defaults.update(kwargs)
return super(BigIntegerField, self).formfield(**defaults)
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
system_check_deprecated_details = {
'msg': (
'IPAddressField has been deprecated. Support for it (except in '
'historical migrations) will be removed in Django 1.9.'
),
'hint': 'Use GenericIPAddressField instead.',
'id': 'fields.W900',
}
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 15
super(IPAddressField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(IPAddressField, self).deconstruct()
del kwargs['max_length']
return name, path, args, kwargs
def get_prep_value(self, value):
value = super(IPAddressField, self).get_prep_value(value)
if value is None:
return None
return six.text_type(value)
def get_internal_type(self):
return "IPAddressField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.IPAddressField}
defaults.update(kwargs)
return super(IPAddressField, self).formfield(**defaults)
class GenericIPAddressField(Field):
empty_strings_allowed = False
description = _("IP address")
default_error_messages = {}
def __init__(self, verbose_name=None, name=None, protocol='both',
unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.protocol = protocol
self.default_validators, invalid_error_message = \
validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages['invalid'] = invalid_error_message
kwargs['max_length'] = 39
super(GenericIPAddressField, self).__init__(verbose_name, name, *args,
**kwargs)
def check(self, **kwargs):
errors = super(GenericIPAddressField, self).check(**kwargs)
errors.extend(self._check_blank_and_null_values(**kwargs))
return errors
def _check_blank_and_null_values(self, **kwargs):
if not getattr(self, 'null', False) and getattr(self, 'blank', False):
return [
checks.Error(
('GenericIPAddressFields cannot have blank=True if null=False, '
'as blank values are stored as nulls.'),
hint=None,
obj=self,
id='fields.E150',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(GenericIPAddressField, self).deconstruct()
if self.unpack_ipv4 is not False:
kwargs['unpack_ipv4'] = self.unpack_ipv4
if self.protocol != "both":
kwargs['protocol'] = self.protocol
if kwargs.get("max_length", None) == 39:
del kwargs['max_length']
return name, path, args, kwargs
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value and ':' in value:
return clean_ipv6_address(value,
self.unpack_ipv4, self.error_messages['invalid'])
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_ipaddress(value)
def get_prep_value(self, value):
value = super(GenericIPAddressField, self).get_prep_value(value)
if value is None:
return None
if value and ':' in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return six.text_type(value)
def formfield(self, **kwargs):
defaults = {
'protocol': self.protocol,
'form_class': forms.GenericIPAddressField,
}
defaults.update(kwargs)
return super(GenericIPAddressField, self).formfield(**defaults)
class NullBooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
def __init__(self, *args, **kwargs):
kwargs['null'] = True
kwargs['blank'] = True
super(NullBooleanField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(NullBooleanField, self).deconstruct()
del kwargs['null']
del kwargs['blank']
return name, path, args, kwargs
def get_internal_type(self):
return "NullBooleanField"
def to_python(self, value):
if value is None:
return None
if value in (True, False):
return bool(value)
if value in ('None',):
return None
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def get_prep_lookup(self, lookup_type, value):
# Special-case handling for filters coming from a Web request (e.g. the
# admin interface). Only works for scalar values (not lists). If you're
# passing in a list, you might as well make things the right type when
# constructing the list.
if value in ('1', '0'):
value = bool(int(value))
return super(NullBooleanField, self).get_prep_lookup(lookup_type,
value)
def get_prep_value(self, value):
value = super(NullBooleanField, self).get_prep_value(value)
if value is None:
return None
return bool(value)
def formfield(self, **kwargs):
defaults = {
'form_class': forms.NullBooleanField,
'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
defaults.update(kwargs)
return super(NullBooleanField, self).formfield(**defaults)
class PositiveIntegerField(IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super(PositiveIntegerField, self).formfield(**defaults)
class PositiveSmallIntegerField(IntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super(PositiveSmallIntegerField, self).formfield(**defaults)
class SlugField(CharField):
default_validators = [validators.validate_slug]
description = _("Slug (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 50)
# Set db_index=True unless it's been set manually.
if 'db_index' not in kwargs:
kwargs['db_index'] = True
super(SlugField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(SlugField, self).deconstruct()
if kwargs.get("max_length", None) == 50:
del kwargs['max_length']
if self.db_index is False:
kwargs['db_index'] = False
else:
del kwargs['db_index']
return name, path, args, kwargs
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.SlugField}
defaults.update(kwargs)
return super(SlugField, self).formfield(**defaults)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class TextField(Field):
description = _("Text")
def get_internal_type(self):
return "TextField"
def get_prep_value(self, value):
value = super(TextField, self).get_prep_value(value)
if isinstance(value, six.string_types) or value is None:
return value
return smart_text(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length, 'widget': forms.Textarea}
defaults.update(kwargs)
return super(TextField, self).formfield(**defaults)
class TimeField(DateTimeCheckMixin, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%(value)s' value has an invalid format. It must be in "
"HH:MM[:ss[.uuuuuu]] format."),
'invalid_time': _("'%(value)s' value has the correct format "
"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."),
}
description = _("Time")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
super(TimeField, self).__init__(verbose_name, name, **kwargs)
def _check_fix_default_value(self):
"""
Adds a warning to the checks framework stating, that using an actual
time or datetime value is probably wrong; it's only being evaluated on
server start-up.
For details see ticket #21905
"""
if not self.has_default():
return []
now = timezone.now()
if not timezone.is_naive(now):
now = timezone.make_naive(now, timezone.utc)
value = self.default
if isinstance(value, datetime.datetime):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc)
elif isinstance(value, datetime.time):
second_offset = datetime.timedelta(seconds=10)
lower = now - second_offset
upper = now + second_offset
value = datetime.datetime.combine(now.date(), value)
if timezone.is_aware(value):
value = timezone.make_naive(value, timezone.utc).time()
else:
# No explicit time / datetime value -- no checks necessary
return []
if lower <= value <= upper:
return [
checks.Warning(
'Fixed default value provided.',
hint='It seems you set a fixed date / time / datetime '
'value as default for this field. This may not be '
'what you want. If you want to have the current date '
'as default, use `django.utils.timezone.now`',
obj=self,
id='fields.W161',
)
]
return []
def deconstruct(self):
name, path, args, kwargs = super(TimeField, self).deconstruct()
if self.auto_now is not False:
kwargs["auto_now"] = self.auto_now
if self.auto_now_add is not False:
kwargs["auto_now_add"] = self.auto_now_add
if self.auto_now or self.auto_now_add:
del kwargs['blank']
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid_time'],
code='invalid_time',
params={'value': value},
)
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super(TimeField, self).pre_save(model_instance, add)
def get_prep_value(self, value):
value = super(TimeField, self).get_prep_value(value)
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_time(value)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.TimeField}
defaults.update(kwargs)
return super(TimeField, self).formfield(**defaults)
class URLField(CharField):
default_validators = [validators.URLValidator()]
description = _("URL")
def __init__(self, verbose_name=None, name=None, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 200)
super(URLField, self).__init__(verbose_name, name, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(URLField, self).deconstruct()
if kwargs.get("max_length", None) == 200:
del kwargs['max_length']
return name, path, args, kwargs
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
defaults = {
'form_class': forms.URLField,
}
defaults.update(kwargs)
return super(URLField, self).formfield(**defaults)
class BinaryField(Field):
description = _("Raw binary data")
empty_values = [None, b'']
def __init__(self, *args, **kwargs):
kwargs['editable'] = False
super(BinaryField, self).__init__(*args, **kwargs)
if self.max_length is not None:
self.validators.append(validators.MaxLengthValidator(self.max_length))
def deconstruct(self):
name, path, args, kwargs = super(BinaryField, self).deconstruct()
del kwargs['editable']
return name, path, args, kwargs
def get_internal_type(self):
return "BinaryField"
def get_default(self):
if self.has_default() and not callable(self.default):
return self.default
default = super(BinaryField, self).get_default()
if default == '':
return b''
return default
def get_db_prep_value(self, value, connection, prepared=False):
value = super(BinaryField, self).get_db_prep_value(value, connection, prepared)
if value is not None:
return connection.Database.Binary(value)
return value
def value_to_string(self, obj):
"""Binary data is serialized as base64"""
return b64encode(force_bytes(self._get_val_from_obj(obj))).decode('ascii')
def to_python(self, value):
# If it's a string, it should be base64-encoded data
if isinstance(value, six.text_type):
return six.memoryview(b64decode(force_bytes(value)))
return value
class UUIDField(Field):
default_error_messages = {
'invalid': _("'%(value)s' is not a valid UUID."),
}
description = 'Universally unique identifier'
empty_strings_allowed = False
def __init__(self, **kwargs):
kwargs['max_length'] = 32
super(UUIDField, self).__init__(**kwargs)
def get_internal_type(self):
return "UUIDField"
def get_db_prep_value(self, value, connection, prepared=False):
if isinstance(value, uuid.UUID):
if connection.features.has_native_uuid_field:
return value
return value.hex
if isinstance(value, six.string_types):
return value.replace('-', '')
return value
def to_python(self, value):
if value and not isinstance(value, uuid.UUID):
try:
return uuid.UUID(value)
except ValueError:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
return value
def formfield(self, **kwargs):
defaults = {
'form_class': forms.UUIDField,
}
defaults.update(kwargs)
return super(UUIDField, self).formfield(**defaults)
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
import json
import os
from datetime import datetime, timedelta
from urllib.parse import urlencode
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.test import RequestFactory
from django.urls import reverse
from django.utils.encoding import force_str
from django.utils.translation import trim_whitespace
from unittest import mock
import pytest
import responses
from pyquery import PyQuery as pq
from waffle.testutils import override_switch
from olympia import amo, core
from olympia.accounts.views import API_TOKEN_COOKIE
from olympia.activity.models import ActivityLog
from olympia.addons.models import Addon, AddonCategory, AddonUser
from olympia.amo.storage_utils import copy_stored_file
from olympia.amo.templatetags.jinja_helpers import (
format_date,
url as url_reverse,
urlparams,
)
from olympia.amo.tests import TestCase, addon_factory, user_factory, version_factory
from olympia.amo.tests.test_helpers import get_image_path
from olympia.api.models import SYMMETRIC_JWT_TYPE, APIKey, APIKeyConfirmation
from olympia.applications.models import AppVersion
from olympia.constants.promoted import RECOMMENDED
from olympia.devhub.decorators import dev_required
from olympia.devhub.models import BlogPost
from olympia.devhub.views import get_next_version_number
from olympia.files.models import FileUpload
from olympia.files.tests.test_models import UploadTest as BaseUploadTest
from olympia.ratings.models import Rating
from olympia.translations.models import Translation, delete_translation
from olympia.users.models import IPNetworkUserRestriction, UserProfile
from olympia.users.tests.test_views import UserViewBase
from olympia.versions.models import ApplicationsVersions, Version, VersionPreview
from olympia.zadmin.models import set_config
class HubTest(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(HubTest, self).setUp()
self.url = reverse('devhub.index')
assert self.client.login(email='regular@mozilla.com')
assert self.client.get(self.url).status_code == 200
self.user_profile = UserProfile.objects.get(id=999)
not_their_addon = addon_factory(users=[user_factory()])
AddonUser.unfiltered.create(
addon=not_their_addon, user=self.user_profile, role=amo.AUTHOR_ROLE_DELETED
)
def clone_addon(self, num, addon_id=3615):
addons = []
source = Addon.objects.get(id=addon_id)
for i in range(num):
data = {
'type': source.type,
'status': source.status,
'name': 'cloned-addon-%s-%s' % (addon_id, i),
'users': [self.user_profile],
}
addons.append(addon_factory(**data))
return addons
class TestDashboard(HubTest):
def setUp(self):
super(TestDashboard, self).setUp()
self.url = reverse('devhub.addons')
self.themes_url = reverse('devhub.themes')
assert self.client.get(self.url).status_code == 200
self.addon = Addon.objects.get(pk=3615)
self.addon.addonuser_set.create(user=self.user_profile)
def test_addons_layout(self):
doc = pq(self.client.get(self.url).content)
assert doc('title').text() == (
'Manage My Submissions :: Developer Hub :: Add-ons for Firefox'
)
assert doc('.links-footer').length == 1
assert doc('#copyright').length == 1
assert doc('#footer-links .mobile-link').length == 0
def get_action_links(self, addon_id):
response = self.client.get(self.url)
doc = pq(response.content)
selector = '.item[data-addonid="%s"] .item-actions li > a' % addon_id
links = [a.text.strip() for a in doc(selector)]
return links
def test_no_addons(self):
"""Check that no add-ons are displayed for this user."""
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.item item').length == 0
def test_addon_pagination(self):
"""Check that the correct info. is displayed for each add-on:
namely, that add-ons are paginated at 10 items per page, and that
when there is more than one page, the 'Sort by' header and pagination
footer appear.
"""
# Create 10 add-ons. We going to make the existing one from the setUp
# and a static theme which shouldn't show up as an addon in this list.
addons = self.clone_addon(10)
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url)
doc = pq(response.content)
assert len(doc('.item .item-info')) == 10
assert len(doc('.item .info.extension')) == 10
assert doc('nav.paginator').length == 0
for addon in addons:
assert addon.get_icon_url(64) in doc('.item .info h3 a').html()
# Create 5 add-ons -have to change self.addon back to clone extensions.
self.addon.update(type=amo.ADDON_EXTENSION)
self.clone_addon(5)
self.addon.update(type=amo.ADDON_STATICTHEME)
response = self.client.get(self.url, {'page': 2})
doc = pq(response.content)
assert len(doc('.item .item-info')) == 5
assert doc('nav.paginator').length == 1
def test_themes(self):
"""Check themes show on dashboard."""
# Create 2 themes.
staticthemes = []
for x in range(2):
addon = addon_factory(type=amo.ADDON_STATICTHEME, users=[self.user_profile])
VersionPreview.objects.create(version=addon.current_version)
staticthemes.append(addon)
response = self.client.get(self.themes_url)
doc = pq(response.content)
assert len(doc('.item .item-info')) == 2
assert len(doc('.item .info.statictheme')) == 2
for addon in staticthemes:
assert addon.current_previews[0].thumbnail_url in [
img.attrib['src'] for img in doc('.info.statictheme h3 img')
]
def test_show_hide_statistics_and_new_version_for_disabled(self):
# Not disabled: show statistics and new version links.
self.addon.update(disabled_by_user=False)
links = self.get_action_links(self.addon.pk)
assert 'Statistics' in links, 'Unexpected: %r' % links
assert 'New Version' in links, 'Unexpected: %r' % links
# Disabled (user): hide new version link.
self.addon.update(disabled_by_user=True)
links = self.get_action_links(self.addon.pk)
assert 'New Version' not in links, 'Unexpected: %r' % links
# Disabled (admin): hide statistics and new version links.
self.addon.update(disabled_by_user=False, status=amo.STATUS_DISABLED)
links = self.get_action_links(self.addon.pk)
assert 'Statistics' not in links, 'Unexpected: %r' % links
assert 'New Version' not in links, 'Unexpected: %r' % links
def test_public_addon(self):
assert self.addon.status == amo.STATUS_APPROVED
doc = pq(self.client.get(self.url).content)
item = doc('.item[data-addonid="%s"]' % self.addon.id)
assert item.find('h3 a').attr('href') == self.addon.get_dev_url()
assert item.find('p.downloads'), 'Expected weekly downloads'
assert item.find('p.users'), 'Expected ADU'
assert item.find('.item-details'), 'Expected item details'
assert not item.find(
'p.incomplete'
), 'Unexpected message about incomplete add-on'
appver = self.addon.current_version.apps.all()[0]
appver.delete()
def test_dev_news(self):
for i in range(7):
bp = BlogPost(
title='hi %s' % i, date_posted=datetime.now() - timedelta(days=i)
)
bp.save()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.blog-posts').length == 1
assert doc('.blog-posts li').length == 5
assert doc('.blog-posts li a').eq(0).text() == 'hi 0'
assert doc('.blog-posts li a').eq(4).text() == 'hi 4'
def test_sort_created_filter(self):
response = self.client.get(self.url + '?sort=created')
doc = pq(response.content)
assert doc('.item-details').length == 1
elm = doc('.item-details .date-created')
assert elm.length == 1
assert elm.remove('strong').text() == (format_date(self.addon.created))
def test_sort_updated_filter(self):
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.item-details').length == 1
elm = doc('.item-details .date-updated')
assert elm.length == 1
assert elm.remove('strong').text() == (
trim_whitespace(format_date(self.addon.last_updated))
)
def test_purely_unlisted_addon_are_not_shown_as_incomplete(self):
self.make_addon_unlisted(self.addon)
assert self.addon.has_complete_metadata()
response = self.client.get(self.url)
doc = pq(response.content)
# It should not be considered incomplete despite having STATUS_NULL,
# since it's purely unlisted.
assert not doc('.incomplete')
# Rest of the details should be shown, but not the AMO-specific stuff.
assert not doc('.item-info')
assert doc('.item-details')
def test_mixed_versions_addon_with_incomplete_metadata(self):
self.make_addon_unlisted(self.addon)
version = version_factory(addon=self.addon, channel=amo.RELEASE_CHANNEL_LISTED)
version.update(license=None)
self.addon.reload()
assert not self.addon.has_complete_metadata()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.incomplete').text() == (
'This add-on is missing some required information before it can be'
' submitted for publication.'
)
assert doc('form.resume').attr('action') == (
url_reverse('devhub.request-review', self.addon.slug)
)
assert doc('button.link').text() == 'Resume'
def test_no_versions_addon(self):
self.addon.current_version.delete()
response = self.client.get(self.url)
doc = pq(response.content)
assert doc('.incomplete').text() == ("This add-on doesn't have any versions.")
class TestUpdateCompatibility(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super().setUp()
assert self.client.login(email='del@icio.us')
self.url = reverse('devhub.addons')
# These aren't realistic but work with existing tests and the 3615 addon
self.create_appversion('android', '3.7a1pre')
self.create_appversion('android', '4.0')
def create_appversion(self, name, version):
return AppVersion.objects.get_or_create(
application=amo.APPS[name].id, version=version
)
def test_no_compat(self):
addon = Addon.objects.get(pk=3615)
addon.update(type=amo.ADDON_DICT)
self.client.logout()
assert self.client.login(email='admin@mozilla.com')
response = self.client.get(self.url)
doc = pq(response.content)
assert not doc('.item[data-addonid="3615"] li.compat')
response = self.client.get(
reverse(
'devhub.ajax.compat.update', args=[addon.slug, addon.current_version.id]
)
)
assert response.status_code == 404
response = self.client.get(
reverse('devhub.ajax.compat.status', args=[addon.slug])
)
assert response.status_code == 404
def test_compat(self):
addon = Addon.objects.get(pk=3615)
response = self.client.get(self.url)
doc = pq(response.content)
cu = doc('.item[data-addonid="3615"] .tooltip.compat-update')
assert not cu
addon.current_version.files.update(strict_compatibility=True)
response = self.client.get(self.url)
doc = pq(response.content)
cu = doc('.item[data-addonid="3615"] .tooltip.compat-update')
assert cu
update_url = reverse(
'devhub.ajax.compat.update', args=[addon.slug, addon.current_version.id]
)
assert cu.attr('data-updateurl') == update_url
status_url = reverse('devhub.ajax.compat.status', args=[addon.slug])
selector = '.item[data-addonid="3615"] li.compat'
assert doc(selector).attr('data-src') == status_url
assert doc('.item[data-addonid="3615"] .compat-update-modal')
def test_incompat_firefox(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.files.update(strict_compatibility=True)
versions = ApplicationsVersions.objects.all()[0]
versions.max = AppVersion.objects.get(version='2.0')
versions.save()
doc = pq(self.client.get(self.url).content)
assert doc('.item[data-addonid="3615"] .tooltip.compat-error')
def test_incompat_android(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.files.update(strict_compatibility=True)
appver = AppVersion.objects.get(version='2.0')
appver.update(application=amo.ANDROID.id)
av = ApplicationsVersions.objects.all()[0]
av.application = amo.ANDROID.id
av.max = appver
av.save()
doc = pq(self.client.get(self.url).content)
assert doc('.item[data-addonid="3615"] .tooltip.compat-error')
class TestDevRequired(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestDevRequired, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.edit_page_url = self.addon.get_dev_url('edit')
self.get_url = self.addon.get_dev_url('versions')
self.post_url = self.addon.get_dev_url('delete')
assert self.client.login(email='del@icio.us')
self.au = self.addon.addonuser_set.get(user__email='del@icio.us')
assert self.au.role == amo.AUTHOR_ROLE_OWNER
def test_anon(self):
self.client.logout()
self.assertLoginRedirects(self.client.get(self.get_url), self.get_url)
self.assertLoginRedirects(
self.client.get(self.edit_page_url), self.edit_page_url
)
def test_dev_get(self):
assert self.client.get(self.get_url).status_code == 200
assert self.client.get(self.edit_page_url).status_code == 200
def test_dev_post(self):
self.assert3xx(self.client.post(self.post_url), self.get_url)
def test_disabled_post_dev(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert self.client.post(self.get_url).status_code == 403
def test_disabled_post_admin(self):
self.addon.update(status=amo.STATUS_DISABLED)
assert self.client.login(email='admin@mozilla.com')
self.assert3xx(self.client.post(self.post_url), self.get_url)
class TestVersionStats(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestVersionStats, self).setUp()
assert self.client.login(email='admin@mozilla.com')
def test_counts(self):
addon = Addon.objects.get(id=3615)
version = addon.current_version
user = UserProfile.objects.get(email='admin@mozilla.com')
for _ in range(10):
Rating.objects.create(addon=addon, user=user, version=addon.current_version)
url = reverse('devhub.versions.stats', args=[addon.slug])
data = json.loads(force_str(self.client.get(url).content))
exp = {
str(version.id): {
'reviews': 10,
'files': 1,
'version': version.version,
'id': version.id,
}
}
self.assertDictEqual(data, exp)
class TestDelete(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
super(TestDelete, self).setUp()
self.get_addon = lambda: Addon.objects.filter(id=3615)
assert self.client.login(email='del@icio.us')
self.user = UserProfile.objects.get(email='del@icio.us')
self.get_url = lambda: self.get_addon()[0].get_dev_url('delete')
def test_post_not(self):
response = self.client.post(self.get_url(), follow=True)
assert pq(response.content)('.notification-box').text() == (
'URL name was incorrect. Add-on was not deleted.'
)
assert self.get_addon().exists()
self.assert3xx(response, self.get_addon()[0].get_dev_url('versions'))
def test_post(self):
self.get_addon().get().update(slug='addon-slug')
response = self.client.post(self.get_url(), {'slug': 'addon-slug'}, follow=True)
assert pq(response.content)('.notification-box').text() == ('Add-on deleted.')
assert not self.get_addon().exists()
self.assert3xx(response, reverse('devhub.addons'))
def test_post_wrong_slug(self):
self.get_addon().get().update(slug='addon-slug')
response = self.client.post(self.get_url(), {'slug': 'theme-slug'}, follow=True)
assert pq(response.content)('.notification-box').text() == (
'URL name was incorrect. Add-on was not deleted.'
)
assert self.get_addon().exists()
self.assert3xx(response, self.get_addon()[0].get_dev_url('versions'))
def test_post_statictheme(self):
theme = addon_factory(
name='xpi name',
type=amo.ADDON_STATICTHEME,
slug='stheme-slug',
users=[self.user],
)
response = self.client.post(
theme.get_dev_url('delete'), {'slug': 'stheme-slug'}, follow=True
)
assert pq(response.content)('.notification-box').text() == ('Theme deleted.')
assert not Addon.objects.filter(id=theme.id).exists()
self.assert3xx(response, reverse('devhub.themes'))
def test_post_statictheme_wrong_slug(self):
theme = addon_factory(
name='xpi name',
type=amo.ADDON_STATICTHEME,
slug='stheme-slug',
users=[self.user],
)
response = self.client.post(
theme.get_dev_url('delete'), {'slug': 'foo-slug'}, follow=True
)
assert pq(response.content)('.notification-box').text() == (
'URL name was incorrect. Theme was not deleted.'
)
assert Addon.objects.filter(id=theme.id).exists()
self.assert3xx(response, theme.get_dev_url('versions'))
class TestHome(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestHome, self).setUp()
assert self.client.login(email='del@icio.us')
self.url = reverse('devhub.index')
self.addon = Addon.objects.get(pk=3615)
def get_pq(self):
response = self.client.get(self.url)
assert response.status_code == 200
return pq(response.content)
def test_basic_logged_out(self):
self.client.logout()
response = self.client.get(self.url)
assert response.status_code == 200
self.assertTemplateUsed(response, 'devhub/index.html')
assert b'Customize Firefox' in response.content
def test_default_lang_selected(self):
self.client.logout()
doc = self.get_pq()
selected_value = doc('#language option:selected').attr('value')
assert selected_value == 'en-us'
def test_basic_logged_in(self):
response = self.client.get(self.url)
assert response.status_code == 200
self.assertTemplateUsed(response, 'devhub/index.html')
assert b'My Add-ons' in response.content
def test_my_addons_addon_versions_link(self):
assert self.client.login(email='del@icio.us')
doc = self.get_pq()
addon_list = doc('.DevHub-MyAddons-list')
href = addon_list.find('.DevHub-MyAddons-item-versions a').attr('href')
assert href == self.addon.get_dev_url('versions')
def test_my_addons(self):
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW, 'Awaiting Review'),
(amo.STATUS_APPROVED, amo.STATUS_AWAITING_REVIEW, 'Approved'),
(amo.STATUS_DISABLED, amo.STATUS_APPROVED, 'Disabled by Mozilla'),
]
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_file = latest_version.files.all()[0]
for addon_status, file_status, status_str in statuses:
latest_file.update(status=file_status)
self.addon.update(status=addon_status)
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-item-edit').attr(
'href'
) == self.addon.get_dev_url('edit')
if self.addon.type != amo.ADDON_STATICTHEME:
assert self.addon.get_icon_url(64) in addon_item.html()
else:
assert self.addon.current_previews[0].thumbnail_url in (
addon_item.html()
)
assert (
status_str == addon_item.find('.DevHub-MyAddons-VersionStatus').text()
)
Addon.objects.all().delete()
assert self.get_pq()('.DevHub-MyAddons-list .DevHub-MyAddons-item').length == 0
def test_my_addons_recommended(self):
self.make_addon_promoted(self.addon, RECOMMENDED, approve_version=True)
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_file = latest_version.files.all()[0]
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW, 'Awaiting Review'),
(
amo.STATUS_APPROVED,
amo.STATUS_AWAITING_REVIEW,
'Approved and Recommended',
),
(amo.STATUS_DISABLED, amo.STATUS_APPROVED, 'Disabled by Mozilla'),
]
for addon_status, file_status, status_str in statuses:
latest_file.update(status=file_status)
self.addon.update(status=addon_status)
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-item-edit').attr(
'href'
) == self.addon.get_dev_url('edit')
if self.addon.type != amo.ADDON_STATICTHEME:
assert self.addon.get_icon_url(64) in addon_item.html()
else:
assert self.addon.current_previews[0].thumbnail_url in (
addon_item.html()
)
assert (
status_str == addon_item.find('.DevHub-MyAddons-VersionStatus').text()
)
Addon.objects.all().delete()
assert self.get_pq()('.DevHub-MyAddons-list .DevHub-MyAddons-item').length == 0
def test_my_addons_with_static_theme(self):
self.addon.update(type=amo.ADDON_STATICTHEME)
VersionPreview.objects.create(version=self.addon.current_version)
self.test_my_addons()
def test_my_addons_incomplete(self):
self.addon.update(status=amo.STATUS_NULL)
# Make add-on incomplete
AddonCategory.objects.filter(addon=self.addon).delete()
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-item-edit').attr(
'href'
) == self.addon.get_dev_url('edit')
def test_my_addons_no_disabled_or_deleted(self):
self.addon.update(status=amo.STATUS_APPROVED, disabled_by_user=True)
doc = self.get_pq()
addon_item = doc('.DevHub-MyAddons-list .DevHub-MyAddons-item')
assert addon_item.length == 1
assert addon_item.find('.DevHub-MyAddons-VersionStatus').text() == 'Invisible'
class TestActivityFeed(TestCase):
fixtures = ('base/users', 'base/addon_3615')
def setUp(self):
super(TestActivityFeed, self).setUp()
assert self.client.login(email='del@icio.us')
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.versions.first()
self.action_user = UserProfile.objects.get(email='reviewer@mozilla.com')
ActivityLog.objects.all().delete()
def test_feed_for_all(self):
response = self.client.get(reverse('devhub.feed_all'))
assert response.status_code == 200
doc = pq(response.content)
assert doc('header h2').text() == 'Recent Activity for My Add-ons'
def test_feed_for_addon(self):
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
assert response.status_code == 200
doc = pq(response.content)
assert doc('header h2').text() == ('Recent Activity for %s' % self.addon.name)
def test_feed_disabled(self):
self.addon.update(status=amo.STATUS_DISABLED)
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
assert response.status_code == 200
def test_feed_disabled_anon(self):
self.client.logout()
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
assert response.status_code == 302
def add_log(self, action=amo.LOG.ADD_RATING):
core.set_user(self.action_user)
ActivityLog.create(action, self.addon, self.version)
def add_hidden_log(self, action=amo.LOG.COMMENT_VERSION):
self.add_log(action=action)
def test_feed_hidden(self):
self.add_hidden_log()
self.add_hidden_log(amo.LOG.OBJECT_ADDED)
res = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(res.content)
assert len(doc('#recent-activity li.item')) == 0
def test_addons_hidden(self):
self.add_hidden_log()
self.add_hidden_log(amo.LOG.OBJECT_ADDED)
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
assert len(doc('.recent-activity li.item')) == 0
def test_unlisted_addons_dashboard(self):
"""Unlisted addons are displayed in the feed on the dashboard page."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
assert len(doc('.recent-activity li.item')) == 2
def test_unlisted_addons_feed_sidebar(self):
"""Unlisted addons are displayed in the left side in the feed page."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.feed_all'))
doc = pq(res.content)
# First li is "All My Add-ons".
assert len(doc('#refine-addon li')) == 2
def test_unlisted_addons_feed(self):
"""Unlisted addons are displayed in the feed page."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.feed_all'))
doc = pq(res.content)
assert len(doc('#recent-activity .item')) == 2
def test_unlisted_addons_feed_filter(self):
"""Feed page can be filtered on unlisted addon."""
self.make_addon_unlisted(self.addon)
self.add_log()
res = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(res.content)
assert len(doc('#recent-activity .item')) == 2
def test_reviewer_name_is_used_for_reviewer_actions(self):
self.action_user.update(display_name='HîdeMe', reviewer_name='ShöwMe')
self.add_log(action=amo.LOG.APPROVE_VERSION)
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(response.content)
assert len(doc('#recent-activity .item')) == 1
content = force_str(response.content)
assert self.action_user.reviewer_name in content
assert self.action_user.name not in content
def test_regular_name_is_used_for_non_reviewer_actions(self):
# Fields are inverted compared to the test above.
self.action_user.update(reviewer_name='HîdeMe', display_name='ShöwMe')
self.add_log(action=amo.LOG.ADD_RATING) # not a reviewer action.
response = self.client.get(reverse('devhub.feed', args=[self.addon.slug]))
doc = pq(response.content)
assert len(doc('#recent-activity .item')) == 1
content = force_str(response.content)
# Assertions are inverted compared to the test above.
assert self.action_user.reviewer_name not in content
assert self.action_user.name in content
def test_addons_dashboard_name(self):
self.add_log()
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
timestamp = doc('.recent-activity li.item span.activity-timestamp')
assert len(timestamp) == 1
assert self.action_user.name
assert self.action_user.name in timestamp.html()
assert '<a href=' not in timestamp.html()
def test_addons_dashboard_reviewer_name(self):
self.action_user.update(reviewer_name='bob')
self.add_log(action=amo.LOG.APPROVE_VERSION)
res = self.client.get(reverse('devhub.addons'))
doc = pq(res.content)
timestamp = doc('.recent-activity li.item span.activity-timestamp')
assert len(timestamp) == 1
assert self.action_user.name
assert self.action_user.name not in timestamp.html()
assert self.action_user.reviewer_name in timestamp.html()
assert '<a href=' not in timestamp.html()
class TestAPIAgreement(TestCase):
fixtures = ['base/addon_3615', 'base/addon_5579', 'base/users']
def setUp(self):
super(TestAPIAgreement, self).setUp()
assert self.client.login(email='del@icio.us')
self.user = UserProfile.objects.get(email='del@icio.us')
self.user.update(last_login_ip='192.168.1.1')
def test_agreement_read(self):
self.user.update(read_dev_agreement=self.days_ago(0))
response = self.client.get(reverse('devhub.api_key_agreement'))
self.assert3xx(response, reverse('devhub.api_key'))
def test_agreement_unread_captcha_inactive(self):
self.user.update(read_dev_agreement=None)
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
form = response.context['agreement_form']
assert 'recaptcha' not in form.fields
doc = pq(response.content)
assert doc('.g-recaptcha') == []
@override_switch('developer-agreement-captcha', active=True)
def test_agreement_unread_captcha_active(self):
self.user.update(read_dev_agreement=None)
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
form = response.context['agreement_form']
assert 'recaptcha' in form.fields
doc = pq(response.content)
assert doc('.g-recaptcha')
def test_agreement_submit_success(self):
self.user.update(read_dev_agreement=None)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 302
assert response['Location'] == reverse('devhub.api_key')
self.user.reload()
self.assertCloseToNow(self.user.read_dev_agreement)
@override_switch('developer-agreement-captcha', active=True)
def test_agreement_submit_captcha_active_error(self):
self.user.update(read_dev_agreement=None)
response = self.client.post(reverse('devhub.api_key_agreement'))
# Captcha is properly rendered
doc = pq(response.content)
assert doc('.g-recaptcha')
assert 'recaptcha' in response.context['agreement_form'].errors
@override_switch('developer-agreement-captcha', active=True)
def test_agreement_submit_captcha_active_success(self):
self.user.update(read_dev_agreement=None)
verify_data = urlencode(
{
'secret': '',
'remoteip': '127.0.0.1',
'response': 'test',
}
)
responses.add(
responses.GET,
'https://www.google.com/recaptcha/api/siteverify?' + verify_data,
json={'error-codes': [], 'success': True},
)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'g-recaptcha-response': 'test',
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 302
assert response['Location'] == reverse('devhub.api_key')
self.user.reload()
self.assertCloseToNow(self.user.read_dev_agreement)
def test_agreement_read_but_too_long_ago(self):
set_config('last_dev_agreement_change_date', '2018-01-01 12:00')
before_agreement_last_changed = datetime(2018, 1, 1, 12, 0) - timedelta(days=1)
self.user.update(read_dev_agreement=before_agreement_last_changed)
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
@mock.patch('olympia.addons.utils.RestrictionChecker.is_submission_allowed')
def test_cant_submit_agreement_if_restricted(self, is_submission_allowed_mock):
is_submission_allowed_mock.return_value = False
self.user.update(read_dev_agreement=None)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 200
assert response.context['agreement_form'].is_valid() is False
self.user.reload()
assert self.user.read_dev_agreement is None
assert is_submission_allowed_mock.call_count == 2
# First call is from the form, and it's not checking the agreement,
# it's just to see if the user is restricted.
assert is_submission_allowed_mock.call_args_list[0] == (
(),
{'check_dev_agreement': False},
)
# Second call is from the view itself, no arguments
assert is_submission_allowed_mock.call_args_list[1] == ((), {})
def test_cant_submit_agreement_if_restricted_functional(self):
# Like test_cant_submit_agreement_if_restricted() but with no mocks,
# picking a single restriction and making sure it's working properly.
IPNetworkUserRestriction.objects.create(network='127.0.0.1/32')
self.user.update(read_dev_agreement=None)
response = self.client.post(
reverse('devhub.api_key_agreement'),
data={
'distribution_agreement': 'on',
'review_policy': 'on',
},
)
assert response.status_code == 200
assert response.context['agreement_form'].is_valid() is False
doc = pq(response.content)
assert doc('.addon-submission-process').text() == (
'Multiple add-ons violating our policies have been submitted '
'from your location. The IP address has been blocked.\n'
'More information on Developer Accounts'
)
@mock.patch('olympia.addons.utils.RestrictionChecker.is_submission_allowed')
def test_agreement_page_shown_if_restricted(self, is_submission_allowed_mock):
# Like test_agreement_read() above, but with a restricted user: they
# are shown the agreement page again instead of redirecting to the
# api keys page.
is_submission_allowed_mock.return_value = False
self.user.update(read_dev_agreement=self.days_ago(0))
response = self.client.get(reverse('devhub.api_key_agreement'))
assert response.status_code == 200
assert 'agreement_form' in response.context
class TestAPIKeyPage(TestCase):
fixtures = ['base/addon_3615', 'base/users']
def setUp(self):
super(TestAPIKeyPage, self).setUp()
self.url = reverse('devhub.api_key')
assert self.client.login(email='del@icio.us')
self.user = UserProfile.objects.get(email='del@icio.us')
self.user.update(last_login_ip='192.168.1.1')
def test_key_redirect(self):
self.user.update(read_dev_agreement=None)
response = self.client.get(reverse('devhub.api_key'))
self.assert3xx(response, reverse('devhub.api_key_agreement'))
def test_redirect_if_restricted(self):
IPNetworkUserRestriction.objects.create(network='127.0.0.1/32')
response = self.client.get(reverse('devhub.api_key'))
self.assert3xx(response, reverse('devhub.api_key_agreement'))
def test_view_without_credentials_not_confirmed_yet(self):
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
submit = doc('#generate-key')
assert submit.text() == 'Generate new credentials'
inputs = doc('.api-input input')
assert len(inputs) == 0, 'Inputs should be absent before keys exist'
assert not doc('input[name=confirmation_token]')
def test_view_with_credentials(self):
APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
submit = doc('#generate-key')
assert submit.text() == 'Revoke and regenerate credentials'
assert doc('#revoke-key').text() == 'Revoke'
key_input = doc('.key-input input').val()
assert key_input == 'some-jwt-key'
def test_view_without_credentials_confirmation_requested_no_token(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=False
)
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
# Since confirmation has already been requested, there shouldn't be
# any buttons on the page if no token was passed in the URL - the user
# needs to follow the link in the email to continue.
assert not doc('input[name=confirmation_token]')
assert not doc('input[name=action]')
def test_view_without_credentials_confirmation_requested_with_token(self):
APIKeyConfirmation.objects.create(
user=self.user, token='secrettoken', confirmed_once=False
)
self.url += '?token=secrettoken'
response = self.client.get(self.url)
assert response.status_code == 200
doc = pq(response.content)
assert len(doc('input[name=confirmation_token]')) == 1
token_input = doc('input[name=confirmation_token]')[0]
assert token_input.value == 'secrettoken'
submit = doc('#generate-key')
assert submit.text() == 'Confirm and generate new credentials'
def test_view_no_credentials_has_been_confirmed_once(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=True
)
# Should look similar to when there are no credentials and no
# confirmation has been requested yet, the post action is where it
# will differ.
self.test_view_without_credentials_not_confirmed_yet()
def test_create_new_credentials_has_been_confirmed_once(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=True
)
patch = mock.patch('olympia.devhub.views.APIKey.new_jwt_credentials')
with patch as mock_creator:
response = self.client.post(self.url, data={'action': 'generate'})
mock_creator.assert_called_with(self.user)
assert len(mail.outbox) == 1
message = mail.outbox[0]
assert message.to == [self.user.email]
assert message.subject == 'New API key created'
assert reverse('devhub.api_key') in message.body
self.assert3xx(response, self.url)
def test_create_new_credentials_confirming_with_token(self):
confirmation = APIKeyConfirmation.objects.create(
user=self.user, token='secrettoken', confirmed_once=False
)
patch = mock.patch('olympia.devhub.views.APIKey.new_jwt_credentials')
with patch as mock_creator:
response = self.client.post(
self.url,
data={'action': 'generate', 'confirmation_token': 'secrettoken'},
)
mock_creator.assert_called_with(self.user)
assert len(mail.outbox) == 1
message = mail.outbox[0]
assert message.to == [self.user.email]
assert message.subject == 'New API key created'
assert reverse('devhub.api_key') in message.body
confirmation.reload()
assert confirmation.confirmed_once
self.assert3xx(response, self.url)
def test_create_new_credentials_not_confirmed_yet(self):
assert not APIKey.objects.filter(user=self.user).exists()
assert not APIKeyConfirmation.objects.filter(user=self.user).exists()
response = self.client.post(self.url, data={'action': 'generate'})
self.assert3xx(response, self.url)
# Since there was no credentials are no confirmation yet, this should
# create a confirmation, send an email with the token, but not create
# credentials yet.
assert len(mail.outbox) == 1
message = mail.outbox[0]
assert message.to == [self.user.email]
assert not APIKey.objects.filter(user=self.user).exists()
assert APIKeyConfirmation.objects.filter(user=self.user).exists()
confirmation = APIKeyConfirmation.objects.filter(user=self.user).get()
assert confirmation.token
assert not confirmation.confirmed_once
token = confirmation.token
expected_url = (
f'http://testserver/en-US/developers/addon/api/key/?token={token}'
)
assert message.subject == 'Confirmation for developer API keys'
assert expected_url in message.body
def test_create_new_credentials_confirmation_exists_no_token_passed(self):
confirmation = APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=False
)
response = self.client.post(self.url, data={'action': 'generate'})
assert len(mail.outbox) == 0
assert not APIKey.objects.filter(user=self.user).exists()
confirmation.reload()
assert not confirmation.confirmed_once # Unchanged
self.assert3xx(response, self.url)
def test_create_new_credentials_confirmation_exists_token_is_wrong(self):
confirmation = APIKeyConfirmation.objects.create(
user=self.user, token='sometoken', confirmed_once=False
)
response = self.client.post(
self.url, data={'action': 'generate', 'confirmation_token': 'wrong'}
)
# Nothing should have happened, the user will just be redirect to the
# page.
assert len(mail.outbox) == 0
assert not APIKey.objects.filter(user=self.user).exists()
confirmation.reload()
assert not confirmation.confirmed_once
self.assert3xx(response, self.url)
def test_delete_and_recreate_credentials_has_been_confirmed_once(self):
APIKeyConfirmation.objects.create(
user=self.user, token='doesnt matter', confirmed_once=True
)
old_key = APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.post(self.url, data={'action': 'generate'})
self.assert3xx(response, self.url)
old_key = APIKey.objects.get(pk=old_key.pk)
assert old_key.is_active is None
new_key = APIKey.get_jwt_key(user=self.user)
assert new_key.key != old_key.key
assert new_key.secret != old_key.secret
def test_delete_and_recreate_credentials_has_not_been_confirmed_yet(self):
old_key = APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.post(self.url, data={'action': 'generate'})
self.assert3xx(response, self.url)
old_key = APIKey.objects.get(pk=old_key.pk)
assert old_key.is_active is None
# Since there was no confirmation, this should create a one, send an
# email with the token, but not create credentials yet. (Would happen
# for an user that had api keys from before we introduced confirmation
# mechanism, but decided to regenerate).
assert len(mail.outbox) == 2 # 2 because of key revocation email.
assert 'revoked' in mail.outbox[0].body
message = mail.outbox[1]
assert message.to == [self.user.email]
assert not APIKey.objects.filter(user=self.user, is_active=True).exists()
assert APIKeyConfirmation.objects.filter(user=self.user).exists()
confirmation = APIKeyConfirmation.objects.filter(user=self.user).get()
assert confirmation.token
assert not confirmation.confirmed_once
token = confirmation.token
expected_url = (
f'http://testserver/en-US/developers/addon/api/key/?token={token}'
)
assert message.subject == 'Confirmation for developer API keys'
assert expected_url in message.body
def test_delete_credentials(self):
old_key = APIKey.objects.create(
user=self.user,
type=SYMMETRIC_JWT_TYPE,
key='some-jwt-key',
secret='some-jwt-secret',
)
response = self.client.post(self.url, data={'action': 'revoke'})
self.assert3xx(response, self.url)
old_key = APIKey.objects.get(pk=old_key.pk)
assert old_key.is_active is None
assert len(mail.outbox) == 1
assert 'revoked' in mail.outbox[0].body
class TestUpload(BaseUploadTest):
fixtures = ['base/users']
def setUp(self):
super(TestUpload, self).setUp()
assert self.client.login(email='regular@mozilla.com')
self.url = reverse('devhub.upload')
self.image_path = get_image_path('animated.png')
def post(self, **kwargs):
# Has to be a binary, non xpi file.
data = open(self.image_path, 'rb')
return self.client.post(self.url, {'upload': data}, **kwargs)
def test_login_required(self):
self.client.logout()
response = self.post()
assert response.status_code == 302
def test_create_fileupload(self):
self.post()
upload = FileUpload.objects.filter().order_by('-created').first()
assert 'animated.png' in upload.name
data = open(self.image_path, 'rb').read()
assert storage.open(upload.path).read() == data
def test_fileupload_metadata(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
self.client.login(email=user.email)
self.post(REMOTE_ADDR='4.8.15.16.23.42')
upload = FileUpload.objects.get()
assert upload.user == user
assert upload.source == amo.UPLOAD_SOURCE_DEVHUB
assert upload.ip_address == '4.8.15.16.23.42'
def test_fileupload_validation(self):
self.post()
upload = FileUpload.objects.filter().order_by('-created').first()
assert upload.validation
validation = json.loads(upload.validation)
assert not validation['success']
# The current interface depends on this JSON structure:
assert validation['errors'] == 1
assert validation['warnings'] == 0
assert len(validation['messages'])
msg = validation['messages'][0]
assert msg['type'] == 'error'
assert msg['message'] == (
'Unsupported file type, please upload a supported file '
'(.crx, .xpi, .zip).'
)
assert not msg['description']
def test_redirect(self):
response = self.post()
upload = FileUpload.objects.get()
url = reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
self.assert3xx(response, url)
def test_not_an_uuid(self):
url = reverse('devhub.upload_detail', args=['garbage', 'json'])
response = self.client.get(url)
assert response.status_code == 404
@mock.patch('olympia.devhub.tasks.validate')
def test_upload_unlisted_addon(self, validate_mock):
"""Unlisted addons are validated as "self hosted" addons."""
validate_mock.return_value = json.dumps(amo.VALIDATOR_SKELETON_RESULTS)
self.url = reverse('devhub.upload_unlisted')
self.post()
# Make sure it was called with listed=False.
assert not validate_mock.call_args[1]['listed']
class TestUploadDetail(BaseUploadTest):
fixtures = ['base/appversion', 'base/users']
@classmethod
def setUpTestData(cls):
versions = {
'51.0a1',
amo.DEFAULT_WEBEXT_MIN_VERSION,
amo.DEFAULT_WEBEXT_MIN_VERSION_ANDROID,
amo.DEFAULT_WEBEXT_MAX_VERSION,
}
for version in versions:
cls.create_appversion('firefox', version)
cls.create_appversion('android', version)
def setUp(self):
super(TestUploadDetail, self).setUp()
assert self.client.login(email='regular@mozilla.com')
@classmethod
def create_appversion(cls, application_name, version):
return AppVersion.objects.create(
application=amo.APPS[application_name].id, version=version
)
def post(self):
# Has to be a binary, non xpi file.
data = open(get_image_path('animated.png'), 'rb')
return self.client.post(reverse('devhub.upload'), {'upload': data})
def validation_ok(self):
return {
'errors': 0,
'success': True,
'warnings': 0,
'notices': 0,
'message_tree': {},
'messages': [],
'rejected': False,
'metadata': {},
}
def upload_file(self, file, url='devhub.upload'):
addon = os.path.join(
settings.ROOT, 'src', 'olympia', 'devhub', 'tests', 'addons', file
)
with open(addon, 'rb') as f:
response = self.client.post(reverse(url), {'upload': f})
assert response.status_code == 302
def test_detail_json(self):
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
assert response.status_code == 200
data = json.loads(force_str(response.content))
assert data['validation']['errors'] == 1
assert data['url'] == (
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
assert data['full_report_url'] == (
reverse('devhub.upload_detail', args=[upload.uuid.hex])
)
# We must have tiers
assert len(data['validation']['messages'])
msg = data['validation']['messages'][0]
assert msg['tier'] == 1
def test_upload_detail_for_version(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory()
addon.addonuser_set.create(user=user)
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 200
def test_upload_detail_for_version_not_an_uuid(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory()
addon.addonuser_set.create(user=user)
url = reverse('devhub.upload_detail_for_version', args=[addon.slug, 'garbage'])
response = self.client.get(url)
assert response.status_code == 404
def test_upload_detail_for_version_unlisted(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory(version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED})
addon.addonuser_set.create(user=user)
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 200
def test_upload_detail_for_version_deleted(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory()
addon.addonuser_set.create(user=user)
addon.delete()
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 404
def test_detail_view(self):
self.post()
upload = FileUpload.objects.filter().order_by('-created').first()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex])
)
assert response.status_code == 200
doc = pq(response.content)
expected = 'Validation Results for animated.png'
assert doc('header h2').text() == expected
suite = doc('#addon-validator-suite')
expected = reverse('devhub.standalone_upload_detail', args=[upload.uuid.hex])
assert suite.attr('data-validateurl') == expected
def test_not_an_uuid_standalon_upload_detail(self):
url = reverse('devhub.standalone_upload_detail', args=['garbage'])
response = self.client.get(url)
assert response.status_code == 404
def test_no_servererror_on_missing_version(self):
"""https://github.com/mozilla/addons-server/issues/3779
addons-linter and amo-validator both add proper errors if the version
is missing but we shouldn't fail on that but properly show the
validation results.
"""
self.upload_file('valid_webextension_no_version.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
message = [
(m['message'], m.get('type') == 'error')
for m in data['validation']['messages']
]
expected = [('"/version" is a required property', True)]
assert message == expected
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_not_a_valid_xpi(self, run_addons_linter_mock):
run_addons_linter_mock.return_value = json.dumps(self.validation_ok())
self.upload_file('unopenable.xpi')
# We never even reach the linter (we can't: because we're repacking
# zip files, we should raise an error if the zip is invalid before
# calling the linter, even though the linter has a perfectly good error
# message for this kind of situation).
assert not run_addons_linter_mock.called
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
message = [
(m['message'], m.get('fatal', False))
for m in data['validation']['messages']
]
# We do raise a specific error message explaining that the archive is
# not valid instead of a generic exception.
assert message == [
('Invalid or corrupt add-on file.', True),
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_experiment_xpi_allowed(self, mock_validator):
user = UserProfile.objects.get(email='regular@mozilla.com')
self.grant_permission(user, 'Experiments:submit')
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file(
'../../../files/fixtures/files/experiment_inside_webextension.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_experiment_xpi_not_allowed(self, mock_validator):
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file(
'../../../files/fixtures/files/experiment_inside_webextension.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == [
{
'tier': 1,
'message': 'You cannot submit this type of add-on',
'fatal': True,
'type': 'error',
}
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_system_addon_allowed(self, mock_validator):
user = user_factory()
self.grant_permission(user, 'SystemAddon:Submit')
assert self.client.login(email=user.email)
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file('../../../files/fixtures/files/mozilla_guid.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_system_addon_not_allowed_not_allowed(self, mock_validator):
user_factory(email='redpanda@mozilla.com')
assert self.client.login(email='redpanda@mozilla.com')
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file('../../../files/fixtures/files/mozilla_guid.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == [
{
'tier': 1,
'message': 'You cannot submit an add-on using an ID ending with '
'"@mozilla.com" or "@mozilla.org" or '
'"@pioneer.mozilla.org" or "@search.mozilla.org" or '
'"@shield.mozilla.com" or "@shield.mozilla.org" or '
'"@mozillaonline.com" or "@mozillafoundation.org" or '
'"@rally.mozilla.org"',
'fatal': True,
'type': 'error',
}
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
@mock.patch('olympia.files.utils.get_signer_organizational_unit_name')
def test_mozilla_signed_allowed(self, mock_get_signature, mock_validator):
user = user_factory()
assert self.client.login(email=user.email)
self.grant_permission(user, 'SystemAddon:Submit')
mock_validator.return_value = json.dumps(self.validation_ok())
mock_get_signature.return_value = 'Mozilla Extensions'
self.upload_file(
'../../../files/fixtures/files/webextension_signed_already.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
@mock.patch('olympia.files.utils.get_signer_organizational_unit_name')
def test_mozilla_signed_not_allowed_not_allowed(self, mock_get_signature):
user_factory(email='redpanda@mozilla.com')
assert self.client.login(email='redpanda@mozilla.com')
mock_get_signature.return_value = 'Mozilla Extensions'
self.upload_file(
'../../../files/fixtures/files/webextension_signed_already.xpi'
)
upload = FileUpload.objects.get()
response = self.client.get(
reverse('devhub.upload_detail', args=[upload.uuid.hex, 'json'])
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == [
{
'tier': 1,
'message': 'You cannot submit a Mozilla Signed Extension',
'fatal': True,
'type': 'error',
}
]
@mock.patch('olympia.devhub.tasks.run_addons_linter')
def test_system_addon_update_allowed(self, mock_validator):
"""Updates to system addons are allowed from anyone."""
user = user_factory(email='pinkpanda@notzilla.com')
addon = addon_factory(guid='systemaddon@mozilla.org')
AddonUser.objects.create(addon=addon, user=user)
assert self.client.login(email='pinkpanda@notzilla.com')
mock_validator.return_value = json.dumps(self.validation_ok())
self.upload_file('../../../files/fixtures/files/mozilla_guid.xpi')
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
data = json.loads(force_str(response.content))
assert data['validation']['messages'] == []
def test_no_redirect_for_metadata(self):
user = UserProfile.objects.get(email='regular@mozilla.com')
addon = addon_factory(status=amo.STATUS_NULL)
AddonCategory.objects.filter(addon=addon).delete()
addon.addonuser_set.create(user=user)
self.post()
upload = FileUpload.objects.get()
response = self.client.get(
reverse(
'devhub.upload_detail_for_version', args=[addon.slug, upload.uuid.hex]
)
)
assert response.status_code == 200
def assert_json_error(request, field, msg):
assert request.status_code == 400
assert request['Content-Type'] == 'application/json'
field = '__all__' if field is None else field
content = json.loads(request.content)
assert field in content, '%r not in %r' % (field, content)
assert content[field] == [msg]
def assert_json_field(request, field, msg):
assert request.status_code == 200
assert request['Content-Type'] == 'application/json'
content = json.loads(request.content)
assert field in content, '%r not in %r' % (field, content)
assert content[field] == msg
class TestQueuePosition(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestQueuePosition, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
self.addon.update(guid='guid@xpi')
assert self.client.login(email='del@icio.us')
self.edit_url = reverse(
'devhub.versions.edit', args=[self.addon.slug, self.version.id]
)
# Add a second one also awaiting review in each queue
addon_factory(
status=amo.STATUS_NOMINATED, file_kw={'status': amo.STATUS_AWAITING_REVIEW}
)
version_factory(
addon=addon_factory(), file_kw={'status': amo.STATUS_AWAITING_REVIEW}
)
# And some static themes that shouldn't be counted
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
version_factory(
addon=addon_factory(type=amo.ADDON_STATICTHEME),
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
addon_factory(
status=amo.STATUS_NOMINATED,
type=amo.ADDON_STATICTHEME,
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
version_factory(
addon=addon_factory(type=amo.ADDON_STATICTHEME),
file_kw={'status': amo.STATUS_AWAITING_REVIEW},
)
def test_not_in_queue(self):
response = self.client.get(self.addon.get_dev_url('versions'))
assert self.addon.status == amo.STATUS_APPROVED
assert pq(response.content)('.version-status-actions .dark').length == 0
def test_in_queue(self):
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW),
(amo.STATUS_APPROVED, amo.STATUS_AWAITING_REVIEW),
]
for (addon_status, file_status) in statuses:
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.all()[0].update(status=file_status)
self.addon.update(status=addon_status)
response = self.client.get(self.addon.get_dev_url('versions'))
doc = pq(response.content)
span = doc('.queue-position')
assert span.length
assert 'Queue Position: 1 of 2' in span.text()
def test_static_themes_in_queue(self):
statuses = [
(amo.STATUS_NOMINATED, amo.STATUS_AWAITING_REVIEW),
(amo.STATUS_APPROVED, amo.STATUS_AWAITING_REVIEW),
]
self.addon.update(type=amo.ADDON_STATICTHEME)
for (addon_status, file_status) in statuses:
latest_version = self.addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.all()[0].update(status=file_status)
self.addon.update(status=addon_status)
response = self.client.get(self.addon.get_dev_url('versions'))
doc = pq(response.content)
span = doc('.queue-position')
assert span.length
assert 'Queue Position: 1 of 3' in span.text()
class TestVersionXSS(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestVersionXSS, self).setUp()
self.version = Addon.objects.get(id=3615).current_version
assert self.client.login(email='del@icio.us')
def test_unique_version_num(self):
# Can't use a "/" to close the tag, as we're doing a get_url_path on
# it, which uses addons.versions, which consumes up to the first "/"
# encountered.
self.version.update(version='<script>alert("Happy XSS-Xmas");<script>')
response = self.client.get(reverse('devhub.addons'))
assert response.status_code == 200
assert b'<script>alert' not in response.content
assert b'<script>alert' in response.content
class TestDeleteAddon(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestDeleteAddon, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.url = self.addon.get_dev_url('delete')
self.client.login(email='admin@mozilla.com')
def test_bad_password(self):
response = self.client.post(self.url, {'slug': 'nope'})
self.assert3xx(response, self.addon.get_dev_url('versions'))
assert response.context['title'] == (
'URL name was incorrect. Add-on was not deleted.'
)
assert Addon.objects.count() == 1
def test_success(self):
response = self.client.post(self.url, {'slug': 'a3615'})
self.assert3xx(response, reverse('devhub.addons'))
assert response.context['title'] == 'Add-on deleted.'
assert Addon.objects.count() == 0
class TestRequestReview(TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestRequestReview, self).setUp()
self.addon = addon_factory()
self.version = self.addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED
)
self.redirect_url = self.addon.get_dev_url('versions')
self.public_url = reverse('devhub.request-review', args=[self.addon.slug])
assert self.client.login(email='admin@mozilla.com')
def get_addon(self):
return Addon.objects.get(id=self.addon.id)
def get_version(self):
return Version.objects.get(pk=self.version.id)
def check_400(self, url):
response = self.client.post(url)
assert response.status_code == 400
def test_public(self):
self.addon.update(status=amo.STATUS_APPROVED)
self.check_400(self.public_url)
@mock.patch('olympia.addons.models.Addon.has_complete_metadata')
def test_renominate_for_full_review(self, mock_has_complete_metadata):
# When a version is rejected, the addon is disabled.
# The author must upload a new version and re-nominate.
# Renominating the same version resets the nomination date.
mock_has_complete_metadata.return_value = True
orig_date = datetime.now() - timedelta(days=30)
# Pretend it was nominated in the past:
self.version.update(nomination=orig_date)
self.addon.update(status=amo.STATUS_NULL)
response = self.client.post(self.public_url)
self.assert3xx(response, self.redirect_url)
assert self.get_addon().status == amo.STATUS_NOMINATED
assert self.get_version().nomination.timetuple()[0:5] != (
orig_date.timetuple()[0:5]
)
class TestRedirects(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestRedirects, self).setUp()
self.base = reverse('devhub.index')
assert self.client.login(email='admin@mozilla.com')
self.user = UserProfile.objects.get(email='admin@mozilla.com')
self.user.update(last_login_ip='192.168.1.1')
def test_edit(self):
url = self.base + 'addon/edit/3615'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.edit', args=['a3615']), 301)
url = self.base + 'addon/edit/3615/'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.edit', args=['a3615']), 301)
def test_status(self):
url = self.base + 'addon/status/3615'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.versions', args=['a3615']), 301)
def test_versions(self):
url = self.base + 'versions/3615'
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.addons.versions', args=['a3615']), 301)
def test_lwt_submit_redirects_to_addon_submit(self):
url = reverse('devhub.themes.submit')
response = self.client.get(url, follow=True)
self.assert3xx(response, reverse('devhub.submit.distribution'), 302)
class TestHasCompleteMetadataRedirects(TestCase):
"""Make sure Addons that are not complete in some way are correctly
redirected to the right view (and don't end up in a redirect loop)."""
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestHasCompleteMetadataRedirects, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = RequestFactory().get('developers/addon/a3615/edit')
self.request.user = UserProfile.objects.get(email='admin@mozilla.com')
self.addon = Addon.objects.get(id=3615)
self.addon.update(status=amo.STATUS_NULL)
self.addon = Addon.objects.get(id=3615)
assert self.addon.has_complete_metadata(), self.addon.get_required_metadata()
assert not self.addon.should_redirect_to_submit_flow()
# We need to be logged in for any redirection into real views.
assert self.client.login(email='admin@mozilla.com')
def _test_redirect(self):
func = dev_required(self.f)
response = func(self.request, addon_id='a3615')
assert not self.f.called
assert response.status_code == 302
assert response['Location'] == ('/en-US/developers/addon/a3615/submit/details')
# Check the redirection doesn't redirect also.
redirection = self.client.get(response['Location'])
assert redirection.status_code == 200
def test_default(self):
func = dev_required(self.f)
func(self.request, addon_id='a3615')
# Don't redirect if there is no metadata to collect.
assert self.f.called
def test_no_summary(self):
delete_translation(self.addon, 'summary')
self._test_redirect()
def test_no_license(self):
self.addon.current_version.update(license=None)
self._test_redirect()
def test_no_license_no_summary(self):
self.addon.current_version.update(license=None)
delete_translation(self.addon, 'summary')
self._test_redirect()
class TestDocs(TestCase):
def test_doc_urls(self):
assert '/en-US/developers/docs/' == reverse('devhub.docs', args=[])
assert '/en-US/developers/docs/te' == reverse('devhub.docs', args=['te'])
assert '/en-US/developers/docs/te/st', reverse('devhub.docs', args=['te/st'])
urls = [
(reverse('devhub.docs', args=['getting-started']), 301),
(reverse('devhub.docs', args=['how-to']), 301),
(reverse('devhub.docs', args=['how-to/other-addons']), 301),
(reverse('devhub.docs', args=['fake-page']), 404),
(reverse('devhub.docs', args=['how-to/fake-page']), 404),
(reverse('devhub.docs'), 301),
]
index = reverse('devhub.index')
for url in urls:
response = self.client.get(url[0])
assert response.status_code == url[1]
if url[1] == 302: # Redirect to the index page
self.assert3xx(response, index)
class TestRemoveLocale(TestCase):
fixtures = ['base/users', 'base/addon_3615']
def setUp(self):
super(TestRemoveLocale, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.url = reverse('devhub.addons.remove-locale', args=['a3615'])
assert self.client.login(email='del@icio.us')
def test_bad_request(self):
response = self.client.post(self.url)
assert response.status_code == 400
def test_success(self):
self.addon.name = {'en-US': 'woo', 'el': 'yeah'}
self.addon.save()
self.addon.remove_locale('el')
qs = Translation.objects.filter(localized_string__isnull=False).values_list(
'locale', flat=True
)
response = self.client.post(self.url, {'locale': 'el'})
assert response.status_code == 200
assert sorted(qs.filter(id=self.addon.name_id)) == ['en-US']
def test_delete_default_locale(self):
response = self.client.post(self.url, {'locale': self.addon.default_locale})
assert response.status_code == 400
def test_remove_version_locale(self):
version = self.addon.versions.all()[0]
version.release_notes = {'fr': 'oui'}
version.save()
self.client.post(self.url, {'locale': 'fr'})
res = self.client.get(
reverse('devhub.versions.edit', args=[self.addon.slug, version.pk])
)
doc = pq(res.content)
# There's 2 fields, one for en-us, one for init.
assert len(doc('div.trans textarea')) == 2
class TestXssOnAddonName(amo.tests.TestXss):
def test_devhub_feed_page(self):
url = reverse('devhub.feed', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
def test_devhub_addon_edit_page(self):
url = reverse('devhub.addons.edit', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
def test_devhub_version_edit_page(self):
url = reverse(
'devhub.versions.edit',
args=[self.addon.slug, self.addon.current_version.id],
)
self.assertNameAndNoXSS(url)
def test_devhub_version_list_page(self):
url = reverse('devhub.addons.versions', args=[self.addon.slug])
self.assertNameAndNoXSS(url)
@pytest.mark.django_db
def test_get_next_version_number():
addon = addon_factory(version_kw={'version': '1.0'})
# Easy case - 1.0 to 2.0
assert get_next_version_number(addon) == '2.0'
# version numbers without minor numbers should be okay too.
version_factory(addon=addon, version='2')
assert get_next_version_number(addon) == '3.0'
# We just iterate the major version number
addon.current_version.update(version='34.45.0a1pre')
addon.current_version.save()
assert get_next_version_number(addon) == '35.0'
# "Take" 35.0
version_factory(
addon=addon, version='35.0', file_kw={'status': amo.STATUS_DISABLED}
)
assert get_next_version_number(addon) == '36.0'
# And 36.0, even though it's deleted.
version_factory(addon=addon, version='36.0').delete()
assert addon.current_version.version == '34.45.0a1pre'
assert get_next_version_number(addon) == '37.0'
class TestThemeBackgroundImage(TestCase):
def setUp(self):
user = user_factory(email='regular@mozilla.com')
assert self.client.login(email='regular@mozilla.com')
self.addon = addon_factory(users=[user])
self.url = reverse(
'devhub.submit.version.previous_background',
args=[self.addon.slug, 'listed'],
)
def test_wrong_user(self):
user_factory(email='irregular@mozilla.com')
assert self.client.login(email='irregular@mozilla.com')
response = self.client.post(self.url, follow=True)
assert response.status_code == 403
def test_no_header_image(self):
response = self.client.post(self.url, follow=True)
assert response.status_code == 200
data = json.loads(force_str(response.content))
assert data == {}
def test_header_image(self):
destination = self.addon.current_version.all_files[0].current_file_path
zip_file = os.path.join(
settings.ROOT, 'src/olympia/devhub/tests/addons/static_theme.zip'
)
copy_stored_file(zip_file, destination)
response = self.client.post(self.url, follow=True)
assert response.status_code == 200
data = json.loads(force_str(response.content))
assert data
assert len(data.items()) == 1
assert 'weta.png' in data
assert len(data['weta.png']) == 168596 # base64-encoded size
class TestLogout(UserViewBase):
def test_success(self):
user = UserProfile.objects.get(email='jbalogh@mozilla.com')
self.client.login(email=user.email)
assert user.auth_id
response = self.client.get(reverse('devhub.index'), follow=True)
assert pq(response.content)('li a.avatar').attr('href') == (user.get_url_path())
assert pq(response.content)('li a.avatar img').attr('src') == (user.picture_url)
response = self.client.get('/en-US/developers/logout', follow=False)
self.assert3xx(response, '/en-US/firefox/', status_code=302)
response = self.client.get(reverse('devhub.index'), follow=True)
assert not pq(response.content)('li a.avatar')
user.reload()
assert not user.auth_id
def test_redirect(self):
self.client.login(email='jbalogh@mozilla.com')
self.client.get(reverse('devhub.index'), follow=True)
url = '/en-US/about'
response = self.client.get(
urlparams(reverse('devhub.logout'), to=url), follow=True
)
self.assert3xx(response, url, status_code=302)
# Test an invalid domain
url = urlparams(
reverse('devhub.logout'), to='/en-US/about', domain='http://evil.com'
)
response = self.client.get(url, follow=False)
self.assert3xx(response, '/en-US/about', status_code=302)
def test_session_cookie_deleted_on_logout(self):
self.client.login(email='jbalogh@mozilla.com')
self.client.cookies[API_TOKEN_COOKIE] = 'some.token.value'
response = self.client.get(reverse('devhub.logout'))
cookie = response.cookies[settings.SESSION_COOKIE_NAME]
cookie_date_string = 'Thu, 01 Jan 1970 00:00:00 GMT'
assert cookie.value == ''
# in django2.1+ changed to django.utils.http.http_date from cookie_date
assert cookie['expires'].replace('-', ' ') == cookie_date_string
jwt_cookie = response.cookies[API_TOKEN_COOKIE]
assert jwt_cookie.value == ''
assert jwt_cookie['expires'].replace('-', ' ') == cookie_date_string
class TestStatsLinksInManageMySubmissionsPage(TestCase):
def setUp(self):
super().setUp()
self.user = user_factory()
self.addon = addon_factory(users=[self.user])
self.url = reverse('devhub.addons')
self.client.login(email=self.user.email)
def test_link_to_stats(self):
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_link_to_stats_for_addon_disabled_by_user(self):
self.addon.update(disabled_by_user=True)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_link_to_stats_for_unlisted_addon(self):
self.make_addon_unlisted(self.addon)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_no_link_for_addon_disabled_by_mozilla(self):
self.addon.update(status=amo.STATUS_DISABLED)
self.make_addon_unlisted(self.addon)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) not in str(
response.content
)
def test_link_to_stats_for_langpacks(self):
self.addon.update(type=amo.ADDON_LPAPP)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
def test_link_to_stats_for_dictionaries(self):
self.addon.update(type=amo.ADDON_DICT)
response = self.client.get(self.url)
assert reverse('stats.overview', args=[self.addon.slug]) in str(
response.content
)
| codeparrot/github-code-clean |
#
# Copyright 2005,2006,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import struct
import numpy
import six
from gnuradio import gru
from . import crc
def conv_packed_binary_string_to_1_0_string(s):
"""
'\xAF' --> '10101111'
"""
r = []
for ch in s:
x = ord(ch)
for i in range(7,-1,-1):
t = (x >> i) & 0x1
r.append(t)
return ''.join([chr(x + ord('0')) for x in r])
def conv_1_0_string_to_packed_binary_string(s):
"""
'10101111' -> ('\xAF', False)
Basically the inverse of conv_packed_binary_string_to_1_0_string,
but also returns a flag indicating if we had to pad with leading zeros
to get to a multiple of 8.
"""
if not is_1_0_string(s):
raise ValueError("Input must be a string containing only 0's and 1's")
# pad to multiple of 8
padded = False
rem = len(s) % 8
if rem != 0:
npad = 8 - rem
s = '0' * npad + s
padded = True
assert len(s) % 8 == 0
r = []
i = 0
while i < len(s):
t = 0
for j in range(8):
t = (t << 1) | (ord(s[i + j]) - ord('0'))
r.append(chr(t))
i += 8
return (''.join(r), padded)
default_access_code = \
conv_packed_binary_string_to_1_0_string('\xAC\xDD\xA4\xE2\xF2\x8C\x20\xFC')
default_preamble = \
conv_packed_binary_string_to_1_0_string('\xA4\xF2')
def is_1_0_string(s):
if not isinstance(s, str):
return False
for ch in s:
if not ch in ('0', '1'):
return False
return True
def string_to_hex_list(s):
return [hex(ord(x)) for x in s]
def whiten(s, o):
sa = numpy.fromstring(s, numpy.uint8)
z = sa ^ random_mask_vec8[o:len(sa)+o]
return z.tostring()
def dewhiten(s, o):
return whiten(s, o) # self inverse
def make_header(payload_len, whitener_offset=0):
# Upper nibble is offset, lower 12 bits is len
val = ((whitener_offset & 0xf) << 12) | (payload_len & 0x0fff)
#print("offset =", whitener_offset, " len =", payload_len, " val=", val)
return struct.pack(b'!HH', val, val)
def make_packet(payload, samples_per_symbol, bits_per_symbol,
preamble=default_preamble, access_code=default_access_code,
pad_for_usrp=True, whitener_offset=0, whitening=True,
calc_crc=True):
"""
Build a packet, given access code, payload, and whitener offset
Args:
payload: packet payload, len [0, 4096]
samples_per_symbol: samples per symbol (needed for padding calculation) (int)
bits_per_symbol: (needed for padding calculation) (int)
preamble: string of ascii 0's and 1's
access_code: string of ascii 0's and 1's
pad_for_usrp: If true, packets are padded such that they end up a multiple of 128 samples(512 bytes)
whitener_offset: offset into whitener string to use [0-16)
whitening: Whether to turn on data whitening(scrambling) (boolean)
calc_crc: Whether to calculate CRC32 or not (boolean)
Packet will have access code at the beginning, followed by length, payload
and finally CRC-32.
"""
if not is_1_0_string(preamble):
raise ValueError("preamble must be a string containing only 0's and 1's (%r)" % (preamble,))
if not is_1_0_string(access_code):
raise ValueError("access_code must be a string containing only 0's and 1's (%r)" % (access_code,))
if not whitener_offset >=0 and whitener_offset < 16:
raise ValueError("whitener_offset must be between 0 and 15, inclusive (%i)" % (whitener_offset,))
(packed_access_code, padded) = conv_1_0_string_to_packed_binary_string(access_code)
(packed_preamble, ignore) = conv_1_0_string_to_packed_binary_string(preamble)
if(calc_crc):
payload_with_crc = crc.gen_and_append_crc32(payload)
else:
payload_with_crc = payload
#print("outbound crc =", string_to_hex_list(payload_with_crc[-4:]))
L = len(payload_with_crc)
MAXLEN = len(random_mask_tuple)
if L > MAXLEN:
raise ValueError("len(payload) must be in [0, %d]" % (MAXLEN,))
if whitening:
pkt = b''.join((packed_preamble, packed_access_code, make_header(L, whitener_offset),
whiten(payload_with_crc, whitener_offset), b'\x55'))
else:
pkt = b''.join((packed_preamble, packed_access_code, make_header(L, whitener_offset),
(payload_with_crc), b'\x55'))
if pad_for_usrp:
pkt = pkt + (_npadding_bytes(len(pkt), int(samples_per_symbol), bits_per_symbol) * b'\x55')
#print("make_packet: len(pkt) =", len(pkt))
return pkt
def _npadding_bytes(pkt_byte_len, samples_per_symbol, bits_per_symbol):
"""
Generate sufficient padding such that each packet ultimately ends
up being a multiple of 512 bytes when sent across the USB. We
send 4-byte samples across the USB (16-bit I and 16-bit Q), thus
we want to pad so that after modulation the resulting packet
is a multiple of 128 samples.
Args:
ptk_byte_len: len in bytes of packet, not including padding.
samples_per_symbol: samples per bit (1 bit / symbolwidth GMSK) (int)
bits_per_symbol: bits per symbol (log2(modulation order)) (int)
Returns:
number of bytes of padding to append.
"""
modulus = 128
byte_modulus = gru.lcm(modulus // 8, samples_per_symbol) * bits_per_symbol // samples_per_symbol
r = pkt_byte_len % byte_modulus
if r == 0:
return 0
return byte_modulus - r
def unmake_packet(whitened_payload_with_crc, whitener_offset=0,
dewhitening=True, check_crc=True):
"""
Return (ok, payload)
Args:
whitened_payload_with_crc: string
whitener_offset: integer offset into whitener table
dewhitening: True if we should run this through the dewhitener
check_crc: True if we should check the CRC of the packet
"""
if dewhitening:
payload_with_crc = dewhiten(whitened_payload_with_crc, whitener_offset)
else:
payload_with_crc = (whitened_payload_with_crc)
if check_crc:
ok, payload = crc.check_crc32(payload_with_crc)
else:
payload = payload_with_crc
ok = True
if 0:
print("payload_with_crc =", string_to_hex_list(payload_with_crc))
print("ok = %r, len(payload) = %d" % (ok, len(payload)))
print("payload =", string_to_hex_list(payload))
print("")
return ok, payload
# FYI, this PN code is the output of a 15-bit LFSR
random_mask_tuple = (
255, 63, 0, 16, 0, 12, 0, 5, 192, 3, 16, 1, 204, 0, 85, 192,
63, 16, 16, 12, 12, 5, 197, 195, 19, 17, 205, 204, 85, 149, 255, 47,
0, 28, 0, 9, 192, 6, 208, 2, 220, 1, 153, 192, 106, 208, 47, 28,
28, 9, 201, 198, 214, 210, 222, 221, 152, 89, 170, 186, 255, 51, 0, 21,
192, 15, 16, 4, 12, 3, 69, 193, 243, 16, 69, 204, 51, 21, 213, 207,
31, 20, 8, 15, 70, 132, 50, 227, 85, 137, 255, 38, 192, 26, 208, 11,
28, 7, 73, 194, 182, 209, 182, 220, 118, 217, 230, 218, 202, 219, 23, 27,
78, 139, 116, 103, 103, 106, 170, 175, 63, 60, 16, 17, 204, 12, 85, 197,
255, 19, 0, 13, 192, 5, 144, 3, 44, 1, 221, 192, 89, 144, 58, 236,
19, 13, 205, 197, 149, 147, 47, 45, 220, 29, 153, 201, 170, 214, 255, 30,
192, 8, 80, 6, 188, 2, 241, 193, 132, 80, 99, 124, 41, 225, 222, 200,
88, 86, 186, 190, 243, 48, 69, 212, 51, 31, 85, 200, 63, 22, 144, 14,
236, 4, 77, 195, 117, 145, 231, 44, 74, 157, 247, 41, 134, 158, 226, 232,
73, 142, 182, 228, 118, 203, 102, 215, 106, 222, 175, 24, 124, 10, 161, 199,
56, 82, 146, 189, 173, 177, 189, 180, 113, 183, 100, 118, 171, 102, 255, 106,
192, 47, 16, 28, 12, 9, 197, 198, 211, 18, 221, 205, 153, 149, 170, 239,
63, 12, 16, 5, 204, 3, 21, 193, 207, 16, 84, 12, 63, 69, 208, 51,
28, 21, 201, 207, 22, 212, 14, 223, 68, 88, 51, 122, 149, 227, 47, 9,
220, 6, 217, 194, 218, 209, 155, 28, 107, 73, 239, 118, 204, 38, 213, 218,
223, 27, 24, 11, 74, 135, 119, 34, 166, 153, 186, 234, 243, 15, 5, 196,
3, 19, 65, 205, 240, 85, 132, 63, 35, 80, 25, 252, 10, 193, 199, 16,
82, 140, 61, 165, 209, 187, 28, 115, 73, 229, 246, 203, 6, 215, 66, 222,
177, 152, 116, 106, 167, 111, 58, 172, 19, 61, 205, 209, 149, 156, 111, 41,
236, 30, 205, 200, 85, 150, 191, 46, 240, 28, 68, 9, 243, 70, 197, 242,
211, 5, 157, 195, 41, 145, 222, 236, 88, 77, 250, 181, 131, 55, 33, 214,
152, 94, 234, 184, 79, 50, 180, 21, 183, 79, 54, 180, 22, 247, 78, 198,
180, 82, 247, 125, 134, 161, 162, 248, 121, 130, 162, 225, 185, 136, 114, 230,
165, 138, 251, 39, 3, 90, 129, 251, 32, 67, 88, 49, 250, 148, 67, 47,
113, 220, 36, 89, 219, 122, 219, 99, 27, 105, 203, 110, 215, 108, 94, 173,
248, 125, 130, 161, 161, 184, 120, 114, 162, 165, 185, 187, 50, 243, 85, 133,
255, 35, 0, 25, 192, 10, 208, 7, 28, 2, 137, 193, 166, 208, 122, 220,
35, 25, 217, 202, 218, 215, 27, 30, 139, 72, 103, 118, 170, 166, 255, 58,
192, 19, 16, 13, 204, 5, 149, 195, 47, 17, 220, 12, 89, 197, 250, 211,
3, 29, 193, 201, 144, 86, 236, 62, 205, 208, 85, 156, 63, 41, 208, 30,
220, 8, 89, 198, 186, 210, 243, 29, 133, 201, 163, 22, 249, 206, 194, 212,
81, 159, 124, 104, 33, 238, 152, 76, 106, 181, 239, 55, 12, 22, 133, 206,
227, 20, 73, 207, 118, 212, 38, 223, 90, 216, 59, 26, 147, 75, 45, 247,
93, 134, 185, 162, 242, 249, 133, 130, 227, 33, 137, 216, 102, 218, 170, 219,
63, 27, 80, 11, 124, 7, 97, 194, 168, 81, 190, 188, 112, 113, 228, 36,
75, 91, 119, 123, 102, 163, 106, 249, 239, 2, 204, 1, 149, 192, 111, 16,
44, 12, 29, 197, 201, 147, 22, 237, 206, 205, 148, 85, 175, 127, 60, 32,
17, 216, 12, 90, 133, 251, 35, 3, 89, 193, 250, 208, 67, 28, 49, 201,
212, 86, 223, 126, 216, 32, 90, 152, 59, 42, 147, 95, 45, 248, 29, 130,
137, 161, 166, 248, 122, 194, 163, 17, 185, 204, 114, 213, 229, 159, 11, 40,
7, 94, 130, 184, 97, 178, 168, 117, 190, 167, 48, 122, 148, 35, 47, 89,
220, 58, 217, 211, 26, 221, 203, 25, 151, 74, 238, 183, 12, 118, 133, 230,
227, 10, 201, 199, 22, 210, 142, 221, 164, 89, 187, 122, 243, 99, 5, 233,
195, 14, 209, 196, 92, 83, 121, 253, 226, 193, 137, 144, 102, 236, 42, 205,
223, 21, 152, 15, 42, 132, 31, 35, 72, 25, 246, 138, 198, 231, 18, 202,
141, 151, 37, 174, 155, 60, 107, 81, 239, 124, 76, 33, 245, 216, 71, 26,
178, 139, 53, 167, 87, 58, 190, 147, 48, 109, 212, 45, 159, 93, 168, 57,
190, 146, 240, 109, 132, 45, 163, 93, 185, 249, 178, 194, 245, 145, 135, 44,
98, 157, 233, 169, 142, 254, 228, 64, 75, 112, 55, 100, 22, 171, 78, 255,
116, 64, 39, 112, 26, 164, 11, 59, 71, 83, 114, 189, 229, 177, 139, 52,
103, 87, 106, 190, 175, 48, 124, 20, 33, 207, 88, 84, 58, 191, 83, 48,
61, 212, 17, 159, 76, 104, 53, 238, 151, 12, 110, 133, 236, 99, 13, 233,
197, 142, 211, 36, 93, 219, 121, 155, 98, 235, 105, 143, 110, 228, 44, 75,
93, 247, 121, 134, 162, 226, 249, 137, 130, 230, 225, 138, 200, 103, 22, 170,
142, 255, 36, 64, 27, 112, 11, 100, 7, 107, 66, 175, 113, 188, 36, 113,
219, 100, 91, 107, 123, 111, 99, 108, 41, 237, 222, 205, 152, 85, 170, 191,
63, 48, 16, 20, 12, 15, 69, 196, 51, 19, 85, 205, 255, 21, 128, 15,
32, 4, 24, 3, 74, 129, 247, 32, 70, 152, 50, 234, 149, 143, 47, 36,
28, 27, 73, 203, 118, 215, 102, 222, 170, 216, 127, 26, 160, 11, 56, 7,
82, 130, 189, 161, 177, 184, 116, 114, 167, 101, 186, 171, 51, 63, 85, 208,
63, 28, 16, 9, 204, 6, 213, 194, 223, 17, 152, 12, 106, 133, 239, 35,
12, 25, 197, 202, 211, 23, 29, 206, 137, 148, 102, 239, 106, 204, 47, 21,
220, 15, 25, 196, 10, 211, 71, 29, 242, 137, 133, 166, 227, 58, 201, 211,
22, 221, 206, 217, 148, 90, 239, 123, 12, 35, 69, 217, 243, 26, 197, 203,
19, 23, 77, 206, 181, 148, 119, 47, 102, 156, 42, 233, 223, 14, 216, 4,
90, 131, 123, 33, 227, 88, 73, 250, 182, 195, 54, 209, 214, 220, 94, 217,
248, 90, 194, 187, 17, 179, 76, 117, 245, 231, 7, 10, 130, 135, 33, 162,
152, 121, 170, 162, 255, 57, 128, 18, 224, 13, 136, 5, 166, 131, 58, 225,
211, 8, 93, 198, 185, 146, 242, 237, 133, 141, 163, 37, 185, 219, 50, 219,
85, 155, 127, 43, 96, 31, 104, 8, 46, 134, 156, 98, 233, 233, 142, 206,
228, 84, 75, 127, 119, 96, 38, 168, 26, 254, 139, 0, 103, 64, 42, 176,
31, 52, 8, 23, 70, 142, 178, 228, 117, 139, 103, 39, 106, 154, 175, 43,
60, 31, 81, 200, 60, 86, 145, 254, 236, 64, 77, 240, 53, 132, 23, 35,
78, 153, 244, 106, 199, 111, 18, 172, 13, 189, 197, 177, 147, 52, 109, 215,
109, 158, 173, 168, 125, 190, 161, 176, 120, 116, 34, 167, 89, 186, 186, 243,
51, 5, 213, 195, 31, 17, 200, 12, 86, 133, 254, 227, 0, 73, 192, 54,
208, 22, 220, 14, 217, 196, 90, 211, 123, 29, 227, 73, 137, 246, 230, 198,
202, 210, 215, 29, 158, 137, 168, 102, 254, 170, 192, 127, 16, 32, 12, 24,
5, 202, 131, 23, 33, 206, 152, 84, 106, 191, 111, 48, 44, 20, 29, 207,
73, 148, 54, 239, 86, 204, 62, 213, 208, 95, 28, 56, 9, 210, 134, 221,
162, 217, 185, 154, 242, 235, 5, 143, 67, 36, 49, 219, 84, 91, 127, 123,
96, 35, 104, 25, 238, 138, 204, 103, 21, 234, 143, 15, 36, 4, 27, 67,
75, 113, 247, 100, 70, 171, 114, 255, 101, 128, 43, 32, 31, 88, 8, 58,
134, 147, 34, 237, 217, 141, 154, 229, 171, 11, 63, 71, 80, 50, 188, 21,
177, 207, 52, 84, 23, 127, 78, 160, 52, 120, 23, 98, 142, 169, 164, 126,
251, 96, 67, 104, 49, 238, 148, 76, 111, 117, 236, 39, 13, 218, 133, 155,
35, 43, 89, 223, 122, 216, 35, 26, 153, 203, 42, 215, 95, 30, 184, 8,
114, 134, 165, 162, 251, 57, 131, 82, 225, 253, 136, 65, 166, 176, 122, 244,
35, 7, 89, 194, 186, 209, 179, 28, 117, 201, 231, 22, 202, 142, 215, 36,
94, 155, 120, 107, 98, 175, 105, 188, 46, 241, 220, 68, 89, 243, 122, 197,
227, 19, 9, 205, 198, 213, 146, 223, 45, 152, 29, 170, 137, 191, 38, 240,
26, 196, 11, 19, 71, 77, 242, 181, 133, 183, 35, 54, 153, 214, 234, 222,
207, 24, 84, 10, 191, 71, 48, 50, 148, 21, 175, 79, 60, 52, 17, 215,
76, 94, 181, 248, 119, 2, 166, 129, 186, 224, 115, 8, 37, 198, 155, 18,
235, 77, 143, 117, 164, 39, 59, 90, 147, 123, 45, 227, 93, 137, 249, 166,
194, 250, 209, 131, 28, 97, 201, 232, 86, 206, 190, 212, 112, 95, 100, 56,
43, 82, 159, 125, 168, 33, 190, 152, 112, 106, 164, 47, 59, 92, 19, 121,
205, 226, 213, 137, 159, 38, 232, 26, 206, 139, 20, 103, 79, 106, 180, 47,
55, 92, 22, 185, 206, 242, 212, 69, 159, 115, 40, 37, 222, 155, 24, 107,
74, 175, 119, 60, 38, 145, 218, 236, 91, 13, 251, 69, 131, 115, 33, 229,
216, 75, 26, 183, 75, 54, 183, 86, 246, 190, 198, 240, 82, 196, 61, 147,
81, 173, 252, 125, 129, 225, 160, 72, 120, 54, 162, 150, 249, 174, 194, 252,
81, 129, 252, 96, 65, 232, 48, 78, 148, 52, 111, 87, 108, 62, 173, 208,
125, 156, 33, 169, 216, 126, 218, 160, 91, 56, 59, 82, 147, 125, 173, 225,
189, 136, 113, 166, 164, 122, 251, 99, 3, 105, 193, 238, 208, 76, 92, 53,
249, 215, 2, 222, 129, 152, 96, 106, 168, 47, 62, 156, 16, 105, 204, 46,
213, 220, 95, 25, 248, 10, 194, 135, 17, 162, 140, 121, 165, 226, 251, 9,
131, 70, 225, 242, 200, 69, 150, 179, 46, 245, 220, 71, 25, 242, 138, 197,
167, 19, 58, 141, 211, 37, 157, 219, 41, 155, 94, 235, 120, 79, 98, 180,
41, 183, 94, 246, 184, 70, 242, 178, 197, 181, 147, 55, 45, 214, 157, 158,
233, 168, 78, 254, 180, 64, 119, 112, 38, 164, 26, 251, 75, 3, 119, 65,
230, 176, 74, 244, 55, 7, 86, 130, 190, 225, 176, 72, 116, 54, 167, 86,
250, 190, 195, 48, 81, 212, 60, 95, 81, 248, 60, 66, 145, 241, 172, 68,
125, 243, 97, 133, 232, 99, 14, 169, 196, 126, 211, 96, 93, 232, 57, 142,
146, 228, 109, 139, 109, 167, 109, 186, 173, 179, 61, 181, 209, 183, 28, 118,
137, 230, 230, 202, 202, 215, 23, 30, 142, 136, 100, 102, 171, 106, 255, 111,
0, 44, 0, 29, 192, 9, 144, 6, 236, 2, 205, 193, 149, 144, 111, 44,
44, 29, 221, 201, 153, 150, 234, 238, 207, 12, 84, 5, 255, 67, 0, 49,
192, 20, 80, 15, 124, 4, 33, 195, 88, 81, 250, 188, 67, 49, 241, 212,
68, 95, 115, 120, 37, 226, 155, 9, 171, 70, 255, 114, 192, 37, 144, 27,
44, 11, 93, 199, 121, 146, 162, 237, 185, 141, 178, 229, 181, 139, 55, 39,
86, 154, 190, 235, 48, 79, 84, 52, 63, 87, 80, 62, 188, 16, 113, 204,
36, 85, 219, 127, 27, 96, 11, 104, 7, 110, 130, 172, 97, 189, 232, 113,
142, 164, 100, 123, 107, 99, 111, 105, 236, 46, 205, 220, 85, 153, 255, 42,
192, 31, 16, 8, 12, 6, 133, 194, 227, 17, 137, 204, 102, 213, 234, 223,
15, 24, 4, 10, 131, 71, 33, 242, 152, 69, 170, 179, 63, 53, 208, 23,
28, 14, 137, 196, 102, 211, 106, 221, 239, 25, 140, 10, 229, 199, 11, 18,
135, 77, 162, 181, 185, 183, 50, 246, 149, 134, 239, 34, 204, 25, 149, 202,
239, 23, 12, 14, 133, 196, 99, 19, 105, 205, 238, 213, 140, 95, 37, 248,
27, 2, 139, 65, 167, 112, 122, 164, 35, 59, 89, 211, 122, 221, 227, 25,
137, 202, 230, 215, 10, 222, 135, 24, 98, 138, 169, 167, 62, 250, 144, 67,
44, 49, 221, 212, 89, 159, 122, 232, 35, 14, 153, 196, 106, 211, 111, 29,
236, 9, 141, 198, 229, 146, 203, 45, 151, 93, 174, 185, 188, 114, 241, 229,
132, 75, 35, 119, 89, 230, 186, 202, 243, 23, 5, 206, 131, 20, 97, 207,
104, 84, 46, 191, 92, 112, 57, 228, 18, 203, 77, 151, 117, 174, 167, 60,
122, 145, 227, 44, 73, 221, 246, 217, 134, 218, 226, 219, 9, 155, 70, 235,
114, 207, 101, 148, 43, 47, 95, 92, 56, 57, 210, 146, 221, 173, 153, 189,
170, 241, 191, 4, 112, 3, 100, 1, 235, 64, 79, 112, 52, 36, 23, 91,
78, 187, 116, 115, 103, 101, 234, 171, 15, 63, 68, 16, 51, 76, 21, 245,
207, 7, 20, 2, 143, 65, 164, 48, 123, 84, 35, 127, 89, 224, 58, 200,
19, 22, 141, 206, 229, 148, 75, 47, 119, 92, 38, 185, 218, 242, 219, 5,
155, 67, 43, 113, 223, 100, 88, 43, 122, 159, 99, 40, 41, 222, 158, 216,
104, 90, 174, 187, 60, 115, 81, 229, 252, 75, 1, 247, 64, 70, 176, 50,
244, 21, 135, 79, 34, 180, 25, 183, 74, 246, 183, 6, 246, 130, 198, 225,
146, 200, 109, 150, 173, 174, 253, 188, 65, 177, 240, 116, 68, 39, 115, 90,
165, 251, 59, 3, 83, 65, 253, 240, 65, 132, 48, 99, 84, 41, 255, 94,
192, 56, 80, 18, 188, 13, 177, 197, 180, 83, 55, 125, 214, 161, 158, 248,
104, 66, 174, 177, 188, 116, 113, 231, 100, 74, 171, 119, 63, 102, 144, 42,
236, 31, 13, 200, 5, 150, 131, 46, 225, 220, 72, 89, 246, 186, 198, 243,
18, 197, 205, 147, 21, 173, 207, 61, 148, 17, 175, 76, 124, 53, 225, 215,
8, 94, 134, 184, 98, 242, 169, 133, 190, 227, 48, 73, 212, 54, 223, 86,
216, 62, 218, 144, 91, 44, 59, 93, 211, 121, 157, 226, 233, 137, 142, 230,
228, 74, 203, 119, 23, 102, 142, 170, 228, 127, 11, 96, 7, 104, 2, 174,
129, 188, 96, 113, 232, 36, 78, 155, 116, 107, 103, 111, 106, 172, 47, 61,
220, 17, 153, 204, 106, 213, 239, 31, 12, 8, 5, 198, 131, 18, 225, 205,
136, 85, 166, 191, 58, 240, 19, 4, 13, 195, 69, 145, 243, 44, 69, 221,
243, 25, 133, 202, 227, 23, 9, 206, 134, 212, 98, 223, 105, 152, 46, 234,
156, 79, 41, 244, 30, 199, 72, 82, 182, 189, 182, 241, 182, 196, 118, 211,
102, 221, 234, 217, 143, 26, 228, 11, 11, 71, 71, 114, 178, 165, 181, 187,
55, 51, 86, 149, 254, 239, 0, 76, 0, 53, 192, 23, 16, 14, 140, 4,
101, 195, 107, 17, 239, 76, 76, 53, 245, 215, 7, 30, 130, 136, 97, 166,
168, 122, 254, 163, 0, 121, 192, 34, 208, 25, 156, 10, 233, 199, 14, 210,
132, 93, 163, 121, 185, 226, 242, 201, 133, 150, 227, 46, 201, 220, 86, 217,
254, 218, 192, 91, 16, 59, 76, 19, 117, 205, 231, 21, 138, 143, 39, 36,
26, 155, 75, 43, 119, 95, 102, 184, 42, 242, 159, 5, 168, 3, 62, 129,
208, 96, 92, 40, 57, 222, 146, 216, 109, 154, 173, 171, 61, 191, 81, 176,
60, 116, 17, 231, 76, 74, 181, 247, 55, 6, 150, 130, 238, 225, 140, 72,
101, 246, 171, 6, 255, 66, 192, 49, 144, 20, 108, 15, 109, 196, 45, 147,
93, 173, 249, 189, 130, 241, 161, 132, 120, 99, 98, 169, 233, 190, 206, 240,
84, 68, 63, 115, 80, 37, 252, 27, 1, 203, 64, 87, 112, 62, 164, 16,
123, 76, 35, 117, 217, 231, 26, 202, 139, 23, 39, 78, 154, 180, 107, 55,
111, 86, 172, 62, 253, 208, 65, 156, 48, 105, 212, 46, 223, 92, 88, 57,
250, 146, 195, 45, 145, 221, 172, 89, 189, 250, 241, 131, 4, 97, 195, 104,
81, 238, 188, 76, 113, 245, 228, 71, 11, 114, 135, 101, 162, 171, 57, 191,
82, 240, 61, 132, 17, 163, 76, 121, 245, 226, 199, 9, 146, 134, 237, 162,
205, 185, 149, 178, 239, 53, 140, 23, 37, 206, 155, 20, 107, 79, 111, 116,
44, 39, 93, 218, 185, 155, 50, 235, 85, 143, 127, 36, 32, 27, 88, 11,
122, 135, 99, 34, 169, 217, 190, 218, 240, 91, 4, 59, 67, 83, 113, 253,
228, 65, 139, 112, 103, 100, 42, 171, 95, 63, 120, 16, 34, 140, 25, 165,
202, 251, 23, 3, 78, 129, 244, 96, 71, 104, 50, 174, 149, 188, 111, 49,
236, 20, 77, 207, 117, 148, 39, 47, 90, 156, 59, 41, 211, 94, 221, 248,
89, 130, 186, 225, 179, 8, 117, 198, 167, 18, 250, 141, 131, 37, 161, 219,
56, 91, 82, 187, 125, 179, 97, 181, 232, 119, 14, 166, 132, 122, 227, 99,
9, 233, 198, 206, 210, 212, 93, 159, 121, 168, 34, 254, 153, 128, 106, 224,
47, 8, 28, 6, 137, 194, 230, 209, 138, 220, 103, 25, 234, 138, 207, 39,
20, 26, 143, 75, 36, 55, 91, 86, 187, 126, 243, 96, 69, 232, 51, 14,
149, 196, 111, 19, 108, 13, 237, 197, 141, 147, 37, 173, 219, 61, 155, 81,
171, 124, 127, 97, 224, 40, 72, 30, 182, 136, 118, 230, 166, 202, 250, 215,
3, 30, 129, 200, 96, 86, 168, 62, 254, 144, 64, 108, 48, 45, 212, 29,
159, 73, 168, 54, 254, 150, 192, 110, 208, 44, 92, 29, 249, 201, 130, 214,
225, 158, 200, 104, 86, 174, 190, 252, 112, 65, 228, 48, 75, 84, 55, 127,
86, 160, 62, 248, 16, 66, 140, 49, 165, 212, 123, 31, 99, 72, 41, 246,
158, 198, 232, 82, 206, 189, 148, 113, 175, 100, 124, 43, 97, 223, 104, 88,
46, 186, 156, 115, 41, 229, 222, 203, 24, 87, 74, 190, 183, 48, 118, 148,
38, 239, 90, 204, 59, 21, 211, 79, 29, 244, 9, 135, 70, 226, 178, 201,
181, 150, 247, 46, 198, 156, 82, 233, 253, 142, 193, 164, 80, 123, 124, 35,
97, 217, 232, 90, 206, 187, 20, 115, 79, 101, 244, 43, 7, 95, 66, 184,
49, 178, 148, 117, 175, 103, 60, 42, 145, 223, 44, 88, 29, 250, 137, 131,
38, 225, 218, 200, 91, 22, 187, 78, 243, 116, 69, 231, 115, 10, 165, 199,
59, 18, 147, 77, 173, 245, 189, 135, 49, 162, 148, 121, 175, 98, 252, 41,
129, 222, 224, 88, 72, 58, 182, 147, 54, 237, 214, 205, 158, 213, 168, 95,
62, 184, 16, 114, 140, 37, 165, 219, 59, 27, 83, 75, 125, 247, 97, 134,
168, 98, 254, 169, 128, 126, 224, 32, 72, 24, 54, 138, 150, 231, 46, 202,
156, 87, 41, 254, 158, 192, 104, 80, 46, 188, 28, 113, 201, 228, 86, 203,
126, 215, 96, 94, 168, 56, 126, 146, 160, 109, 184, 45, 178, 157, 181, 169,
183, 62, 246, 144, 70, 236, 50, 205, 213, 149, 159, 47, 40, 28, 30, 137,
200, 102, 214, 170, 222, 255, 24, 64, 10, 176, 7, 52, 2, 151, 65, 174,
176, 124, 116, 33, 231, 88, 74, 186, 183, 51, 54, 149, 214, 239, 30, 204,
8, 85, 198, 191, 18, 240, 13, 132, 5, 163, 67, 57, 241, 210, 196, 93,
147, 121, 173, 226, 253, 137, 129, 166, 224, 122, 200, 35, 22, 153, 206, 234,
212, 79, 31, 116, 8, 39, 70, 154, 178, 235, 53, 143, 87, 36, 62, 155,
80, 107, 124, 47, 97, 220, 40, 89, 222, 186, 216, 115, 26, 165, 203, 59,
23, 83, 78, 189, 244, 113, 135, 100, 98, 171, 105, 191, 110, 240, 44, 68,
29, 243, 73, 133, 246, 227, 6, 201, 194, 214, 209, 158, 220, 104, 89, 238,
186, 204, 115, 21, 229, 207, 11, 20, 7, 79, 66, 180, 49, 183, 84, 118,
191, 102, 240, 42, 196, 31, 19, 72, 13, 246, 133, 134, 227, 34, 201, 217,
150, 218, 238, 219, 12, 91, 69, 251, 115, 3, 101, 193, 235, 16, 79, 76,
52, 53, 215, 87, 30, 190, 136, 112, 102, 164, 42, 251, 95, 3, 120, 1,
226, 128, 73, 160, 54, 248, 22, 194, 142, 209, 164, 92, 123, 121, 227, 98,
201, 233, 150, 206, 238, 212, 76, 95, 117, 248, 39, 2, 154, 129, 171, 32,
127, 88, 32, 58, 152, 19, 42, 141, 223, 37, 152, 27, 42, 139, 95, 39,
120, 26, 162, 139, 57, 167, 82, 250, 189, 131, 49, 161, 212, 120, 95, 98,
184, 41, 178, 158, 245, 168, 71, 62, 178, 144, 117, 172, 39, 61, 218, 145,
155, 44, 107, 93, 239, 121, 140, 34, 229, 217, 139, 26, 231, 75, 10, 183,
71, 54, 178, 150, 245, 174, 199, 60, 82, 145, 253, 172, 65, 189, 240, 113,
132, 36, 99, 91, 105, 251, 110, 195, 108, 81, 237, 252, 77, 129, 245, 160,
71, 56, 50, 146, 149, 173, 175, 61, 188, 17, 177, 204, 116, 85, 231, 127,
10, 160, 7, 56, 2, 146, 129, 173, 160, 125, 184, 33, 178, 152, 117, 170,
167, 63, 58, 144, 19, 44, 13, 221, 197, 153, 147, 42, 237, 223, 13, 152,
5, 170, 131, 63, 33, 208, 24, 92, 10, 185, 199, 50, 210, 149, 157, 175,
41, 188, 30, 241, 200, 68, 86, 179, 126, 245, 224, 71, 8, 50, 134, 149,
162, 239, 57, 140, 18, 229, 205, 139, 21, 167, 79, 58, 180, 19, 55, 77,
214, 181, 158, 247, 40, 70, 158, 178, 232, 117, 142, 167, 36, 122, 155, 99,
43, 105, 223, 110, 216, 44, 90, 157, 251, 41, 131, 94, 225, 248, 72, 66,
182, 177, 182, 244, 118, 199, 102, 210, 170, 221, 191, 25, 176, 10, 244, 7,
7, 66, 130, 177, 161, 180, 120, 119, 98, 166, 169, 186, 254, 243, 0, 69,
192, 51, 16, 21, 204, 15, 21, 196, 15, 19, 68, 13, 243, 69, 133, 243,
35, 5, 217, 195, 26, 209, 203, 28, 87, 73, 254, 182, 192, 118, 208, 38,
220, 26, 217, 203, 26, 215, 75, 30, 183, 72, 118, 182, 166, 246, 250, 198,
195, 18, 209, 205, 156, 85, 169, 255, 62, 192, 16, 80, 12, 60, 5, 209,
195, 28, 81, 201, 252, 86, 193, 254, 208, 64, 92, 48, 57, 212, 18, 223,
77, 152, 53, 170, 151, 63, 46, 144, 28, 108, 9, 237, 198, 205, 146, 213,
173, 159, 61, 168, 17, 190, 140, 112, 101, 228, 43, 11, 95, 71, 120, 50,
162, 149, 185, 175, 50, 252, 21, 129, 207, 32, 84, 24, 63, 74, 144, 55,
44, 22, 157, 206, 233, 148, 78, 239, 116, 76, 39, 117, 218, 167, 27, 58,
139, 83, 39, 125, 218, 161, 155, 56, 107, 82, 175, 125, 188, 33, 177, 216,
116, 90, 167, 123, 58, 163, 83, 57, 253, 210, 193, 157, 144, 105, 172, 46,
253, 220, 65, 153, 240, 106, 196, 47, 19, 92, 13, 249, 197, 130, 211, 33,
157, 216, 105, 154, 174, 235, 60, 79, 81, 244, 60, 71, 81, 242, 188, 69,
177, 243, 52, 69, 215, 115, 30, 165, 200, 123, 22, 163, 78, 249, 244, 66,
199, 113, 146, 164, 109, 187, 109, 179, 109, 181, 237, 183, 13, 182, 133, 182,
227, 54, 201, 214, 214, 222, 222, 216, 88, 90, 186, 187, 51, 51, 255, 63 )
random_mask_vec8 = numpy.array(random_mask_tuple, numpy.uint8)
| codeparrot/github-code-clean |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Stephen Fromm <sfromm@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
module: user
version_added: "0.2"
short_description: Manage user accounts
description:
- Manage user accounts and user attributes.
- For Windows targets, use the M(win_user) module instead.
options:
name:
description:
- Name of the user to create, remove or modify.
type: str
required: true
aliases: [ user ]
uid:
description:
- Optionally sets the I(UID) of the user.
type: int
comment:
description:
- Optionally sets the description (aka I(GECOS)) of user account.
type: str
hidden:
description:
- macOS only, optionally hide the user from the login window and system preferences.
- The default will be C(yes) if the I(system) option is used.
type: bool
version_added: "2.6"
non_unique:
description:
- Optionally when used with the -u option, this option allows to change the user ID to a non-unique value.
type: bool
default: no
version_added: "1.1"
seuser:
description:
- Optionally sets the seuser type (user_u) on selinux enabled systems.
type: str
version_added: "2.1"
group:
description:
- Optionally sets the user's primary group (takes a group name).
type: str
groups:
description:
- List of groups user will be added to. When set to an empty string C(''),
C(null), or C(~), the user is removed from all groups except the
primary group. (C(~) means C(null) in YAML)
- Before Ansible 2.3, the only input format allowed was a comma separated string.
type: list
append:
description:
- If C(yes), add the user to the groups specified in C(groups).
- If C(no), user will only be added to the groups specified in C(groups),
removing them from all other groups.
type: bool
default: no
shell:
description:
- Optionally set the user's shell.
- On macOS, before Ansible 2.5, the default shell for non-system users was C(/usr/bin/false).
Since Ansible 2.5, the default shell for non-system users on macOS is C(/bin/bash).
- On other operating systems, the default shell is determined by the underlying tool being
used. See Notes for details.
type: str
home:
description:
- Optionally set the user's home directory.
type: path
skeleton:
description:
- Optionally set a home skeleton directory.
- Requires C(create_home) option!
type: str
version_added: "2.0"
password:
description:
- Optionally set the user's password to this crypted value.
- On macOS systems, this value has to be cleartext. Beware of security issues.
- To create a disabled account on Linux systems, set this to C('!') or C('*').
- See U(https://docs.ansible.com/ansible/faq.html#how-do-i-generate-crypted-passwords-for-the-user-module)
for details on various ways to generate these password values.
type: str
state:
description:
- Whether the account should exist or not, taking action if the state is different from what is stated.
type: str
choices: [ absent, present ]
default: present
create_home:
description:
- Unless set to C(no), a home directory will be made for the user
when the account is created or if the home directory does not exist.
- Changed from C(createhome) to C(create_home) in Ansible 2.5.
type: bool
default: yes
aliases: [ createhome ]
move_home:
description:
- "If set to C(yes) when used with C(home: ), attempt to move the user's old home
directory to the specified directory if it isn't there already and the old home exists."
type: bool
default: no
system:
description:
- When creating an account C(state=present), setting this to C(yes) makes the user a system account.
- This setting cannot be changed on existing users.
type: bool
default: no
force:
description:
- This only affects C(state=absent), it forces removal of the user and associated directories on supported platforms.
- The behavior is the same as C(userdel --force), check the man page for C(userdel) on your system for details and support.
- When used with C(generate_ssh_key=yes) this forces an existing key to be overwritten.
type: bool
default: no
remove:
description:
- This only affects C(state=absent), it attempts to remove directories associated with the user.
- The behavior is the same as C(userdel --remove), check the man page for details and support.
type: bool
default: no
login_class:
description:
- Optionally sets the user's login class, a feature of most BSD OSs.
type: str
generate_ssh_key:
description:
- Whether to generate a SSH key for the user in question.
- This will B(not) overwrite an existing SSH key unless used with C(force=yes).
type: bool
default: no
version_added: "0.9"
ssh_key_bits:
description:
- Optionally specify number of bits in SSH key to create.
type: int
default: default set by ssh-keygen
version_added: "0.9"
ssh_key_type:
description:
- Optionally specify the type of SSH key to generate.
- Available SSH key types will depend on implementation
present on target host.
type: str
default: rsa
version_added: "0.9"
ssh_key_file:
description:
- Optionally specify the SSH key filename.
- If this is a relative filename then it will be relative to the user's home directory.
- This parameter defaults to I(.ssh/id_rsa).
type: path
version_added: "0.9"
ssh_key_comment:
description:
- Optionally define the comment for the SSH key.
type: str
default: ansible-generated on $HOSTNAME
version_added: "0.9"
ssh_key_passphrase:
description:
- Set a passphrase for the SSH key.
- If no passphrase is provided, the SSH key will default to having no passphrase.
type: str
version_added: "0.9"
update_password:
description:
- C(always) will update passwords if they differ.
- C(on_create) will only set the password for newly created users.
type: str
choices: [ always, on_create ]
default: always
version_added: "1.3"
expires:
description:
- An expiry time for the user in epoch, it will be ignored on platforms that do not support this.
- Currently supported on GNU/Linux, FreeBSD, and DragonFlyBSD.
- Since Ansible 2.6 you can remove the expiry time specify a negative value.
Currently supported on GNU/Linux and FreeBSD.
type: float
version_added: "1.9"
password_lock:
description:
- Lock the password (usermod -L, pw lock, usermod -C).
- BUT implementation differs on different platforms, this option does not always mean the user cannot login via other methods.
- This option does not disable the user, only lock the password. Do not change the password in the same task.
- Currently supported on Linux, FreeBSD, DragonFlyBSD, NetBSD, OpenBSD.
type: bool
version_added: "2.6"
local:
description:
- Forces the use of "local" command alternatives on platforms that implement it.
- This is useful in environments that use centralized authentification when you want to manipulate the local users
(i.e. it uses C(luseradd) instead of C(useradd)).
- This requires that these commands exist on the targeted host, otherwise it will be a fatal error.
type: bool
default: no
version_added: "2.4"
profile:
description:
- Sets the profile of the user.
- Does nothing when used with other platforms.
- Can set multiple profiles using comma separation.
- To delete all the profiles, use C(profile='').
- Currently supported on Illumos/Solaris.
type: str
version_added: "2.8"
authorization:
description:
- Sets the authorization of the user.
- Does nothing when used with other platforms.
- Can set multiple authorizations using comma separation.
- To delete all authorizations, use C(authorization='').
- Currently supported on Illumos/Solaris.
type: str
version_added: "2.8"
role:
description:
- Sets the role of the user.
- Does nothing when used with other platforms.
- Can set multiple roles using comma separation.
- To delete all roles, use C(role='').
- Currently supported on Illumos/Solaris.
type: str
version_added: "2.8"
notes:
- There are specific requirements per platform on user management utilities. However
they generally come pre-installed with the system and Ansible will require they
are present at runtime. If they are not, a descriptive error message will be shown.
- On SunOS platforms, the shadow file is backed up automatically since this module edits it directly.
On other platforms, the shadow file is backed up by the underlying tools used by this module.
- On macOS, this module uses C(dscl) to create, modify, and delete accounts. C(dseditgroup) is used to
modify group membership. Accounts are hidden from the login window by modifying
C(/Library/Preferences/com.apple.loginwindow.plist).
- On FreeBSD, this module uses C(pw useradd) and C(chpass) to create, C(pw usermod) and C(chpass) to modify,
C(pw userdel) remove, C(pw lock) to lock, and C(pw unlock) to unlock accounts.
- On all other platforms, this module uses C(useradd) to create, C(usermod) to modify, and
C(userdel) to remove accounts.
seealso:
- module: authorized_key
- module: group
- module: win_user
author:
- Stephen Fromm (@sfromm)
'''
EXAMPLES = r'''
- name: Add the user 'johnd' with a specific uid and a primary group of 'admin'
user:
name: johnd
comment: John Doe
uid: 1040
group: admin
- name: Add the user 'james' with a bash shell, appending the group 'admins' and 'developers' to the user's groups
user:
name: james
shell: /bin/bash
groups: admins,developers
append: yes
- name: Remove the user 'johnd'
user:
name: johnd
state: absent
remove: yes
- name: Create a 2048-bit SSH key for user jsmith in ~jsmith/.ssh/id_rsa
user:
name: jsmith
generate_ssh_key: yes
ssh_key_bits: 2048
ssh_key_file: .ssh/id_rsa
- name: Added a consultant whose account you want to expire
user:
name: james18
shell: /bin/zsh
groups: developers
expires: 1422403387
- name: Starting at Ansible 2.6, modify user, remove expiry time
user:
name: james18
expires: -1
'''
RETURN = r'''
append:
description: Whether or not to append the user to groups
returned: When state is 'present' and the user exists
type: bool
sample: True
comment:
description: Comment section from passwd file, usually the user name
returned: When user exists
type: str
sample: Agent Smith
create_home:
description: Whether or not to create the home directory
returned: When user does not exist and not check mode
type: bool
sample: True
force:
description: Whether or not a user account was forcibly deleted
returned: When state is 'absent' and user exists
type: bool
sample: False
group:
description: Primary user group ID
returned: When user exists
type: int
sample: 1001
groups:
description: List of groups of which the user is a member
returned: When C(groups) is not empty and C(state) is 'present'
type: str
sample: 'chrony,apache'
home:
description: "Path to user's home directory"
returned: When C(state) is 'present'
type: str
sample: '/home/asmith'
move_home:
description: Whether or not to move an existing home directory
returned: When C(state) is 'present' and user exists
type: bool
sample: False
name:
description: User account name
returned: always
type: str
sample: asmith
password:
description: Masked value of the password
returned: When C(state) is 'present' and C(password) is not empty
type: str
sample: 'NOT_LOGGING_PASSWORD'
remove:
description: Whether or not to remove the user account
returned: When C(state) is 'absent' and user exists
type: bool
sample: True
shell:
description: User login shell
returned: When C(state) is 'present'
type: str
sample: '/bin/bash'
ssh_fingerprint:
description: Fingerprint of generated SSH key
returned: When C(generate_ssh_key) is C(True)
type: str
sample: '2048 SHA256:aYNHYcyVm87Igh0IMEDMbvW0QDlRQfE0aJugp684ko8 ansible-generated on host (RSA)'
ssh_key_file:
description: Path to generated SSH public key file
returned: When C(generate_ssh_key) is C(True)
type: str
sample: /home/asmith/.ssh/id_rsa
ssh_public_key:
description: Generated SSH public key file
returned: When C(generate_ssh_key) is C(True)
type: str
sample: >
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC95opt4SPEC06tOYsJQJIuN23BbLMGmYo8ysVZQc4h2DZE9ugbjWWGS1/pweUGjVstgzMkBEeBCByaEf/RJKNecKRPeGd2Bw9DCj/bn5Z6rGfNENKBmo
618mUJBvdlEgea96QGjOwSB7/gmonduC7gsWDMNcOdSE3wJMTim4lddiBx4RgC9yXsJ6Tkz9BHD73MXPpT5ETnse+A3fw3IGVSjaueVnlUyUmOBf7fzmZbhlFVXf2Zi2rFTXqvbdGHKkzpw1U8eB8xFPP7y
d5u1u0e6Acju/8aZ/l17IDFiLke5IzlqIMRTEbDwLNeO84YQKWTm9fODHzhYe0yvxqLiK07 ansible-generated on host'
stderr:
description: Standard error from running commands
returned: When stderr is returned by a command that is run
type: str
sample: Group wheels does not exist
stdout:
description: Standard output from running commands
returned: When standard output is returned by the command that is run
type: str
sample:
system:
description: Whether or not the account is a system account
returned: When C(system) is passed to the module and the account does not exist
type: bool
sample: True
uid:
description: User ID of the user account
returned: When C(UID) is passed to the module
type: int
sample: 1044
'''
import errno
import grp
import calendar
import os
import re
import pty
import pwd
import select
import shutil
import socket
import subprocess
import time
from ansible.module_utils import distro
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.basic import load_platform_subclass, AnsibleModule
try:
import spwd
HAVE_SPWD = True
except ImportError:
HAVE_SPWD = False
_HASH_RE = re.compile(r'[^a-zA-Z0-9./=]')
class User(object):
"""
This is a generic User manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- create_user()
- remove_user()
- modify_user()
- ssh_key_gen()
- ssh_key_fingerprint()
- user_exists()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
SHADOWFILE = '/etc/shadow'
SHADOWFILE_EXPIRE_INDEX = 7
LOGIN_DEFS = '/etc/login.defs'
DATE_FORMAT = '%Y-%m-%d'
def __new__(cls, *args, **kwargs):
return load_platform_subclass(User, args, kwargs)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.uid = module.params['uid']
self.hidden = module.params['hidden']
self.non_unique = module.params['non_unique']
self.seuser = module.params['seuser']
self.group = module.params['group']
self.comment = module.params['comment']
self.shell = module.params['shell']
self.password = module.params['password']
self.force = module.params['force']
self.remove = module.params['remove']
self.create_home = module.params['create_home']
self.move_home = module.params['move_home']
self.skeleton = module.params['skeleton']
self.system = module.params['system']
self.login_class = module.params['login_class']
self.append = module.params['append']
self.sshkeygen = module.params['generate_ssh_key']
self.ssh_bits = module.params['ssh_key_bits']
self.ssh_type = module.params['ssh_key_type']
self.ssh_comment = module.params['ssh_key_comment']
self.ssh_passphrase = module.params['ssh_key_passphrase']
self.update_password = module.params['update_password']
self.home = module.params['home']
self.expires = None
self.password_lock = module.params['password_lock']
self.groups = None
self.local = module.params['local']
self.profile = module.params['profile']
self.authorization = module.params['authorization']
self.role = module.params['role']
if module.params['groups'] is not None:
self.groups = ','.join(module.params['groups'])
if module.params['expires'] is not None:
try:
self.expires = time.gmtime(module.params['expires'])
except Exception as e:
module.fail_json(msg="Invalid value for 'expires' %s: %s" % (self.expires, to_native(e)))
if module.params['ssh_key_file'] is not None:
self.ssh_file = module.params['ssh_key_file']
else:
self.ssh_file = os.path.join('.ssh', 'id_%s' % self.ssh_type)
def check_password_encrypted(self):
# Darwin needs cleartext password, so skip validation
if self.module.params['password'] and self.platform != 'Darwin':
maybe_invalid = False
# Allow setting the password to * or ! in order to disable the account
if self.module.params['password'] in set(['*', '!']):
maybe_invalid = False
else:
# : for delimiter, * for disable user, ! for lock user
# these characters are invalid in the password
if any(char in self.module.params['password'] for char in ':*!'):
maybe_invalid = True
if '$' not in self.module.params['password']:
maybe_invalid = True
else:
fields = self.module.params['password'].split("$")
if len(fields) >= 3:
# contains character outside the crypto constraint
if bool(_HASH_RE.search(fields[-1])):
maybe_invalid = True
# md5
if fields[1] == '1' and len(fields[-1]) != 22:
maybe_invalid = True
# sha256
if fields[1] == '5' and len(fields[-1]) != 43:
maybe_invalid = True
# sha512
if fields[1] == '6' and len(fields[-1]) != 86:
maybe_invalid = True
else:
maybe_invalid = True
if maybe_invalid:
self.module.warn("The input password appears not to have been hashed. "
"The 'password' argument must be encrypted for this module to work properly.")
def execute_command(self, cmd, use_unsafe_shell=False, data=None, obey_checkmode=True):
if self.module.check_mode and obey_checkmode:
self.module.debug('In check mode, would have run: "%s"' % cmd)
return (0, '', '')
else:
# cast all args to strings ansible-modules-core/issues/4397
cmd = [str(x) for x in cmd]
return self.module.run_command(cmd, use_unsafe_shell=use_unsafe_shell, data=data)
def backup_shadow(self):
if not self.module.check_mode and self.SHADOWFILE:
return self.module.backup_local(self.SHADOWFILE)
def remove_user_userdel(self):
if self.local:
command_name = 'luserdel'
else:
command_name = 'userdel'
cmd = [self.module.get_bin_path(command_name, True)]
if self.force:
cmd.append('-f')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self):
if self.local:
command_name = 'luseradd'
else:
command_name = 'useradd'
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.seuser is not None:
cmd.append('-Z')
cmd.append(self.seuser)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
elif self.group_exists(self.name):
# use the -N option (no user group) if a group already
# exists with the same name as the user to prevent
# errors from useradd trying to create a group when
# USERGROUPS_ENAB is set in /etc/login.defs.
if os.path.exists('/etc/redhat-release'):
dist = distro.linux_distribution(full_distribution_name=False)
major_release = int(dist[1].split('.')[0])
if major_release <= 5:
cmd.append('-n')
else:
cmd.append('-N')
elif os.path.exists('/etc/SuSE-release'):
# -N did not exist in useradd before SLE 11 and did not
# automatically create a group
dist = distro.linux_distribution(full_distribution_name=False)
major_release = int(dist[1].split('.')[0])
if major_release >= 12:
cmd.append('-N')
else:
cmd.append('-N')
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.expires is not None:
cmd.append('-e')
if self.expires < time.gmtime(0):
cmd.append('')
else:
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
if not self.local:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def _check_usermod_append(self):
# check if this version of usermod can append groups
if self.local:
command_name = 'lusermod'
else:
command_name = 'usermod'
usermod_path = self.module.get_bin_path(command_name, True)
# for some reason, usermod --help cannot be used by non root
# on RH/Fedora, due to lack of execute bit for others
if not os.access(usermod_path, os.X_OK):
return False
cmd = [usermod_path, '--help']
(rc, data1, data2) = self.execute_command(cmd, obey_checkmode=False)
helpout = data1 + data2
# check if --append exists
lines = to_native(helpout).split('\n')
for line in lines:
if line.strip().startswith('-a, --append'):
return True
return False
def modify_user_usermod(self):
if self.local:
command_name = 'lusermod'
else:
command_name = 'usermod'
cmd = [self.module.get_bin_path(command_name, True)]
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
# get a list of all groups for the user, including the primary
current_groups = self.user_group_membership(exclude_primary=False)
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.expires is not None:
current_expires = int(self.user_password()[1])
if self.expires < time.gmtime(0):
if current_expires >= 0:
cmd.append('-e')
cmd.append('')
else:
# Convert days since Epoch to seconds since Epoch as struct_time
current_expire_date = time.gmtime(current_expires * 86400)
# Current expires is negative or we compare year, month, and day only
if current_expires < 0 or current_expire_date[:3] != self.expires[:3]:
cmd.append('-e')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
# Lock if no password or unlocked, unlock only if locked
if self.password_lock and not info[1].startswith('!'):
cmd.append('-L')
elif self.password_lock is False and info[1].startswith('!'):
# usermod will refuse to unlock a user with no password, module shows 'changed' regardless
cmd.append('-U')
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self, group):
try:
# Try group as a gid first
grp.getgrgid(int(group))
return True
except (ValueError, KeyError):
try:
grp.getgrnam(group)
return True
except KeyError:
return False
def group_info(self, group):
if not self.group_exists(group):
return False
try:
# Try group as a gid first
return list(grp.getgrgid(int(group)))
except (ValueError, KeyError):
return list(grp.getgrnam(group))
def get_groups_set(self, remove_existing=True):
if self.groups is None:
return None
info = self.user_info()
groups = set(x.strip() for x in self.groups.split(',') if x)
for g in groups.copy():
if not self.group_exists(g):
self.module.fail_json(msg="Group %s does not exist" % (g))
if info and remove_existing and self.group_info(g)[2] == info[3]:
groups.remove(g)
return groups
def user_group_membership(self, exclude_primary=True):
''' Return a list of groups the user belongs to '''
groups = []
info = self.get_pwd_info()
for group in grp.getgrall():
if self.name in group.gr_mem:
# Exclude the user's primary group by default
if not exclude_primary:
groups.append(group[0])
else:
if info[3] != group.gr_gid:
groups.append(group[0])
return groups
def user_exists(self):
try:
if pwd.getpwnam(self.name):
return True
except KeyError:
return False
def get_pwd_info(self):
if not self.user_exists():
return False
return list(pwd.getpwnam(self.name))
def user_info(self):
if not self.user_exists():
return False
info = self.get_pwd_info()
if len(info[1]) == 1 or len(info[1]) == 0:
info[1] = self.user_password()[0]
return info
def user_password(self):
passwd = ''
expires = ''
if HAVE_SPWD:
try:
passwd = spwd.getspnam(self.name)[1]
expires = spwd.getspnam(self.name)[7]
return passwd, expires
except KeyError:
return passwd, expires
except OSError as e:
# Python 3.6 raises PermissionError instead of KeyError
# Due to absence of PermissionError in python2.7 need to check
# errno
if e.errno in (errno.EACCES, errno.EPERM):
return passwd, expires
raise
if not self.user_exists():
return passwd, expires
elif self.SHADOWFILE:
# Read shadow file for user's encrypted password string
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'r') as f:
for line in f:
if line.startswith('%s:' % self.name):
passwd = line.split(':')[1]
expires = line.split(':')[self.SHADOWFILE_EXPIRE_INDEX] or -1
return passwd, expires
def get_ssh_key_path(self):
info = self.user_info()
if os.path.isabs(self.ssh_file):
ssh_key_file = self.ssh_file
else:
if not os.path.exists(info[5]) and not self.module.check_mode:
raise Exception('User %s home directory does not exist' % self.name)
ssh_key_file = os.path.join(info[5], self.ssh_file)
return ssh_key_file
def ssh_key_gen(self):
info = self.user_info()
overwrite = None
try:
ssh_key_file = self.get_ssh_key_path()
except Exception as e:
return (1, '', to_native(e))
ssh_dir = os.path.dirname(ssh_key_file)
if not os.path.exists(ssh_dir):
if self.module.check_mode:
return (0, '', '')
try:
os.mkdir(ssh_dir, int('0700', 8))
os.chown(ssh_dir, info[2], info[3])
except OSError as e:
return (1, '', 'Failed to create %s: %s' % (ssh_dir, to_native(e)))
if os.path.exists(ssh_key_file):
if self.force:
# ssh-keygen doesn't support overwriting the key interactively, so send 'y' to confirm
overwrite = 'y'
else:
return (None, 'Key already exists, use "force: yes" to overwrite', '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-t')
cmd.append(self.ssh_type)
if self.ssh_bits > 0:
cmd.append('-b')
cmd.append(self.ssh_bits)
cmd.append('-C')
cmd.append(self.ssh_comment)
cmd.append('-f')
cmd.append(ssh_key_file)
if self.ssh_passphrase is not None:
if self.module.check_mode:
self.module.debug('In check mode, would have run: "%s"' % cmd)
return (0, '', '')
master_in_fd, slave_in_fd = pty.openpty()
master_out_fd, slave_out_fd = pty.openpty()
master_err_fd, slave_err_fd = pty.openpty()
env = os.environ.copy()
env['LC_ALL'] = 'C'
try:
p = subprocess.Popen([to_bytes(c) for c in cmd],
stdin=slave_in_fd,
stdout=slave_out_fd,
stderr=slave_err_fd,
preexec_fn=os.setsid,
env=env)
out_buffer = b''
err_buffer = b''
while p.poll() is None:
r, w, e = select.select([master_out_fd, master_err_fd], [], [], 1)
first_prompt = b'Enter passphrase (empty for no passphrase):'
second_prompt = b'Enter same passphrase again'
prompt = first_prompt
for fd in r:
if fd == master_out_fd:
chunk = os.read(master_out_fd, 10240)
out_buffer += chunk
if prompt in out_buffer:
os.write(master_in_fd, to_bytes(self.ssh_passphrase, errors='strict') + b'\r')
prompt = second_prompt
else:
chunk = os.read(master_err_fd, 10240)
err_buffer += chunk
if prompt in err_buffer:
os.write(master_in_fd, to_bytes(self.ssh_passphrase, errors='strict') + b'\r')
prompt = second_prompt
if b'Overwrite (y/n)?' in out_buffer or b'Overwrite (y/n)?' in err_buffer:
# The key was created between us checking for existence and now
return (None, 'Key already exists', '')
rc = p.returncode
out = to_native(out_buffer)
err = to_native(err_buffer)
except OSError as e:
return (1, '', to_native(e))
else:
cmd.append('-N')
cmd.append('')
(rc, out, err) = self.execute_command(cmd, data=overwrite)
if rc == 0 and not self.module.check_mode:
# If the keys were successfully created, we should be able
# to tweak ownership.
os.chown(ssh_key_file, info[2], info[3])
os.chown('%s.pub' % ssh_key_file, info[2], info[3])
return (rc, out, err)
def ssh_key_fingerprint(self):
ssh_key_file = self.get_ssh_key_path()
if not os.path.exists(ssh_key_file):
return (1, 'SSH Key file %s does not exist' % ssh_key_file, '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-l')
cmd.append('-f')
cmd.append(ssh_key_file)
return self.execute_command(cmd, obey_checkmode=False)
def get_ssh_public_key(self):
ssh_public_key_file = '%s.pub' % self.get_ssh_key_path()
try:
with open(ssh_public_key_file, 'r') as f:
ssh_public_key = f.read().strip()
except IOError:
return None
return ssh_public_key
def create_user(self):
# by default we use the create_user_useradd method
return self.create_user_useradd()
def remove_user(self):
# by default we use the remove_user_userdel method
return self.remove_user_userdel()
def modify_user(self):
# by default we use the modify_user_usermod method
return self.modify_user_usermod()
def create_homedir(self, path):
if not os.path.exists(path):
if self.skeleton is not None:
skeleton = self.skeleton
else:
skeleton = '/etc/skel'
if os.path.exists(skeleton):
try:
shutil.copytree(skeleton, path, symlinks=True)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
else:
try:
os.makedirs(path)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
# get umask from /etc/login.defs and set correct home mode
if os.path.exists(self.LOGIN_DEFS):
with open(self.LOGIN_DEFS, 'r') as f:
for line in f:
m = re.match(r'^UMASK\s+(\d+)$', line)
if m:
umask = int(m.group(1), 8)
mode = 0o777 & ~umask
try:
os.chmod(path, mode)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
def chown_homedir(self, uid, gid, path):
try:
os.chown(path, uid, gid)
for root, dirs, files in os.walk(path):
for d in dirs:
os.chown(os.path.join(root, d), uid, gid)
for f in files:
os.chown(os.path.join(root, f), uid, gid)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
# ===========================================
class FreeBsdUser(User):
"""
This is a FreeBSD User manipulation class - it uses the pw command
to manipulate the user database, followed by the chpass command
to change the password.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'FreeBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
SHADOWFILE_EXPIRE_INDEX = 6
DATE_FORMAT = '%d-%b-%Y'
def remove_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'userdel',
'-n',
self.name
]
if self.remove:
cmd.append('-r')
return self.execute_command(cmd)
def create_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'useradd',
'-n',
self.name,
]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.expires is not None:
cmd.append('-e')
if self.expires < time.gmtime(0):
cmd.append('0')
else:
cmd.append(str(calendar.timegm(self.expires)))
# system cannot be handled currently - should we error if its requested?
# create the user
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password in a second command
if self.password is not None:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
return (rc, out, err)
def modify_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'usermod',
'-n',
self.name
]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
if (info[5] != self.home and self.move_home) or (not os.path.exists(self.home) and self.create_home):
cmd.append('-m')
if info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'r') as f:
for line in f:
if line.startswith('%s:' % self.name):
user_login_class = line.split(':')[4]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.expires is not None:
current_expires = int(self.user_password()[1])
# If expiration is negative or zero and the current expiration is greater than zero, disable expiration.
# In OpenBSD, setting expiration to zero disables expiration. It does not expire the account.
if self.expires <= time.gmtime(0):
if current_expires > 0:
cmd.append('-e')
cmd.append('0')
else:
# Convert days since Epoch to seconds since Epoch as struct_time
current_expire_date = time.gmtime(current_expires)
# Current expires is negative or we compare year, month, and day only
if current_expires <= 0 or current_expire_date[:3] != self.expires[:3]:
cmd.append('-e')
cmd.append(str(calendar.timegm(self.expires)))
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password in a second command
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
# we have to lock/unlock the password in a distinct command
if self.password_lock and not info[1].startswith('*LOCKED*'):
cmd = [
self.module.get_bin_path('pw', True),
'lock',
self.name
]
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
return self.execute_command(cmd)
elif self.password_lock is False and info[1].startswith('*LOCKED*'):
cmd = [
self.module.get_bin_path('pw', True),
'unlock',
self.name
]
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
return self.execute_command(cmd)
return (rc, out, err)
class DragonFlyBsdUser(FreeBsdUser):
"""
This is a DragonFlyBSD User manipulation class - it inherits the
FreeBsdUser class behaviors, such as using the pw command to
manipulate the user database, followed by the chpass command
to change the password.
"""
platform = 'DragonFly'
class OpenBSDUser(User):
"""
This is a OpenBSD User manipulation class.
Main differences are that OpenBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'OpenBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None and self.password != '*':
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups_option = '-S'
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_option = '-G'
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append(groups_option)
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
userinfo_cmd = [self.module.get_bin_path('userinfo', True), self.name]
(rc, out, err) = self.execute_command(userinfo_cmd, obey_checkmode=False)
for line in out.splitlines():
tokens = line.split()
if tokens[0] == 'class' and len(tokens) == 2:
user_login_class = tokens[1]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.password_lock and not info[1].startswith('*'):
cmd.append('-Z')
elif self.password_lock is False and info[1].startswith('*'):
cmd.append('-U')
if self.update_password == 'always' and self.password is not None \
and self.password != '*' and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class NetBSDUser(User):
"""
This is a NetBSD User manipulation class.
Main differences are that NetBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'NetBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups = set(current_groups).union(groups)
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
if self.password_lock and not info[1].startswith('*LOCKED*'):
cmd.append('-C yes')
elif self.password_lock is False and info[1].startswith('*LOCKED*'):
cmd.append('-C no')
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class SunOS(User):
"""
This is a SunOS User manipulation class - The main difference between
this class and the generic user class is that Solaris-type distros
don't support the concept of a "system" account and we need to
edit the /etc/shadow file manually to set a password. (Ugh)
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
- user_info()
"""
platform = 'SunOS'
distribution = None
SHADOWFILE = '/etc/shadow'
USER_ATTR = '/etc/user_attr'
def get_password_defaults(self):
# Read password aging defaults
try:
minweeks = ''
maxweeks = ''
warnweeks = ''
with open("/etc/default/passwd", 'r') as f:
for line in f:
line = line.strip()
if (line.startswith('#') or line == ''):
continue
m = re.match(r'^([^#]*)#(.*)$', line)
if m: # The line contains a hash / comment
line = m.group(1)
key, value = line.split('=')
if key == "MINWEEKS":
minweeks = value.rstrip('\n')
elif key == "MAXWEEKS":
maxweeks = value.rstrip('\n')
elif key == "WARNWEEKS":
warnweeks = value.rstrip('\n')
except Exception as err:
self.module.fail_json(msg="failed to read /etc/default/passwd: %s" % to_native(err))
return (minweeks, maxweeks, warnweeks)
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.profile is not None:
cmd.append('-P')
cmd.append(self.profile)
if self.authorization is not None:
cmd.append('-A')
cmd.append(self.authorization)
if self.role is not None:
cmd.append('-R')
cmd.append(self.role)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
if not self.module.check_mode:
# we have to set the password by editing the /etc/shadow file
if self.password is not None:
self.backup_shadow()
minweeks, maxweeks, warnweeks = self.get_password_defaults()
try:
lines = []
with open(self.SHADOWFILE, 'rb') as f:
for line in f:
line = to_native(line, errors='surrogate_or_strict')
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() // 86400))
if minweeks:
try:
fields[3] = str(int(minweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
if maxweeks:
try:
fields[4] = str(int(maxweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
if warnweeks:
try:
fields[5] = str(int(warnweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
line = ':'.join(fields)
lines.append('%s\n' % line)
with open(self.SHADOWFILE, 'w+') as f:
f.writelines(lines)
except Exception as err:
self.module.fail_json(msg="failed to update users password: %s" % to_native(err))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups.update(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.profile is not None and info[7] != self.profile:
cmd.append('-P')
cmd.append(self.profile)
if self.authorization is not None and info[8] != self.authorization:
cmd.append('-A')
cmd.append(self.authorization)
if self.role is not None and info[9] != self.role:
cmd.append('-R')
cmd.append(self.role)
# modify the user if cmd will do anything
if cmd_len != len(cmd):
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password by editing the /etc/shadow file
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
self.backup_shadow()
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
minweeks, maxweeks, warnweeks = self.get_password_defaults()
try:
lines = []
with open(self.SHADOWFILE, 'rb') as f:
for line in f:
line = to_native(line, errors='surrogate_or_strict')
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() // 86400))
if minweeks:
fields[3] = str(int(minweeks) * 7)
if maxweeks:
fields[4] = str(int(maxweeks) * 7)
if warnweeks:
fields[5] = str(int(warnweeks) * 7)
line = ':'.join(fields)
lines.append('%s\n' % line)
with open(self.SHADOWFILE, 'w+') as f:
f.writelines(lines)
rc = 0
except Exception as err:
self.module.fail_json(msg="failed to update users password: %s" % to_native(err))
return (rc, out, err)
def user_info(self):
info = super(SunOS, self).user_info()
if info:
info += self._user_attr_info()
return info
def _user_attr_info(self):
info = [''] * 3
with open(self.USER_ATTR, 'r') as file_handler:
for line in file_handler:
lines = line.strip().split('::::')
if lines[0] == self.name:
tmp = dict(x.split('=') for x in lines[1].split(';'))
info[0] = tmp.get('profiles', '')
info[1] = tmp.get('auths', '')
info[2] = tmp.get('roles', '')
return info
class DarwinUser(User):
"""
This is a Darwin macOS User manipulation class.
Main differences are that Darwin:-
- Handles accounts in a database managed by dscl(1)
- Has no useradd/groupadd
- Does not create home directories
- User password must be cleartext
- UID must be given
- System users must ben under 500
This overrides the following methods from the generic class:-
- user_exists()
- create_user()
- remove_user()
- modify_user()
"""
platform = 'Darwin'
distribution = None
SHADOWFILE = None
dscl_directory = '.'
fields = [
('comment', 'RealName'),
('home', 'NFSHomeDirectory'),
('shell', 'UserShell'),
('uid', 'UniqueID'),
('group', 'PrimaryGroupID'),
('hidden', 'IsHidden'),
]
def __init__(self, module):
super(DarwinUser, self).__init__(module)
# make the user hidden if option is set or deffer to system option
if self.hidden is None:
if self.system:
self.hidden = 1
elif self.hidden:
self.hidden = 1
else:
self.hidden = 0
# add hidden to processing if set
if self.hidden is not None:
self.fields.append(('hidden', 'IsHidden'))
def _get_dscl(self):
return [self.module.get_bin_path('dscl', True), self.dscl_directory]
def _list_user_groups(self):
cmd = self._get_dscl()
cmd += ['-search', '/Groups', 'GroupMembership', self.name]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
groups = []
for line in out.splitlines():
if line.startswith(' ') or line.startswith(')'):
continue
groups.append(line.split()[0])
return groups
def _get_user_property(self, property):
'''Return user PROPERTY as given my dscl(1) read or None if not found.'''
cmd = self._get_dscl()
cmd += ['-read', '/Users/%s' % self.name, property]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
if rc != 0:
return None
# from dscl(1)
# if property contains embedded spaces, the list will instead be
# displayed one entry per line, starting on the line after the key.
lines = out.splitlines()
# sys.stderr.write('*** |%s| %s -> %s\n' % (property, out, lines))
if len(lines) == 1:
return lines[0].split(': ')[1]
else:
if len(lines) > 2:
return '\n'.join([lines[1].strip()] + lines[2:])
else:
if len(lines) == 2:
return lines[1].strip()
else:
return None
def _get_next_uid(self, system=None):
'''
Return the next available uid. If system=True, then
uid should be below of 500, if possible.
'''
cmd = self._get_dscl()
cmd += ['-list', '/Users', 'UniqueID']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
if rc != 0:
self.module.fail_json(
msg="Unable to get the next available uid",
rc=rc,
out=out,
err=err
)
max_uid = 0
max_system_uid = 0
for line in out.splitlines():
current_uid = int(line.split(' ')[-1])
if max_uid < current_uid:
max_uid = current_uid
if max_system_uid < current_uid and current_uid < 500:
max_system_uid = current_uid
if system and (0 < max_system_uid < 499):
return max_system_uid + 1
return max_uid + 1
def _change_user_password(self):
'''Change password for SELF.NAME against SELF.PASSWORD.
Please note that password must be cleartext.
'''
# some documentation on how is stored passwords on OSX:
# http://blog.lostpassword.com/2012/07/cracking-mac-os-x-lion-accounts-passwords/
# http://null-byte.wonderhowto.com/how-to/hack-mac-os-x-lion-passwords-0130036/
# http://pastebin.com/RYqxi7Ca
# on OSX 10.8+ hash is SALTED-SHA512-PBKDF2
# https://pythonhosted.org/passlib/lib/passlib.hash.pbkdf2_digest.html
# https://gist.github.com/nueh/8252572
cmd = self._get_dscl()
if self.password:
cmd += ['-passwd', '/Users/%s' % self.name, self.password]
else:
cmd += ['-create', '/Users/%s' % self.name, 'Password', '*']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Error when changing password', err=err, out=out, rc=rc)
return (rc, out, err)
def _make_group_numerical(self):
'''Convert SELF.GROUP to is stringed numerical value suitable for dscl.'''
if self.group is None:
self.group = 'nogroup'
try:
self.group = grp.getgrnam(self.group).gr_gid
except KeyError:
self.module.fail_json(msg='Group "%s" not found. Try to create it first using "group" module.' % self.group)
# We need to pass a string to dscl
self.group = str(self.group)
def __modify_group(self, group, action):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwise 'remove' is assumed. '''
if action == 'add':
option = '-a'
else:
option = '-d'
cmd = ['dseditgroup', '-o', 'edit', option, self.name, '-t', 'user', group]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot %s user "%s" to group "%s".'
% (action, self.name, group), err=err, out=out, rc=rc)
return (rc, out, err)
def _modify_group(self):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwise 'remove' is assumed. '''
rc = 0
out = ''
err = ''
changed = False
current = set(self._list_user_groups())
if self.groups is not None:
target = set(self.groups.split(','))
else:
target = set([])
if self.append is False:
for remove in current - target:
(_rc, _err, _out) = self.__modify_group(remove, 'delete')
rc += rc
out += _out
err += _err
changed = True
for add in target - current:
(_rc, _err, _out) = self.__modify_group(add, 'add')
rc += _rc
out += _out
err += _err
changed = True
return (rc, err, out, changed)
def _update_system_user(self):
'''Hide or show user on login window according SELF.SYSTEM.
Returns 0 if a change has been made, None otherwise.'''
plist_file = '/Library/Preferences/com.apple.loginwindow.plist'
# http://support.apple.com/kb/HT5017?viewlocale=en_US
cmd = ['defaults', 'read', plist_file, 'HiddenUsersList']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
# returned value is
# (
# "_userA",
# "_UserB",
# userc
# )
hidden_users = []
for x in out.splitlines()[1:-1]:
try:
x = x.split('"')[1]
except IndexError:
x = x.strip()
hidden_users.append(x)
if self.system:
if self.name not in hidden_users:
cmd = ['defaults', 'write', plist_file, 'HiddenUsersList', '-array-add', self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot user "%s" to hidden user list.' % self.name, err=err, out=out, rc=rc)
return 0
else:
if self.name in hidden_users:
del (hidden_users[hidden_users.index(self.name)])
cmd = ['defaults', 'write', plist_file, 'HiddenUsersList', '-array'] + hidden_users
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot remove user "%s" from hidden user list.' % self.name, err=err, out=out, rc=rc)
return 0
def user_exists(self):
'''Check is SELF.NAME is a known user on the system.'''
cmd = self._get_dscl()
cmd += ['-list', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
return rc == 0
def remove_user(self):
'''Delete SELF.NAME. If SELF.FORCE is true, remove its home directory.'''
info = self.user_info()
cmd = self._get_dscl()
cmd += ['-delete', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot delete user "%s".' % self.name, err=err, out=out, rc=rc)
if self.force:
if os.path.exists(info[5]):
shutil.rmtree(info[5])
out += "Removed %s" % info[5]
return (rc, out, err)
def create_user(self, command_name='dscl'):
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name]
(rc, err, out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot create user "%s".' % self.name, err=err, out=out, rc=rc)
self._make_group_numerical()
if self.uid is None:
self.uid = str(self._get_next_uid(self.system))
# Homedir is not created by default
if self.create_home:
if self.home is None:
self.home = '/Users/%s' % self.name
if not self.module.check_mode:
if not os.path.exists(self.home):
os.makedirs(self.home)
self.chown_homedir(int(self.uid), int(self.group), self.home)
# dscl sets shell to /usr/bin/false when UserShell is not specified
# so set the shell to /bin/bash when the user is not a system user
if not self.system and self.shell is None:
self.shell = '/bin/bash'
for field in self.fields:
if field[0] in self.__dict__ and self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name, field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot add property "%s" to user "%s".' % (field[0], self.name), err=err, out=out, rc=rc)
out += _out
err += _err
if rc != 0:
return (rc, _err, _out)
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
self._update_system_user()
# here we don't care about change status since it is a creation,
# thus changed is always true.
if self.groups:
(rc, _out, _err, changed) = self._modify_group()
out += _out
err += _err
return (rc, err, out)
def modify_user(self):
changed = None
out = ''
err = ''
if self.group:
self._make_group_numerical()
for field in self.fields:
if field[0] in self.__dict__ and self.__dict__[field[0]]:
current = self._get_user_property(field[1])
if current is None or current != self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name, field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot update property "%s" for user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
changed = rc
out += _out
err += _err
if self.update_password == 'always' and self.password is not None:
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
changed = rc
if self.groups:
(rc, _out, _err, _changed) = self._modify_group()
out += _out
err += _err
if _changed is True:
changed = rc
rc = self._update_system_user()
if rc == 0:
changed = rc
return (changed, out, err)
class AIX(User):
"""
This is a AIX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'AIX'
distribution = None
SHADOWFILE = '/etc/security/passwd'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.password is not None:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
self.execute_command(cmd, data="%s:%s" % (self.name, self.password))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# skip if no changes to be made
if len(cmd) == 1:
(rc, out, err) = (None, '', '')
else:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
(rc2, out2, err2) = self.execute_command(cmd, data="%s:%s" % (self.name, self.password))
else:
(rc2, out2, err2) = (None, '', '')
if rc is not None:
return (rc, out + out2, err + err2)
else:
return (rc2, out + out2, err + err2)
class HPUX(User):
"""
This is a HP-UX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'HP-UX'
distribution = None
SHADOWFILE = '/etc/shadow'
def create_user(self):
cmd = ['/usr/sam/lbin/useradd.sam']
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user(self):
cmd = ['/usr/sam/lbin/userdel.sam']
if self.force:
cmd.append('-F')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = ['/usr/sam/lbin/usermod.sam']
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-F')
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def main():
ssh_defaults = dict(
bits=0,
type='rsa',
passphrase=None,
comment='ansible-generated on %s' % socket.gethostname()
)
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['absent', 'present']),
name=dict(type='str', required=True, aliases=['user']),
uid=dict(type='int'),
non_unique=dict(type='bool', default=False),
group=dict(type='str'),
groups=dict(type='list'),
comment=dict(type='str'),
home=dict(type='path'),
shell=dict(type='str'),
password=dict(type='str', no_log=True),
login_class=dict(type='str'),
# following options are specific to macOS
hidden=dict(type='bool'),
# following options are specific to selinux
seuser=dict(type='str'),
# following options are specific to userdel
force=dict(type='bool', default=False),
remove=dict(type='bool', default=False),
# following options are specific to useradd
create_home=dict(type='bool', default=True, aliases=['createhome']),
skeleton=dict(type='str'),
system=dict(type='bool', default=False),
# following options are specific to usermod
move_home=dict(type='bool', default=False),
append=dict(type='bool', default=False),
# following are specific to ssh key generation
generate_ssh_key=dict(type='bool'),
ssh_key_bits=dict(type='int', default=ssh_defaults['bits']),
ssh_key_type=dict(type='str', default=ssh_defaults['type']),
ssh_key_file=dict(type='path'),
ssh_key_comment=dict(type='str', default=ssh_defaults['comment']),
ssh_key_passphrase=dict(type='str', no_log=True),
update_password=dict(type='str', default='always', choices=['always', 'on_create']),
expires=dict(type='float'),
password_lock=dict(type='bool'),
local=dict(type='bool'),
profile=dict(type='str'),
authorization=dict(type='str'),
role=dict(type='str'),
),
supports_check_mode=True
)
user = User(module)
user.check_password_encrypted()
module.debug('User instantiated - platform %s' % user.platform)
if user.distribution:
module.debug('User instantiated - distribution %s' % user.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = user.name
result['state'] = user.state
if user.state == 'absent':
if user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.remove_user()
if rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
result['force'] = user.force
result['remove'] = user.remove
elif user.state == 'present':
if not user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.create_user()
if module.check_mode:
result['system'] = user.name
else:
result['system'] = user.system
result['create_home'] = user.create_home
else:
# modify user (note: this function is check mode aware)
(rc, out, err) = user.modify_user()
result['append'] = user.append
result['move_home'] = user.move_home
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if user.password is not None:
result['password'] = 'NOT_LOGGING_PASSWORD'
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if user.user_exists() and user.state == 'present':
info = user.user_info()
if info is False:
result['msg'] = "failed to look up user name: %s" % user.name
result['failed'] = True
result['uid'] = info[2]
result['group'] = info[3]
result['comment'] = info[4]
result['home'] = info[5]
result['shell'] = info[6]
if user.groups is not None:
result['groups'] = user.groups
# handle missing homedirs
info = user.user_info()
if user.home is None:
user.home = info[5]
if not os.path.exists(user.home) and user.create_home:
if not module.check_mode:
user.create_homedir(user.home)
user.chown_homedir(info[2], info[3], user.home)
result['changed'] = True
# deal with ssh key
if user.sshkeygen:
# generate ssh key (note: this function is check mode aware)
(rc, out, err) = user.ssh_key_gen()
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if rc == 0:
result['changed'] = True
(rc, out, err) = user.ssh_key_fingerprint()
if rc == 0:
result['ssh_fingerprint'] = out.strip()
else:
result['ssh_fingerprint'] = err.strip()
result['ssh_key_file'] = user.get_ssh_key_path()
result['ssh_public_key'] = user.get_ssh_public_key()
module.exit_json(**result)
# import module snippets
if __name__ == '__main__':
main()
| codeparrot/github-code-clean |
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import utils
import config, time, os, re, pprint
import hashlib
import weblib, traceback, forms, valuespec, inventory, visuals, metrics
import sites
import bi
import inspect
import livestatus
from log import logger
from gui_exceptions import MKGeneralException, MKUserError, MKInternalError
import cmk.paths
# Datastructures and functions needed before plugins can be loaded
loaded_with_language = False
display_options = None
# Load all view plugins
def load_plugins(force):
global loaded_with_language
if loaded_with_language == current_language and not force:
# always reload the hosttag painters, because new hosttags might have been
# added during runtime
load_host_tag_painters()
clear_alarm_sound_states()
return
global multisite_datasources ; multisite_datasources = {}
global multisite_layouts ; multisite_layouts = {}
global multisite_painters ; multisite_painters = {}
global multisite_sorters ; multisite_sorters = {}
global multisite_builtin_views ; multisite_builtin_views = {}
global multisite_painter_options ; multisite_painter_options = {}
global multisite_commands ; multisite_commands = []
global multisite_command_groups ; multisite_command_groups = {}
global view_hooks ; view_hooks = {}
global inventory_displayhints ; inventory_displayhints = {}
config.declare_permission_section("action", _("Commands on host and services"), do_sort = True)
utils.load_web_plugins("views", globals())
load_host_tag_painters()
clear_alarm_sound_states()
# This must be set after plugin loading to make broken plugins raise
# exceptions all the time and not only the first time (when the plugins
# are loaded).
loaded_with_language = current_language
# Declare permissions for builtin views
config.declare_permission_section("view", _("Multisite Views"), do_sort = True)
for name, view in multisite_builtin_views.items():
config.declare_permission("view.%s" % name,
format_view_title(view),
"%s - %s" % (name, _u(view["description"])),
config.builtin_role_ids)
# Make sure that custom views also have permissions
config.declare_dynamic_permissions(lambda: visuals.declare_custom_permissions('views'))
declare_inventory_columns()
# Load all views - users or builtins
def load_views():
global multisite_views, available_views
# Skip views which do not belong to known datasources
multisite_views = visuals.load('views', multisite_builtin_views,
skip_func = lambda v: v['datasource'] not in multisite_datasources)
available_views = visuals.available('views', multisite_views)
transform_old_views()
def permitted_views():
try:
return available_views
except:
# In some cases, for example when handling AJAX calls the views might
# have not been loaded yet
load_views()
return available_views
def all_views():
return multisite_views
# Convert views that are saved in the pre 1.2.6-style
# FIXME: Can be removed one day. Mark as incompatible change or similar.
def transform_old_views():
for view in multisite_views.values():
ds_name = view['datasource']
datasource = multisite_datasources[ds_name]
if "context" not in view: # legacy views did not have this explicitly
view.setdefault("user_sortable", True)
if 'context_type' in view:
# This code transforms views from user_views.mk which have been migrated with
# daily snapshots from 2014-08 till beginning 2014-10.
visuals.transform_old_visual(view)
elif 'single_infos' not in view:
# This tries to map the datasource and additional settings of the
# views to get the correct view context
#
# This code transforms views from views.mk (legacy format) to the current format
try:
hide_filters = view.get('hide_filters')
if 'service' in hide_filters and 'host' in hide_filters:
view['single_infos'] = ['service', 'host']
elif 'service' in hide_filters and 'host' not in hide_filters:
view['single_infos'] = ['service']
elif 'host' in hide_filters:
view['single_infos'] = ['host']
elif 'hostgroup' in hide_filters:
view['single_infos'] = ['hostgroup']
elif 'servicegroup' in hide_filters:
view['single_infos'] = ['servicegroup']
elif 'aggr_service' in hide_filters:
view['single_infos'] = ['service']
elif 'aggr_name' in hide_filters:
view['single_infos'] = ['aggr']
elif 'aggr_group' in hide_filters:
view['single_infos'] = ['aggr_group']
elif 'log_contact_name' in hide_filters:
view['single_infos'] = ['contact']
elif 'event_host' in hide_filters:
view['single_infos'] = ['host']
elif hide_filters == ['event_id', 'history_line']:
view['single_infos'] = ['history']
elif 'event_id' in hide_filters:
view['single_infos'] = ['event']
elif 'aggr_hosts' in hide_filters:
view['single_infos'] = ['host']
else:
# For all other context types assume the view is showing multiple objects
# and the datasource can simply be gathered from the datasource
view['single_infos'] = []
except: # Exceptions can happen for views saved with certain GIT versions
if config.debug:
raise
# Convert from show_filters, hide_filters, hard_filters and hard_filtervars
# to context construct
if 'context' not in view:
view['show_filters'] = view['hide_filters'] + view['hard_filters'] + view['show_filters']
single_keys = visuals.get_single_info_keys(view)
# First get vars for the classic filters
context = {}
filtervars = dict(view['hard_filtervars'])
all_vars = {}
for filter_name in view['show_filters']:
if filter_name in single_keys:
continue # skip conflictings vars / filters
context.setdefault(filter_name, {})
try:
f = visuals.get_filter(filter_name)
except:
# The exact match filters have been removed. They where used only as
# link filters anyway - at least by the builtin views.
continue
for var in f.htmlvars:
# Check whether or not the filter is supported by the datasource,
# then either skip or use the filter vars
if var in filtervars and f.info in datasource['infos']:
value = filtervars[var]
all_vars[var] = value
context[filter_name][var] = value
# We changed different filters since the visuals-rewrite. This must be treated here, since
# we need to transform views which have been created with the old filter var names.
# Changes which have been made so far:
changed_filter_vars = {
'serviceregex': { # Name of the filter
# old var name: new var name
'service': 'service_regex',
},
'hostregex': {
'host': 'host_regex',
},
'hostgroupnameregex': {
'hostgroup_name': 'hostgroup_regex',
},
'servicegroupnameregex': {
'servicegroup_name': 'servicegroup_regex',
},
'opthostgroup': {
'opthostgroup': 'opthost_group',
'neg_opthostgroup': 'neg_opthost_group',
},
'optservicegroup': {
'optservicegroup': 'optservice_group',
'neg_optservicegroup': 'neg_optservice_group',
},
'hostgroup': {
'hostgroup': 'host_group',
'neg_hostgroup': 'neg_host_group',
},
'servicegroup': {
'servicegroup': 'service_group',
'neg_servicegroup': 'neg_service_group',
},
'host_contactgroup': {
'host_contactgroup': 'host_contact_group',
'neg_host_contactgroup': 'neg_host_contact_group',
},
'service_contactgroup': {
'service_contactgroup': 'service_contact_group',
'neg_service_contactgroup': 'neg_service_contact_group',
},
}
if filter_name in changed_filter_vars and f.info in datasource['infos']:
for old_var, new_var in changed_filter_vars[filter_name].items():
if old_var in filtervars:
value = filtervars[old_var]
all_vars[new_var] = value
context[filter_name][new_var] = value
# Now, when there are single object infos specified, add these keys to the
# context
for single_key in single_keys:
if single_key in all_vars:
context[single_key] = all_vars[single_key]
view['context'] = context
# Cleanup unused attributes
for k in [ 'hide_filters', 'hard_filters', 'show_filters', 'hard_filtervars' ]:
try:
del view[k]
except KeyError:
pass
def save_views(us):
visuals.save('views', multisite_views)
# For each view a function can be registered that has to return either True
# or False to show a view as context link
view_is_enabled = {}
def is_enabled_for(linking_view, view, context_vars):
if view["name"] not in view_is_enabled:
return True # Not registered are always visible!
return view_is_enabled[view["name"]](linking_view, view, context_vars)
#.
# .--PainterOptions------------------------------------------------------.
# | ____ _ _ ___ _ _ |
# | | _ \ __ _(_)_ __ | |_ ___ _ __ / _ \ _ __ | |_(_) ___ _ __ ___ |
# | | |_) / _` | | '_ \| __/ _ \ '__| | | | '_ \| __| |/ _ \| '_ \/ __| |
# | | __/ (_| | | | | | || __/ | | |_| | |_) | |_| | (_) | | | \__ \ |
# | |_| \__,_|_|_| |_|\__\___|_| \___/| .__/ \__|_|\___/|_| |_|___/ |
# | |_| |
# +----------------------------------------------------------------------+
# | Painter options are settings that can be changed per user per view. |
# | These options are controlled throught the painter options form which |
# | is accessible through the small monitor icon on the top left of the |
# | views. |
# '----------------------------------------------------------------------'
# TODO: Better name it PainterOptions or DisplayOptions? There are options which only affect
# painters, but some which affect generic behaviour of the views, so DisplayOptions might
# be better.
class PainterOptions(object):
def __init__(self, view_name=None):
self._view_name = view_name
# The names of the painter options used by the current view
self._used_option_names = None
# The effective options for this view
self._options = {}
def load(self):
self._load_from_config()
# Load the options to be used for this view
def _load_used_options(self, view):
if self._used_option_names != None:
return # only load once per request
options = set([])
for cell in get_group_cells(view) + get_cells(view):
options.update(cell.painter_options())
# Also layouts can register painter options
layout_name = view.get("layout")
if layout_name != None:
options.update(multisite_layouts[layout_name].get("options", []))
# TODO: Improve sorting. Add a sort index?
self._used_option_names = sorted(options)
def _load_from_config(self):
if self._is_anonymous_view():
return # never has options
if not self.painter_options_permitted():
return
# Options are stored per view. Get all options for all views
vo = config.user.load_file("viewoptions", {})
self._options = vo.get(self._view_name, {})
def save_to_config(self):
vo = config.user.load_file("viewoptions", {}, lock=True)
vo[self._view_name] = self._options
config.user.save_file("viewoptions", vo)
def update_from_url(self, view):
self._load_used_options(view)
if not self.painter_option_form_enabled():
return
if html.has_var("_reset_painter_options"):
self._clear_painter_options()
return
elif html.has_var("_update_painter_options"):
self._set_from_submitted_form()
def _set_from_submitted_form(self):
# TODO: Remove all keys that are in multisite_painter_options
# but not in self._used_option_names
modified = False
for option_name in self._used_option_names:
# Get new value for the option from the value spec
vs = self.get_valuespec_of(option_name)
value = vs.from_html_vars("po_%s" % option_name)
if not self._is_set(option_name) or self.get(option_name) != value:
modified = True
self.set(option_name, value)
if modified:
self.save_to_config()
def _clear_painter_options(self):
# TODO: This never removes options that are not existant anymore
modified = False
for name in multisite_painter_options.keys():
try:
del self._options[name]
modified = True
except KeyError:
pass
if modified:
self.save_to_config()
# Also remove the options from current html vars. Otherwise the
# painter option form will display the just removed options as
# defaults of the painter option form.
for varname in html.all_varnames_with_prefix("po_"):
html.del_var(varname)
def get_valuespec_of(self, name):
opt = multisite_painter_options[name]
if type(lambda: None) == type(opt["valuespec"]):
return opt["valuespec"]()
else:
return opt["valuespec"]
def _is_set(self, name):
return name in self._options
# Sets a painter option value (only for this request). Is not persisted!
def set(self, name, value):
self._options[name] = value
# Returns either the set value, the provided default value or if none
# provided, it returns the default value of the valuespec.
def get(self, name, dflt=None):
if dflt == None:
try:
dflt = self.get_valuespec_of(name).default_value()
except KeyError:
# Some view options (that are not declared as display options)
# like "refresh" don't have a valuespec. So they need to default
# to None.
# TODO: Find all occurences and simply declare them as "invisible"
# painter options.
pass
return self._options.get(name, dflt)
# Not falling back to a default value, simply returning None in case
# the option is not set.
def get_without_default(self, name):
return self._options.get(name)
def get_all(self):
return self._options
def _is_anonymous_view(self):
return self._view_name == None
def painter_options_permitted(self):
return config.user.may("general.painter_options")
def painter_option_form_enabled(self):
return self._used_option_names and self.painter_options_permitted()
def show_form(self, view):
self._load_used_options(view)
if not display_options.enabled(display_options.D) or not self.painter_option_form_enabled():
return
html.open_div(id_="painteroptions", class_=["view_form"], style="display: none;")
html.begin_form("painteroptions")
forms.header(_("Display Options"))
for name in self._used_option_names:
vs = self.get_valuespec_of(name)
forms.section(vs.title())
# TODO: Possible improvement for vars which default is specified
# by the view: Don't just default to the valuespecs default. Better
# use the view default value here to get the user the current view
# settings reflected.
vs.render_input("po_%s" % name, self.get(name))
forms.end()
html.button("_update_painter_options", _("Submit"), "submit")
html.button("_reset_painter_options", _("Reset"), "submit")
html.hidden_fields()
html.end_form()
html.close_div()
def prepare_painter_options(view_name=None):
global painter_options
painter_options = PainterOptions(view_name)
painter_options.load()
#.
# .--Cells---------------------------------------------------------------.
# | ____ _ _ |
# | / ___|___| | |___ |
# | | | / _ \ | / __| |
# | | |__| __/ | \__ \ |
# | \____\___|_|_|___/ |
# | |
# +----------------------------------------------------------------------+
# | View cell handling classes. Each cell instanciates a multisite |
# | painter to render a table cell. |
# '----------------------------------------------------------------------'
# A cell is an instance of a painter in a view (-> a cell or a grouping cell)
class Cell(object):
# Wanted to have the "parse painter spec logic" in one place (The Cell() class)
# but this should be cleaned up more. TODO: Move this to another place
@staticmethod
def painter_exists(painter_spec):
if type(painter_spec[0]) == tuple:
painter_name = painter_spec[0][0]
else:
painter_name = painter_spec[0]
return painter_name in multisite_painters
# Wanted to have the "parse painter spec logic" in one place (The Cell() class)
# but this should be cleaned up more. TODO: Move this to another place
@staticmethod
def is_join_cell(painter_spec):
return len(painter_spec) >= 4
def __init__(self, view, painter_spec=None):
self._view = view
self._painter_name = None
self._painter_params = None
self._link_view_name = None
self._tooltip_painter_name = None
if painter_spec:
self._from_view(painter_spec)
# In views the painters are saved as tuples of the following formats:
#
# Painter name, Link view name
# ('service_discovery_service', None),
#
# Painter name, Link view name, Hover painter name
# ('host_plugin_output', None, None),
#
# Join column: Painter name, Link view name, hover painter name, Join service description
# ('service_description', None, None, u'CPU load')
#
# Join column: Painter name, Link view name, hover painter name, Join service description, custom title
# ('service_description', None, None, u'CPU load')
#
# Parameterized painters:
# Same as above but instead of the "Painter name" a two element tuple with the painter name as
# first element and a dictionary of parameters as second element is set.
def _from_view(self, painter_spec):
if type(painter_spec[0]) == tuple:
self._painter_name, self._painter_params = painter_spec[0]
else:
self._painter_name = painter_spec[0]
if painter_spec[1] != None:
self._link_view_name = painter_spec[1]
# Clean this call to Cell.painter_exists() up!
if len(painter_spec) >= 3 and Cell.painter_exists((painter_spec[2], None)):
self._tooltip_painter_name = painter_spec[2]
# Get a list of columns we need to fetch in order to render this cell
def needed_columns(self):
columns = set(get_painter_columns(self.painter()))
if self._link_view_name:
# Make sure that the information about the available views is present. If
# called via the reporting, then this might not be the case
# TODO: Move this to some better place.
views = permitted_views()
if self._has_link():
link_view = self._link_view()
if link_view:
# TODO: Clean this up here
for filt in [ visuals.get_filter(fn) for fn in visuals.get_single_info_keys(link_view) ]:
columns.update(filt.link_columns)
if self.has_tooltip():
columns.update(get_painter_columns(self.tooltip_painter()))
return columns
def is_joined(self):
return False
def join_service(self):
return None
def _has_link(self):
return self._link_view_name != None
def _link_view(self):
try:
return get_view_by_name(self._link_view_name)
except KeyError:
return None
def painter(self):
return multisite_painters[self._painter_name]
def painter_name(self):
return self._painter_name
def export_title(self):
return self._painter_name
def painter_options(self):
return self.painter().get("options", [])
# The parameters configured in the view for this painter. In case the
# painter has params, it defaults to the valuespec default value and
# in case the painter has no params, it returns None.
def painter_parameters(self):
vs_painter_params = get_painter_params_valuespec(self.painter())
if not vs_painter_params:
return
if vs_painter_params and self._painter_params == None:
return vs_painter_params.default_value()
else:
return self._painter_params
def title(self, use_short=True):
painter = self.painter()
if use_short:
return painter.get("short", painter["title"])
else:
return painter["title"]
# Can either be:
# True : Is printable in PDF
# False : Is not printable at all
# "<string>" : ID of a painter_printer (Reporting module)
def printable(self):
return self.painter().get("printable", True)
def has_tooltip(self):
return self._tooltip_painter_name != None
def tooltip_painter_name(self):
return self._tooltip_painter_name
def tooltip_painter(self):
return multisite_painters[self._tooltip_painter_name]
def paint_as_header(self, is_last_column_header=False):
# Optional: Sort link in title cell
# Use explicit defined sorter or implicit the sorter with the painter name
# Important for links:
# - Add the display options (Keeping the same display options as current)
# - Link to _self (Always link to the current frame)
classes = []
onclick = ''
title = ''
if display_options.enabled(display_options.L) \
and self._view.get('user_sortable', False) \
and get_sorter_name_of_painter(self.painter_name()) is not None:
params = [
('sort', self._sort_url()),
]
if display_options.title_options:
params.append(('display_options', display_options.title_options))
classes += [ "sort", get_primary_sorter_order(self._view, self.painter_name()) ]
onclick = "location.href=\'%s\'" % html.makeuri(params, 'sort')
title = _('Sort by %s') % self.title()
if is_last_column_header:
classes.append("last_col")
html.open_th(class_=classes, onclick=onclick, title=title)
html.write(self.title())
html.close_th()
#html.guitest_record_output("view", ("header", title))
def _sort_url(self):
"""
The following sorters need to be handled in this order:
1. group by sorter (needed in grouped views)
2. user defined sorters (url sorter)
3. configured view sorters
"""
sorter = []
group_sort, user_sort, view_sort = get_separated_sorters(self._view)
sorter = group_sort + user_sort + view_sort
# Now apply the sorter of the current column:
# - Negate/Disable when at first position
# - Move to the first position when already in sorters
# - Add in the front of the user sorters when not set
sorter_name = get_sorter_name_of_painter(self.painter_name())
if self.is_joined():
# TODO: Clean this up and then remove Cell.join_service()
this_asc_sorter = (sorter_name, False, self.join_service())
this_desc_sorter = (sorter_name, True, self.join_service())
else:
this_asc_sorter = (sorter_name, False)
this_desc_sorter = (sorter_name, True)
if user_sort and this_asc_sorter == user_sort[0]:
# Second click: Change from asc to desc order
sorter[sorter.index(this_asc_sorter)] = this_desc_sorter
elif user_sort and this_desc_sorter == user_sort[0]:
# Third click: Remove this sorter
sorter.remove(this_desc_sorter)
else:
# First click: add this sorter as primary user sorter
# Maybe the sorter is already in the user sorters or view sorters, remove it
for s in [ user_sort, view_sort ]:
if this_asc_sorter in s:
s.remove(this_asc_sorter)
if this_desc_sorter in s:
s.remove(this_desc_sorter)
# Now add the sorter as primary user sorter
sorter = group_sort + [this_asc_sorter] + user_sort + view_sort
p = []
for s in sorter:
if len(s) == 2:
p.append((s[1] and '-' or '') + s[0])
else:
p.append((s[1] and '-' or '') + s[0] + '~' + s[2])
return ','.join(p)
def render(self, row):
row = join_row(row, self)
try:
tdclass, content = self.render_content(row)
except:
logger.exception("Failed to render painter '%s' (Row: %r)" %
(self._painter_name, row))
raise
if tdclass == None:
tdclass = ""
if tdclass == "" and content == "":
return "", ""
# Add the optional link to another view
if content and self._has_link():
content = link_to_view(content, row, self._link_view_name)
# Add the optional mouseover tooltip
if content and self.has_tooltip():
tooltip_cell = Cell(self._view, (self.tooltip_painter_name(), None))
tooltip_tdclass, tooltip_content = tooltip_cell.render_content(row)
tooltip_text = html.strip_tags(tooltip_content)
content = '<span title="%s">%s</span>' % (tooltip_text, content)
return tdclass, content
# Same as self.render() for HTML output: Gets a painter and a data
# row and creates the text for being painted.
def render_for_pdf(self, row, time_range):
# TODO: Move this somewhere else!
def find_htdocs_image_path(filename):
dirs = [
cmk.paths.local_web_dir + "/htdocs/",
cmk.paths.web_dir + "/htdocs/",
]
for d in dirs:
if os.path.exists(d + filename):
return d + filename
try:
row = join_row(row, self)
css_classes, txt = self.render_content(row)
if txt is None:
return css_classes, ""
txt = txt.strip()
# Handle <img...>. Our PDF writer cannot draw arbitrary
# images, but all that we need for showing simple icons.
# Current limitation: *one* image
if txt.lower().startswith("<img"):
img_filename = re.sub('.*src=["\']([^\'"]*)["\'].*', "\\1", str(txt))
img_path = find_htdocs_image_path(img_filename)
if img_path:
txt = ("icon", img_path)
else:
txt = img_filename
if isinstance(txt, HTML):
txt = "%s" % txt
elif not isinstance(txt, tuple):
txt = html.escaper.unescape_attributes(txt)
txt = html.strip_tags(txt)
return css_classes, txt
except Exception:
raise MKGeneralException('Failed to paint "%s": %s' %
(self.painter_name(), traceback.format_exc()))
def render_content(self, row):
if not row:
return "", "" # nothing to paint
painter = self.painter()
paint_func = painter["paint"]
# Painters can request to get the cell object handed over.
# Detect that and give the painter this argument.
arg_names = inspect.getargspec(paint_func)[0]
painter_args = []
for arg_name in arg_names:
if arg_name == "row":
painter_args.append(row)
elif arg_name == "cell":
painter_args.append(self)
# Add optional painter arguments from painter specification
if "args" in painter:
painter_args += painter["args"]
return painter["paint"](*painter_args)
def paint(self, row, tdattrs="", is_last_cell=False):
tdclass, content = self.render(row)
has_content = content != ""
if is_last_cell:
if tdclass == None:
tdclass = "last_col"
else:
tdclass += " last_col"
if tdclass:
html.write("<td %s class=\"%s\">" % (tdattrs, tdclass))
html.write(content)
html.close_td()
else:
html.write("<td %s>" % (tdattrs))
html.write(content)
html.close_td()
#html.guitest_record_output("view", ("cell", content))
return has_content
class JoinCell(Cell):
def __init__(self, view, painter_spec):
self._join_service_descr = None
self._custom_title = None
super(JoinCell, self).__init__(view, painter_spec)
def _from_view(self, painter_spec):
super(JoinCell, self)._from_view(painter_spec)
if len(painter_spec) >= 4:
self._join_service_descr = painter_spec[3]
if len(painter_spec) == 5:
self._custom_title = painter_spec[4]
def is_joined(self):
return True
def join_service(self):
return self._join_service_descr
def livestatus_filter(self, join_column_name):
return "Filter: %s = %s" % \
(livestatus.lqencode(join_column_name), livestatus.lqencode(self._join_service_descr))
def title(self, use_short=True):
if self._custom_title:
return self._custom_title
else:
return self._join_service_descr
def export_title(self):
return "%s.%s" % (self._painter_name, self.join_service())
class EmptyCell(Cell):
def __init__(self, view):
super(EmptyCell, self).__init__(view)
def render(self, row):
return "", ""
def paint(self, row):
return False
#.
# .--Table of views------------------------------------------------------.
# | _____ _ _ __ _ |
# | |_ _|_ _| |__ | | ___ ___ / _| __ _(_) _____ _____ |
# | | |/ _` | '_ \| |/ _ \ / _ \| |_ \ \ / / |/ _ \ \ /\ / / __| |
# | | | (_| | |_) | | __/ | (_) | _| \ V /| | __/\ V V /\__ \ |
# | |_|\__,_|_.__/|_|\___| \___/|_| \_/ |_|\___| \_/\_/ |___/ |
# | |
# +----------------------------------------------------------------------+
# | Show list of all views with buttons for editing |
# '----------------------------------------------------------------------'
def page_edit_views():
load_views()
cols = [ (_('Datasource'), lambda v: multisite_datasources[v["datasource"]]['title']) ]
visuals.page_list('views', _("Edit Views"), multisite_views, cols)
#.
# .--Create View---------------------------------------------------------.
# | ____ _ __ ___ |
# | / ___|_ __ ___ __ _| |_ ___ \ \ / (_) _____ __ |
# | | | | '__/ _ \/ _` | __/ _ \ \ \ / /| |/ _ \ \ /\ / / |
# | | |___| | | __/ (_| | || __/ \ V / | | __/\ V V / |
# | \____|_| \___|\__,_|\__\___| \_/ |_|\___| \_/\_/ |
# | |
# +----------------------------------------------------------------------+
# | Select the view type of the new view |
# '----------------------------------------------------------------------'
# First step: Select the data source
# Create datasource selection valuespec, also for other modules
# FIXME: Sort the datasources by (assumed) common usage
def DatasourceSelection():
# FIXME: Sort the datasources by (assumed) common usage
datasources = []
for ds_name, ds in multisite_datasources.items():
datasources.append((ds_name, ds['title']))
return DropdownChoice(
title = _('Datasource'),
help = _('The datasources define which type of objects should be displayed with this view.'),
choices = datasources,
sorted = True,
columns = 1,
default_value = 'services',
)
def page_create_view(next_url = None):
vs_ds = DatasourceSelection()
ds = 'services' # Default selection
html.header(_('Create View'), stylesheets=["pages"])
html.begin_context_buttons()
back_url = html.var("back", "")
html.context_button(_("Back"), back_url or "edit_views.py", "back")
html.end_context_buttons()
if html.var('save') and html.check_transaction():
try:
ds = vs_ds.from_html_vars('ds')
vs_ds.validate_value(ds, 'ds')
if not next_url:
next_url = html.makeuri([('datasource', ds)], filename = "create_view_infos.py")
else:
next_url = next_url + '&datasource=%s' % ds
html.http_redirect(next_url)
return
except MKUserError, e:
html.div(e, class_=["error"])
html.add_user_error(e.varname, e)
html.begin_form('create_view')
html.hidden_field('mode', 'create')
forms.header(_('Select Datasource'))
forms.section(vs_ds.title())
vs_ds.render_input('ds', ds)
html.help(vs_ds.help())
forms.end()
html.button('save', _('Continue'), 'submit')
html.hidden_fields()
html.end_form()
html.footer()
def page_create_view_infos():
ds_name = html.var('datasource')
if ds_name not in multisite_datasources:
raise MKGeneralException(_('The given datasource is not supported'))
visuals.page_create_visual('views', multisite_datasources[ds_name]['infos'],
next_url = 'edit_view.py?mode=create&datasource=%s&single_infos=%%s' % ds_name)
#.
# .--Edit View-----------------------------------------------------------.
# | _____ _ _ _ __ ___ |
# | | ____|__| (_) |_ \ \ / (_) _____ __ |
# | | _| / _` | | __| \ \ / /| |/ _ \ \ /\ / / |
# | | |__| (_| | | |_ \ V / | | __/\ V V / |
# | |_____\__,_|_|\__| \_/ |_|\___| \_/\_/ |
# | |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------'
# Return list of available datasources (used to render filters)
def get_view_infos(view):
ds_name = view.get('datasource', html.var('datasource'))
return multisite_datasources[ds_name]['infos']
def page_edit_view():
load_views()
visuals.page_edit_visual('views', multisite_views,
custom_field_handler = render_view_config,
load_handler = transform_view_to_valuespec_value,
create_handler = create_view_from_valuespec,
info_handler = get_view_infos,
)
def view_choices(only_with_hidden = False):
choices = [("", "")]
for name, view in available_views.items():
if not only_with_hidden or view['single_infos']:
title = format_view_title(view)
choices.append(("%s" % name, title))
return choices
def format_view_title(view):
if view.get('mobile', False):
return _('Mobile: ') + _u(view["title"])
else:
return _u(view["title"])
def view_editor_options():
return [
('mobile', _('Show this view in the Mobile GUI')),
('mustsearch', _('Show data only on search')),
('force_checkboxes', _('Always show the checkboxes')),
('user_sortable', _('Make view sortable by user')),
('play_sounds', _('Play alarm sounds')),
]
def view_editor_specs(ds_name, general_properties=True):
load_views() # make sure that available_views is present
specs = []
if general_properties:
specs.append(
('view', Dictionary(
title = _('View Properties'),
render = 'form',
optional_keys = None,
elements = [
('datasource', FixedValue(ds_name,
title = _('Datasource'),
totext = multisite_datasources[ds_name]['title'],
help = _('The datasource of a view cannot be changed.'),
)),
('options', ListChoice(
title = _('Options'),
choices = view_editor_options(),
default_value = ['user_sortable'],
)),
('browser_reload', Integer(
title = _('Automatic page reload'),
unit = _('seconds'),
minvalue = 0,
help = _('Leave this empty or at 0 for no automatic reload.'),
)),
('layout', DropdownChoice(
title = _('Basic Layout'),
choices = [ (k, v["title"]) for k,v in multisite_layouts.items() if not v.get("hide")],
default_value = 'table',
sorted = True,
)),
('num_columns', Integer(
title = _('Number of Columns'),
default_value = 1,
minvalue = 1,
maxvalue = 50,
)),
('column_headers', DropdownChoice(
title = _('Column Headers'),
choices = [
("off", _("off")),
("pergroup", _("once per group")),
("repeat", _("repeat every 20'th row")),
],
default_value = 'pergroup',
)),
],
))
)
def column_spec(ident, title, ds_name):
painters = painters_of_datasource(ds_name)
allow_empty = True
empty_text = None
if ident == 'columns':
allow_empty = False
empty_text = _("Please add at least one column to your view.")
vs_column = Tuple(
title = _('Column'),
elements = [
CascadingDropdown(
title = _('Column'),
choices = painter_choices_with_params(painters),
no_preselect = True,
),
DropdownChoice(
title = _('Link'),
choices = view_choices,
sorted = True,
),
DropdownChoice(
title = _('Tooltip'),
choices = [(None, "")] + painter_choices(painters),
),
],
)
join_painters = join_painters_of_datasource(ds_name)
if ident == 'columns' and join_painters:
join_painters = join_painters_of_datasource(ds_name)
vs_column = Alternative(
elements = [
vs_column,
Tuple(
title = _('Joined column'),
help = _("A joined column can display information about specific services for "
"host objects in a view showing host objects. You need to specify the "
"service description of the service you like to show the data for."),
elements = [
CascadingDropdown(
title = _('Column'),
choices = painter_choices_with_params(join_painters),
no_preselect = True,
),
TextUnicode(
title = _('of Service'),
allow_empty = False,
),
DropdownChoice(
title = _('Link'),
choices = view_choices,
sorted = True,
),
DropdownChoice(
title = _('Tooltip'),
choices = [(None, "")] + painter_choices(join_painters),
),
TextUnicode(
title = _('Title'),
),
],
),
],
style = 'dropdown',
match = lambda x: 1 * (x is not None and len(x) == 5),
)
return (ident, Dictionary(
title = title,
render = 'form',
optional_keys = None,
elements = [
(ident, ListOf(vs_column,
title = title,
add_label = _('Add column'),
allow_empty = allow_empty,
empty_text = empty_text,
)),
],
))
specs.append(column_spec('columns', _('Columns'), ds_name))
specs.append(
('sorting', Dictionary(
title = _('Sorting'),
render = 'form',
optional_keys = None,
elements = [
('sorters', ListOf(
Tuple(
elements = [
DropdownChoice(
title = _('Column'),
choices = [ (name, get_painter_title_for_choices(p)) for name, p
in sorters_of_datasource(ds_name).items() ],
sorted = True,
no_preselect = True,
),
DropdownChoice(
title = _('Order'),
choices = [(False, _("Ascending")),
(True, _("Descending"))],
),
],
orientation = 'horizontal',
),
title = _('Sorting'),
add_label = _('Add sorter'),
)),
],
)),
)
specs.append(column_spec('grouping', _('Grouping'), ds_name))
return specs
def render_view_config(view, general_properties=True):
ds_name = view.get("datasource", html.var("datasource"))
if not ds_name:
raise MKInternalError(_("No datasource defined."))
if ds_name not in multisite_datasources:
raise MKInternalError(_('The given datasource is not supported.'))
view['datasource'] = ds_name
for ident, vs in view_editor_specs(ds_name, general_properties):
vs.render_input(ident, view.get(ident))
# Is used to change the view structure to be compatible to
# the valuespec This needs to perform the inverted steps of the
# transform_valuespec_value_to_view() function. FIXME: One day we should
# rewrite this to make no transform needed anymore
def transform_view_to_valuespec_value(view):
view["view"] = {} # Several global variables are put into a sub-dict
# Only copy our known keys. Reporting element, etc. might have their own keys as well
for key in [ "datasource", "browser_reload", "layout", "num_columns", "column_headers" ]:
if key in view:
view["view"][key] = view[key]
view["view"]['options'] = []
for key, title in view_editor_options():
if view.get(key):
view['view']['options'].append(key)
view['visibility'] = {}
for key in [ 'hidden', 'hidebutton', 'public' ]:
if view.get(key):
view['visibility'][key] = view[key]
view['grouping'] = { "grouping" : view.get('group_painters', []) }
view['sorting'] = { "sorters" : view.get('sorters', {}) }
columns = []
view['columns'] = { "columns" : columns }
for entry in view.get('painters', []):
if len(entry) == 5:
pname, viewname, tooltip, join_index, col_title = entry
columns.append((pname, join_index, viewname, tooltip or None, col_title))
elif len(entry) == 4:
pname, viewname, tooltip, join_index = entry
columns.append((pname, join_index, viewname, tooltip or None, ''))
elif len(entry) == 3:
pname, viewname, tooltip = entry
columns.append((pname, viewname, tooltip or None))
else:
pname, viewname = entry
columns.append((pname, viewname, None))
def transform_valuespec_value_to_view(view):
for ident, attrs in view.items():
# Transform some valuespec specific options to legacy view
# format. We do not want to change the view data structure
# at the moment.
if ident == 'view':
if "options" in attrs:
# First set all options to false
for option in dict(view_editor_options()).keys():
view[option] = False
# Then set the selected single options
for option in attrs['options']:
view[option] = True
# And cleanup
del attrs['options']
view.update(attrs)
del view["view"]
elif ident == 'sorting':
view.update(attrs)
del view["sorting"]
elif ident == 'grouping':
view['group_painters'] = attrs['grouping']
del view["grouping"]
elif ident == 'columns':
painters = []
for column in attrs['columns']:
if len(column) == 5:
pname, join_index, viewname, tooltip, col_title = column
else:
pname, viewname, tooltip = column
join_index, col_title = None, None
viewname = viewname if viewname else None
if join_index and col_title:
painters.append((pname, viewname, tooltip, join_index, col_title))
elif join_index:
painters.append((pname, viewname, tooltip, join_index))
else:
painters.append((pname, viewname, tooltip))
view['painters'] = painters
del view["columns"]
# Extract properties of view from HTML variables and construct
# view object, to be used for saving or displaying
#
# old_view is the old view dict which might be loaded from storage.
# view is the new dict object to be updated.
def create_view_from_valuespec(old_view, view):
ds_name = old_view.get('datasource', html.var('datasource'))
view['datasource'] = ds_name
vs_value = {}
for ident, vs in view_editor_specs(ds_name):
attrs = vs.from_html_vars(ident)
vs.validate_value(attrs, ident)
vs_value[ident] = attrs
transform_valuespec_value_to_view(vs_value)
view.update(vs_value)
return view
#.
# .--Display View--------------------------------------------------------.
# | ____ _ _ __ ___ |
# | | _ \(_)___ _ __ | | __ _ _ _ \ \ / (_) _____ __ |
# | | | | | / __| '_ \| |/ _` | | | | \ \ / /| |/ _ \ \ /\ / / |
# | | |_| | \__ \ |_) | | (_| | |_| | \ V / | | __/\ V V / |
# | |____/|_|___/ .__/|_|\__,_|\__, | \_/ |_|\___| \_/\_/ |
# | |_| |___/ |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------'
def show_filter(f):
if not f.visible():
html.open_div(style="display:none;")
f.display()
html.close_div()
else:
visuals.show_filter(f)
def show_filter_form(is_open, filters):
# Table muss einen anderen Namen, als das Formular
html.open_div(id_="filters", class_=["view_form"], style="display: none;" if not is_open else None)
html.begin_form("filter")
html.open_table(class_=["filterform"], cellpadding="0", cellspacing="0", border="0")
html.open_tr()
html.open_td()
# sort filters according to title
s = [(f.sort_index, f.title, f) for f in filters if f.available()]
s.sort()
# First show filters with double height (due to better floating
# layout)
for sort_index, title, f in s:
if f.double_height():
show_filter(f)
# Now single height filters
for sort_index, title, f in s:
if not f.double_height():
show_filter(f)
html.close_td()
html.close_tr()
html.open_tr()
html.open_td()
html.button("search", _("Search"), "submit")
html.close_td()
html.close_tr()
html.close_table()
html.hidden_fields()
html.end_form()
html.close_div()
def page_view():
bi.reset_cache_status() # needed for status icon
load_views()
view_name = html.var("view_name")
if view_name == None:
raise MKGeneralException(_("Missing the variable view_name in the URL."))
view = available_views.get(view_name)
if not view:
raise MKGeneralException(_("No view defined with the name '%s'.") % html.attrencode(view_name))
# Gather the page context which is needed for the "add to visual" popup menu
# to add e.g. views to dashboards or reports
datasource = multisite_datasources[view['datasource']]
context = visuals.get_context_from_uri_vars(datasource['infos'])
context.update(visuals.get_singlecontext_html_vars(view))
html.set_page_context(context)
prepare_painter_options(view_name)
painter_options.update_from_url(view)
show_view(view, True, True, True)
def get_painter_columns(painter):
if type(lambda: None) == type(painter["columns"]):
return painter["columns"]()
else:
return painter["columns"]
# Display view with real data. This is *the* function everying
# is about.
def show_view(view, show_heading = False, show_buttons = True,
show_footer = True, render_function = None, only_count=False,
all_filters_active=False, limit=None):
weblib.prepare_display_options(globals())
# Load from hard painter options > view > hard coded default
num_columns = painter_options.get("num_columns", view.get("num_columns", 1))
browser_reload = painter_options.get("refresh", view.get("browser_reload", None))
force_checkboxes = view.get("force_checkboxes", False)
show_checkboxes = force_checkboxes or html.var('show_checkboxes', '0') == '1'
# Get the datasource (i.e. the logical table)
try:
datasource = multisite_datasources[view["datasource"]]
except KeyError:
if view["datasource"].startswith("mkeventd_"):
raise MKUserError(None,
_("The Event Console view '%s' can not be rendered. The Event Console is possibly "
"disabled.") % view["name"])
else:
raise MKUserError(None,
_("The view '%s' using the datasource '%s' can not be rendered "
"because the datasource does not exist.") % (view["name"], view["datasource"]))
tablename = datasource["table"]
# Filters to use in the view
# In case of single object views, the needed filters are fixed, but not always present
# in context. In this case, take them from the context type definition.
use_filters = visuals.filters_of_visual(view, datasource['infos'],
all_filters_active, datasource.get('link_filters', {}))
# Not all filters are really shown later in show_filter_form(), because filters which
# have a hardcoded value are not changeable by the user
show_filters = visuals.visible_filters_of_visual(view, use_filters)
# FIXME TODO HACK to make grouping single contextes possible on host/service infos
# Is hopefully cleaned up soon.
if view['datasource'] in ['hosts', 'services']:
if html.has_var('hostgroup') and not html.has_var("opthost_group"):
html.set_var("opthost_group", html.var("hostgroup"))
if html.has_var('servicegroup') and not html.has_var("optservice_group"):
html.set_var("optservice_group", html.var("servicegroup"))
# TODO: Another hack :( Just like the above one: When opening the view "ec_events_of_host",
# which is of single context "host" using a host name of a unrelated event, the list of
# events is always empty since the single context filter "host" is sending a "host_name = ..."
# filter to livestatus which is not matching a "unrelated event". Instead the filter event_host
# needs to be used.
# But this may only be done for the unrelated events view. The "ec_events_of_monhost" view still
# needs the filter. :-/
# Another idea: We could change these views to non single context views, but then we would not
# be able to show the buttons to other host related views, which is also bad. So better stick
# with the current mode.
if view["datasource"] in [ "mkeventd_events", "mkeventd_history" ] \
and "host" in view["single_infos"] and view["name"] != "ec_events_of_monhost":
# Remove the original host name filter
use_filters = [ f for f in use_filters if f.name != "host" ]
# Set the value for the event host filter
if not html.has_var("event_host"):
html.set_var("event_host", html.var("host"))
# Now populate the HTML vars with context vars from the view definition. Hard
# coded default values are treated differently:
#
# a) single context vars of the view are enforced
# b) multi context vars can be overwritten by existing HTML vars
visuals.add_context_to_uri_vars(view, datasource["infos"], only_count)
# Check that all needed information for configured single contexts are available
visuals.verify_single_contexts('views', view, datasource.get('link_filters', {}))
# Prepare Filter headers for Livestatus
# TODO: When this is used by the reporting then *all* filters are
# active. That way the inventory data will always be loaded. When
# we convert this to the visuals principle the we need to optimize
# this.
filterheaders = ""
all_active_filters = [ f for f in use_filters if f.available() ]
for filt in all_active_filters:
header = filt.filter(tablename)
filterheaders += header
# Apply the site hint / filter
if html.var("site"):
only_sites = [html.var("site")]
else:
only_sites = None
# Prepare limit:
# We had a problem with stats queries on the logtable where
# the limit was not applied on the resulting rows but on the
# lines of the log processed. This resulted in wrong stats.
# For these datasources we ignore the query limits.
if limit == None: # Otherwise: specified as argument
if not datasource.get('ignore_limit', False):
limit = get_limit()
# Fork to availability view. We just need the filter headers, since we do not query the normal
# hosts and service table, but "statehist". This is *not* true for BI availability, though (see later)
if html.var("mode") == "availability" and (
"aggr" not in datasource["infos"] or html.var("timeline_aggr")):
context = visuals.get_context_from_uri_vars(datasource['infos'])
context.update(visuals.get_singlecontext_html_vars(view))
return render_availability_page(view, datasource, context, filterheaders, only_sites, limit)
query = filterheaders + view.get("add_headers", "")
# Sorting - use view sorters and URL supplied sorters
if not only_count:
user_sorters = parse_url_sorters(html.var("sort"))
if user_sorters:
sorter_list = user_sorters
else:
sorter_list = view["sorters"]
sorters = [ (multisite_sorters[s[0]],) + s[1:] for s in sorter_list
if s[0] in multisite_sorters ]
else:
sorters = []
# Prepare cells of the view
# Group cells: Are displayed as titles of grouped rows
# Regular cells: Are displaying information about the rows of the type the view is about
# Join cells: Are displaying information of a joined source (e.g.service data on host views)
group_cells = get_group_cells(view)
cells = get_cells(view)
regular_cells = get_regular_cells(cells)
join_cells = get_join_cells(cells)
# Now compute the list of all columns we need to query via Livestatus.
# Those are: (1) columns used by the sorters in use, (2) columns use by
# column- and group-painters in use and - note - (3) columns used to
# satisfy external references (filters) of views we link to. The last bit
# is the trickiest. Also compute this list of view options use by the
# painters
columns = get_needed_regular_columns(group_cells + cells, sorters, datasource)
join_columns = get_needed_join_columns(join_cells, sorters, datasource)
# Fetch data. Some views show data only after pressing [Search]
if (only_count or (not view.get("mustsearch")) or html.var("filled_in") in ["filter", 'actions', 'confirm', 'painteroptions']):
# names for additional columns (through Stats: headers)
add_columns = datasource.get("add_columns", [])
# tablename may be a function instead of a livestatus tablename
# In that case that function is used to compute the result.
# It may also be a tuple. In this case the first element is a function and the second element
# is a list of argument to hand over to the function together with all other arguments that
# are passed to query_data().
if type(tablename) == type(lambda x:None):
rows = tablename(columns, query, only_sites, limit, all_active_filters)
elif type(tablename) == tuple:
func, args = tablename
rows = func(datasource, columns, add_columns, query, only_sites, limit, *args)
else:
rows = query_data(datasource, columns, add_columns, query, only_sites, limit)
# Now add join information, if there are join columns
if join_cells:
do_table_join(datasource, rows, filterheaders, join_cells, join_columns, only_sites)
# If any painter, sorter or filter needs the information about the host's
# inventory, then we load it and attach it as column "host_inventory"
if is_inventory_data_needed(group_cells, cells, sorters, all_active_filters):
for row in rows:
if "host_name" in row:
row["host_inventory"] = inventory.load_tree(row["host_name"])
sort_data(rows, sorters)
else:
rows = []
# Apply non-Livestatus filters
for filter in all_active_filters:
rows = filter.filter_table(rows)
if html.var("mode") == "availability":
render_bi_availability(view_title(view), rows)
return
# TODO: Use livestatus Stats: instead of fetching rows!
if only_count:
for fname, filter_vars in view["context"].items():
for varname, value in filter_vars.items():
html.del_var(varname)
return len(rows)
# The layout of the view: it can be overridden by several specifying
# an output format (like json or python). Note: the layout is not
# always needed. In case of an embedded view in the reporting this
# field is simply missing, because the rendering is done by the
# report itself.
# TODO: CSV export should be handled by the layouts. It cannot
# be done generic in most cases
if html.output_format == "html":
if "layout" in view:
layout = multisite_layouts[view["layout"]]
else:
layout = None
else:
if "layout" in view and "csv_export" in multisite_layouts[view["layout"]]:
multisite_layouts[view["layout"]]["csv_export"](rows, view, group_cells, cells)
return
else:
# Generic layout of export
layout = multisite_layouts.get(html.output_format)
if not layout:
layout = multisite_layouts["json"]
# Set browser reload
if browser_reload and display_options.enabled(display_options.R) and not only_count:
html.set_browser_reload(browser_reload)
# Until now no single byte of HTML code has been output.
# Now let's render the view. The render_function will be
# replaced by the mobile interface for an own version.
if not render_function:
render_function = render_view
render_function(view, rows, datasource, group_cells, cells,
show_heading, show_buttons,
show_checkboxes, layout, num_columns, show_filters, show_footer,
browser_reload)
def get_group_cells(view):
return [ Cell(view, e) for e in view["group_painters"]
if Cell.painter_exists(e) ]
def get_cells(view):
cells = []
for e in view["painters"]:
if not Cell.painter_exists(e):
continue
if Cell.is_join_cell(e):
cells.append(JoinCell(view, e))
else:
cells.append(Cell(view, e))
return cells
def get_join_cells(cell_list):
return filter(lambda x: type(x) == JoinCell, cell_list)
def get_regular_cells(cell_list):
return filter(lambda x: type(x) == Cell, cell_list)
def get_needed_regular_columns(cells, sorters, datasource):
# BI availability needs aggr_tree
# TODO: wtf? a full reset of the list? Move this far away to a special place!
if html.var("mode") == "availability" and "aggr" in datasource["infos"]:
return [ "aggr_tree", "aggr_name", "aggr_group" ]
columns = columns_of_cells(cells)
# Columns needed for sorters
# TODO: Move sorter parsing and logic to something like Cells()
for s in sorters:
if len(s) == 2:
columns.update(s[0]["columns"])
# Add key columns, needed for executing commands
columns.update(datasource["keys"])
# Add idkey columns, needed for identifying the row
columns.update(datasource["idkeys"])
# Remove (implicit) site column
try:
columns.remove("site")
except KeyError:
pass
return list(columns)
def get_needed_join_columns(join_cells, sorters, datasource):
join_columns = columns_of_cells(join_cells)
# Columns needed for sorters
# TODO: Move sorter parsing and logic to something like Cells()
for s in sorters:
if len(s) != 2:
join_columns.update(s[0]["columns"])
return list(join_columns)
def is_inventory_data_needed(group_cells, cells, sorters, all_active_filters):
for cell in cells:
if cell.has_tooltip():
if cell.tooltip_painter_name().startswith("inv_"):
return True
for s in sorters:
if s[0].get("load_inv"):
return True
for cell in group_cells + cells:
if cell.painter().get("load_inv"):
return True
for filt in all_active_filters:
if filt.need_inventory():
return True
return False
def columns_of_cells(cells):
columns = set([])
for cell in cells:
columns.update(cell.needed_columns())
return columns
# Output HTML code of a view. If you add or remove paramters here,
# then please also do this in htdocs/mobile.py!
def render_view(view, rows, datasource, group_painters, painters,
show_heading, show_buttons,
show_checkboxes, layout, num_columns, show_filters, show_footer,
browser_reload):
if html.transaction_valid() and html.do_actions():
html.set_browser_reload(0)
# Show heading (change between "preview" mode and full page mode)
if show_heading:
# Show/Hide the header with page title, MK logo, etc.
if display_options.enabled(display_options.H):
# FIXME: view/layout/module related stylesheets/javascripts e.g. in case of BI?
html.body_start(view_title(view), stylesheets=["pages","views","status","bi"])
if display_options.enabled(display_options.T):
html.top_heading(view_title(view))
has_done_actions = False
row_count = len(rows)
# This is a general flag which makes the command form render when the current
# view might be able to handle commands. When no commands are possible due missing
# permissions or datasources without commands, the form is not rendered
command_form = should_show_command_form(datasource)
if command_form:
weblib.init_selection()
# Is the layout able to display checkboxes?
can_display_checkboxes = layout.get('checkboxes', False)
if show_buttons:
show_combined_graphs_button = \
("host" in datasource["infos"] or "service" in datasource["infos"]) and \
(type(datasource["table"]) == str) and \
("host" in datasource["table"] or "service" in datasource["table"])
show_context_links(view, datasource, show_filters,
# Take into account: permissions, display_options
row_count > 0 and command_form,
# Take into account: layout capabilities
can_display_checkboxes and not view.get("force_checkboxes"), show_checkboxes,
# Show link to availability
datasource["table"] in [ "hosts", "services" ] or "aggr" in datasource["infos"],
# Show link to combined graphs
show_combined_graphs_button,)
# User errors in filters
html.show_user_errors()
# Filter form
filter_isopen = view.get("mustsearch") and not html.var("filled_in")
if display_options.enabled(display_options.F) and len(show_filters) > 0:
show_filter_form(filter_isopen, show_filters)
# Actions
if command_form:
# If we are currently within an action (confirming or executing), then
# we display only the selected rows (if checkbox mode is active)
if show_checkboxes and html.do_actions():
rows = filter_selected_rows(view, rows, weblib.get_rowselection('view-' + view['name']))
# There are one shot actions which only want to affect one row, filter the rows
# by this id during actions
if html.has_var("_row_id") and html.do_actions():
rows = filter_by_row_id(view, rows)
if html.do_actions() and html.transaction_valid(): # submit button pressed, no reload
try:
# Create URI with all actions variables removed
backurl = html.makeuri([], delvars=['filled_in', 'actions'])
has_done_actions = do_actions(view, datasource["infos"][0], rows, backurl)
except MKUserError, e:
html.show_error(e)
html.add_user_error(e.varname, e)
if display_options.enabled(display_options.C):
show_command_form(True, datasource)
elif display_options.enabled(display_options.C): # (*not* display open, if checkboxes are currently shown)
show_command_form(False, datasource)
# Also execute commands in cases without command form (needed for Python-
# web service e.g. for NagStaMon)
elif row_count > 0 and config.user.may("general.act") \
and html.do_actions() and html.transaction_valid():
# There are one shot actions which only want to affect one row, filter the rows
# by this id during actions
if html.has_var("_row_id") and html.do_actions():
rows = filter_by_row_id(view, rows)
try:
do_actions(view, datasource["infos"][0], rows, '')
except:
pass # currently no feed back on webservice
painter_options.show_form(view)
# The refreshing content container
if display_options.enabled(display_options.R):
html.open_div(id_="data_container")
if not has_done_actions:
# Limit exceeded? Show warning
if display_options.enabled(display_options.W):
check_limit(rows, get_limit())
layout["render"](rows, view, group_painters, painters, num_columns,
show_checkboxes and not html.do_actions())
headinfo = "%d %s" % (row_count, _("row") if row_count == 1 else _("rows"))
if show_checkboxes:
selected = filter_selected_rows(view, rows, weblib.get_rowselection('view-' + view['name']))
headinfo = "%d/%s" % (len(selected), headinfo)
if html.output_format == "html":
html.javascript("update_headinfo('%s');" % headinfo)
# The number of rows might have changed to enable/disable actions and checkboxes
if show_buttons:
update_context_links(
# don't take display_options into account here ('c' is set during reload)
row_count > 0 and should_show_command_form(datasource, ignore_display_option=True),
# and not html.do_actions(),
can_display_checkboxes
)
# Play alarm sounds, if critical events have been displayed
if display_options.enabled(display_options.S) and view.get("play_sounds"):
play_alarm_sounds()
else:
# Always hide action related context links in this situation
update_context_links(False, False)
# In multi site setups error messages of single sites do not block the
# output and raise now exception. We simply print error messages here.
# In case of the web service we show errors only on single site installations.
if config.show_livestatus_errors \
and display_options.enabled(display_options.W) \
and html.output_format == "html":
for sitename, info in sites.live().dead_sites().items():
html.show_error("<b>%s - %s</b><br>%s" %
(info["site"]["alias"], _('Livestatus error'), info["exception"]))
# FIXME: Sauberer waere noch die Status Icons hier mit aufzunehmen
if display_options.enabled(display_options.R):
html.close_div()
if show_footer:
pid = os.getpid()
if sites.live().successfully_persisted():
html.add_status_icon("persist", _("Reused persistent livestatus connection from earlier request (PID %d)") % pid)
if bi.reused_compilation():
html.add_status_icon("aggrcomp", _("Reused cached compiled BI aggregations (PID %d)") % pid)
html.bottom_focuscode()
if display_options.enabled(display_options.Z):
html.bottom_footer()
if display_options.enabled(display_options.H):
html.body_end()
def check_limit(rows, limit):
count = len(rows)
if limit != None and count >= limit + 1:
text = _("Your query produced more than %d results. ") % limit
if html.var("limit", "soft") == "soft" and config.user.may("general.ignore_soft_limit"):
text += html.render_a(_('Repeat query and allow more results.'),
target="_self",
href=html.makeuri([("limit", "hard")]))
elif html.var("limit") == "hard" and config.user.may("general.ignore_hard_limit"):
text += html.render_a(_('Repeat query without limit.'),
target="_self",
href=html.makeuri([("limit", "none")]))
text += " " + _("<b>Note:</b> the shown results are incomplete and do not reflect the sort order.")
html.show_warning(text)
del rows[limit:]
return False
return True
def do_table_join(master_ds, master_rows, master_filters, join_cells, join_columns, only_sites):
join_table, join_master_column = master_ds["join"]
slave_ds = multisite_datasources[join_table]
join_slave_column = slave_ds["joinkey"]
# Create additional filters
join_filters = []
for cell in join_cells:
join_filters.append(cell.livestatus_filter(join_slave_column))
join_filters.append("Or: %d" % len(join_filters))
query = "%s%s\n" % (master_filters, "\n".join(join_filters))
rows = query_data(slave_ds, [join_master_column, join_slave_column] + join_columns, [], query, only_sites, None)
per_master_entry = {}
current_key = None
current_entry = None
for row in rows:
master_key = (row["site"], row[join_master_column])
if master_key != current_key:
current_key = master_key
current_entry = {}
per_master_entry[current_key] = current_entry
current_entry[row[join_slave_column]] = row
# Add this information into master table in artificial column "JOIN"
for row in master_rows:
key = (row["site"], row[join_master_column])
joininfo = per_master_entry.get(key, {})
row["JOIN"] = joininfo
g_alarm_sound_states = set([])
def clear_alarm_sound_states():
g_alarm_sound_states.clear()
def save_state_for_playing_alarm_sounds(row):
if not config.enable_sounds or not config.sounds:
return
# TODO: Move this to a generic place. What about -1?
host_state_map = { 0: "up", 1: "down", 2: "unreachable"}
service_state_map = { 0: "up", 1: "warning", 2: "critical", 3: "unknown"}
for state_map, state in [
(host_state_map, row.get("host_hard_state", row.get("host_state"))),
(service_state_map, row.get("service_last_hard_state", row.get("service_state"))) ]:
if state is None:
continue
try:
state_name = state_map[int(state)]
except KeyError:
continue
g_alarm_sound_states.add(state_name)
def play_alarm_sounds():
if not config.enable_sounds or not config.sounds:
return
url = config.sound_url
if not url.endswith("/"):
url += "/"
for state_name, wav in config.sounds:
if not state_name or state_name in g_alarm_sound_states:
html.play_sound(url + wav)
break # only one sound at one time
# How many data rows may the user query?
def get_limit():
limitvar = html.var("limit", "soft")
if limitvar == "hard" and config.user.may("general.ignore_soft_limit"):
return config.hard_query_limit
elif limitvar == "none" and config.user.may("general.ignore_hard_limit"):
return None
else:
return config.soft_query_limit
def view_title(view):
return visuals.visual_title('view', view)
def view_optiondial(view, option, choices, help):
# Darn: The option "refresh" has the name "browser_reload" in the
# view definition
if option == "refresh":
name = "browser_reload"
else:
name = option
# Take either the first option of the choices, the view value or the
# configured painter option.
value = painter_options.get(option, dflt=view.get(name, choices[0][0]))
title = dict(choices).get(value, value)
html.begin_context_buttons() # just to be sure
# Remove unicode strings
choices = [ [c[0], str(c[1])] for c in choices ]
html.open_div(id_="optiondial_%s" % option,
class_=["optiondial", option, "val_%s" % value],
title=help,
onclick="view_dial_option(this, \'%s\', \'%s\', %r)"
% (view["name"], option, choices))
html.div(title)
html.close_div()
html.final_javascript("init_optiondial('optiondial_%s');" % option)
def view_optiondial_off(option):
html.div('', class_=["optiondial", "off", option])
# FIXME: Consolidate with html.toggle_button() rendering functions
def toggler(id, icon, help, onclick, value, hidden = False):
html.begin_context_buttons() # just to be sure
hide = ' style="display:none"' if hidden else ''
html.write('<div id="%s_on" title="%s" class="togglebutton %s %s" %s>'
'<a href="javascript:void(0)" onclick="%s"><img src="images/icon_%s.png"></a></div>' % (
id, help, icon, value and "down" or "up", hide, onclick, icon))
# Will be called when the user presses the upper button, in order
# to persist the new setting - and to make it active before the
# browser reload of the DIV containing the actual status data is done.
def ajax_set_viewoption():
view_name = html.var("view_name")
option = html.var("option")
value = html.var("value")
value = { 'true' : True, 'false' : False }.get(value, value)
if type(value) == str and value[0].isdigit():
try:
value = int(value)
except:
pass
po = PainterOptions(view_name)
po.load()
po.set(option, value)
po.save_to_config()
def show_context_links(thisview, datasource, show_filters,
enable_commands, enable_checkboxes, show_checkboxes,
show_availability, show_combined_graphs):
# html.begin_context_buttons() called automatically by html.context_button()
# That way if no button is painted we avoid the empty container
if display_options.enabled(display_options.B):
execute_hooks('buttons-begin')
filter_isopen = html.var("filled_in") != "filter" and thisview.get("mustsearch")
if display_options.enabled(display_options.F):
if html.var("filled_in") == "filter":
icon = "filters_set"
help = _("The current data is being filtered")
else:
icon = "filters"
help = _("Set a filter for refining the shown data")
html.toggle_button("filters", filter_isopen, icon, help, disabled=not show_filters)
if display_options.enabled(display_options.D):
html.toggle_button("painteroptions", False, "painteroptions", _("Modify display options"),
disabled=not painter_options.painter_option_form_enabled())
if display_options.enabled(display_options.C):
html.toggle_button("commands", False, "commands", _("Execute commands on hosts, services and other objects"),
hidden = not enable_commands)
html.toggle_button("commands", False, "commands", "", hidden=enable_commands, disabled=True)
selection_enabled = (enable_commands and enable_checkboxes) or thisview.get("force_checkboxes")
if not thisview.get("force_checkboxes"):
toggler("checkbox", "checkbox", _("Enable/Disable checkboxes for selecting rows for commands"),
"location.href='%s';" % html.makeuri([('show_checkboxes', show_checkboxes and '0' or '1')]),
show_checkboxes, hidden = True) # not selection_enabled)
html.toggle_button("checkbox", False, "checkbox", "", hidden=not thisview.get("force_checkboxes"), disabled=True)
html.javascript('g_selection_enabled = %s;' % ('true' if selection_enabled else 'false'))
if display_options.enabled(display_options.O):
if config.user.may("general.view_option_columns"):
choices = [ [x, "%s" % x] for x in config.view_option_columns ]
view_optiondial(thisview, "num_columns", choices, _("Change the number of display columns"))
else:
view_optiondial_off("num_columns")
if display_options.enabled(display_options.R) and config.user.may("general.view_option_refresh"):
choices = [ [x, {0:_("off")}.get(x, str(x) + "s") ] for x in config.view_option_refreshes ]
view_optiondial(thisview, "refresh", choices, _("Change the refresh rate"))
else:
view_optiondial_off("refresh")
if display_options.enabled(display_options.B):
# WATO: If we have a host context, then show button to WATO, if permissions allow this
if html.has_var("host") \
and config.wato_enabled \
and config.user.may("wato.use") \
and (config.user.may("wato.hosts") or config.user.may("wato.seeall")):
host = html.var("host")
if host:
url = wato.link_to_host_by_name(host)
else:
url = wato.link_to_folder_by_path(html.var("wato_folder", ""))
html.context_button(_("WATO"), url, "wato", id="wato",
bestof = config.context_buttons_to_show)
# Button for creating an instant report (if reporting is available)
if config.reporting_available() and config.user.may("general.reporting"):
html.context_button(_("Export as PDF"), html.makeuri([], filename="report_instant.py"),
"report", class_="context_pdf_export")
# Buttons to other views, dashboards, etc.
links = visuals.collect_context_links(thisview)
for linktitle, uri, icon, buttonid in links:
html.context_button(linktitle, url=uri, icon=icon, id=buttonid, bestof=config.context_buttons_to_show)
# Customize/Edit view button
if display_options.enabled(display_options.E) and config.user.may("general.edit_views"):
url_vars = [
("back", html.requested_url()),
("load_name", thisview["name"]),
]
if thisview["owner"] != config.user.id:
url_vars.append(("load_user", thisview["owner"]))
url = html.makeuri_contextless(url_vars, filename="edit_view.py")
html.context_button(_("Edit View"), url, "edit", id="edit", bestof=config.context_buttons_to_show)
if display_options.enabled(display_options.E):
if show_availability:
html.context_button(_("Availability"), html.makeuri([("mode", "availability")]), "availability")
if show_combined_graphs and config.combined_graphs_available():
html.context_button(_("Combined graphs"),
html.makeuri([
("single_infos", ",".join(thisview["single_infos"])),
("datasource", thisview["datasource"]),
("view_title", view_title(thisview)),
],
filename="combined_graphs.py"), "pnp")
if display_options.enabled(display_options.B):
execute_hooks('buttons-end')
html.end_context_buttons()
def update_context_links(enable_command_toggle, enable_checkbox_toggle):
html.javascript("update_togglebutton('commands', %d);" % (enable_command_toggle and 1 or 0))
html.javascript("update_togglebutton('checkbox', %d);" % (enable_command_toggle and enable_checkbox_toggle and 1 or 0, ))
def ajax_count_button():
id = html.var("id")
counts = config.user.load_file("buttoncounts", {})
for i in counts:
counts[i] *= 0.95
counts.setdefault(id, 0)
counts[id] += 1
config.user.save_file("buttoncounts", counts)
# Retrieve data via livestatus, convert into list of dicts,
# prepare row-function needed for painters
# datasource: the datasource object as defined in plugins/views/datasources.py
# columns: the list of livestatus columns to query
# add_columns: list of columns the datasource is known to add itself
# (couldn't we get rid of this parameter by looking that up ourselves?)
# add_headers: additional livestatus headers to add
# only_sites: list of sites the query is limited to
# limit: maximum number of data rows to query
def query_data(datasource, columns, add_columns, add_headers,
only_sites = None, limit = None, tablename=None):
if only_sites is None:
only_sites = []
if tablename == None:
tablename = datasource["table"]
add_headers += datasource.get("add_headers", "")
merge_column = datasource.get("merge_by")
if merge_column:
columns = [merge_column] + columns
# Most layouts need current state of object in order to
# choose background color - even if no painter for state
# is selected. Make sure those columns are fetched. This
# must not be done for the table 'log' as it cannot correctly
# distinguish between service_state and host_state
if "log" not in datasource["infos"]:
state_columns = []
if "service" in datasource["infos"]:
state_columns += [ "service_has_been_checked", "service_state" ]
if "host" in datasource["infos"]:
state_columns += [ "host_has_been_checked", "host_state" ]
for c in state_columns:
if c not in columns:
columns.append(c)
auth_domain = datasource.get("auth_domain", "read")
# Remove columns which are implicitely added by the datasource
columns = [ c for c in columns if c not in add_columns ]
query = "GET %s\n" % tablename
rows = do_query_data(query, columns, add_columns, merge_column,
add_headers, only_sites, limit, auth_domain)
# Datasource may have optional post processing function to filter out rows
post_process_func = datasource.get("post_process")
if post_process_func:
return post_process_func(rows)
else:
return rows
def do_query_data(query, columns, add_columns, merge_column,
add_headers, only_sites, limit, auth_domain):
query += "Columns: %s\n" % " ".join(columns)
query += add_headers
sites.live().set_prepend_site(True)
if limit != None:
sites.live().set_limit(limit + 1) # + 1: We need to know, if limit is exceeded
else:
sites.live().set_limit(None)
if config.debug_livestatus_queries \
and html.output_format == "html" and display_options.enabled(display_options.W):
html.open_div(class_=["livestatus", "message"])
html.tt(query.replace('\n', '<br>\n'))
html.close_div()
if only_sites:
sites.live().set_only_sites(only_sites)
sites.live().set_auth_domain(auth_domain)
data = sites.live().query(query)
sites.live().set_auth_domain("read")
sites.live().set_only_sites(None)
sites.live().set_prepend_site(False)
sites.live().set_limit() # removes limit
if merge_column:
data = merge_data(data, columns)
# convert lists-rows into dictionaries.
# performance, but makes live much easier later.
columns = ["site"] + columns + add_columns
rows = [ dict(zip(columns, row)) for row in data ]
return rows
# Merge all data rows with different sites but the same value
# in merge_column. We require that all column names are prefixed
# with the tablename. The column with the merge key is required
# to be the *second* column (right after the site column)
def merge_data(data, columns):
merged = {}
mergefuncs = [lambda a,b: ""] # site column is not merged
def worst_service_state(a, b):
if a == 2 or b == 2:
return 2
else:
return max(a, b)
def worst_host_state(a, b):
if a == 1 or b == 1:
return 1
else:
return max(a, b)
for c in columns:
tablename, col = c.split("_", 1)
if col.startswith("num_") or col.startswith("members"):
mergefunc = lambda a,b: a+b
elif col.startswith("worst_service"):
return worst_service_state
elif col.startswith("worst_host"):
return worst_host_state
else:
mergefunc = lambda a,b: a
mergefuncs.append(mergefunc)
for row in data:
mergekey = row[1]
if mergekey in merged:
oldrow = merged[mergekey]
merged[mergekey] = [ f(a,b) for f,a,b in zip(mergefuncs, oldrow, row) ]
else:
merged[mergekey] = row
# return all rows sorted according to merge key
mergekeys = merged.keys()
mergekeys.sort()
return [ merged[k] for k in mergekeys ]
# Sort data according to list of sorters. The tablename
# is needed in order to handle different column names
# for same objects (e.g. host_name in table services and
# simply name in table hosts)
def sort_data(data, sorters):
if len(sorters) == 0:
return
# Handle case where join columns are not present for all rows
def save_compare(compfunc, row1, row2, args):
if row1 == None and row2 == None:
return 0
elif row1 == None:
return -1
elif row2 == None:
return 1
else:
if args:
return compfunc(row1, row2, *args)
else:
return compfunc(row1, row2)
sort_cmps = []
for s in sorters:
cmpfunc = s[0]["cmp"]
negate = -1 if s[1] else 1
if len(s) > 2:
joinkey = s[2] # e.g. service description
else:
joinkey = None
sort_cmps.append((cmpfunc, negate, joinkey, s[0].get('args')))
def multisort(e1, e2):
for func, neg, joinkey, args in sort_cmps:
if joinkey: # Sorter for join column, use JOIN info
c = neg * save_compare(func, e1["JOIN"].get(joinkey), e2["JOIN"].get(joinkey), args)
else:
if args:
c = neg * func(e1, e2, *args)
else:
c = neg * func(e1, e2)
if c != 0: return c
return 0 # equal
data.sort(multisort)
def sorters_of_datasource(ds_name):
return allowed_for_datasource(multisite_sorters, ds_name)
def painters_of_datasource(ds_name):
return allowed_for_datasource(multisite_painters, ds_name)
def join_painters_of_datasource(ds_name):
ds = multisite_datasources[ds_name]
if "join" not in ds:
return {} # no joining with this datasource
# Get the painters allowed for the join "source" and "target"
painters = painters_of_datasource(ds_name)
join_painters_unfiltered = allowed_for_datasource(multisite_painters, ds['join'][0])
# Filter out painters associated with the "join source" datasource
join_painters = {}
for key, val in join_painters_unfiltered.items():
if key not in painters:
join_painters[key] = val
return join_painters
# Filters a list of sorters or painters and decides which of
# those are available for a certain data source
def allowed_for_datasource(collection, datasourcename):
datasource = multisite_datasources[datasourcename]
infos_available = set(datasource["infos"])
add_columns = datasource.get("add_columns", [])
allowed = {}
for name, item in collection.items():
infos_needed = infos_needed_by_painter(item, add_columns)
if len(infos_needed.difference(infos_available)) == 0:
allowed[name] = item
return allowed
def infos_needed_by_painter(painter, add_columns=None):
if add_columns is None:
add_columns = []
columns = get_painter_columns(painter)
return set([ c.split("_", 1)[0] for c in columns if c != "site" and c not in add_columns])
# Returns either the valuespec of the painter parameters or None
def get_painter_params_valuespec(painter):
if "params" not in painter:
return
if type(lambda: None) == type(painter["params"]):
return painter["params"]()
else:
return painter["params"]
def painter_choices(painters, add_params=False):
choices = []
for name, painter in painters.items():
title = get_painter_title_for_choices(painter)
# Add the optional valuespec for painter parameters
if add_params and "params" in painter:
vs_params = get_painter_params_valuespec(painter)
choices.append((name, title, vs_params))
else:
choices.append((name, title))
return sorted(choices, key=lambda x: x[1])
def get_painter_title_for_choices(painter):
info_title = "/".join([ visuals.infos[info_name]["title_plural"] for info_name
in sorted(infos_needed_by_painter(painter)) ])
# TODO: Cleanup the special case for sites. How? Add an info for it?
if painter["columns"] == ["site"]:
info_title = _("Site")
return "%s: %s" % (info_title, painter["title"])
def painter_choices_with_params(painters):
return painter_choices(painters, add_params=True)
#.
# .--Commands------------------------------------------------------------.
# | ____ _ |
# | / ___|___ _ __ ___ _ __ ___ __ _ _ __ __| |___ |
# | | | / _ \| '_ ` _ \| '_ ` _ \ / _` | '_ \ / _` / __| |
# | | |__| (_) | | | | | | | | | | | (_| | | | | (_| \__ \ |
# | \____\___/|_| |_| |_|_| |_| |_|\__,_|_| |_|\__,_|___/ |
# | |
# +----------------------------------------------------------------------+
# | Functions dealing with external commands send to the monitoring |
# | core. The commands themselves are defined as a plugin. Shipped |
# | command definitions are in plugins/views/commands.py. |
# | We apologize for the fact that we one time speak of "commands" and |
# | the other time of "action". Both is the same here... |
# '----------------------------------------------------------------------'
# Checks whether or not this view handles commands for the current user
# When it does not handle commands the command tab, command form, row
# selection and processing commands is disabled.
def should_show_command_form(datasource, ignore_display_option=False):
if not ignore_display_option and display_options.disabled(display_options.C):
return False
if not config.user.may("general.act"):
return False
# What commands are available depends on the Livestatus table we
# deal with. If a data source provides information about more
# than one table, (like services datasource also provide host
# information) then the first info is the primary table. So 'what'
# will be one of "host", "service", "command" or "downtime".
what = datasource["infos"][0]
for command in multisite_commands:
if what in command["tables"] and config.user.may(command["permission"]):
return True
return False
def show_command_form(is_open, datasource):
# What commands are available depends on the Livestatus table we
# deal with. If a data source provides information about more
# than one table, (like services datasource also provide host
# information) then the first info is the primary table. So 'what'
# will be one of "host", "service", "command" or "downtime".
what = datasource["infos"][0]
html.open_div(id_="commands",
class_=["view_form"],
style="display:none;" if not is_open else None)
html.begin_form("actions")
html.hidden_field("_do_actions", "yes")
html.hidden_field("actions", "yes")
html.hidden_fields() # set all current variables, exception action vars
# Show command forms, grouped by (optional) command group
by_group = {}
for command in multisite_commands:
if what in command["tables"] and config.user.may(command["permission"]):
# Some special commands can be shown on special views using this option.
# It is currently only used in custom views, not shipped with check_mk.
if command.get('only_view') and html.var('view_name') != command['only_view']:
continue
group = command.get("group", "various")
by_group.setdefault(group, []).append(command)
for group_ident, group_commands in sorted(by_group.items(),
key=lambda x: multisite_command_groups[x[0]]["sort_index"]):
forms.header(multisite_command_groups[group_ident]["title"], narrow=True)
for command in group_commands:
forms.section(command["title"])
command["render"]()
forms.end()
html.end_form()
html.close_div()
# Examine the current HTML variables in order determine, which
# command the user has selected. The fetch ids from a data row
# (host name, service description, downtime/commands id) and
# construct one or several core command lines and a descriptive
# title.
def core_command(what, row, row_nr, total_rows):
host = row.get("host_name")
descr = row.get("service_description")
if what == "host":
spec = host
cmdtag = "HOST"
elif what == "service":
spec = "%s;%s" % (host, descr)
cmdtag = "SVC"
else:
spec = row.get(what + "_id")
if descr:
cmdtag = "SVC"
else:
cmdtag = "HOST"
commands = None
title = None
# Call all command actions. The first one that detects
# itself to be executed (by examining the HTML variables)
# will return a command to execute and a title for the
# confirmation dialog.
for cmd in multisite_commands:
if config.user.may(cmd["permission"]):
# Does the command need information about the total number of rows
# and the number of the current row? Then specify that
if cmd.get("row_stats"):
result = cmd["action"](cmdtag, spec, row, row_nr, total_rows)
else:
result = cmd["action"](cmdtag, spec, row)
if result:
executor = cmd.get("executor", command_executor_livestatus)
commands, title = result
break
# Use the title attribute to determine if a command exists, since the list
# of commands might be empty (e.g. in case of "remove all downtimes" where)
# no downtime exists in a selection of rows.
if not title:
raise MKUserError(None, _("Sorry. This command is not implemented."))
# Some commands return lists of commands, others
# just return one basic command. Convert those
if type(commands) != list:
commands = [commands]
return commands, title, executor
def command_executor_livestatus(command, site):
sites.live().command("[%d] %s" % (int(time.time()), command), site)
# make gettext localize some magic texts
_("services")
_("hosts")
_("commands")
_("downtimes")
_("aggregations")
# Returns:
# True -> Actions have been done
# False -> No actions done because now rows selected
# [...] new rows -> Rows actions (shall/have) be performed on
def do_actions(view, what, action_rows, backurl):
if not config.user.may("general.act"):
html.show_error(_("You are not allowed to perform actions. "
"If you think this is an error, please ask "
"your administrator grant you the permission to do so."))
return False # no actions done
if not action_rows:
message = _("No rows selected to perform actions for.")
if html.output_format == "html": # sorry for this hack
message += '<br><a href="%s">%s</a>' % (backurl, _('Back to view'))
html.show_error(message)
return False # no actions done
command = None
title, executor = core_command(what, action_rows[0], 0, len(action_rows))[1:3] # just get the title and executor
if not html.confirm(_("Do you really want to %(title)s the following %(count)d %(what)s?") %
{ "title" : title, "count" : len(action_rows), "what" : visuals.infos[what]["title_plural"], }, method = 'GET'):
return False
count = 0
already_executed = set([])
for nr, row in enumerate(action_rows):
core_commands, title, executor = core_command(what, row, nr, len(action_rows))
for command_entry in core_commands:
site = row.get("site") # site is missing for BI rows (aggregations can spawn several sites)
if (site, command_entry) not in already_executed:
# Some command functions return the information about the site per-command (e.g. for BI)
if type(command_entry) == tuple:
site, command = command_entry
else:
command = command_entry
if type(command) == unicode:
command = command.encode("utf-8")
executor(command, site)
already_executed.add((site, command_entry))
count += 1
message = None
if command:
message = _("Successfully sent %d commands.") % count
if config.debug:
message += _("The last one was: <pre>%s</pre>") % command
elif count == 0:
message = _("No matching data row. No command sent.")
if message:
if html.output_format == "html": # sorry for this hack
message += '<br><a href="%s">%s</a>' % (backurl, _('Back to view'))
if html.var("show_checkboxes") == "1":
html.del_var("selection")
weblib.selection_id()
backurl += "&selection=" + html.var("selection")
message += '<br><a href="%s">%s</a>' % (backurl, _('Back to view with checkboxes reset'))
if html.var("_show_result") == "0":
html.immediate_browser_redirect(0.5, backurl)
html.message(message)
return True
def filter_by_row_id(view, rows):
wanted_row_id = html.var("_row_id")
for row in rows:
if row_id(view, row) == wanted_row_id:
return [row]
return []
def filter_selected_rows(view, rows, selected_ids):
action_rows = []
for row in rows:
if row_id(view, row) in selected_ids:
action_rows.append(row)
return action_rows
def get_context_link(user, viewname):
if viewname in available_views:
return "view.py?view_name=%s" % viewname
else:
return None
def ajax_export():
load_views()
for name, view in available_views.items():
view["owner"] = ''
view["public"] = True
html.write(pprint.pformat(available_views))
def get_view_by_name(view_name):
load_views()
return available_views[view_name]
#.
# .--Plugin Helpers------------------------------------------------------.
# | ____ _ _ _ _ _ |
# | | _ \| |_ _ __ _(_)_ __ | | | | ___| |_ __ ___ _ __ ___ |
# | | |_) | | | | |/ _` | | '_ \ | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
# | | __/| | |_| | (_| | | | | | | _ | __/ | |_) | __/ | \__ \ |
# | |_| |_|\__,_|\__, |_|_| |_| |_| |_|\___|_| .__/ \___|_| |___/ |
# | |___/ |_| |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------'
def register_command_group(ident, title, sort_index):
multisite_command_groups[ident] = {
"title" : title,
"sort_index" : sort_index,
}
def register_hook(hook, func):
if not hook in view_hooks:
view_hooks[hook] = []
if func not in view_hooks[hook]:
view_hooks[hook].append(func)
def execute_hooks(hook):
for hook_func in view_hooks.get(hook, []):
try:
hook_func()
except:
if config.debug:
raise MKGeneralException(_('Problem while executing hook function %s in hook %s: %s')
% (hook_func.__name__, hook, traceback.format_exc()))
else:
pass
def join_row(row, cell):
if type(cell) == JoinCell:
return row.get("JOIN", {}).get(cell.join_service())
else:
return row
def url_to_view(row, view_name):
if display_options.disabled(display_options.I):
return None
view = permitted_views().get(view_name)
if view:
# Get the context type of the view to link to, then get the parameters of this
# context type and try to construct the context from the data of the row
url_vars = []
datasource = multisite_datasources[view['datasource']]
for info_key in datasource['infos']:
if info_key in view['single_infos']:
# Determine which filters (their names) need to be set
# for specifying in order to select correct context for the
# target view.
for filter_name in visuals.info_params(info_key):
filter_object = visuals.get_filter(filter_name)
# Get the list of URI vars to be set for that filter
new_vars = filter_object.variable_settings(row)
url_vars += new_vars
# See get_link_filter_names() comment for details
for src_key, dst_key in visuals.get_link_filter_names(view, datasource['infos'],
datasource.get('link_filters', {})):
try:
url_vars += visuals.get_filter(src_key).variable_settings(row)
except KeyError:
pass
try:
url_vars += visuals.get_filter(dst_key).variable_settings(row)
except KeyError:
pass
# Some special handling for the site filter which is meant as optional hint
# Always add the site filter var when some useful information is available
add_site_hint = True
for filter_key in datasource.get('multiple_site_filters', []):
if filter_key in dict(url_vars):
add_site_hint = False
# Hack for servicedesc view which is meant to show all services with the given
# description: Don't add the site filter for this view.
if view_name == "servicedesc":
add_site_hint = False
if add_site_hint and row.get('site'):
url_vars.append(('site', row['site']))
do = html.var("display_options")
if do:
url_vars.append(("display_options", do))
filename = "mobile_view.py" if html.mobile else "view.py"
return filename + "?" + html.urlencode_vars([("view_name", view_name)] + url_vars)
def link_to_view(content, row, view_name):
if display_options.disabled(display_options.I):
return content
url = url_to_view(row, view_name)
if url:
return "<a href=\"%s\">%s</a>" % (url, content)
else:
return content
def docu_link(topic, text):
return '<a href="%s" target="_blank">%s</a>' % (config.doculink_urlformat % topic, text)
# Calculates a uniq id for each data row which identifies the current
# row accross different page loadings.
def row_id(view, row):
key = u''
for col in multisite_datasources[view['datasource']]['idkeys']:
key += u'~%s' % row[col]
return hashlib.sha256(key.encode('utf-8')).hexdigest()
def paint_stalified(row, text):
if is_stale(row):
return "stale", text
else:
return "", text
def substract_sorters(base, remove):
for s in remove:
if s in base:
base.remove(s)
elif (s[0], not s[1]) in base:
base.remove((s[0], not s[1]))
def parse_url_sorters(sort):
sorters = []
if not sort:
return sorters
for s in sort.split(','):
if not '~' in s:
sorters.append((s.replace('-', ''), s.startswith('-')))
else:
sorter, join_index = s.split('~', 1)
sorters.append((sorter.replace('-', ''), sorter.startswith('-'), join_index))
return sorters
def get_sorter_name_of_painter(painter_name):
painter = multisite_painters[painter_name]
if 'sorter' in painter:
return painter['sorter']
elif painter_name in multisite_sorters:
return painter_name
def get_primary_sorter_order(view, painter_name):
sorter_name = get_sorter_name_of_painter(painter_name)
this_asc_sorter = (sorter_name, False)
this_desc_sorter = (sorter_name, True)
group_sort, user_sort, view_sort = get_separated_sorters(view)
if user_sort and this_asc_sorter == user_sort[0]:
return 'asc'
elif user_sort and this_desc_sorter == user_sort[0]:
return 'desc'
else:
return ''
def get_separated_sorters(view):
group_sort = [ (get_sorter_name_of_painter(p[0]), False)
for p in view['group_painters']
if p[0] in multisite_painters
and get_sorter_name_of_painter(p[0]) is not None ]
view_sort = [ s for s in view['sorters'] if not s[0] in group_sort ]
# Get current url individual sorters. Parse the "sort" url parameter,
# then remove the group sorters. The left sorters must be the user
# individual sorters for this view.
# Then remove the user sorters from the view sorters
user_sort = parse_url_sorters(html.var('sort'))
substract_sorters(user_sort, group_sort)
substract_sorters(view_sort, user_sort)
return group_sort, user_sort, view_sort
# The Group-value of a row is used for deciding whether
# two rows are in the same group or not
def group_value(row, group_cells):
group = []
for cell in group_cells:
painter = cell.painter()
groupvalfunc = painter.get("groupby")
if groupvalfunc:
if "args" in painter:
group.append(groupvalfunc(row, *painter["args"]))
else:
group.append(groupvalfunc(row))
else:
for c in get_painter_columns(painter):
if c in row:
group.append(row[c])
return create_dict_key(group)
def create_dict_key(value):
if type(value) in (list, tuple):
return tuple(map(create_dict_key, value))
elif type(value) == dict:
return tuple([ (k, create_dict_key(v)) for (k, v) in sorted(value.items()) ])
else:
return value
def get_host_tags(row):
if type(row.get("host_custom_variables")) == dict:
return row["host_custom_variables"].get("TAGS", "")
if type(row.get("host_custom_variable_names")) != list:
return ""
for name, val in zip(row["host_custom_variable_names"],
row["host_custom_variable_values"]):
if name == "TAGS":
return val
return ""
# Get the definition of a tag group
g_taggroups_by_id = {}
def get_tag_group(tgid):
# Build a cache
if not g_taggroups_by_id:
for entry in config.host_tag_groups():
g_taggroups_by_id[entry[0]] = (entry[1], entry[2])
return g_taggroups_by_id.get(tgid, (_("N/A"), []))
def get_custom_var(row, key):
for name, val in zip(row["custom_variable_names"],
row["custom_variable_values"]):
if name == key:
return val
return ""
def is_stale(row):
return row.get('service_staleness', row.get('host_staleness', 0)) >= config.staleness_threshold
def cmp_insensitive_string(v1, v2):
c = cmp(v1.lower(), v2.lower())
# force a strict order in case of equal spelling but different
# case!
if c == 0:
return cmp(v1, v2)
else:
return c
# Sorting
def cmp_ip_address(column, r1, r2):
def split_ip(ip):
try:
return tuple(int(part) for part in ip.split('.'))
except:
return ip
v1, v2 = split_ip(r1.get(column, '')), split_ip(r2.get(column, ''))
return cmp(v1, v2)
def cmp_simple_string(column, r1, r2):
v1, v2 = r1.get(column, ''), r2.get(column, '')
return cmp_insensitive_string(v1, v2)
def cmp_num_split(column, r1, r2):
return utils.cmp_num_split(r1[column].lower(), r2[column].lower())
def cmp_string_list(column, r1, r2):
v1 = ''.join(r1.get(column, []))
v2 = ''.join(r2.get(column, []))
return cmp_insensitive_string(v1, v2)
def cmp_simple_number(column, r1, r2):
return cmp(r1.get(column), r2.get(column))
def cmp_custom_variable(r1, r2, key, cmp_func):
return cmp(get_custom_var(r1, key), get_custom_var(r2, key))
def cmp_service_name_equiv(r):
if r == "Check_MK":
return -6
elif r == "Check_MK Agent":
return -5
elif r == "Check_MK Discovery":
return -4
elif r == "Check_MK inventory":
return -3 # FIXME: Remove old name one day
elif r == "Check_MK HW/SW Inventory":
return -2
else:
return 0
def declare_simple_sorter(name, title, column, func):
multisite_sorters[name] = {
"title" : title,
"columns" : [ column ],
"cmp" : lambda r1, r2: func(column, r1, r2)
}
def declare_1to1_sorter(painter_name, func, col_num = 0, reverse = False):
multisite_sorters[painter_name] = {
"title" : multisite_painters[painter_name]['title'],
"columns" : multisite_painters[painter_name]['columns'],
}
if not reverse:
multisite_sorters[painter_name]["cmp"] = \
lambda r1, r2: func(multisite_painters[painter_name]['columns'][col_num], r1, r2)
else:
multisite_sorters[painter_name]["cmp"] = \
lambda r1, r2: func(multisite_painters[painter_name]['columns'][col_num], r2, r1)
return painter_name
# Ajax call for fetching parts of the tree
def ajax_inv_render_tree():
hostname = html.var("host")
invpath = html.var("path")
tree_id = html.var("treeid", "")
if html.var("show_internal_tree_paths"):
show_internal_tree_paths = True
else:
show_internal_tree_paths = False
if tree_id:
struct_tree = inventory.load_delta_tree(hostname, int(tree_id[1:]))
tree_renderer = DeltaNodeRenderer(hostname, tree_id, invpath)
else:
struct_tree = inventory.load_tree(hostname)
tree_renderer = AttributeRenderer(hostname, "", invpath,
show_internal_tree_paths=show_internal_tree_paths)
if struct_tree is None:
html.show_error(_("No such inventory tree."))
struct_tree = struct_tree.get_filtered_tree(inventory.get_permitted_inventory_paths())
parsed_path, attributes_key = inventory.parse_tree_path(invpath)
if parsed_path:
children = struct_tree.get_sub_children(parsed_path)
else:
children = [struct_tree.get_root_container()]
if children is None:
html.show_error(_("Invalid path in inventory tree: '%s' >> %s") % (invpath, repr(parsed_path)))
else:
for child in inventory.sort_children(children):
child.show(tree_renderer, path=invpath)
def output_csv_headers(view):
filename = '%s-%s.csv' % (view['name'], time.strftime('%Y-%m-%d_%H-%M-%S', time.localtime(time.time())))
if type(filename) == unicode:
filename = filename.encode("utf-8")
html.req.headers_out['Content-Disposition'] = 'Attachment; filename="%s"' % filename
def paint_host_list(site, hosts):
entries = []
for host in hosts:
args = [
("view_name", "hoststatus"),
("site", site),
("host", host),
]
if html.var("display_options"):
args.append(("display_options", html.var("display_options")))
url = html.makeuri_contextless(args, filename="view.py")
entries.append(html.render_a(host, href=url))
return "", HTML(", ").join(entries)
# There is common code with modules/events.py:format_plugin_output(). Please check
# whether or not that function needs to be changed too
# TODO(lm): Find a common place to unify this functionality.
def format_plugin_output(output, row = None):
ok_marker = '<b class="stmark state0">OK</b>'
warn_marker = '<b class="stmark state1">WARN</b>'
crit_marker = '<b class="stmark state2">CRIT</b>'
unknown_marker = '<b class="stmark state3">UNKN</b>'
shall_escape = config.escape_plugin_output
# In case we have a host or service row use the optional custom attribute
# ESCAPE_PLUGIN_OUTPUT (set by host / service ruleset) to override the global
# setting.
if row:
custom_vars = row.get("service_custom_variables", row.get("host_custom_variables", {}))
if "ESCAPE_PLUGIN_OUTPUT" in custom_vars:
shall_escape = custom_vars["ESCAPE_PLUGIN_OUTPUT"] == "1"
if shall_escape:
output = html.attrencode(output)
output = output.replace("(!)", warn_marker) \
.replace("(!!)", crit_marker) \
.replace("(?)", unknown_marker) \
.replace("(.)", ok_marker)
if row and "[running on" in output:
a = output.index("[running on")
e = output.index("]", a)
hosts = output[a+12:e].replace(" ","").split(",")
css, h = paint_host_list(row["site"], hosts)
output = output[:a] + "running on " + h + output[e+1:]
if shall_escape:
# (?:<A HREF="), (?: target="_blank">)? and endswith(" </A>") is a special
# handling for the HTML code produced by check_http when "clickable URL" option is active.
output = re.sub("(?:<A HREF=")?(http[s]?://[^\"'>\t\s\n,]+)(?: target="_blank">)?",
lambda p: '<a href="%s"><img class=pluginurl align=absmiddle title="%s" src="images/pluginurl.png"></a>' %
(p.group(1).replace('"', ''), p.group(1).replace('"', '')), output)
if output.endswith(" </A>"):
output = output[:-11]
return output
#.
# .--Icon Selector-------------------------------------------------------.
# | ___ ____ _ _ |
# | |_ _|___ ___ _ __ / ___| ___| | ___ ___| |_ ___ _ __ |
# | | |/ __/ _ \| '_ \ \___ \ / _ \ |/ _ \/ __| __/ _ \| '__| |
# | | | (_| (_) | | | | ___) | __/ | __/ (__| || (_) | | |
# | |___\___\___/|_| |_| |____/ \___|_|\___|\___|\__\___/|_| |
# | |
# +----------------------------------------------------------------------+
# | AJAX API call for rendering the icon selector |
# '----------------------------------------------------------------------'
def ajax_popup_icon_selector():
varprefix = html.var('varprefix')
value = html.var('value')
allow_empty = html.var('allow_empty') == '1'
vs = IconSelector(allow_empty=allow_empty)
vs.render_popup_input(varprefix, value)
#.
# .--Action Menu---------------------------------------------------------.
# | _ _ _ __ __ |
# | / \ ___| |_(_) ___ _ __ | \/ | ___ _ __ _ _ |
# | / _ \ / __| __| |/ _ \| '_ \ | |\/| |/ _ \ '_ \| | | | |
# | / ___ \ (__| |_| | (_) | | | | | | | | __/ | | | |_| | |
# | /_/ \_\___|\__|_|\___/|_| |_| |_| |_|\___|_| |_|\__,_| |
# | |
# +----------------------------------------------------------------------+
# | Realizes the popup action menu for hosts/services in views |
# '----------------------------------------------------------------------'
def query_action_data(what, host, site, svcdesc):
# Now fetch the needed data from livestatus
columns = list(iconpainter_columns(what, toplevel=False))
try:
columns.remove('site')
except KeyError:
pass
if site:
sites.live().set_only_sites([site])
sites.live().set_prepend_site(True)
query = 'GET %ss\n' \
'Columns: %s\n' \
'Filter: host_name = %s\n' \
% (what, ' '.join(columns), host)
if what == 'service':
query += 'Filter: service_description = %s\n' % svcdesc
row = sites.live().query_row(query)
sites.live().set_prepend_site(False)
sites.live().set_only_sites(None)
return dict(zip(['site'] + columns, row))
def ajax_popup_action_menu():
site = html.var('site')
host = html.var('host')
svcdesc = html.var('service')
what = 'service' if svcdesc else 'host'
weblib.prepare_display_options(globals())
row = query_action_data(what, host, site, svcdesc)
icons = get_icons(what, row, toplevel=False)
html.open_ul()
for icon in icons:
if len(icon) != 4:
html.open_li()
html.write(icon[1])
html.close_li()
else:
html.open_li()
icon_name, title, url_spec = icon[1:]
if url_spec:
url, target_frame = sanitize_action_url(url_spec)
url = replace_action_url_macros(url, what, row)
onclick = None
if url.startswith('onclick:'):
onclick = url[8:]
url = 'javascript:void(0);'
target = None
if target_frame and target_frame != "_self":
target = target_frame
html.open_a(href=url, target=target, onclick=onclick)
html.icon('', icon_name)
if title:
html.write(title)
else:
html.write_text(_("No title"))
if url_spec:
html.close_a()
html.close_li()
html.close_ul()
def sanitize_action_url(url_spec):
if type(url_spec) == tuple:
return url_spec
else:
return (url_spec, None)
#.
# .--Reschedule----------------------------------------------------------.
# | ____ _ _ _ |
# | | _ \ ___ ___ ___| |__ ___ __| |_ _| | ___ |
# | | |_) / _ \/ __|/ __| '_ \ / _ \/ _` | | | | |/ _ \ |
# | | _ < __/\__ \ (__| | | | __/ (_| | |_| | | __/ |
# | |_| \_\___||___/\___|_| |_|\___|\__,_|\__,_|_|\___| |
# | |
# +----------------------------------------------------------------------+
# | Ajax webservice for reschedulung host- and service checks |
# '----------------------------------------------------------------------'
def ajax_reschedule():
try:
do_reschedule()
except Exception, e:
html.write("['ERROR', '%s']\n" % e)
def do_reschedule():
if not config.user.may("action.reschedule"):
raise MKGeneralException("You are not allowed to reschedule checks.")
site = html.var("site")
host = html.var("host", "")
if not host:
raise MKGeneralException("Action reschedule: missing host name")
service = html.get_unicode_input("service", "")
wait_svc = html.get_unicode_input("wait_svc", "")
if service:
cmd = "SVC"
what = "service"
spec = "%s;%s" % (host, service.encode("utf-8"))
if wait_svc:
wait_spec = u'%s;%s' % (host, wait_svc)
add_filter = "Filter: service_description = %s\n" % livestatus.lqencode(wait_svc)
else:
wait_spec = spec
add_filter = "Filter: service_description = %s\n" % livestatus.lqencode(service)
else:
cmd = "HOST"
what = "host"
spec = host
wait_spec = spec
add_filter = ""
try:
now = int(time.time())
sites.live().command("[%d] SCHEDULE_FORCED_%s_CHECK;%s;%d" % (now, cmd, livestatus.lqencode(spec), now), site)
sites.live().set_only_sites([site])
query = u"GET %ss\n" \
"WaitObject: %s\n" \
"WaitCondition: last_check >= %d\n" \
"WaitTimeout: %d\n" \
"WaitTrigger: check\n" \
"Columns: last_check state plugin_output\n" \
"Filter: host_name = %s\n%s" \
% (what, livestatus.lqencode(wait_spec), now, config.reschedule_timeout * 1000, livestatus.lqencode(host), add_filter)
row = sites.live().query_row(query)
sites.live().set_only_sites()
last_check = row[0]
if last_check < now:
html.write("['TIMEOUT', 'Check not executed within %d seconds']\n" % (config.reschedule_timeout))
else:
if service == "Check_MK":
# Passive services triggered by Check_MK often are updated
# a few ms later. We introduce a small wait time in order
# to increase the chance for the passive services already
# updated also when we return.
time.sleep(0.7);
html.write("['OK', %d, %d, %r]\n" % (row[0], row[1], row[2].encode("utf-8")))
except Exception, e:
sites.live().set_only_sites()
raise MKGeneralException(_("Cannot reschedule check: %s") % e)
| codeparrot/github-code-clean |
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyostila #
# 2008 Alarian #
# 2008 myfingershurt #
# 2008 Capo #
# 2008 Glorandwarf #
# 2008 QQStarS #
# 2008 Blazingamer #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from Song import Note, Tempo
from Mesh import Mesh
from Neck import Neck
import random
from copy import deepcopy
from Shader import shaders
from OpenGL.GL import *
import math
#myfingershurt: needed for multi-OS file fetching
import os
import Log
import Song #need the base song defines as well
from Instrument import *
class Guitar(Instrument):
def __init__(self, engine, playerObj, editorMode = False, player = 0, bass = False):
Instrument.__init__(self, engine, playerObj, player)
self.isDrum = False
self.isBassGuitar = bass
self.isVocal = False
self.debugMode = False
self.gameMode2p = self.engine.world.multiMode
self.matchingNotes = []
self.starSpinFrameIndex = 0
self.starSpinFrames = 16
self.logClassInits = self.engine.config.get("game", "log_class_inits")
if self.logClassInits == 1:
Log.debug("Guitar class init...")
#death_au: fixed neck size
#if self.engine.theme.twoDnote == False or self.engine.theme.twoDkeys == False:
#self.boardWidth = 3.6
#self.boardLength = 9.0
self.lastPlayedNotes = [] #MFH - for reverting when game discovers it implied incorrectly
self.missedNotes = []
self.missedNoteNums = []
self.editorMode = editorMode
#########For Animations
self.Animspeed = 30#Lower value = Faster animations
#For Animated Starnotes
self.indexCount = 0
#Alarian, For animated hitglow
self.HCountAni = False
#myfingershurt:
self.hopoStyle = self.engine.config.get("game", "hopo_system")
self.gh2sloppy = self.engine.config.get("game", "gh2_sloppy")
if self.gh2sloppy == 1:
self.hopoStyle = 4
self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
#blazingamer
self.killfx = self.engine.config.get("performance", "killfx")
self.killCount = 0
self.bigMax = 1
#Get theme
themename = self.engine.data.themeLabel
#now theme determination logic is only in data.py:
self.theme = self.engine.data.theme
self.oFlash = None
#myfingershurt:
self.bassGrooveNeckMode = self.engine.config.get("game", "bass_groove_neck")
self.starspin = self.engine.config.get("performance", "starspin")
if self.twoDnote == True:
#Spinning starnotes or not?
#myfingershurt: allowing any non-Rock Band theme to have spinning starnotes if the SpinNotes.png is available in that theme's folder
if self.starspin == True and self.theme < 2:
#myfingershurt: check for SpinNotes, if not there then no animation
if self.gameMode2p == 6:
if engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotesbattle.png")):
self.starSpinFrames = 8
else:
self.starspin = False
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotes.png")):
self.starspin = False
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
#mfh - adding fallback for beta option
else:
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "note.dae")):
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "note.dae")))
else:
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("note.dae")))
for i in range(5):
if engine.loadImgDrawing(self, "notetex"+chr(97+i), os.path.join("themes", themename, "notetex_"+chr(97+i)+".png")):
self.notetex = True
else:
self.notetex = False
break
if self.engine.fileExists(os.path.join("themes", themename, "star.dae")):
engine.resource.load(self, "starMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "star.dae")))
else:
self.starMesh = None
for i in range(5):
if engine.loadImgDrawing(self, "startex"+chr(97+i), os.path.join("themes", themename, "startex_"+chr(97+i)+".png")):
self.startex = True
else:
self.startex = False
break
for i in range(5):
if engine.loadImgDrawing(self, "staratex"+chr(97+i), os.path.join("themes", themename, "staratex_"+chr(97+i)+".png")):
self.staratex = True
else:
self.staratex = False
break
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "battleFrets", os.path.join("themes", themename,"battle_frets.png")):
self.battleFrets = None
if self.twoDkeys == True:
engine.loadImgDrawing(self, "fretButtons", os.path.join("themes",themename,"fretbuttons.png"))
else:
defaultKey = False
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "key.dae")):
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "key.dae")))
else:
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("key.dae")))
defaultKey = True
if defaultKey:
self.keytex = False
else:
for i in range(5):
if engine.loadImgDrawing(self, "keytex"+chr(97+i), os.path.join("themes", themename, "keytex_"+chr(97+i)+".png")):
self.keytex = True
else:
self.keytex = False
break
#inkk: loading theme-dependant tail images
#myfingershurt: must ensure the new tails don't affect the Rock Band mod...
self.simpleTails = False
for i in range(0,7):
if not engine.loadImgDrawing(self, "tail"+str(i), os.path.join("themes",themename,"tails","tail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "taile"+str(i), os.path.join("themes",themename,"tails","taile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btail"+str(i), os.path.join("themes",themename,"tails","btail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btaile"+str(i), os.path.join("themes",themename,"tails","btaile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if self.simpleTails:
Log.debug("Simple tails used; complex tail loading error...")
if not engine.loadImgDrawing(self, "tail1", os.path.join("themes",themename,"tail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail1", "tail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "tail2", os.path.join("themes",themename,"tail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail2", "tail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail1", os.path.join("themes",themename,"bigtail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail1", "bigtail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail2", os.path.join("themes",themename,"bigtail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail2", "bigtail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill1", os.path.join("themes", themename, "kill1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill1", "kill1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill2", os.path.join("themes", themename, "kill2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill2", "kill2.png", textureSize = (128, 128))
#MFH - freestyle tails (for drum fills & BREs)
if not engine.loadImgDrawing(self, "freestyle1", os.path.join("themes", themename, "freestyletail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle1", "freestyletail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "freestyle2", os.path.join("themes", themename, "freestyletail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle2", "freestyletail2.png", textureSize = (128, 128))
self.twoChordMax = False
self.rockLevel = 0.0
self.neck = Neck(self.engine, self, playerObj)
def selectPreviousString(self):
self.selectedString = (self.selectedString - 1) % self.strings
def selectString(self, string):
self.selectedString = string % self.strings
def selectNextString(self):
self.selectedString = (self.selectedString + 1) % self.strings
def noteBeingHeld(self):
noteHeld = False
for i in range(0,5):
if self.hit[i] == True:
noteHeld = True
return noteHeld
def isKillswitchPossible(self):
possible = False
for i in range(0,5):
if self.hit[i] == True:
possible = True
return possible
def renderTail(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False, freestyleTail = 0, pos = 0):
#volshebnyi - if freestyleTail == 0, act normally.
# if freestyleTail == 1, render an freestyle tail
# if freestyleTail == 2, render highlighted freestyle tail
if not self.simpleTails:#Tail Colors
tailcol = (1,1,1, color[3])
else:
if big == False and tailOnly == True:
tailcol = (.6, .6, .6, color[3])
else:
tailcol = (color)
#volshebnyi - tail color when sp is active
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):#8bit
c = self.fretColors[5]
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], color[3])
if flat:
tailscale = (1, .1, 1)
else:
tailscale = None
if sustain:
if not length == None:
size = (.08, length)
if size[1] > self.boardLength:
s = self.boardLength
else:
s = length
# if freestyleTail == 1, render freestyle tail
if freestyleTail == 0: #normal tail rendering
#myfingershurt: so any theme containing appropriate files can use new tails
if not self.simpleTails:
if big == True and tailOnly == True:
if kill and self.killfx == 0:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.17, s - zsize)
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.btail6
tex2 = self.btaile6
else:
if fret == 0:
tex1 = self.btail1
tex2 = self.btaile1
elif fret == 1:
tex1 = self.btail2
tex2 = self.btaile2
elif fret == 2:
tex1 = self.btail3
tex2 = self.btaile3
elif fret == 3:
tex1 = self.btail4
tex2 = self.btaile4
elif fret == 4:
tex1 = self.btail5
tex2 = self.btaile5
else:
zsize = .15
size = (.1, s - zsize)
if tailOnly:#Note let go
tex1 = self.tail0
tex2 = self.taile0
else:
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.tail6
tex2 = self.taile6
else:
if fret == 0:
tex1 = self.tail1
tex2 = self.taile1
elif fret == 1:
tex1 = self.tail2
tex2 = self.taile2
elif fret == 2:
tex1 = self.tail3
tex2 = self.taile3
elif fret == 3:
tex1 = self.tail4
tex2 = self.taile4
elif fret == 4:
tex1 = self.tail5
tex2 = self.taile5
else:
if big == True and tailOnly == True:
if kill:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.11, s - zsize)
tex1 = self.bigTail1
tex2 = self.bigTail2
else:
zsize = .15
size = (.08, s - zsize)
tex1 = self.tail1
tex2 = self.tail2
else: #freestyleTail > 0
# render an inactive freestyle tail (self.freestyle1 & self.freestyle2)
zsize = .25
if self.freestyleActive:
size = (.30, s - zsize) #was .15
else:
size = (.15, s - zsize)
tex1 = self.freestyle1
tex2 = self.freestyle2
if freestyleTail == 1:
#glColor4f(*color)
c1, c2, c3, c4 = color
tailGlow = 1 - (pos - self.freestyleLastFretHitTime[fret] ) / self.freestylePeriod
if tailGlow < 0:
tailGlow = 0
color = (c1 + c1*2.0*tailGlow, c2 + c2*2.0*tailGlow, c3 + c3*2.0*tailGlow, c4*0.6 + c4*0.4*tailGlow) #MFH - this fades inactive tails' color darker
tailcol = (color)
if self.theme == 2 and freestyleTail == 0 and big and tailOnly and shaders.enable("tail"):
color = (color[0]*1.5,color[1]*1.5,color[2]*1.5,1.0)
shaders.setVar("color",color)
if kill and self.killfx == 0:
h = shaders.getVar("height")
shaders.modVar("height",0.5,0.06/h-0.1)
shaders.setVar("offset",(5.0-size[1],0.0))
size=(size[0]*15,size[1])
self.engine.draw3Dtex(tex1, vertex = (-size[0], 0, size[0], size[1]), texcoord = (0.0, 0.0, 1.0, 1.0),
scale = tailscale, color = tailcol)
self.engine.draw3Dtex(tex2, vertex = (-size[0], size[1], size[0], size[1] + (zsize)),
scale = tailscale, texcoord = (0.0, 0.05, 1.0, 0.95), color = tailcol)
shaders.disable()
#MFH - this block of code renders the tail "beginning" - before the note, for freestyle "lanes" only
#volshebnyi
if freestyleTail > 0 and pos < self.freestyleStart + self.freestyleLength:
self.engine.draw3Dtex(tex2, vertex = (-size[0], 0-(zsize), size[0], 0 + (.05)),
scale = tailscale, texcoord = (0.0, 0.95, 1.0, 0.05), color = tailcol)
if tailOnly:
return
def renderNote(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False):
if flat:
glScalef(1, .1, 1)
if tailOnly:
return
if self.twoDnote == True:
#myfingershurt: this should be retrieved once at init, not repeatedly in-game whenever tails are rendered.
if self.notedisappear == True:#Notes keep on going when missed
notecol = (1,1,1)#capo
else:
if flat:#Notes disappear when missed
notecol = (.1,.1,.1)
else:
notecol = (1,1,1)
tailOnly == True
if self.theme < 2:
if self.starspin:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.150+self.starSpinFrameIndex*0.05, 0.175+self.starSpinFrameIndex*0.05)
else:
texY = (0.125+self.starSpinFrameIndex*0.05, 0.150+self.starSpinFrameIndex*0.05)
else:
if isTappable:
texY = (0.025,0.05)
else:
texY = (0,0.025)
if self.starPowerActive:
texY = (0.10,0.125) #QQstarS
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
else:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.6, 0.8)
else:
texY = (0.4,0.6)
else:
if isTappable:
texY = (0.2,0.4)
else:
texY = (0,0.2)
if self.starPowerActive:
texY = (0.8,1)
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
elif self.theme == 2:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (3*0.166667, 4*0.166667)
else:
texY = (2*0.166667, 3*0.166667)
else:
if isTappable:
texY = (1*0.166667, 2*0.166667)
else:
texY = (0, 1*0.166667)
#myfingershurt: adding spNote==False conditional so that star notes can appear in overdrive
if self.starPowerActive and spNote == False:
if isTappable:
texY = (5*0.166667, 1)
else:
texY = (4*0.166667, 5*0.166667)
self.engine.draw3Dtex(self.noteButtons, vertex = (-size[0],size[1],size[0],-size[1]), texcoord = (texSize[0],texY[0],texSize[1],texY[1]),
scale = (1,1,0), rot = (30,1,0,0), multiples = True, color = color, vertscale = .27)
else:
shaders.setVar("Material",color,"notes")
#mesh = outer ring (black)
#mesh_001 = main note (key color)
#mesh_002 = top (spot or hopo if no mesh_003)
#mesh_003 = hopo bump (hopo color)
if spNote == True and self.starMesh is not None:
meshObj = self.starMesh
else:
meshObj = self.noteMesh
glPushMatrix()
glEnable(GL_DEPTH_TEST)
glDepthMask(1)
glShadeModel(GL_SMOOTH)
if self.noterotate:
glRotatef(90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
if spNote == True and self.threeDspin == True:
glRotate(90 + self.time/3, 0, 1, 0)
#death_au: fixed 3D note colours
#volshebnyi - note color when sp is active
glColor4f(*color)
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):
c = self.fretColors[5]
glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
if fret == 0: # green note
glRotate(self.engine.theme.noterot[0], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[0], 0)
elif fret == 1: # red note
glRotate(self.engine.theme.noterot[1], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[1], 0)
elif fret == 2: # yellow
glRotate(self.engine.theme.noterot[2], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[2], 0)
elif fret == 3:# blue note
glRotate(self.engine.theme.noterot[3], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[3], 0)
elif fret == 4:# blue note
glRotate(self.engine.theme.noterot[4], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[4], 0)
if self.staratex == True and self.starPowerActive and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"staratex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.notetex == True and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"notetex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.startex == True and spNote == True:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"startex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
if shaders.enable("notes"):
shaders.setVar("isTextured",False)
meshObj.render("Mesh_001")
shaders.disable()
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
if isTappable:
if self.hopoColor[0] == -2:
glColor4f(*color)
else:
glColor3f(self.hopoColor[0], self.hopoColor[1], self.hopoColor[2])
if(meshObj.find("Mesh_003")) == True:
meshObj.render("Mesh_003")
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
meshObj.render("Mesh_002")
glColor3f(self.meshColor[0], self.meshColor[1], self.meshColor[2])
meshObj.render("Mesh")
glDepthMask(0)
glPopMatrix()
def renderFreestyleLanes(self, visibility, song, pos):
if not song:
return
if not song.readyToGo:
return
#boardWindowMin = pos - self.currentPeriod * 2
boardWindowMax = pos + self.currentPeriod * self.beatsPerBoard
track = song.midiEventTrack[self.player]
#MFH - render 5 freestyle tails when Song.freestyleMarkingNote comes up
if self.freestyleEnabled:
freestyleActive = False
#for time, event in track.getEvents(boardWindowMin, boardWindowMax):
for time, event in track.getEvents(pos - self.freestyleOffset , boardWindowMax + self.freestyleOffset):
if isinstance(event, Song.MarkerNote):
if event.number == Song.freestyleMarkingNote:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
w = self.boardWidth / self.strings
self.freestyleLength = event.length #volshebnyi
self.freestyleStart = time # volshebnyi
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#MFH - must extend the tail past the first fretboard section dynamically so we don't have to render the entire length at once
#volshebnyi - allow tail to move under frets
if time - self.freestyleOffset < pos:
freestyleActive = True
if z < -1.5:
length += z +1.5
z = -1.5
#MFH - render 5 freestyle tails
for theFret in range(0,5):
x = (self.strings / 2 - theFret) * w
c = self.fretColors[theFret]
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (theFret + 1), z)
freestyleTailMode = 1
self.renderTail(length, sustain = True, kill = False, color = color, flat = False, tailOnly = True, isTappable = False, big = True, fret = theFret, spNote = False, freestyleTail = freestyleTailMode, pos = pos)
glPopMatrix()
self.freestyleActive = freestyleActive
def renderNotes(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
starEventsInView = False
renderedNotes = reversed(self.getRequiredNotesForRender(song,pos))
for time, event in renderedNotes:
#for time, event in reversed(track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)): #MFH - reverse order of note rendering
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
if self.lastBpmChange > 0 and self.disableVBPM == True:
continue
if (pos - time > self.currentPeriod or self.lastBpmChange < 0) and time > self.lastBpmChange:
self.baseBeat += (time - self.lastBpmChange) / self.currentPeriod
self.targetBpm = event.bpm
self.lastBpmChange = time
self.neck.lastBpmChange = time
self.neck.baseBeat = self.baseBeat
# self.setBPM(self.targetBpm) # glorandwarf: was setDynamicBPM(self.targetBpm)
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue #can't break. Tempo.
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#volshebnyi - hide notes in BRE zone if BRE enabled
if self.freestyleEnabled and self.freestyleStart > 0:
if time >= self.freestyleStart-self.freestyleOffset and time < self.freestyleStart + self.freestyleLength+self.freestyleOffset:
z = -2.0
if self.twoDnote == True and not self.useFretColors:
color = (1,1,1, 1 * visibility * f)
else:
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if self.starPowerActive:
if self.spRefillMode == 0: #mode 0 = no starpower / overdrive refill notes
self.spEnabled = False
elif self.spRefillMode == 1 and self.theme != 2: #mode 1 = overdrive refill notes in RB themes only
self.spEnabled = False
elif self.spRefillMode == 2 and song.midiStyle != 1: #mode 2 = refill based on MIDI type
self.spEnabled = False
if event.star:
#self.isStarPhrase = True
starEventsInView = True
if event.finalStar:
self.finalStarSeen = True
starEventsInView = True
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
Log.debug("star power added")
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
#MFH - filter out this tail whitening when starpower notes have been disbled from a screwup
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if shaders.turnon:
shaders.setVar("note_position",(x, (1.0 - visibility) ** (event.number + 1), z),"notes")
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote)
else:
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote)
glPopMatrix()
if (not starEventsInView and self.finalStarSeen):
self.spEnabled = True
self.finalStarSeen = False
self.isStarPhrase = False
def renderTails(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
#for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard):
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
self.neck.ocount = 0
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote, pos = pos)
else:
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote, pos = pos)
glPopMatrix()
if killswitch and self.killfx == 1:
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
for time, event in self.playedNotes:
step = self.currentPeriod / 16
t = time + event.length
x = (self.strings / 2 - event.number) * w
c = self.fretColors[event.number]
s = t
proj = 1.0 / self.currentPeriod / self.beatsPerUnit
zStep = step * proj
def waveForm(t):
u = ((t - time) * -.1 + pos - time) / 64.0 + .0001
return (math.sin(event.number + self.time * -.01 + t * .03) + math.cos(event.number + self.time * .01 + t * .02)) * .1 + .1 + math.sin(u) / (5 * u)
glBegin(GL_TRIANGLE_STRIP)
f1 = 0
while t > time:
if ((t-pos)*proj) < self.boardLength:
z = (t - pos) * proj
else:
z = self.boardLength
if z < 0:
break
f2 = min((s - t) / (6 * step), 1.0)
a1 = waveForm(t) * f1
a2 = waveForm(t - step) * f2
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x - a1, 0, z)
glVertex3f(x - a2, 0, z - zStep)
glColor4f(1, 1, 1, .75)
glVertex3f(x, 0, z)
glVertex3f(x, 0, z - zStep)
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x + a1, 0, z)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x - a2, 0, z - zStep)
t -= step
f1 = f2
glEnd()
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
def renderFrets(self, visibility, song, controls):
w = self.boardWidth / self.strings
size = (.22, .22)
v = 1.0 - visibility
glEnable(GL_DEPTH_TEST)
#Hitglow color option - myfingershurt sez this should be a Guitar class global, not retrieved ever fret render in-game...
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.fretPress:
y = v + f / 6
else:
y = v / 6
x = (self.strings / 2 - n) * w
if self.twoDkeys == True:
if self.battleStatus[4]:
fretWhamOffset = self.battleWhammyNow * .15
fretColor = (1,1,1,.5)
else:
fretWhamOffset = 0
fretColor = (1,1,1,1)
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2.4)
if self.battleStatus[3] and self.battleFrets != None and self.battleBreakString == n:
texSize = (n/5.0+.042,n/5.0+0.158)
size = (.30, .40)
fretPos = 8 - round((self.battleBreakNow/self.battleBreakLimit) * 8)
texY = (fretPos/8.0,(fretPos + 1.0)/8)
self.engine.draw3Dtex(self.battleFrets, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + .08 + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
texSize = (n/5.0,n/5.0+0.2)
texY = (0.0,1.0/3.0)
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]):
texY = (1.0/3.0,2.0/3.0)
if self.hit[n] or (self.battleStatus[3] and self.battleBreakString == n):
texY = (2.0/3.0,1.0)
self.engine.draw3Dtex(self.fretButtons, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
if self.keyMesh:
glPushMatrix()
glDepthMask(1)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glRotatef(90, 0, 1, 0)
glLightfv(GL_LIGHT0, GL_POSITION, (5.0, 10.0, -10.0, 0.0))
glLightfv(GL_LIGHT0, GL_AMBIENT, (.2, .2, .2, 0.0))
glLightfv(GL_LIGHT0, GL_DIFFUSE, (1.0, 1.0, 1.0, 0.0))
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if n == 0: #green fret button
glRotate(self.engine.theme.keyrot[0], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[0])
elif n == 1: #red fret button
glRotate(self.engine.theme.keyrot[1], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[1])
elif n == 2: #yellow fret button
glRotate(self.engine.theme.keyrot[2], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[2])
elif n == 3: #blue fret button
glRotate(self.engine.theme.keyrot[3], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[3])
elif n == 4: #orange fret button
glRotate(self.engine.theme.keyrot[4], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[4])
#Mesh - Main fret
#Key_001 - Top of fret (key_color)
#Key_002 - Bottom of fret (key2_color)
#Glow_001 - Only rendered when a note is hit along with the glow.svg
#if self.complexkey == True:
# glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], visibility)
# if self.battleStatus[4]:
# glTranslatef(x, y + self.battleWhammyNow * .15, 0)
# else:
# glTranslatef(x, y, 0)
if self.keytex == True:
glColor4f(1,1,1,visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glEnable(GL_TEXTURE_2D)
getattr(self,"keytex"+chr(97+n)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if f and not self.hit[n]:
self.keyMesh.render("Mesh_001")
elif self.hit[n]:
self.keyMesh.render("Mesh_002")
else:
self.keyMesh.render("Mesh")
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15 + v * 6, 0)
else:
glTranslatef(x, y + v * 6, 0)
key = self.keyMesh
if(key.find("Glow_001")) == True:
key.render("Mesh")
if(key.find("Key_001")) == True:
glColor3f(self.keyColor[0], self.keyColor[1], self.keyColor[2])
key.render("Key_001")
if(key.find("Key_002")) == True:
glColor3f(self.key2Color[0], self.key2Color[1], self.key2Color[2])
key.render("Key_002")
else:
key.render()
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDepthMask(0)
glPopMatrix()
######################
f = self.fretActivity[n]
if f and self.disableFretSFX != True:
if self.glowColor[0] == -1:
s = 1.0
else:
s = 0.0
while s < 1:
ms = s * (math.sin(self.time) * .25 + 1)
if self.glowColor[0] == -2:
glColor3f(c[0] * (1 - ms), c[1] * (1 - ms), c[2] * (1 - ms))
else:
glColor3f(self.glowColor[0] * (1 - ms), self.glowColor[1] * (1 - ms), self.glowColor[2] * (1 - ms))
glPushMatrix()
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glScalef(.1 + .02 * ms * f, .1 + .02 * ms * f, .1 + .02 * ms * f)
glRotatef( 90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if self.twoDkeys == False and self.keytex == False:
if(self.keyMesh.find("Glow_001")) == True:
key.render("Glow_001")
else:
key.render()
glPopMatrix()
s += 0.2
#Hitglow color
if self.hitglow_color == 0:
glowcol = (c[0], c[1], c[2])#Same as fret
elif self.hitglow_color == 1:
glowcol = (1, 1, 1)#Actual color in .svg-file
f += 2
if self.battleStatus[4]:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y + self.battleWhammyNow * .15, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
else:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
#self.hit[n] = False #MFH -- why? This prevents frets from being rendered under / before the notes...
glDisable(GL_DEPTH_TEST)
def renderFreestyleFlames(self, visibility, controls):
if self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
#track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
for fretNum in range(self.strings):
if controls.getState(self.keys[fretNum]) or controls.getState(self.keys[fretNum+5]):
if self.freestyleHitFlameCounts[fretNum] < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - fretNum) * w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
#flameSize = self.flameSizes[self.scoreMultiplier - 1][fretNum]
flameSize = self.flameSizes[self.cappedScoreMult - 1][fretNum]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else: #MFH - fixing crash!
#try:
# flameColor = self.flameColors[self.scoreMultiplier - 1][fretNum]
#except IndexError:
flameColor = self.fretColors[fretNum]
if flameColor[0] == -2:
flameColor = self.fretColors[fretNum]
ff += 1.5 #ff first time is 2.75 after this
if self.freestyleHitFlameCounts[fretNum] < flameLimitHalf:
flamecol = tuple([flameColor[ifc] for ifc in range(3)])
rbStarColor = (.1, .1, .2, .3)
xOffset = (.0, - .005, .005, .0)
yOffset = (.20, .255, .255, .255)
scaleMod = .6 * ms * ff
scaleFix = (6.0, 5.5, 5.0, 4.7)
for step in range(4):
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (rbStarColor[step],)*3
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - self.freestyleHitFlameCounts[fretNum])
flamecol = tuple([flameColor[ifc]*flameColorMod for ifc in range(3)])
xOffset = (.0, - .005, .005, .005)
yOffset = (.35, .405, .355, .355)
scaleMod = .6 * ms * ff
scaleFix = (3.0, 2.5, 2.0, 1.7)
for step in range(4):
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
else:
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.4+.1*step,)*3
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
self.freestyleHitFlameCounts[fretNum] += 1
else: #MFH - flame count is done - reset it!
self.freestyleHitFlameCounts[fretNum] = 0 #MFH
def renderFlames(self, visibility, song, pos, controls):
if not song or self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True and (self.HCountAni == True and self.HCount2 > 12):
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
y = v + f / 6
x = (self.strings / 2 - n) * w
f = self.fretActivity[n]
if f:
ms = math.sin(self.time) * .25 + 1
ff = f
ff += 1.2
#myfingershurt: need to cap flameSizes use of scoreMultiplier to 4x, the 5x and 6x bass groove mults cause crash:
self.cappedScoreMult = min(self.scoreMultiplier,4)
flameSize = self.flameSizes[self.cappedScoreMult - 1][n]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][n]
flameColorMod = (1.19, 1.97, 10.59)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.9,.9,.9)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglowDrawing, coord = (x, y + .125, 0), rot = (90, 1, 0, 0),
scale = (0.5 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
#Alarian: Animated hitflames
else:
self.HCount = self.HCount + 1
if self.HCount > self.Animspeed-1:
self.HCount = 0
HIndex = (self.HCount * 16 - (self.HCount * 16) % self.Animspeed) / self.Animspeed
if HIndex > 15:
HIndex = 0
texX = (HIndex*(1/16.0), HIndex*(1/16.0)+(1/16.0))
self.engine.draw3Dtex(self.hitglowAnim, coord = (x, y + .225, 0), rot = (90, 1, 0, 0), scale = (2.4, 1, 3.3),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
ff += .3
flameColorMod = (1.19, 1.78, 12.22)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.8,.8,.8)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglow2Drawing, coord = (x, y + .25, .05), rot = (90, 1, 0, 0),
scale = (.40 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
if isinstance(event, Tempo):
continue
if not isinstance(event, Note):
continue
if (event.played or event.hopod) and event.flameCount < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - event.number) * w
xlightning = (self.strings / 2 - event.number)*2.2*w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
flameSize = self.flameSizes[self.cappedScoreMult - 1][event.number]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][event.number]
if flameColor[0] == -2:
flameColor = self.fretColors[event.number]
ff += 1.5 #ff first time is 2.75 after this
if self.Hitanim2 == True:
self.HCount2 = self.HCount2 + 1
self.HCountAni = False
if self.HCount2 > 12:
if not event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
self.HCount2 = 0
else:
self.HCountAni = True
if event.flameCount < flameLimitHalf:
HIndex = (self.HCount2 * 13 - (self.HCount2 * 13) % 13) / 13
if HIndex > 12 and self.HCountAni != True:
HIndex = 0
texX = (HIndex*(1/13.0), HIndex*(1/13.0)+(1/13.0))
self.engine.draw3Dtex(self.hitflamesAnim, coord = (x, y + .665, 0), rot = (90, 1, 0, 0), scale = (1.6, 1.6, 4.9),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
if self.hitFlamesPresent == True:
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
elif self.hitFlamesPresent == True and self.Hitanim2 == False:
self.HCount2 = 13
self.HCountAni = True
if event.flameCount < flameLimitHalf:
flamecol = flameColor
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = .3
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (.1,.1,.1)
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x, y + .20, 0), rot = (90, 1, 0, 0),
scale = (.25 + .6 * ms * ff, event.flameCount/6.0 + .6 * ms * ff, event.flameCount / 6.0 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
for i in range(3):
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = 0.4+i*0.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (0.1+i*0.1,)*3
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x-.005, y + .255, 0), rot = (90, 1, 0, 0),
scale = (.30 + i*0.05 + .6 * ms * ff, event.flameCount/(5.5 - i*0.4) + .6 * ms * ff, event.flameCount / (5.5 - i*0.4) + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
event.flameCount += 1
def render(self, visibility, song, pos, controls, killswitch):
if shaders.turnon:
shaders.globals["dfActive"] = self.drumFillsActive
shaders.globals["breActive"] = self.freestyleActive
shaders.globals["rockLevel"] = self.rockLevel
if shaders.globals["killswitch"] != killswitch:
shaders.globals["killswitchPos"] = pos
shaders.globals["killswitch"] = killswitch
shaders.modVar("height",0.2,0.2,1.0,"tail")
if not self.starNotesSet == True:
self.totalNotes = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
self.totalNotes += 1
stars = []
maxStars = []
maxPhrase = self.totalNotes/120
for q in range(0,maxPhrase):
for n in range(0,10):
stars.append(self.totalNotes/maxPhrase*(q)+n+maxPhrase/4)
maxStars.append(self.totalNotes/maxPhrase*(q)+10+maxPhrase/4)
i = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for a in stars:
if i == a:
self.starNotes.append(time)
event.star = True
for a in maxStars:
if i == a:
self.maxStars.append(time)
event.finalStar = True
i += 1
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for q in self.starNotes:
if time == q:
event.star = True
for q in self.maxStars:
#if time == q and not event.finalStar:
# event.star = True
if time == q: #MFH - no need to mark only the final SP phrase note as the finalStar as in drums, they will be hit simultaneously here.
event.finalStar = True
self.starNotesSet = True
if not (self.coOpFailed and not self.coOpRestart):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
if self.freestyleActive:
self.renderTails(visibility, song, pos, killswitch)
self.renderNotes(visibility, song, pos, killswitch)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
self.renderFrets(visibility, song, controls)
if self.hitFlamesPresent: #MFH - only if present!
self.renderFreestyleFlames(visibility, controls) #MFH - freestyle hit flames
else:
self.renderTails(visibility, song, pos, killswitch)
if self.fretsUnderNotes: #MFH
if self.twoDnote == True:
self.renderFrets(visibility, song, controls)
self.renderNotes(visibility, song, pos, killswitch)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
if self.hitFlamesPresent: #MFH - only if present!
self.renderFlames(visibility, song, pos, controls) #MFH - only when freestyle inactive!
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
#return notes
#MFH - corrected and optimized:
#def getRequiredNotesMFH(self, song, pos):
def getRequiredNotesMFH(self, song, pos, hopoTroubleCheck = False):
if self.battleStatus[2] and self.difficulty != 0:
if pos < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or pos > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
else:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track = song.track[self.player]
if hopoTroubleCheck:
notes = [(time, event) for time, event in track.getEvents(pos, pos + (self.earlyMargin*2)) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not time==pos] #MFH - filter out the problem note that caused this check!
else:
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + self.earlyMargin) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
notes = [(time, event) for time, event in notes if (time >= (pos - self.lateMargin)) and (time <= (pos + self.earlyMargin))]
sorted(notes, key=lambda x: x[0])
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def getDoubleNotes(self, notes):
if self.battleStatus[7] and notes != []:
notes = sorted(notes, key=lambda x: x[0])
curTime = 0
tempnotes = []
tempnumbers = []
tempnote = None
curNumbers = []
noteCount = 0
for time, note in notes:
noteCount += 1
if not isinstance(note, Note):
if noteCount == len(notes) and len(curNumbers) < 3 and len(curNumbers) > 0:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
continue
if time != curTime:
if curTime != 0 and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif (curTime != 0 or noteCount == len(notes)) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
tempnotes.append((time,deepcopy(note)))
curTime = time
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
else:
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
noteCount = 0
for time, note in tempnotes:
if tempnumbers[noteCount] != -1:
note.number = tempnumbers[noteCount]
noteCount += 1
if time > self.battleStartTimes[7] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[7] - self.currentPeriod * self.beatsPerBoard + self.battleDoubleLength:
notes.append((time,note))
else:
noteCount += 1
return sorted(notes, key=lambda x: x[0])
def getRequiredNotesForRender(self, song, pos):
if self.battleStatus[2] and self.difficulty != 0:
Log.debug(self.battleDiffUpValue)
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
track0 = song.track[self.player]
notes0 = [(time, event) for time, event in track0.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track1 = song.track[self.player]
notes1 = [(time, event) for time, event in track1.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
notes = []
for time,note in notes0:
if time < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or time > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
for time,note in notes1:
if time > self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
notes0 = None
notes1 = None
track0 = None
track1 = None
notes = sorted(notes, key=lambda x: x[0])
#Log.debug(notes)
else:
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return notes
#MFH - corrected and optimized:
def getRequiredNotesForJurgenOnTime(self, song, pos):
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + 30) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def controlsMatchNotes(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for k in self.keys:
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if n > max(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes2(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if hopo == False and n >= min(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes3(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
#chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
chordlist.sort(key=lambda a: a[0][0])
self.missedNotes = []
self.missedNoteNums = []
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, requiredKeys, hopo)):
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
break
if hopo == True:
break
self.missedNotes.append(chord)
else:
self.missedNotes = []
self.missedNoteNums = []
for chord in self.missedNotes:
for time, note in chord:
if self.debugMode:
self.missedNoteNums.append(note.number)
note.skipped = True
note.played = False
if twochord == 2:
self.twoChord += skipped
return True
#MFH - special function for HOPO intentions checking
def controlsMatchNextChord(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(key=lambda a: a[0][0])
twochord = 0
for chord in chordlist:
# matching keys?
self.requiredKeys = [note.number for time, note in chord]
self.requiredKeys = self.uniqify(self.requiredKeys)
if len(self.requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
self.twoChordApply = True
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(self.requiredKeys) - 2
self.requiredKeys = [min(self.requiredKeys), max(self.requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, self.requiredKeys, False)):
return True
else:
return False
def uniqify(self, seq, idfun=None):
# order preserving
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
def controlsMatchNote3(self, controls, chordTuple, requiredKeys, hopo):
if len(chordTuple) > 1:
#Chords must match exactly
for n in range(self.strings):
if (n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))) or (n not in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))):
return False
else:
#Single Note must match that note
requiredKey = requiredKeys[0]
if not controls.getState(self.keys[requiredKey]) and not controls.getState(self.keys[requiredKey+5]):
return False
#myfingershurt: this is where to filter out higher frets held when HOPOing:
if hopo == False or self.hopoStyle == 2 or self.hopoStyle == 3:
#Check for higher numbered frets if not a HOPO or if GH2 strict mode
for n, k in enumerate(self.keys):
if (n > requiredKey and n < 5) or (n > 4 and n > requiredKey + 5):
#higher numbered frets cannot be held
if controls.getState(k):
return False
return True
def areNotesTappable(self, notes):
if not notes:
return
for time, note in notes:
if note.tappable > 1:
return True
return False
def startPick(self, song, pos, controls, hopo = False):
if hopo == True:
res = startPick2(song, pos, controls, hopo)
return res
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes(song, pos)
if self.controlsMatchNotes(controls, self.matchingNotes):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
note.played = True
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
return True
return False
def startPick2(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes2(song, pos, hopo)
if self.controlsMatchNotes2(controls, self.matchingNotes, hopo):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
self.hopoLast = note.number
return True
return False
def startPick3(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.lastPlayedNotes = self.playedNotes
self.playedNotes = []
self.matchingNotes = self.getRequiredNotesMFH(song, pos)
self.controlsMatchNotes3(controls, self.matchingNotes, hopo)
#myfingershurt
for time, note in self.matchingNotes:
if note.played != True:
continue
if shaders.turnon:
shaders.var["fret"][self.player][note.number]=shaders.time()
shaders.var["fretpos"][self.player][note.number]=pos
self.pickStartPos = pos
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
#self.wasLastNoteHopod = False
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
if hopo: #MFH - you just tapped a 3 - make a note of it. (har har)
self.hopoProblemNoteNum = note.number
self.sameNoteHopoString = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.hopoLast = note.number
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
#myfingershurt: be sure to catch when a chord is played
if len(self.playedNotes) > 1:
lastPlayedNote = None
for time, note in self.playedNotes:
if isinstance(lastPlayedNote, Note):
if note.tappable == 1 and lastPlayedNote.tappable == 1:
self.LastStrumWasChord = True
#self.sameNoteHopoString = False
else:
self.LastStrumWasChord = False
lastPlayedNote = note
elif len(self.playedNotes) > 0: #ensure at least that a note was played here
self.LastStrumWasChord = False
if len(self.playedNotes) != 0:
return True
return False
def soloFreestylePick(self, song, pos, controls):
numHits = 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret+5])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
#MFH - TODO - handle freestyle picks here
def freestylePick(self, song, pos, controls):
numHits = 0
#if not song:
# return numHits
if not controls.getState(self.actions[0]) and not controls.getState(self.actions[1]):
return 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
def endPick(self, pos):
for time, note in self.playedNotes:
if time + note.length > pos + self.noteReleaseMargin:
self.playedNotes = []
return False
self.playedNotes = []
return True
def getPickLength(self, pos):
if not self.playedNotes:
return 0.0
# The pick length is limited by the played notes
pickLength = pos - self.pickStartPos
for time, note in self.playedNotes:
pickLength = min(pickLength, note.length)
return pickLength
def coOpRescue(self, pos):
self.coOpRestart = True #initializes Restart Timer
self.coOpRescueTime = pos
self.starPower = 0
Log.debug("Rescued at " + str(pos))
def run(self, ticks, pos, controls):
if not self.paused:
self.time += ticks
#MFH - Determine which frame to display for starpower notes
if self.starspin:
self.indexCount = self.indexCount + 1
if self.indexCount > self.Animspeed-1:
self.indexCount = 0
self.starSpinFrameIndex = (self.indexCount * self.starSpinFrames - (self.indexCount * self.starSpinFrames) % self.Animspeed) / self.Animspeed
if self.starSpinFrameIndex > self.starSpinFrames - 1:
self.starSpinFrameIndex = 0
#myfingershurt: must not decrease SP if paused.
if self.starPowerActive == True and self.paused == False:
self.starPower -= ticks/self.starPowerDecreaseDivisor
if self.starPower <= 0:
self.starPower = 0
self.starPowerActive = False
#MFH - call to play star power deactivation sound, if it exists (if not play nothing)
if self.engine.data.starDeActivateSoundFound:
#self.engine.data.starDeActivateSound.setVolume(self.sfxVolume)
self.engine.data.starDeActivateSound.play()
# update frets
if self.editorMode:
if (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
for i in range(self.strings):
if controls.getState(self.keys[i]) or controls.getState(self.keys[i+5]):
activeFrets.append(i)
activeFrets = activeFrets or [self.selectedString]
else:
activeFrets = []
else:
activeFrets = [note.number for time, note in self.playedNotes]
for n in range(self.strings):
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]) or (self.editorMode and self.selectedString == n):
self.fretWeight[n] = 0.5
else:
self.fretWeight[n] = max(self.fretWeight[n] - ticks / 64.0, 0.0)
if n in activeFrets:
self.fretActivity[n] = min(self.fretActivity[n] + ticks / 32.0, 1.0)
else:
self.fretActivity[n] = max(self.fretActivity[n] - ticks / 64.0, 0.0)
#MFH - THIS is where note sustains should be determined... NOT in renderNotes / renderFrets / renderFlames -.-
if self.fretActivity[n]:
self.hit[n] = True
else:
self.hit[n] = False
if self.vbpmLogicType == 0: #MFH - VBPM (old)
if self.currentBpm != self.targetBpm:
diff = self.targetBpm - self.currentBpm
if (round((diff * .03), 4) != 0):
self.currentBpm = round(self.currentBpm + (diff * .03), 4)
else:
self.currentBpm = self.targetBpm
self.setBPM(self.currentBpm) # glorandwarf: was setDynamicBPM(self.currentBpm)
for time, note in self.playedNotes:
if pos > time + note.length:
return False
return True
| codeparrot/github-code-clean |
#!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_container
short_description: manage docker containers
description:
- Manage the life cycle of docker containers.
- Supports check mode. Run with C(--check) and C(--diff) to view config difference and list of actions to be taken.
version_added: "2.1"
notes:
- For most config changes, the container needs to be recreated, i.e. the existing container has to be destroyed and
a new one created. This can cause unexpected data loss and downtime. You can use the I(comparisons) option to
prevent this.
- If the module needs to recreate the container, it will only use the options provided to the module to create the
new container (except I(image)). Therefore, always specify *all* options relevant to the container.
- When I(restart) is set to C(true), the module will only restart the container if no config changes are detected.
Please note that several options have default values; if the container to be restarted uses different values for
these options, it will be recreated instead. The options with default values which can cause this are I(auto_remove),
I(detach), I(init), I(interactive), I(memory), I(paused), I(privileged), I(read_only) and I(tty). This behavior
can be changed by setting I(container_default_behavior) to C(no_defaults), which will be the default value from
Ansible 2.14 on.
options:
auto_remove:
description:
- Enable auto-removal of the container on daemon side when the container's process exits.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
version_added: "2.4"
blkio_weight:
description:
- Block IO (relative weight), between 10 and 1000.
type: int
capabilities:
description:
- List of capabilities to add to the container.
type: list
elements: str
cap_drop:
description:
- List of capabilities to drop from the container.
type: list
elements: str
version_added: "2.7"
cleanup:
description:
- Use with I(detach=false) to remove the container after successful execution.
type: bool
default: no
version_added: "2.2"
command:
description:
- Command to execute when the container starts. A command may be either a string or a list.
- Prior to version 2.4, strings were split on commas.
type: raw
comparisons:
description:
- Allows to specify how properties of existing containers are compared with
module options to decide whether the container should be recreated / updated
or not.
- Only options which correspond to the state of a container as handled by the
Docker daemon can be specified, as well as C(networks).
- Must be a dictionary specifying for an option one of the keys C(strict), C(ignore)
and C(allow_more_present).
- If C(strict) is specified, values are tested for equality, and changes always
result in updating or restarting. If C(ignore) is specified, changes are ignored.
- C(allow_more_present) is allowed only for lists, sets and dicts. If it is
specified for lists or sets, the container will only be updated or restarted if
the module option contains a value which is not present in the container's
options. If the option is specified for a dict, the container will only be updated
or restarted if the module option contains a key which isn't present in the
container's option, or if the value of a key present differs.
- The wildcard option C(*) can be used to set one of the default values C(strict)
or C(ignore) to *all* comparisons which are not explicitly set to other values.
- See the examples for details.
type: dict
version_added: "2.8"
container_default_behavior:
description:
- Various module options used to have default values. This causes problems with
containers which use different values for these options.
- The default value is C(compatibility), which will ensure that the default values
are used when the values are not explicitly specified by the user.
- From Ansible 2.14 on, the default value will switch to C(no_defaults). To avoid
deprecation warnings, please set I(container_default_behavior) to an explicit
value.
- This affects the I(auto_remove), I(detach), I(init), I(interactive), I(memory),
I(paused), I(privileged), I(read_only) and I(tty) options.
type: str
choices:
- compatibility
- no_defaults
version_added: "2.10"
cpu_period:
description:
- Limit CPU CFS (Completely Fair Scheduler) period.
- See I(cpus) for an easier to use alternative.
type: int
cpu_quota:
description:
- Limit CPU CFS (Completely Fair Scheduler) quota.
- See I(cpus) for an easier to use alternative.
type: int
cpus:
description:
- Specify how much of the available CPU resources a container can use.
- A value of C(1.5) means that at most one and a half CPU (core) will be used.
type: float
version_added: '2.10'
cpuset_cpus:
description:
- CPUs in which to allow execution C(1,3) or C(1-3).
type: str
cpuset_mems:
description:
- Memory nodes (MEMs) in which to allow execution C(0-3) or C(0,1).
type: str
cpu_shares:
description:
- CPU shares (relative weight).
type: int
detach:
description:
- Enable detached mode to leave the container running in background.
- If disabled, the task will reflect the status of the container run (failed if the command failed).
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(yes).
type: bool
devices:
description:
- List of host device bindings to add to the container.
- "Each binding is a mapping expressed in the format C(<path_on_host>:<path_in_container>:<cgroup_permissions>)."
type: list
elements: str
device_read_bps:
description:
- "List of device path and read rate (bytes per second) from device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit in format C(<number>[<unit>])."
- "Number is a positive integer. Unit can be one of C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- "Omitting the unit defaults to bytes."
type: str
required: yes
version_added: "2.8"
device_write_bps:
description:
- "List of device and write rate (bytes per second) to device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit in format C(<number>[<unit>])."
- "Number is a positive integer. Unit can be one of C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- "Omitting the unit defaults to bytes."
type: str
required: yes
version_added: "2.8"
device_read_iops:
description:
- "List of device and read rate (IO per second) from device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit."
- "Must be a positive integer."
type: int
required: yes
version_added: "2.8"
device_write_iops:
description:
- "List of device and write rate (IO per second) to device."
type: list
elements: dict
suboptions:
path:
description:
- Device path in the container.
type: str
required: yes
rate:
description:
- "Device read limit."
- "Must be a positive integer."
type: int
required: yes
version_added: "2.8"
dns_opts:
description:
- List of DNS options.
type: list
elements: str
dns_servers:
description:
- List of custom DNS servers.
type: list
elements: str
dns_search_domains:
description:
- List of custom DNS search domains.
type: list
elements: str
domainname:
description:
- Container domainname.
type: str
version_added: "2.5"
env:
description:
- Dictionary of key,value pairs.
- Values which might be parsed as numbers, booleans or other types by the YAML parser must be quoted (e.g. C("true")) in order to avoid data loss.
type: dict
env_file:
description:
- Path to a file, present on the target, containing environment variables I(FOO=BAR).
- If variable also present in I(env), then the I(env) value will override.
type: path
version_added: "2.2"
entrypoint:
description:
- Command that overwrites the default C(ENTRYPOINT) of the image.
type: list
elements: str
etc_hosts:
description:
- Dict of host-to-IP mappings, where each host name is a key in the dictionary.
Each host name will be added to the container's C(/etc/hosts) file.
type: dict
exposed_ports:
description:
- List of additional container ports which informs Docker that the container
listens on the specified network ports at runtime.
- If the port is already exposed using C(EXPOSE) in a Dockerfile, it does not
need to be exposed again.
type: list
elements: str
aliases:
- exposed
- expose
force_kill:
description:
- Use the kill command when stopping a running container.
type: bool
default: no
aliases:
- forcekill
groups:
description:
- List of additional group names and/or IDs that the container process will run as.
type: list
elements: str
healthcheck:
description:
- Configure a check that is run to determine whether or not containers for this service are "healthy".
- "See the docs for the L(HEALTHCHECK Dockerfile instruction,https://docs.docker.com/engine/reference/builder/#healthcheck)
for details on how healthchecks work."
- "I(interval), I(timeout) and I(start_period) are specified as durations. They accept duration as a string in a format
that look like: C(5h34m56s), C(1m30s) etc. The supported units are C(us), C(ms), C(s), C(m) and C(h)."
type: dict
suboptions:
test:
description:
- Command to run to check health.
- Must be either a string or a list. If it is a list, the first item must be one of C(NONE), C(CMD) or C(CMD-SHELL).
type: raw
interval:
description:
- Time between running the check.
- The default used by the Docker daemon is C(30s).
type: str
timeout:
description:
- Maximum time to allow one check to run.
- The default used by the Docker daemon is C(30s).
type: str
retries:
description:
- Consecutive number of failures needed to report unhealthy.
- The default used by the Docker daemon is C(3).
type: int
start_period:
description:
- Start period for the container to initialize before starting health-retries countdown.
- The default used by the Docker daemon is C(0s).
type: str
version_added: "2.8"
hostname:
description:
- The container's hostname.
type: str
ignore_image:
description:
- When I(state) is C(present) or C(started), the module compares the configuration of an existing
container to requested configuration. The evaluation includes the image version. If the image
version in the registry does not match the container, the container will be recreated. You can
stop this behavior by setting I(ignore_image) to C(True).
- "*Warning:* This option is ignored if C(image: ignore) or C(*: ignore) is specified in the
I(comparisons) option."
type: bool
default: no
version_added: "2.2"
image:
description:
- Repository path and tag used to create the container. If an image is not found or pull is true, the image
will be pulled from the registry. If no tag is included, C(latest) will be used.
- Can also be an image ID. If this is the case, the image is assumed to be available locally.
The I(pull) option is ignored for this case.
type: str
init:
description:
- Run an init inside the container that forwards signals and reaps processes.
- This option requires Docker API >= 1.25.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
version_added: "2.6"
interactive:
description:
- Keep stdin open after a container is launched, even if not attached.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
ipc_mode:
description:
- Set the IPC mode for the container.
- Can be one of C(container:<name|id>) to reuse another container's IPC namespace or C(host) to use
the host's IPC namespace within the container.
type: str
keep_volumes:
description:
- Retain volumes associated with a removed container.
type: bool
default: yes
kill_signal:
description:
- Override default signal used to kill a running container.
type: str
kernel_memory:
description:
- "Kernel memory limit in format C(<number>[<unit>]). Number is a positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte). Minimum is C(4M)."
- Omitting the unit defaults to bytes.
type: str
labels:
description:
- Dictionary of key value pairs.
type: dict
links:
description:
- List of name aliases for linked containers in the format C(container_name:alias).
- Setting this will force container to be restarted.
type: list
elements: str
log_driver:
description:
- Specify the logging driver. Docker uses C(json-file) by default.
- See L(here,https://docs.docker.com/config/containers/logging/configure/) for possible choices.
type: str
log_options:
description:
- Dictionary of options specific to the chosen I(log_driver).
- See U(https://docs.docker.com/engine/admin/logging/overview/) for details.
type: dict
aliases:
- log_opt
mac_address:
description:
- Container MAC address (e.g. 92:d0:c6:0a:29:33).
type: str
memory:
description:
- "Memory limit in format C(<number>[<unit>]). Number is a positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C("0").
type: str
memory_reservation:
description:
- "Memory soft limit in format C(<number>[<unit>]). Number is a positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes.
type: str
memory_swap:
description:
- "Total memory limit (memory + swap) in format C(<number>[<unit>]).
Number is a positive integer. Unit can be C(B) (byte), C(K) (kibibyte, 1024B),
C(M) (mebibyte), C(G) (gibibyte), C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes.
type: str
memory_swappiness:
description:
- Tune a container's memory swappiness behavior. Accepts an integer between 0 and 100.
- If not set, the value will be remain the same if container exists and will be inherited
from the host machine if it is (re-)created.
type: int
mounts:
version_added: "2.9"
type: list
elements: dict
description:
- Specification for mounts to be added to the container. More powerful alternative to I(volumes).
suboptions:
target:
description:
- Path inside the container.
type: str
required: true
source:
description:
- Mount source (e.g. a volume name or a host path).
type: str
type:
description:
- The mount type.
- Note that C(npipe) is only supported by Docker for Windows.
type: str
choices:
- bind
- npipe
- tmpfs
- volume
default: volume
read_only:
description:
- Whether the mount should be read-only.
type: bool
consistency:
description:
- The consistency requirement for the mount.
type: str
choices:
- cached
- consistent
- default
- delegated
propagation:
description:
- Propagation mode. Only valid for the C(bind) type.
type: str
choices:
- private
- rprivate
- shared
- rshared
- slave
- rslave
no_copy:
description:
- False if the volume should be populated with the data from the target. Only valid for the C(volume) type.
- The default value is C(false).
type: bool
labels:
description:
- User-defined name and labels for the volume. Only valid for the C(volume) type.
type: dict
volume_driver:
description:
- Specify the volume driver. Only valid for the C(volume) type.
- See L(here,https://docs.docker.com/storage/volumes/#use-a-volume-driver) for details.
type: str
volume_options:
description:
- Dictionary of options specific to the chosen volume_driver. See
L(here,https://docs.docker.com/storage/volumes/#use-a-volume-driver) for details.
type: dict
tmpfs_size:
description:
- "The size for the tmpfs mount in bytes in format <number>[<unit>]."
- "Number is a positive integer. Unit can be one of C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- "Omitting the unit defaults to bytes."
type: str
tmpfs_mode:
description:
- The permission mode for the tmpfs mount.
type: str
name:
description:
- Assign a name to a new container or match an existing container.
- When identifying an existing container name may be a name or a long or short container ID.
type: str
required: yes
network_mode:
description:
- Connect the container to a network. Choices are C(bridge), C(host), C(none), C(container:<name|id>), C(<network_name>) or C(default).
- "*Note* that from Ansible 2.14 on, if I(networks_cli_compatible) is C(true) and I(networks) contains at least one network,
the default value for I(network_mode) will be the name of the first network in the I(networks) list. You can prevent this
by explicitly specifying a value for I(network_mode), like the default value C(default) which will be used by Docker if
I(network_mode) is not specified."
type: str
userns_mode:
description:
- Set the user namespace mode for the container. Currently, the only valid value are C(host) and the empty string.
type: str
version_added: "2.5"
networks:
description:
- List of networks the container belongs to.
- For examples of the data structure and usage see EXAMPLES below.
- To remove a container from one or more networks, use the I(purge_networks) option.
- Note that as opposed to C(docker run ...), M(docker_container) does not remove the default
network if I(networks) is specified. You need to explicitly use I(purge_networks) to enforce
the removal of the default network (and all other networks not explicitly mentioned in I(networks)).
Alternatively, use the I(networks_cli_compatible) option, which will be enabled by default from Ansible 2.12 on.
type: list
elements: dict
suboptions:
name:
description:
- The network's name.
type: str
required: yes
ipv4_address:
description:
- The container's IPv4 address in this network.
type: str
ipv6_address:
description:
- The container's IPv6 address in this network.
type: str
links:
description:
- A list of containers to link to.
type: list
elements: str
aliases:
description:
- List of aliases for this container in this network. These names
can be used in the network to reach this container.
type: list
elements: str
version_added: "2.2"
networks_cli_compatible:
description:
- "When networks are provided to the module via the I(networks) option, the module
behaves differently than C(docker run --network): C(docker run --network other)
will create a container with network C(other) attached, but the default network
not attached. This module with I(networks: {name: other}) will create a container
with both C(default) and C(other) attached. If I(purge_networks) is set to C(yes),
the C(default) network will be removed afterwards."
- "If I(networks_cli_compatible) is set to C(yes), this module will behave as
C(docker run --network) and will *not* add the default network if I(networks) is
specified. If I(networks) is not specified, the default network will be attached."
- "*Note* that docker CLI also sets I(network_mode) to the name of the first network
added if C(--network) is specified. For more compatibility with docker CLI, you
explicitly have to set I(network_mode) to the name of the first network you're
adding. This behavior will change for Ansible 2.14: then I(network_mode) will
automatically be set to the first network name in I(networks) if I(network_mode)
is not specified, I(networks) has at least one entry and I(networks_cli_compatible)
is C(true)."
- Current value is C(no). A new default of C(yes) will be set in Ansible 2.12.
type: bool
version_added: "2.8"
oom_killer:
description:
- Whether or not to disable OOM Killer for the container.
type: bool
oom_score_adj:
description:
- An integer value containing the score given to the container in order to tune
OOM killer preferences.
type: int
version_added: "2.2"
output_logs:
description:
- If set to true, output of the container command will be printed.
- Only effective when I(log_driver) is set to C(json-file) or C(journald).
type: bool
default: no
version_added: "2.7"
paused:
description:
- Use with the started state to pause running processes inside the container.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
pid_mode:
description:
- Set the PID namespace mode for the container.
- Note that Docker SDK for Python < 2.0 only supports C(host). Newer versions of the
Docker SDK for Python (docker) allow all values supported by the Docker daemon.
type: str
pids_limit:
description:
- Set PIDs limit for the container. It accepts an integer value.
- Set C(-1) for unlimited PIDs.
type: int
version_added: "2.8"
privileged:
description:
- Give extended privileges to the container.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
published_ports:
description:
- List of ports to publish from the container to the host.
- "Use docker CLI syntax: C(8000), C(9000:8000), or C(0.0.0.0:9000:8000), where 8000 is a
container port, 9000 is a host port, and 0.0.0.0 is a host interface."
- Port ranges can be used for source and destination ports. If two ranges with
different lengths are specified, the shorter range will be used.
- "Bind addresses must be either IPv4 or IPv6 addresses. Hostnames are *not* allowed. This
is different from the C(docker) command line utility. Use the L(dig lookup,../lookup/dig.html)
to resolve hostnames."
- A value of C(all) will publish all exposed container ports to random host ports, ignoring
any other mappings.
- If I(networks) parameter is provided, will inspect each network to see if there exists
a bridge network with optional parameter C(com.docker.network.bridge.host_binding_ipv4).
If such a network is found, then published ports where no host IP address is specified
will be bound to the host IP pointed to by C(com.docker.network.bridge.host_binding_ipv4).
Note that the first bridge network with a C(com.docker.network.bridge.host_binding_ipv4)
value encountered in the list of I(networks) is the one that will be used.
type: list
elements: str
aliases:
- ports
pull:
description:
- If true, always pull the latest version of an image. Otherwise, will only pull an image
when missing.
- "*Note:* images are only pulled when specified by name. If the image is specified
as a image ID (hash), it cannot be pulled."
type: bool
default: no
purge_networks:
description:
- Remove the container from ALL networks not included in I(networks) parameter.
- Any default networks such as C(bridge), if not found in I(networks), will be removed as well.
type: bool
default: no
version_added: "2.2"
read_only:
description:
- Mount the container's root file system as read-only.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
recreate:
description:
- Use with present and started states to force the re-creation of an existing container.
type: bool
default: no
restart:
description:
- Use with started state to force a matching container to be stopped and restarted.
type: bool
default: no
restart_policy:
description:
- Container restart policy.
- Place quotes around C(no) option.
type: str
choices:
- 'no'
- 'on-failure'
- 'always'
- 'unless-stopped'
restart_retries:
description:
- Use with restart policy to control maximum number of restart attempts.
type: int
runtime:
description:
- Runtime to use for the container.
type: str
version_added: "2.8"
shm_size:
description:
- "Size of C(/dev/shm) in format C(<number>[<unit>]). Number is positive integer.
Unit can be C(B) (byte), C(K) (kibibyte, 1024B), C(M) (mebibyte), C(G) (gibibyte),
C(T) (tebibyte), or C(P) (pebibyte)."
- Omitting the unit defaults to bytes. If you omit the size entirely, Docker daemon uses C(64M).
type: str
security_opts:
description:
- List of security options in the form of C("label:user:User").
type: list
elements: str
state:
description:
- 'C(absent) - A container matching the specified name will be stopped and removed. Use I(force_kill) to kill the container
rather than stopping it. Use I(keep_volumes) to retain volumes associated with the removed container.'
- 'C(present) - Asserts the existence of a container matching the name and any provided configuration parameters. If no
container matches the name, a container will be created. If a container matches the name but the provided configuration
does not match, the container will be updated, if it can be. If it cannot be updated, it will be removed and re-created
with the requested config.'
- 'C(started) - Asserts that the container is first C(present), and then if the container is not running moves it to a running
state. Use I(restart) to force a matching container to be stopped and restarted.'
- 'C(stopped) - Asserts that the container is first C(present), and then if the container is running moves it to a stopped
state.'
- To control what will be taken into account when comparing configuration, see the I(comparisons) option. To avoid that the
image version will be taken into account, you can also use the I(ignore_image) option.
- Use the I(recreate) option to always force re-creation of a matching container, even if it is running.
- If the container should be killed instead of stopped in case it needs to be stopped for recreation, or because I(state) is
C(stopped), please use the I(force_kill) option. Use I(keep_volumes) to retain volumes associated with a removed container.
- Use I(keep_volumes) to retain volumes associated with a removed container.
type: str
default: started
choices:
- absent
- present
- stopped
- started
stop_signal:
description:
- Override default signal used to stop the container.
type: str
stop_timeout:
description:
- Number of seconds to wait for the container to stop before sending C(SIGKILL).
When the container is created by this module, its C(StopTimeout) configuration
will be set to this value.
- When the container is stopped, will be used as a timeout for stopping the
container. In case the container has a custom C(StopTimeout) configuration,
the behavior depends on the version of the docker daemon. New versions of
the docker daemon will always use the container's configured C(StopTimeout)
value if it has been configured.
type: int
trust_image_content:
description:
- If C(yes), skip image verification.
- The option has never been used by the module. It will be removed in Ansible 2.14.
type: bool
default: no
tmpfs:
description:
- Mount a tmpfs directory.
type: list
elements: str
version_added: 2.4
tty:
description:
- Allocate a pseudo-TTY.
- If I(container_default_behavior) is set to C(compatiblity) (the default value), this
option has a default of C(no).
type: bool
ulimits:
description:
- "List of ulimit options. A ulimit is specified as C(nofile:262144:262144)."
type: list
elements: str
sysctls:
description:
- Dictionary of key,value pairs.
type: dict
version_added: 2.4
user:
description:
- Sets the username or UID used and optionally the groupname or GID for the specified command.
- "Can be of the forms C(user), C(user:group), C(uid), C(uid:gid), C(user:gid) or C(uid:group)."
type: str
uts:
description:
- Set the UTS namespace mode for the container.
type: str
volumes:
description:
- List of volumes to mount within the container.
- "Use docker CLI-style syntax: C(/host:/container[:mode])"
- "Mount modes can be a comma-separated list of various modes such as C(ro), C(rw), C(consistent),
C(delegated), C(cached), C(rprivate), C(private), C(rshared), C(shared), C(rslave), C(slave), and
C(nocopy). Note that the docker daemon might not support all modes and combinations of such modes."
- SELinux hosts can additionally use C(z) or C(Z) to use a shared or private label for the volume.
- "Note that Ansible 2.7 and earlier only supported one mode, which had to be one of C(ro), C(rw),
C(z), and C(Z)."
type: list
elements: str
volume_driver:
description:
- The container volume driver.
type: str
volumes_from:
description:
- List of container names or IDs to get volumes from.
type: list
elements: str
working_dir:
description:
- Path to the working directory.
type: str
version_added: "2.4"
extends_documentation_fragment:
- docker
- docker.docker_py_1_documentation
author:
- "Cove Schneider (@cove)"
- "Joshua Conner (@joshuaconner)"
- "Pavel Antonov (@softzilla)"
- "Thomas Steinbach (@ThomasSteinbach)"
- "Philippe Jandot (@zfil)"
- "Daan Oosterveld (@dusdanig)"
- "Chris Houseknecht (@chouseknecht)"
- "Kassian Sun (@kassiansun)"
- "Felix Fontein (@felixfontein)"
requirements:
- "L(Docker SDK for Python,https://docker-py.readthedocs.io/en/stable/) >= 1.8.0 (use L(docker-py,https://pypi.org/project/docker-py/) for Python 2.6)"
- "Docker API >= 1.20"
'''
EXAMPLES = '''
- name: Create a data container
docker_container:
name: mydata
image: busybox
volumes:
- /data
- name: Re-create a redis container
docker_container:
name: myredis
image: redis
command: redis-server --appendonly yes
state: present
recreate: yes
exposed_ports:
- 6379
volumes_from:
- mydata
- name: Restart a container
docker_container:
name: myapplication
image: someuser/appimage
state: started
restart: yes
links:
- "myredis:aliasedredis"
devices:
- "/dev/sda:/dev/xvda:rwm"
ports:
- "8080:9000"
- "127.0.0.1:8081:9001/udp"
env:
SECRET_KEY: "ssssh"
# Values which might be parsed as numbers, booleans or other types by the YAML parser need to be quoted
BOOLEAN_KEY: "yes"
- name: Container present
docker_container:
name: mycontainer
state: present
image: ubuntu:14.04
command: sleep infinity
- name: Stop a container
docker_container:
name: mycontainer
state: stopped
- name: Start 4 load-balanced containers
docker_container:
name: "container{{ item }}"
recreate: yes
image: someuser/anotherappimage
command: sleep 1d
with_sequence: count=4
- name: remove container
docker_container:
name: ohno
state: absent
- name: Syslogging output
docker_container:
name: myservice
image: busybox
log_driver: syslog
log_options:
syslog-address: tcp://my-syslog-server:514
syslog-facility: daemon
# NOTE: in Docker 1.13+ the "syslog-tag" option was renamed to "tag" for
# older docker installs, use "syslog-tag" instead
tag: myservice
- name: Create db container and connect to network
docker_container:
name: db_test
image: "postgres:latest"
networks:
- name: "{{ docker_network_name }}"
- name: Start container, connect to network and link
docker_container:
name: sleeper
image: ubuntu:14.04
networks:
- name: TestingNet
ipv4_address: "172.1.1.100"
aliases:
- sleepyzz
links:
- db_test:db
- name: TestingNet2
- name: Start a container with a command
docker_container:
name: sleepy
image: ubuntu:14.04
command: ["sleep", "infinity"]
- name: Add container to networks
docker_container:
name: sleepy
networks:
- name: TestingNet
ipv4_address: 172.1.1.18
links:
- sleeper
- name: TestingNet2
ipv4_address: 172.1.10.20
- name: Update network with aliases
docker_container:
name: sleepy
networks:
- name: TestingNet
aliases:
- sleepyz
- zzzz
- name: Remove container from one network
docker_container:
name: sleepy
networks:
- name: TestingNet2
purge_networks: yes
- name: Remove container from all networks
docker_container:
name: sleepy
purge_networks: yes
- name: Start a container and use an env file
docker_container:
name: agent
image: jenkinsci/ssh-slave
env_file: /var/tmp/jenkins/agent.env
- name: Create a container with limited capabilities
docker_container:
name: sleepy
image: ubuntu:16.04
command: sleep infinity
capabilities:
- sys_time
cap_drop:
- all
- name: Finer container restart/update control
docker_container:
name: test
image: ubuntu:18.04
env:
arg1: "true"
arg2: "whatever"
volumes:
- /tmp:/tmp
comparisons:
image: ignore # don't restart containers with older versions of the image
env: strict # we want precisely this environment
volumes: allow_more_present # if there are more volumes, that's ok, as long as `/tmp:/tmp` is there
- name: Finer container restart/update control II
docker_container:
name: test
image: ubuntu:18.04
env:
arg1: "true"
arg2: "whatever"
comparisons:
'*': ignore # by default, ignore *all* options (including image)
env: strict # except for environment variables; there, we want to be strict
- name: Start container with healthstatus
docker_container:
name: nginx-proxy
image: nginx:1.13
state: started
healthcheck:
# Check if nginx server is healthy by curl'ing the server.
# If this fails or timeouts, the healthcheck fails.
test: ["CMD", "curl", "--fail", "http://nginx.host.com"]
interval: 1m30s
timeout: 10s
retries: 3
start_period: 30s
- name: Remove healthcheck from container
docker_container:
name: nginx-proxy
image: nginx:1.13
state: started
healthcheck:
# The "NONE" check needs to be specified
test: ["NONE"]
- name: start container with block device read limit
docker_container:
name: test
image: ubuntu:18.04
state: started
device_read_bps:
# Limit read rate for /dev/sda to 20 mebibytes per second
- path: /dev/sda
rate: 20M
device_read_iops:
# Limit read rate for /dev/sdb to 300 IO per second
- path: /dev/sdb
rate: 300
'''
RETURN = '''
container:
description:
- Facts representing the current state of the container. Matches the docker inspection output.
- Note that facts are part of the registered vars since Ansible 2.8. For compatibility reasons, the facts
are also accessible directly as C(docker_container). Note that the returned fact will be removed in Ansible 2.12.
- Before 2.3 this was C(ansible_docker_container) but was renamed in 2.3 to C(docker_container) due to
conflicts with the connection plugin.
- Empty if I(state) is C(absent)
- If I(detached) is C(false), will include C(Output) attribute containing any output from container run.
returned: always
type: dict
sample: '{
"AppArmorProfile": "",
"Args": [],
"Config": {
"AttachStderr": false,
"AttachStdin": false,
"AttachStdout": false,
"Cmd": [
"/usr/bin/supervisord"
],
"Domainname": "",
"Entrypoint": null,
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"ExposedPorts": {
"443/tcp": {},
"80/tcp": {}
},
"Hostname": "8e47bf643eb9",
"Image": "lnmp_nginx:v1",
"Labels": {},
"OnBuild": null,
"OpenStdin": false,
"StdinOnce": false,
"Tty": false,
"User": "",
"Volumes": {
"/tmp/lnmp/nginx-sites/logs/": {}
},
...
}'
'''
import os
import re
import shlex
import traceback
from distutils.version import LooseVersion
from ansible.module_utils.common.text.formatters import human_to_bytes
from ansible.module_utils.docker.common import (
AnsibleDockerClient,
DifferenceTracker,
DockerBaseClass,
compare_generic,
is_image_name_id,
sanitize_result,
clean_dict_booleans_for_docker_api,
omit_none_from_dict,
parse_healthcheck,
DOCKER_COMMON_ARGS,
RequestException,
)
from ansible.module_utils.six import string_types
try:
from docker import utils
from ansible.module_utils.docker.common import docker_version
if LooseVersion(docker_version) >= LooseVersion('1.10.0'):
from docker.types import Ulimit, LogConfig
from docker import types as docker_types
else:
from docker.utils.types import Ulimit, LogConfig
from docker.errors import DockerException, APIError, NotFound
except Exception:
# missing Docker SDK for Python handled in ansible.module_utils.docker.common
pass
REQUIRES_CONVERSION_TO_BYTES = [
'kernel_memory',
'memory',
'memory_reservation',
'memory_swap',
'shm_size'
]
def is_volume_permissions(mode):
for part in mode.split(','):
if part not in ('rw', 'ro', 'z', 'Z', 'consistent', 'delegated', 'cached', 'rprivate', 'private', 'rshared', 'shared', 'rslave', 'slave', 'nocopy'):
return False
return True
def parse_port_range(range_or_port, client):
'''
Parses a string containing either a single port or a range of ports.
Returns a list of integers for each port in the list.
'''
if '-' in range_or_port:
try:
start, end = [int(port) for port in range_or_port.split('-')]
except Exception:
client.fail('Invalid port range: "{0}"'.format(range_or_port))
if end < start:
client.fail('Invalid port range: "{0}"'.format(range_or_port))
return list(range(start, end + 1))
else:
try:
return [int(range_or_port)]
except Exception:
client.fail('Invalid port: "{0}"'.format(range_or_port))
def split_colon_ipv6(text, client):
'''
Split string by ':', while keeping IPv6 addresses in square brackets in one component.
'''
if '[' not in text:
return text.split(':')
start = 0
result = []
while start < len(text):
i = text.find('[', start)
if i < 0:
result.extend(text[start:].split(':'))
break
j = text.find(']', i)
if j < 0:
client.fail('Cannot find closing "]" in input "{0}" for opening "[" at index {1}!'.format(text, i + 1))
result.extend(text[start:i].split(':'))
k = text.find(':', j)
if k < 0:
result[-1] += text[i:]
start = len(text)
else:
result[-1] += text[i:k]
if k == len(text):
result.append('')
break
start = k + 1
return result
class TaskParameters(DockerBaseClass):
'''
Access and parse module parameters
'''
def __init__(self, client):
super(TaskParameters, self).__init__()
self.client = client
self.auto_remove = None
self.blkio_weight = None
self.capabilities = None
self.cap_drop = None
self.cleanup = None
self.command = None
self.cpu_period = None
self.cpu_quota = None
self.cpus = None
self.cpuset_cpus = None
self.cpuset_mems = None
self.cpu_shares = None
self.detach = None
self.debug = None
self.devices = None
self.device_read_bps = None
self.device_write_bps = None
self.device_read_iops = None
self.device_write_iops = None
self.dns_servers = None
self.dns_opts = None
self.dns_search_domains = None
self.domainname = None
self.env = None
self.env_file = None
self.entrypoint = None
self.etc_hosts = None
self.exposed_ports = None
self.force_kill = None
self.groups = None
self.healthcheck = None
self.hostname = None
self.ignore_image = None
self.image = None
self.init = None
self.interactive = None
self.ipc_mode = None
self.keep_volumes = None
self.kernel_memory = None
self.kill_signal = None
self.labels = None
self.links = None
self.log_driver = None
self.output_logs = None
self.log_options = None
self.mac_address = None
self.memory = None
self.memory_reservation = None
self.memory_swap = None
self.memory_swappiness = None
self.mounts = None
self.name = None
self.network_mode = None
self.userns_mode = None
self.networks = None
self.networks_cli_compatible = None
self.oom_killer = None
self.oom_score_adj = None
self.paused = None
self.pid_mode = None
self.pids_limit = None
self.privileged = None
self.purge_networks = None
self.pull = None
self.read_only = None
self.recreate = None
self.restart = None
self.restart_retries = None
self.restart_policy = None
self.runtime = None
self.shm_size = None
self.security_opts = None
self.state = None
self.stop_signal = None
self.stop_timeout = None
self.tmpfs = None
self.trust_image_content = None
self.tty = None
self.user = None
self.uts = None
self.volumes = None
self.volume_binds = dict()
self.volumes_from = None
self.volume_driver = None
self.working_dir = None
for key, value in client.module.params.items():
setattr(self, key, value)
self.comparisons = client.comparisons
# If state is 'absent', parameters do not have to be parsed or interpreted.
# Only the container's name is needed.
if self.state == 'absent':
return
if self.cpus is not None:
self.cpus = int(round(self.cpus * 1E9))
if self.groups:
# In case integers are passed as groups, we need to convert them to
# strings as docker internally treats them as strings.
self.groups = [str(g) for g in self.groups]
for param_name in REQUIRES_CONVERSION_TO_BYTES:
if client.module.params.get(param_name):
try:
setattr(self, param_name, human_to_bytes(client.module.params.get(param_name)))
except ValueError as exc:
self.fail("Failed to convert %s to bytes: %s" % (param_name, exc))
self.publish_all_ports = False
self.published_ports = self._parse_publish_ports()
if self.published_ports in ('all', 'ALL'):
self.publish_all_ports = True
self.published_ports = None
self.ports = self._parse_exposed_ports(self.published_ports)
self.log("expose ports:")
self.log(self.ports, pretty_print=True)
self.links = self._parse_links(self.links)
if self.volumes:
self.volumes = self._expand_host_paths()
self.tmpfs = self._parse_tmpfs()
self.env = self._get_environment()
self.ulimits = self._parse_ulimits()
self.sysctls = self._parse_sysctls()
self.log_config = self._parse_log_config()
try:
self.healthcheck, self.disable_healthcheck = parse_healthcheck(self.healthcheck)
except ValueError as e:
self.fail(str(e))
self.exp_links = None
self.volume_binds = self._get_volume_binds(self.volumes)
self.pid_mode = self._replace_container_names(self.pid_mode)
self.ipc_mode = self._replace_container_names(self.ipc_mode)
self.network_mode = self._replace_container_names(self.network_mode)
self.log("volumes:")
self.log(self.volumes, pretty_print=True)
self.log("volume binds:")
self.log(self.volume_binds, pretty_print=True)
if self.networks:
for network in self.networks:
network['id'] = self._get_network_id(network['name'])
if not network['id']:
self.fail("Parameter error: network named %s could not be found. Does it exist?" % network['name'])
if network.get('links'):
network['links'] = self._parse_links(network['links'])
if self.mac_address:
# Ensure the MAC address uses colons instead of hyphens for later comparison
self.mac_address = self.mac_address.replace('-', ':')
if self.entrypoint:
# convert from list to str.
self.entrypoint = ' '.join([str(x) for x in self.entrypoint])
if self.command:
# convert from list to str
if isinstance(self.command, list):
self.command = ' '.join([str(x) for x in self.command])
self.mounts_opt, self.expected_mounts = self._process_mounts()
self._check_mount_target_collisions()
for param_name in ["device_read_bps", "device_write_bps"]:
if client.module.params.get(param_name):
self._process_rate_bps(option=param_name)
for param_name in ["device_read_iops", "device_write_iops"]:
if client.module.params.get(param_name):
self._process_rate_iops(option=param_name)
def fail(self, msg):
self.client.fail(msg)
@property
def update_parameters(self):
'''
Returns parameters used to update a container
'''
update_parameters = dict(
blkio_weight='blkio_weight',
cpu_period='cpu_period',
cpu_quota='cpu_quota',
cpu_shares='cpu_shares',
cpuset_cpus='cpuset_cpus',
cpuset_mems='cpuset_mems',
mem_limit='memory',
mem_reservation='memory_reservation',
memswap_limit='memory_swap',
kernel_memory='kernel_memory',
)
result = dict()
for key, value in update_parameters.items():
if getattr(self, value, None) is not None:
if self.client.option_minimal_versions[value]['supported']:
result[key] = getattr(self, value)
return result
@property
def create_parameters(self):
'''
Returns parameters used to create a container
'''
create_params = dict(
command='command',
domainname='domainname',
hostname='hostname',
user='user',
detach='detach',
stdin_open='interactive',
tty='tty',
ports='ports',
environment='env',
name='name',
entrypoint='entrypoint',
mac_address='mac_address',
labels='labels',
stop_signal='stop_signal',
working_dir='working_dir',
stop_timeout='stop_timeout',
healthcheck='healthcheck',
)
if self.client.docker_py_version < LooseVersion('3.0'):
# cpu_shares and volume_driver moved to create_host_config in > 3
create_params['cpu_shares'] = 'cpu_shares'
create_params['volume_driver'] = 'volume_driver'
result = dict(
host_config=self._host_config(),
volumes=self._get_mounts(),
)
for key, value in create_params.items():
if getattr(self, value, None) is not None:
if self.client.option_minimal_versions[value]['supported']:
result[key] = getattr(self, value)
if self.networks_cli_compatible and self.networks:
network = self.networks[0]
params = dict()
for para in ('ipv4_address', 'ipv6_address', 'links', 'aliases'):
if network.get(para):
params[para] = network[para]
network_config = dict()
network_config[network['name']] = self.client.create_endpoint_config(**params)
result['networking_config'] = self.client.create_networking_config(network_config)
return result
def _expand_host_paths(self):
new_vols = []
for vol in self.volumes:
if ':' in vol:
if len(vol.split(':')) == 3:
host, container, mode = vol.split(':')
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
if re.match(r'[.~]', host):
host = os.path.abspath(os.path.expanduser(host))
new_vols.append("%s:%s:%s" % (host, container, mode))
continue
elif len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]) and re.match(r'[.~]', parts[0]):
host = os.path.abspath(os.path.expanduser(parts[0]))
new_vols.append("%s:%s:rw" % (host, parts[1]))
continue
new_vols.append(vol)
return new_vols
def _get_mounts(self):
'''
Return a list of container mounts.
:return:
'''
result = []
if self.volumes:
for vol in self.volumes:
if ':' in vol:
if len(vol.split(':')) == 3:
dummy, container, dummy = vol.split(':')
result.append(container)
continue
if len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]):
result.append(parts[1])
continue
result.append(vol)
self.log("mounts:")
self.log(result, pretty_print=True)
return result
def _host_config(self):
'''
Returns parameters used to create a HostConfig object
'''
host_config_params = dict(
port_bindings='published_ports',
publish_all_ports='publish_all_ports',
links='links',
privileged='privileged',
dns='dns_servers',
dns_opt='dns_opts',
dns_search='dns_search_domains',
binds='volume_binds',
volumes_from='volumes_from',
network_mode='network_mode',
userns_mode='userns_mode',
cap_add='capabilities',
cap_drop='cap_drop',
extra_hosts='etc_hosts',
read_only='read_only',
ipc_mode='ipc_mode',
security_opt='security_opts',
ulimits='ulimits',
sysctls='sysctls',
log_config='log_config',
mem_limit='memory',
memswap_limit='memory_swap',
mem_swappiness='memory_swappiness',
oom_score_adj='oom_score_adj',
oom_kill_disable='oom_killer',
shm_size='shm_size',
group_add='groups',
devices='devices',
pid_mode='pid_mode',
tmpfs='tmpfs',
init='init',
uts_mode='uts',
runtime='runtime',
auto_remove='auto_remove',
device_read_bps='device_read_bps',
device_write_bps='device_write_bps',
device_read_iops='device_read_iops',
device_write_iops='device_write_iops',
pids_limit='pids_limit',
mounts='mounts',
nano_cpus='cpus',
)
if self.client.docker_py_version >= LooseVersion('1.9') and self.client.docker_api_version >= LooseVersion('1.22'):
# blkio_weight can always be updated, but can only be set on creation
# when Docker SDK for Python and Docker API are new enough
host_config_params['blkio_weight'] = 'blkio_weight'
if self.client.docker_py_version >= LooseVersion('3.0'):
# cpu_shares and volume_driver moved to create_host_config in > 3
host_config_params['cpu_shares'] = 'cpu_shares'
host_config_params['volume_driver'] = 'volume_driver'
params = dict()
for key, value in host_config_params.items():
if getattr(self, value, None) is not None:
if self.client.option_minimal_versions[value]['supported']:
params[key] = getattr(self, value)
if self.restart_policy:
params['restart_policy'] = dict(Name=self.restart_policy,
MaximumRetryCount=self.restart_retries)
if 'mounts' in params:
params['mounts'] = self.mounts_opt
return self.client.create_host_config(**params)
@property
def default_host_ip(self):
ip = '0.0.0.0'
if not self.networks:
return ip
for net in self.networks:
if net.get('name'):
try:
network = self.client.inspect_network(net['name'])
if network.get('Driver') == 'bridge' and \
network.get('Options', {}).get('com.docker.network.bridge.host_binding_ipv4'):
ip = network['Options']['com.docker.network.bridge.host_binding_ipv4']
break
except NotFound as nfe:
self.client.fail(
"Cannot inspect the network '{0}' to determine the default IP: {1}".format(net['name'], nfe),
exception=traceback.format_exc()
)
return ip
def _parse_publish_ports(self):
'''
Parse ports from docker CLI syntax
'''
if self.published_ports is None:
return None
if 'all' in self.published_ports:
return 'all'
default_ip = self.default_host_ip
binds = {}
for port in self.published_ports:
parts = split_colon_ipv6(str(port), self.client)
container_port = parts[-1]
protocol = ''
if '/' in container_port:
container_port, protocol = parts[-1].split('/')
container_ports = parse_port_range(container_port, self.client)
p_len = len(parts)
if p_len == 1:
port_binds = len(container_ports) * [(default_ip,)]
elif p_len == 2:
port_binds = [(default_ip, port) for port in parse_port_range(parts[0], self.client)]
elif p_len == 3:
# We only allow IPv4 and IPv6 addresses for the bind address
ipaddr = parts[0]
if not re.match(r'^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$', parts[0]) and not re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
self.fail(('Bind addresses for published ports must be IPv4 or IPv6 addresses, not hostnames. '
'Use the dig lookup to resolve hostnames. (Found hostname: {0})').format(ipaddr))
if re.match(r'^\[[0-9a-fA-F:]+\]$', ipaddr):
ipaddr = ipaddr[1:-1]
if parts[1]:
port_binds = [(ipaddr, port) for port in parse_port_range(parts[1], self.client)]
else:
port_binds = len(container_ports) * [(ipaddr,)]
for bind, container_port in zip(port_binds, container_ports):
idx = '{0}/{1}'.format(container_port, protocol) if protocol else container_port
if idx in binds:
old_bind = binds[idx]
if isinstance(old_bind, list):
old_bind.append(bind)
else:
binds[idx] = [old_bind, bind]
else:
binds[idx] = bind
return binds
def _get_volume_binds(self, volumes):
'''
Extract host bindings, if any, from list of volume mapping strings.
:return: dictionary of bind mappings
'''
result = dict()
if volumes:
for vol in volumes:
host = None
if ':' in vol:
parts = vol.split(':')
if len(parts) == 3:
host, container, mode = parts
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
elif len(parts) == 2:
if not is_volume_permissions(parts[1]):
host, container, mode = (vol.split(':') + ['rw'])
if host is not None:
result[host] = dict(
bind=container,
mode=mode
)
return result
def _parse_exposed_ports(self, published_ports):
'''
Parse exposed ports from docker CLI-style ports syntax.
'''
exposed = []
if self.exposed_ports:
for port in self.exposed_ports:
port = str(port).strip()
protocol = 'tcp'
match = re.search(r'(/.+$)', port)
if match:
protocol = match.group(1).replace('/', '')
port = re.sub(r'/.+$', '', port)
exposed.append((port, protocol))
if published_ports:
# Any published port should also be exposed
for publish_port in published_ports:
match = False
if isinstance(publish_port, string_types) and '/' in publish_port:
port, protocol = publish_port.split('/')
port = int(port)
else:
protocol = 'tcp'
port = int(publish_port)
for exposed_port in exposed:
if exposed_port[1] != protocol:
continue
if isinstance(exposed_port[0], string_types) and '-' in exposed_port[0]:
start_port, end_port = exposed_port[0].split('-')
if int(start_port) <= port <= int(end_port):
match = True
elif exposed_port[0] == port:
match = True
if not match:
exposed.append((port, protocol))
return exposed
@staticmethod
def _parse_links(links):
'''
Turn links into a dictionary
'''
if links is None:
return None
result = []
for link in links:
parsed_link = link.split(':', 1)
if len(parsed_link) == 2:
result.append((parsed_link[0], parsed_link[1]))
else:
result.append((parsed_link[0], parsed_link[0]))
return result
def _parse_ulimits(self):
'''
Turn ulimits into an array of Ulimit objects
'''
if self.ulimits is None:
return None
results = []
for limit in self.ulimits:
limits = dict()
pieces = limit.split(':')
if len(pieces) >= 2:
limits['name'] = pieces[0]
limits['soft'] = int(pieces[1])
limits['hard'] = int(pieces[1])
if len(pieces) == 3:
limits['hard'] = int(pieces[2])
try:
results.append(Ulimit(**limits))
except ValueError as exc:
self.fail("Error parsing ulimits value %s - %s" % (limit, exc))
return results
def _parse_sysctls(self):
'''
Turn sysctls into an hash of Sysctl objects
'''
return self.sysctls
def _parse_log_config(self):
'''
Create a LogConfig object
'''
if self.log_driver is None:
return None
options = dict(
Type=self.log_driver,
Config=dict()
)
if self.log_options is not None:
options['Config'] = dict()
for k, v in self.log_options.items():
if not isinstance(v, string_types):
self.client.module.warn(
"Non-string value found for log_options option '%s'. The value is automatically converted to '%s'. "
"If this is not correct, or you want to avoid such warnings, please quote the value." % (k, str(v))
)
v = str(v)
self.log_options[k] = v
options['Config'][k] = v
try:
return LogConfig(**options)
except ValueError as exc:
self.fail('Error parsing logging options - %s' % (exc))
def _parse_tmpfs(self):
'''
Turn tmpfs into a hash of Tmpfs objects
'''
result = dict()
if self.tmpfs is None:
return result
for tmpfs_spec in self.tmpfs:
split_spec = tmpfs_spec.split(":", 1)
if len(split_spec) > 1:
result[split_spec[0]] = split_spec[1]
else:
result[split_spec[0]] = ""
return result
def _get_environment(self):
"""
If environment file is combined with explicit environment variables, the explicit environment variables
take precedence.
"""
final_env = {}
if self.env_file:
parsed_env_file = utils.parse_env_file(self.env_file)
for name, value in parsed_env_file.items():
final_env[name] = str(value)
if self.env:
for name, value in self.env.items():
if not isinstance(value, string_types):
self.fail("Non-string value found for env option. Ambiguous env options must be "
"wrapped in quotes to avoid them being interpreted. Key: %s" % (name, ))
final_env[name] = str(value)
return final_env
def _get_network_id(self, network_name):
network_id = None
try:
for network in self.client.networks(names=[network_name]):
if network['Name'] == network_name:
network_id = network['Id']
break
except Exception as exc:
self.fail("Error getting network id for %s - %s" % (network_name, str(exc)))
return network_id
def _process_mounts(self):
if self.mounts is None:
return None, None
mounts_list = []
mounts_expected = []
for mount in self.mounts:
target = mount['target']
datatype = mount['type']
mount_dict = dict(mount)
# Sanity checks (so we don't wait for docker-py to barf on input)
if mount_dict.get('source') is None and datatype != 'tmpfs':
self.client.fail('source must be specified for mount "{0}" of type "{1}"'.format(target, datatype))
mount_option_types = dict(
volume_driver='volume',
volume_options='volume',
propagation='bind',
no_copy='volume',
labels='volume',
tmpfs_size='tmpfs',
tmpfs_mode='tmpfs',
)
for option, req_datatype in mount_option_types.items():
if mount_dict.get(option) is not None and datatype != req_datatype:
self.client.fail('{0} cannot be specified for mount "{1}" of type "{2}" (needs type "{3}")'.format(option, target, datatype, req_datatype))
# Handle volume_driver and volume_options
volume_driver = mount_dict.pop('volume_driver')
volume_options = mount_dict.pop('volume_options')
if volume_driver:
if volume_options:
volume_options = clean_dict_booleans_for_docker_api(volume_options)
mount_dict['driver_config'] = docker_types.DriverConfig(name=volume_driver, options=volume_options)
if mount_dict['labels']:
mount_dict['labels'] = clean_dict_booleans_for_docker_api(mount_dict['labels'])
if mount_dict.get('tmpfs_size') is not None:
try:
mount_dict['tmpfs_size'] = human_to_bytes(mount_dict['tmpfs_size'])
except ValueError as exc:
self.fail('Failed to convert tmpfs_size of mount "{0}" to bytes: {1}'.format(target, exc))
if mount_dict.get('tmpfs_mode') is not None:
try:
mount_dict['tmpfs_mode'] = int(mount_dict['tmpfs_mode'], 8)
except Exception as dummy:
self.client.fail('tmp_fs mode of mount "{0}" is not an octal string!'.format(target))
# Fill expected mount dict
mount_expected = dict(mount)
mount_expected['tmpfs_size'] = mount_dict['tmpfs_size']
mount_expected['tmpfs_mode'] = mount_dict['tmpfs_mode']
# Add result to lists
mounts_list.append(docker_types.Mount(**mount_dict))
mounts_expected.append(omit_none_from_dict(mount_expected))
return mounts_list, mounts_expected
def _process_rate_bps(self, option):
"""
Format device_read_bps and device_write_bps option
"""
devices_list = []
for v in getattr(self, option):
device_dict = dict((x.title(), y) for x, y in v.items())
device_dict['Rate'] = human_to_bytes(device_dict['Rate'])
devices_list.append(device_dict)
setattr(self, option, devices_list)
def _process_rate_iops(self, option):
"""
Format device_read_iops and device_write_iops option
"""
devices_list = []
for v in getattr(self, option):
device_dict = dict((x.title(), y) for x, y in v.items())
devices_list.append(device_dict)
setattr(self, option, devices_list)
def _replace_container_names(self, mode):
"""
Parse IPC and PID modes. If they contain a container name, replace
with the container's ID.
"""
if mode is None or not mode.startswith('container:'):
return mode
container_name = mode[len('container:'):]
# Try to inspect container to see whether this is an ID or a
# name (and in the latter case, retrieve it's ID)
container = self.client.get_container(container_name)
if container is None:
# If we can't find the container, issue a warning and continue with
# what the user specified.
self.client.module.warn('Cannot find a container with name or ID "{0}"'.format(container_name))
return mode
return 'container:{0}'.format(container['Id'])
def _check_mount_target_collisions(self):
last = dict()
def f(t, name):
if t in last:
if name == last[t]:
self.client.fail('The mount point "{0}" appears twice in the {1} option'.format(t, name))
else:
self.client.fail('The mount point "{0}" appears both in the {1} and {2} option'.format(t, name, last[t]))
last[t] = name
if self.expected_mounts:
for t in [m['target'] for m in self.expected_mounts]:
f(t, 'mounts')
if self.volumes:
for v in self.volumes:
vs = v.split(':')
f(vs[0 if len(vs) == 1 else 1], 'volumes')
class Container(DockerBaseClass):
def __init__(self, container, parameters):
super(Container, self).__init__()
self.raw = container
self.Id = None
self.container = container
if container:
self.Id = container['Id']
self.Image = container['Image']
self.log(self.container, pretty_print=True)
self.parameters = parameters
self.parameters.expected_links = None
self.parameters.expected_ports = None
self.parameters.expected_exposed = None
self.parameters.expected_volumes = None
self.parameters.expected_ulimits = None
self.parameters.expected_sysctls = None
self.parameters.expected_etc_hosts = None
self.parameters.expected_env = None
self.parameters_map = dict()
self.parameters_map['expected_links'] = 'links'
self.parameters_map['expected_ports'] = 'expected_ports'
self.parameters_map['expected_exposed'] = 'exposed_ports'
self.parameters_map['expected_volumes'] = 'volumes'
self.parameters_map['expected_ulimits'] = 'ulimits'
self.parameters_map['expected_sysctls'] = 'sysctls'
self.parameters_map['expected_etc_hosts'] = 'etc_hosts'
self.parameters_map['expected_env'] = 'env'
self.parameters_map['expected_entrypoint'] = 'entrypoint'
self.parameters_map['expected_binds'] = 'volumes'
self.parameters_map['expected_cmd'] = 'command'
self.parameters_map['expected_devices'] = 'devices'
self.parameters_map['expected_healthcheck'] = 'healthcheck'
self.parameters_map['expected_mounts'] = 'mounts'
def fail(self, msg):
self.parameters.client.fail(msg)
@property
def exists(self):
return True if self.container else False
@property
def running(self):
if self.container and self.container.get('State'):
if self.container['State'].get('Running') and not self.container['State'].get('Ghost', False):
return True
return False
@property
def paused(self):
if self.container and self.container.get('State'):
return self.container['State'].get('Paused', False)
return False
def _compare(self, a, b, compare):
'''
Compare values a and b as described in compare.
'''
return compare_generic(a, b, compare['comparison'], compare['type'])
def _decode_mounts(self, mounts):
if not mounts:
return mounts
result = []
empty_dict = dict()
for mount in mounts:
res = dict()
res['type'] = mount.get('Type')
res['source'] = mount.get('Source')
res['target'] = mount.get('Target')
res['read_only'] = mount.get('ReadOnly', False) # golang's omitempty for bool returns None for False
res['consistency'] = mount.get('Consistency')
res['propagation'] = mount.get('BindOptions', empty_dict).get('Propagation')
res['no_copy'] = mount.get('VolumeOptions', empty_dict).get('NoCopy', False)
res['labels'] = mount.get('VolumeOptions', empty_dict).get('Labels', empty_dict)
res['volume_driver'] = mount.get('VolumeOptions', empty_dict).get('DriverConfig', empty_dict).get('Name')
res['volume_options'] = mount.get('VolumeOptions', empty_dict).get('DriverConfig', empty_dict).get('Options', empty_dict)
res['tmpfs_size'] = mount.get('TmpfsOptions', empty_dict).get('SizeBytes')
res['tmpfs_mode'] = mount.get('TmpfsOptions', empty_dict).get('Mode')
result.append(res)
return result
def has_different_configuration(self, image):
'''
Diff parameters vs existing container config. Returns tuple: (True | False, List of differences)
'''
self.log('Starting has_different_configuration')
self.parameters.expected_entrypoint = self._get_expected_entrypoint()
self.parameters.expected_links = self._get_expected_links()
self.parameters.expected_ports = self._get_expected_ports()
self.parameters.expected_exposed = self._get_expected_exposed(image)
self.parameters.expected_volumes = self._get_expected_volumes(image)
self.parameters.expected_binds = self._get_expected_binds(image)
self.parameters.expected_ulimits = self._get_expected_ulimits(self.parameters.ulimits)
self.parameters.expected_sysctls = self._get_expected_sysctls(self.parameters.sysctls)
self.parameters.expected_etc_hosts = self._convert_simple_dict_to_list('etc_hosts')
self.parameters.expected_env = self._get_expected_env(image)
self.parameters.expected_cmd = self._get_expected_cmd()
self.parameters.expected_devices = self._get_expected_devices()
self.parameters.expected_healthcheck = self._get_expected_healthcheck()
if not self.container.get('HostConfig'):
self.fail("has_config_diff: Error parsing container properties. HostConfig missing.")
if not self.container.get('Config'):
self.fail("has_config_diff: Error parsing container properties. Config missing.")
if not self.container.get('NetworkSettings'):
self.fail("has_config_diff: Error parsing container properties. NetworkSettings missing.")
host_config = self.container['HostConfig']
log_config = host_config.get('LogConfig', dict())
restart_policy = host_config.get('RestartPolicy', dict())
config = self.container['Config']
network = self.container['NetworkSettings']
# The previous version of the docker module ignored the detach state by
# assuming if the container was running, it must have been detached.
detach = not (config.get('AttachStderr') and config.get('AttachStdout'))
# "ExposedPorts": null returns None type & causes AttributeError - PR #5517
if config.get('ExposedPorts') is not None:
expected_exposed = [self._normalize_port(p) for p in config.get('ExposedPorts', dict()).keys()]
else:
expected_exposed = []
# Map parameters to container inspect results
config_mapping = dict(
expected_cmd=config.get('Cmd'),
domainname=config.get('Domainname'),
hostname=config.get('Hostname'),
user=config.get('User'),
detach=detach,
init=host_config.get('Init'),
interactive=config.get('OpenStdin'),
capabilities=host_config.get('CapAdd'),
cap_drop=host_config.get('CapDrop'),
expected_devices=host_config.get('Devices'),
dns_servers=host_config.get('Dns'),
dns_opts=host_config.get('DnsOptions'),
dns_search_domains=host_config.get('DnsSearch'),
expected_env=(config.get('Env') or []),
expected_entrypoint=config.get('Entrypoint'),
expected_etc_hosts=host_config['ExtraHosts'],
expected_exposed=expected_exposed,
groups=host_config.get('GroupAdd'),
ipc_mode=host_config.get("IpcMode"),
labels=config.get('Labels'),
expected_links=host_config.get('Links'),
mac_address=network.get('MacAddress'),
memory_swappiness=host_config.get('MemorySwappiness'),
network_mode=host_config.get('NetworkMode'),
userns_mode=host_config.get('UsernsMode'),
oom_killer=host_config.get('OomKillDisable'),
oom_score_adj=host_config.get('OomScoreAdj'),
pid_mode=host_config.get('PidMode'),
privileged=host_config.get('Privileged'),
expected_ports=host_config.get('PortBindings'),
read_only=host_config.get('ReadonlyRootfs'),
restart_policy=restart_policy.get('Name'),
runtime=host_config.get('Runtime'),
shm_size=host_config.get('ShmSize'),
security_opts=host_config.get("SecurityOpt"),
stop_signal=config.get("StopSignal"),
tmpfs=host_config.get('Tmpfs'),
tty=config.get('Tty'),
expected_ulimits=host_config.get('Ulimits'),
expected_sysctls=host_config.get('Sysctls'),
uts=host_config.get('UTSMode'),
expected_volumes=config.get('Volumes'),
expected_binds=host_config.get('Binds'),
volume_driver=host_config.get('VolumeDriver'),
volumes_from=host_config.get('VolumesFrom'),
working_dir=config.get('WorkingDir'),
publish_all_ports=host_config.get('PublishAllPorts'),
expected_healthcheck=config.get('Healthcheck'),
disable_healthcheck=(not config.get('Healthcheck') or config.get('Healthcheck').get('Test') == ['NONE']),
device_read_bps=host_config.get('BlkioDeviceReadBps'),
device_write_bps=host_config.get('BlkioDeviceWriteBps'),
device_read_iops=host_config.get('BlkioDeviceReadIOps'),
device_write_iops=host_config.get('BlkioDeviceWriteIOps'),
pids_limit=host_config.get('PidsLimit'),
# According to https://github.com/moby/moby/, support for HostConfig.Mounts
# has been included at least since v17.03.0-ce, which has API version 1.26.
# The previous tag, v1.9.1, has API version 1.21 and does not have
# HostConfig.Mounts. I have no idea what about API 1.25...
expected_mounts=self._decode_mounts(host_config.get('Mounts')),
cpus=host_config.get('NanoCpus'),
)
# Options which don't make sense without their accompanying option
if self.parameters.restart_policy:
config_mapping['restart_retries'] = restart_policy.get('MaximumRetryCount')
if self.parameters.log_driver:
config_mapping['log_driver'] = log_config.get('Type')
config_mapping['log_options'] = log_config.get('Config')
if self.parameters.client.option_minimal_versions['auto_remove']['supported']:
# auto_remove is only supported in Docker SDK for Python >= 2.0.0; unfortunately
# it has a default value, that's why we have to jump through the hoops here
config_mapping['auto_remove'] = host_config.get('AutoRemove')
if self.parameters.client.option_minimal_versions['stop_timeout']['supported']:
# stop_timeout is only supported in Docker SDK for Python >= 2.1. Note that
# stop_timeout has a hybrid role, in that it used to be something only used
# for stopping containers, and is now also used as a container property.
# That's why it needs special handling here.
config_mapping['stop_timeout'] = config.get('StopTimeout')
if self.parameters.client.docker_api_version < LooseVersion('1.22'):
# For docker API < 1.22, update_container() is not supported. Thus
# we need to handle all limits which are usually handled by
# update_container() as configuration changes which require a container
# restart.
config_mapping.update(dict(
blkio_weight=host_config.get('BlkioWeight'),
cpu_period=host_config.get('CpuPeriod'),
cpu_quota=host_config.get('CpuQuota'),
cpu_shares=host_config.get('CpuShares'),
cpuset_cpus=host_config.get('CpusetCpus'),
cpuset_mems=host_config.get('CpusetMems'),
kernel_memory=host_config.get("KernelMemory"),
memory=host_config.get('Memory'),
memory_reservation=host_config.get('MemoryReservation'),
memory_swap=host_config.get('MemorySwap'),
))
differences = DifferenceTracker()
for key, value in config_mapping.items():
minimal_version = self.parameters.client.option_minimal_versions.get(key, {})
if not minimal_version.get('supported', True):
continue
compare = self.parameters.client.comparisons[self.parameters_map.get(key, key)]
self.log('check differences %s %s vs %s (%s)' % (key, getattr(self.parameters, key), str(value), compare))
if getattr(self.parameters, key, None) is not None:
match = self._compare(getattr(self.parameters, key), value, compare)
if not match:
# no match. record the differences
p = getattr(self.parameters, key)
c = value
if compare['type'] == 'set':
# Since the order does not matter, sort so that the diff output is better.
if p is not None:
p = sorted(p)
if c is not None:
c = sorted(c)
elif compare['type'] == 'set(dict)':
# Since the order does not matter, sort so that the diff output is better.
if key == 'expected_mounts':
# For selected values, use one entry as key
def sort_key_fn(x):
return x['target']
else:
# We sort the list of dictionaries by using the sorted items of a dict as its key.
def sort_key_fn(x):
return sorted((a, str(b)) for a, b in x.items())
if p is not None:
p = sorted(p, key=sort_key_fn)
if c is not None:
c = sorted(c, key=sort_key_fn)
differences.add(key, parameter=p, active=c)
has_differences = not differences.empty
return has_differences, differences
def has_different_resource_limits(self):
'''
Diff parameters and container resource limits
'''
if not self.container.get('HostConfig'):
self.fail("limits_differ_from_container: Error parsing container properties. HostConfig missing.")
if self.parameters.client.docker_api_version < LooseVersion('1.22'):
# update_container() call not supported
return False, []
host_config = self.container['HostConfig']
config_mapping = dict(
blkio_weight=host_config.get('BlkioWeight'),
cpu_period=host_config.get('CpuPeriod'),
cpu_quota=host_config.get('CpuQuota'),
cpu_shares=host_config.get('CpuShares'),
cpuset_cpus=host_config.get('CpusetCpus'),
cpuset_mems=host_config.get('CpusetMems'),
kernel_memory=host_config.get("KernelMemory"),
memory=host_config.get('Memory'),
memory_reservation=host_config.get('MemoryReservation'),
memory_swap=host_config.get('MemorySwap'),
)
differences = DifferenceTracker()
for key, value in config_mapping.items():
if getattr(self.parameters, key, None):
compare = self.parameters.client.comparisons[self.parameters_map.get(key, key)]
match = self._compare(getattr(self.parameters, key), value, compare)
if not match:
# no match. record the differences
differences.add(key, parameter=getattr(self.parameters, key), active=value)
different = not differences.empty
return different, differences
def has_network_differences(self):
'''
Check if the container is connected to requested networks with expected options: links, aliases, ipv4, ipv6
'''
different = False
differences = []
if not self.parameters.networks:
return different, differences
if not self.container.get('NetworkSettings'):
self.fail("has_missing_networks: Error parsing container properties. NetworkSettings missing.")
connected_networks = self.container['NetworkSettings']['Networks']
for network in self.parameters.networks:
network_info = connected_networks.get(network['name'])
if network_info is None:
different = True
differences.append(dict(
parameter=network,
container=None
))
else:
diff = False
network_info_ipam = network_info.get('IPAMConfig') or {}
if network.get('ipv4_address') and network['ipv4_address'] != network_info_ipam.get('IPv4Address'):
diff = True
if network.get('ipv6_address') and network['ipv6_address'] != network_info_ipam.get('IPv6Address'):
diff = True
if network.get('aliases'):
if not compare_generic(network['aliases'], network_info.get('Aliases'), 'allow_more_present', 'set'):
diff = True
if network.get('links'):
expected_links = []
for link, alias in network['links']:
expected_links.append("%s:%s" % (link, alias))
if not compare_generic(expected_links, network_info.get('Links'), 'allow_more_present', 'set'):
diff = True
if diff:
different = True
differences.append(dict(
parameter=network,
container=dict(
name=network['name'],
ipv4_address=network_info_ipam.get('IPv4Address'),
ipv6_address=network_info_ipam.get('IPv6Address'),
aliases=network_info.get('Aliases'),
links=network_info.get('Links')
)
))
return different, differences
def has_extra_networks(self):
'''
Check if the container is connected to non-requested networks
'''
extra_networks = []
extra = False
if not self.container.get('NetworkSettings'):
self.fail("has_extra_networks: Error parsing container properties. NetworkSettings missing.")
connected_networks = self.container['NetworkSettings'].get('Networks')
if connected_networks:
for network, network_config in connected_networks.items():
keep = False
if self.parameters.networks:
for expected_network in self.parameters.networks:
if expected_network['name'] == network:
keep = True
if not keep:
extra = True
extra_networks.append(dict(name=network, id=network_config['NetworkID']))
return extra, extra_networks
def _get_expected_devices(self):
if not self.parameters.devices:
return None
expected_devices = []
for device in self.parameters.devices:
parts = device.split(':')
if len(parts) == 1:
expected_devices.append(
dict(
CgroupPermissions='rwm',
PathInContainer=parts[0],
PathOnHost=parts[0]
))
elif len(parts) == 2:
parts = device.split(':')
expected_devices.append(
dict(
CgroupPermissions='rwm',
PathInContainer=parts[1],
PathOnHost=parts[0]
)
)
else:
expected_devices.append(
dict(
CgroupPermissions=parts[2],
PathInContainer=parts[1],
PathOnHost=parts[0]
))
return expected_devices
def _get_expected_entrypoint(self):
if not self.parameters.entrypoint:
return None
return shlex.split(self.parameters.entrypoint)
def _get_expected_ports(self):
if not self.parameters.published_ports:
return None
expected_bound_ports = {}
for container_port, config in self.parameters.published_ports.items():
if isinstance(container_port, int):
container_port = "%s/tcp" % container_port
if len(config) == 1:
if isinstance(config[0], int):
expected_bound_ports[container_port] = [{'HostIp': "0.0.0.0", 'HostPort': config[0]}]
else:
expected_bound_ports[container_port] = [{'HostIp': config[0], 'HostPort': ""}]
elif isinstance(config[0], tuple):
expected_bound_ports[container_port] = []
for host_ip, host_port in config:
expected_bound_ports[container_port].append({'HostIp': host_ip, 'HostPort': str(host_port)})
else:
expected_bound_ports[container_port] = [{'HostIp': config[0], 'HostPort': str(config[1])}]
return expected_bound_ports
def _get_expected_links(self):
if self.parameters.links is None:
return None
self.log('parameter links:')
self.log(self.parameters.links, pretty_print=True)
exp_links = []
for link, alias in self.parameters.links:
exp_links.append("/%s:%s/%s" % (link, ('/' + self.parameters.name), alias))
return exp_links
def _get_expected_binds(self, image):
self.log('_get_expected_binds')
image_vols = []
if image:
image_vols = self._get_image_binds(image[self.parameters.client.image_inspect_source].get('Volumes'))
param_vols = []
if self.parameters.volumes:
for vol in self.parameters.volumes:
host = None
if ':' in vol:
if len(vol.split(':')) == 3:
host, container, mode = vol.split(':')
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
if len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]):
host, container, mode = vol.split(':') + ['rw']
if host:
param_vols.append("%s:%s:%s" % (host, container, mode))
result = list(set(image_vols + param_vols))
self.log("expected_binds:")
self.log(result, pretty_print=True)
return result
def _get_image_binds(self, volumes):
'''
Convert array of binds to array of strings with format host_path:container_path:mode
:param volumes: array of bind dicts
:return: array of strings
'''
results = []
if isinstance(volumes, dict):
results += self._get_bind_from_dict(volumes)
elif isinstance(volumes, list):
for vol in volumes:
results += self._get_bind_from_dict(vol)
return results
@staticmethod
def _get_bind_from_dict(volume_dict):
results = []
if volume_dict:
for host_path, config in volume_dict.items():
if isinstance(config, dict) and config.get('bind'):
container_path = config.get('bind')
mode = config.get('mode', 'rw')
results.append("%s:%s:%s" % (host_path, container_path, mode))
return results
def _get_expected_volumes(self, image):
self.log('_get_expected_volumes')
expected_vols = dict()
if image and image[self.parameters.client.image_inspect_source].get('Volumes'):
expected_vols.update(image[self.parameters.client.image_inspect_source].get('Volumes'))
if self.parameters.volumes:
for vol in self.parameters.volumes:
container = None
if ':' in vol:
if len(vol.split(':')) == 3:
dummy, container, mode = vol.split(':')
if not is_volume_permissions(mode):
self.fail('Found invalid volumes mode: {0}'.format(mode))
if len(vol.split(':')) == 2:
parts = vol.split(':')
if not is_volume_permissions(parts[1]):
dummy, container, mode = vol.split(':') + ['rw']
new_vol = dict()
if container:
new_vol[container] = dict()
else:
new_vol[vol] = dict()
expected_vols.update(new_vol)
if not expected_vols:
expected_vols = None
self.log("expected_volumes:")
self.log(expected_vols, pretty_print=True)
return expected_vols
def _get_expected_env(self, image):
self.log('_get_expected_env')
expected_env = dict()
if image and image[self.parameters.client.image_inspect_source].get('Env'):
for env_var in image[self.parameters.client.image_inspect_source]['Env']:
parts = env_var.split('=', 1)
expected_env[parts[0]] = parts[1]
if self.parameters.env:
expected_env.update(self.parameters.env)
param_env = []
for key, value in expected_env.items():
param_env.append("%s=%s" % (key, value))
return param_env
def _get_expected_exposed(self, image):
self.log('_get_expected_exposed')
image_ports = []
if image:
image_exposed_ports = image[self.parameters.client.image_inspect_source].get('ExposedPorts') or {}
image_ports = [self._normalize_port(p) for p in image_exposed_ports.keys()]
param_ports = []
if self.parameters.ports:
param_ports = [str(p[0]) + '/' + p[1] for p in self.parameters.ports]
result = list(set(image_ports + param_ports))
self.log(result, pretty_print=True)
return result
def _get_expected_ulimits(self, config_ulimits):
self.log('_get_expected_ulimits')
if config_ulimits is None:
return None
results = []
for limit in config_ulimits:
results.append(dict(
Name=limit.name,
Soft=limit.soft,
Hard=limit.hard
))
return results
def _get_expected_sysctls(self, config_sysctls):
self.log('_get_expected_sysctls')
if config_sysctls is None:
return None
result = dict()
for key, value in config_sysctls.items():
result[key] = str(value)
return result
def _get_expected_cmd(self):
self.log('_get_expected_cmd')
if not self.parameters.command:
return None
return shlex.split(self.parameters.command)
def _convert_simple_dict_to_list(self, param_name, join_with=':'):
if getattr(self.parameters, param_name, None) is None:
return None
results = []
for key, value in getattr(self.parameters, param_name).items():
results.append("%s%s%s" % (key, join_with, value))
return results
def _normalize_port(self, port):
if '/' not in port:
return port + '/tcp'
return port
def _get_expected_healthcheck(self):
self.log('_get_expected_healthcheck')
expected_healthcheck = dict()
if self.parameters.healthcheck:
expected_healthcheck.update([(k.title().replace("_", ""), v)
for k, v in self.parameters.healthcheck.items()])
return expected_healthcheck
class ContainerManager(DockerBaseClass):
'''
Perform container management tasks
'''
def __init__(self, client):
super(ContainerManager, self).__init__()
if client.module.params.get('log_options') and not client.module.params.get('log_driver'):
client.module.warn('log_options is ignored when log_driver is not specified')
if client.module.params.get('healthcheck') and not client.module.params.get('healthcheck').get('test'):
client.module.warn('healthcheck is ignored when test is not specified')
if client.module.params.get('restart_retries') is not None and not client.module.params.get('restart_policy'):
client.module.warn('restart_retries is ignored when restart_policy is not specified')
self.client = client
self.parameters = TaskParameters(client)
self.check_mode = self.client.check_mode
self.results = {'changed': False, 'actions': []}
self.diff = {}
self.diff_tracker = DifferenceTracker()
self.facts = {}
state = self.parameters.state
if state in ('stopped', 'started', 'present'):
self.present(state)
elif state == 'absent':
self.absent()
if not self.check_mode and not self.parameters.debug:
self.results.pop('actions')
if self.client.module._diff or self.parameters.debug:
self.diff['before'], self.diff['after'] = self.diff_tracker.get_before_after()
self.results['diff'] = self.diff
if self.facts:
self.results['ansible_facts'] = {'docker_container': self.facts}
self.results['container'] = self.facts
def present(self, state):
container = self._get_container(self.parameters.name)
was_running = container.running
was_paused = container.paused
container_created = False
# If the image parameter was passed then we need to deal with the image
# version comparison. Otherwise we handle this depending on whether
# the container already runs or not; in the former case, in case the
# container needs to be restarted, we use the existing container's
# image ID.
image = self._get_image()
self.log(image, pretty_print=True)
if not container.exists:
# New container
self.log('No container found')
if not self.parameters.image:
self.fail('Cannot create container when image is not specified!')
self.diff_tracker.add('exists', parameter=True, active=False)
new_container = self.container_create(self.parameters.image, self.parameters.create_parameters)
if new_container:
container = new_container
container_created = True
else:
# Existing container
different, differences = container.has_different_configuration(image)
image_different = False
if self.parameters.comparisons['image']['comparison'] == 'strict':
image_different = self._image_is_different(image, container)
if image_different or different or self.parameters.recreate:
self.diff_tracker.merge(differences)
self.diff['differences'] = differences.get_legacy_docker_container_diffs()
if image_different:
self.diff['image_different'] = True
self.log("differences")
self.log(differences.get_legacy_docker_container_diffs(), pretty_print=True)
image_to_use = self.parameters.image
if not image_to_use and container and container.Image:
image_to_use = container.Image
if not image_to_use:
self.fail('Cannot recreate container when image is not specified or cannot be extracted from current container!')
if container.running:
self.container_stop(container.Id)
self.container_remove(container.Id)
new_container = self.container_create(image_to_use, self.parameters.create_parameters)
if new_container:
container = new_container
container_created = True
if container and container.exists:
container = self.update_limits(container)
container = self.update_networks(container, container_created)
if state == 'started' and not container.running:
self.diff_tracker.add('running', parameter=True, active=was_running)
container = self.container_start(container.Id)
elif state == 'started' and self.parameters.restart:
self.diff_tracker.add('running', parameter=True, active=was_running)
self.diff_tracker.add('restarted', parameter=True, active=False)
container = self.container_restart(container.Id)
elif state == 'stopped' and container.running:
self.diff_tracker.add('running', parameter=False, active=was_running)
self.container_stop(container.Id)
container = self._get_container(container.Id)
if state == 'started' and container.paused is not None and container.paused != self.parameters.paused:
self.diff_tracker.add('paused', parameter=self.parameters.paused, active=was_paused)
if not self.check_mode:
try:
if self.parameters.paused:
self.client.pause(container=container.Id)
else:
self.client.unpause(container=container.Id)
except Exception as exc:
self.fail("Error %s container %s: %s" % (
"pausing" if self.parameters.paused else "unpausing", container.Id, str(exc)
))
container = self._get_container(container.Id)
self.results['changed'] = True
self.results['actions'].append(dict(set_paused=self.parameters.paused))
self.facts = container.raw
def absent(self):
container = self._get_container(self.parameters.name)
if container.exists:
if container.running:
self.diff_tracker.add('running', parameter=False, active=True)
self.container_stop(container.Id)
self.diff_tracker.add('exists', parameter=False, active=True)
self.container_remove(container.Id)
def fail(self, msg, **kwargs):
self.client.fail(msg, **kwargs)
def _output_logs(self, msg):
self.client.module.log(msg=msg)
def _get_container(self, container):
'''
Expects container ID or Name. Returns a container object
'''
return Container(self.client.get_container(container), self.parameters)
def _get_image(self):
if not self.parameters.image:
self.log('No image specified')
return None
if is_image_name_id(self.parameters.image):
image = self.client.find_image_by_id(self.parameters.image)
else:
repository, tag = utils.parse_repository_tag(self.parameters.image)
if not tag:
tag = "latest"
image = self.client.find_image(repository, tag)
if not image or self.parameters.pull:
if not self.check_mode:
self.log("Pull the image.")
image, alreadyToLatest = self.client.pull_image(repository, tag)
if alreadyToLatest:
self.results['changed'] = False
else:
self.results['changed'] = True
self.results['actions'].append(dict(pulled_image="%s:%s" % (repository, tag)))
elif not image:
# If the image isn't there, claim we'll pull.
# (Implicitly: if the image is there, claim it already was latest.)
self.results['changed'] = True
self.results['actions'].append(dict(pulled_image="%s:%s" % (repository, tag)))
self.log("image")
self.log(image, pretty_print=True)
return image
def _image_is_different(self, image, container):
if image and image.get('Id'):
if container and container.Image:
if image.get('Id') != container.Image:
self.diff_tracker.add('image', parameter=image.get('Id'), active=container.Image)
return True
return False
def update_limits(self, container):
limits_differ, different_limits = container.has_different_resource_limits()
if limits_differ:
self.log("limit differences:")
self.log(different_limits.get_legacy_docker_container_diffs(), pretty_print=True)
self.diff_tracker.merge(different_limits)
if limits_differ and not self.check_mode:
self.container_update(container.Id, self.parameters.update_parameters)
return self._get_container(container.Id)
return container
def update_networks(self, container, container_created):
updated_container = container
if self.parameters.comparisons['networks']['comparison'] != 'ignore' or container_created:
has_network_differences, network_differences = container.has_network_differences()
if has_network_differences:
if self.diff.get('differences'):
self.diff['differences'].append(dict(network_differences=network_differences))
else:
self.diff['differences'] = [dict(network_differences=network_differences)]
for netdiff in network_differences:
self.diff_tracker.add(
'network.{0}'.format(netdiff['parameter']['name']),
parameter=netdiff['parameter'],
active=netdiff['container']
)
self.results['changed'] = True
updated_container = self._add_networks(container, network_differences)
if (self.parameters.comparisons['networks']['comparison'] == 'strict' and self.parameters.networks is not None) or self.parameters.purge_networks:
has_extra_networks, extra_networks = container.has_extra_networks()
if has_extra_networks:
if self.diff.get('differences'):
self.diff['differences'].append(dict(purge_networks=extra_networks))
else:
self.diff['differences'] = [dict(purge_networks=extra_networks)]
for extra_network in extra_networks:
self.diff_tracker.add(
'network.{0}'.format(extra_network['name']),
active=extra_network
)
self.results['changed'] = True
updated_container = self._purge_networks(container, extra_networks)
return updated_container
def _add_networks(self, container, differences):
for diff in differences:
# remove the container from the network, if connected
if diff.get('container'):
self.results['actions'].append(dict(removed_from_network=diff['parameter']['name']))
if not self.check_mode:
try:
self.client.disconnect_container_from_network(container.Id, diff['parameter']['id'])
except Exception as exc:
self.fail("Error disconnecting container from network %s - %s" % (diff['parameter']['name'],
str(exc)))
# connect to the network
params = dict()
for para in ('ipv4_address', 'ipv6_address', 'links', 'aliases'):
if diff['parameter'].get(para):
params[para] = diff['parameter'][para]
self.results['actions'].append(dict(added_to_network=diff['parameter']['name'], network_parameters=params))
if not self.check_mode:
try:
self.log("Connecting container to network %s" % diff['parameter']['id'])
self.log(params, pretty_print=True)
self.client.connect_container_to_network(container.Id, diff['parameter']['id'], **params)
except Exception as exc:
self.fail("Error connecting container to network %s - %s" % (diff['parameter']['name'], str(exc)))
return self._get_container(container.Id)
def _purge_networks(self, container, networks):
for network in networks:
self.results['actions'].append(dict(removed_from_network=network['name']))
if not self.check_mode:
try:
self.client.disconnect_container_from_network(container.Id, network['name'])
except Exception as exc:
self.fail("Error disconnecting container from network %s - %s" % (network['name'],
str(exc)))
return self._get_container(container.Id)
def container_create(self, image, create_parameters):
self.log("create container")
self.log("image: %s parameters:" % image)
self.log(create_parameters, pretty_print=True)
self.results['actions'].append(dict(created="Created container", create_parameters=create_parameters))
self.results['changed'] = True
new_container = None
if not self.check_mode:
try:
new_container = self.client.create_container(image, **create_parameters)
self.client.report_warnings(new_container)
except Exception as exc:
self.fail("Error creating container: %s" % str(exc))
return self._get_container(new_container['Id'])
return new_container
def container_start(self, container_id):
self.log("start container %s" % (container_id))
self.results['actions'].append(dict(started=container_id))
self.results['changed'] = True
if not self.check_mode:
try:
self.client.start(container=container_id)
except Exception as exc:
self.fail("Error starting container %s: %s" % (container_id, str(exc)))
if self.parameters.detach is False:
if self.client.docker_py_version >= LooseVersion('3.0'):
status = self.client.wait(container_id)['StatusCode']
else:
status = self.client.wait(container_id)
if self.parameters.auto_remove:
output = "Cannot retrieve result as auto_remove is enabled"
if self.parameters.output_logs:
self.client.module.warn('Cannot output_logs if auto_remove is enabled!')
else:
config = self.client.inspect_container(container_id)
logging_driver = config['HostConfig']['LogConfig']['Type']
if logging_driver in ('json-file', 'journald'):
output = self.client.logs(container_id, stdout=True, stderr=True, stream=False, timestamps=False)
if self.parameters.output_logs:
self._output_logs(msg=output)
else:
output = "Result logged using `%s` driver" % logging_driver
if status != 0:
self.fail(output, status=status)
if self.parameters.cleanup:
self.container_remove(container_id, force=True)
insp = self._get_container(container_id)
if insp.raw:
insp.raw['Output'] = output
else:
insp.raw = dict(Output=output)
return insp
return self._get_container(container_id)
def container_remove(self, container_id, link=False, force=False):
volume_state = (not self.parameters.keep_volumes)
self.log("remove container container:%s v:%s link:%s force%s" % (container_id, volume_state, link, force))
self.results['actions'].append(dict(removed=container_id, volume_state=volume_state, link=link, force=force))
self.results['changed'] = True
response = None
if not self.check_mode:
count = 0
while True:
try:
response = self.client.remove_container(container_id, v=volume_state, link=link, force=force)
except NotFound as dummy:
pass
except APIError as exc:
if 'Unpause the container before stopping or killing' in exc.explanation:
# New docker daemon versions do not allow containers to be removed
# if they are paused. Make sure we don't end up in an infinite loop.
if count == 3:
self.fail("Error removing container %s (tried to unpause three times): %s" % (container_id, str(exc)))
count += 1
# Unpause
try:
self.client.unpause(container=container_id)
except Exception as exc2:
self.fail("Error unpausing container %s for removal: %s" % (container_id, str(exc2)))
# Now try again
continue
if 'removal of container ' in exc.explanation and ' is already in progress' in exc.explanation:
pass
else:
self.fail("Error removing container %s: %s" % (container_id, str(exc)))
except Exception as exc:
self.fail("Error removing container %s: %s" % (container_id, str(exc)))
# We only loop when explicitly requested by 'continue'
break
return response
def container_update(self, container_id, update_parameters):
if update_parameters:
self.log("update container %s" % (container_id))
self.log(update_parameters, pretty_print=True)
self.results['actions'].append(dict(updated=container_id, update_parameters=update_parameters))
self.results['changed'] = True
if not self.check_mode and callable(getattr(self.client, 'update_container')):
try:
result = self.client.update_container(container_id, **update_parameters)
self.client.report_warnings(result)
except Exception as exc:
self.fail("Error updating container %s: %s" % (container_id, str(exc)))
return self._get_container(container_id)
def container_kill(self, container_id):
self.results['actions'].append(dict(killed=container_id, signal=self.parameters.kill_signal))
self.results['changed'] = True
response = None
if not self.check_mode:
try:
if self.parameters.kill_signal:
response = self.client.kill(container_id, signal=self.parameters.kill_signal)
else:
response = self.client.kill(container_id)
except Exception as exc:
self.fail("Error killing container %s: %s" % (container_id, exc))
return response
def container_restart(self, container_id):
self.results['actions'].append(dict(restarted=container_id, timeout=self.parameters.stop_timeout))
self.results['changed'] = True
if not self.check_mode:
try:
if self.parameters.stop_timeout:
dummy = self.client.restart(container_id, timeout=self.parameters.stop_timeout)
else:
dummy = self.client.restart(container_id)
except Exception as exc:
self.fail("Error restarting container %s: %s" % (container_id, str(exc)))
return self._get_container(container_id)
def container_stop(self, container_id):
if self.parameters.force_kill:
self.container_kill(container_id)
return
self.results['actions'].append(dict(stopped=container_id, timeout=self.parameters.stop_timeout))
self.results['changed'] = True
response = None
if not self.check_mode:
count = 0
while True:
try:
if self.parameters.stop_timeout:
response = self.client.stop(container_id, timeout=self.parameters.stop_timeout)
else:
response = self.client.stop(container_id)
except APIError as exc:
if 'Unpause the container before stopping or killing' in exc.explanation:
# New docker daemon versions do not allow containers to be removed
# if they are paused. Make sure we don't end up in an infinite loop.
if count == 3:
self.fail("Error removing container %s (tried to unpause three times): %s" % (container_id, str(exc)))
count += 1
# Unpause
try:
self.client.unpause(container=container_id)
except Exception as exc2:
self.fail("Error unpausing container %s for removal: %s" % (container_id, str(exc2)))
# Now try again
continue
self.fail("Error stopping container %s: %s" % (container_id, str(exc)))
except Exception as exc:
self.fail("Error stopping container %s: %s" % (container_id, str(exc)))
# We only loop when explicitly requested by 'continue'
break
return response
def detect_ipvX_address_usage(client):
'''
Helper function to detect whether any specified network uses ipv4_address or ipv6_address
'''
for network in client.module.params.get("networks") or []:
if network.get('ipv4_address') is not None or network.get('ipv6_address') is not None:
return True
return False
class AnsibleDockerClientContainer(AnsibleDockerClient):
# A list of module options which are not docker container properties
__NON_CONTAINER_PROPERTY_OPTIONS = tuple([
'env_file', 'force_kill', 'keep_volumes', 'ignore_image', 'name', 'pull', 'purge_networks',
'recreate', 'restart', 'state', 'trust_image_content', 'networks', 'cleanup', 'kill_signal',
'output_logs', 'paused'
] + list(DOCKER_COMMON_ARGS.keys()))
def _parse_comparisons(self):
comparisons = {}
comp_aliases = {}
# Put in defaults
explicit_types = dict(
command='list',
devices='set(dict)',
dns_search_domains='list',
dns_servers='list',
env='set',
entrypoint='list',
etc_hosts='set',
mounts='set(dict)',
networks='set(dict)',
ulimits='set(dict)',
device_read_bps='set(dict)',
device_write_bps='set(dict)',
device_read_iops='set(dict)',
device_write_iops='set(dict)',
)
all_options = set() # this is for improving user feedback when a wrong option was specified for comparison
default_values = dict(
stop_timeout='ignore',
)
for option, data in self.module.argument_spec.items():
all_options.add(option)
for alias in data.get('aliases', []):
all_options.add(alias)
# Ignore options which aren't used as container properties
if option in self.__NON_CONTAINER_PROPERTY_OPTIONS and option != 'networks':
continue
# Determine option type
if option in explicit_types:
datatype = explicit_types[option]
elif data['type'] == 'list':
datatype = 'set'
elif data['type'] == 'dict':
datatype = 'dict'
else:
datatype = 'value'
# Determine comparison type
if option in default_values:
comparison = default_values[option]
elif datatype in ('list', 'value'):
comparison = 'strict'
else:
comparison = 'allow_more_present'
comparisons[option] = dict(type=datatype, comparison=comparison, name=option)
# Keep track of aliases
comp_aliases[option] = option
for alias in data.get('aliases', []):
comp_aliases[alias] = option
# Process legacy ignore options
if self.module.params['ignore_image']:
comparisons['image']['comparison'] = 'ignore'
if self.module.params['purge_networks']:
comparisons['networks']['comparison'] = 'strict'
# Process options
if self.module.params.get('comparisons'):
# If '*' appears in comparisons, process it first
if '*' in self.module.params['comparisons']:
value = self.module.params['comparisons']['*']
if value not in ('strict', 'ignore'):
self.fail("The wildcard can only be used with comparison modes 'strict' and 'ignore'!")
for option, v in comparisons.items():
if option == 'networks':
# `networks` is special: only update if
# some value is actually specified
if self.module.params['networks'] is None:
continue
v['comparison'] = value
# Now process all other comparisons.
comp_aliases_used = {}
for key, value in self.module.params['comparisons'].items():
if key == '*':
continue
# Find main key
key_main = comp_aliases.get(key)
if key_main is None:
if key_main in all_options:
self.fail("The module option '%s' cannot be specified in the comparisons dict, "
"since it does not correspond to container's state!" % key)
self.fail("Unknown module option '%s' in comparisons dict!" % key)
if key_main in comp_aliases_used:
self.fail("Both '%s' and '%s' (aliases of %s) are specified in comparisons dict!" % (key, comp_aliases_used[key_main], key_main))
comp_aliases_used[key_main] = key
# Check value and update accordingly
if value in ('strict', 'ignore'):
comparisons[key_main]['comparison'] = value
elif value == 'allow_more_present':
if comparisons[key_main]['type'] == 'value':
self.fail("Option '%s' is a value and not a set/list/dict, so its comparison cannot be %s" % (key, value))
comparisons[key_main]['comparison'] = value
else:
self.fail("Unknown comparison mode '%s'!" % value)
# Add implicit options
comparisons['publish_all_ports'] = dict(type='value', comparison='strict', name='published_ports')
comparisons['expected_ports'] = dict(type='dict', comparison=comparisons['published_ports']['comparison'], name='expected_ports')
comparisons['disable_healthcheck'] = dict(type='value',
comparison='ignore' if comparisons['healthcheck']['comparison'] == 'ignore' else 'strict',
name='disable_healthcheck')
# Check legacy values
if self.module.params['ignore_image'] and comparisons['image']['comparison'] != 'ignore':
self.module.warn('The ignore_image option has been overridden by the comparisons option!')
if self.module.params['purge_networks'] and comparisons['networks']['comparison'] != 'strict':
self.module.warn('The purge_networks option has been overridden by the comparisons option!')
self.comparisons = comparisons
def _get_additional_minimal_versions(self):
stop_timeout_supported = self.docker_api_version >= LooseVersion('1.25')
stop_timeout_needed_for_update = self.module.params.get("stop_timeout") is not None and self.module.params.get('state') != 'absent'
if stop_timeout_supported:
stop_timeout_supported = self.docker_py_version >= LooseVersion('2.1')
if stop_timeout_needed_for_update and not stop_timeout_supported:
# We warn (instead of fail) since in older versions, stop_timeout was not used
# to update the container's configuration, but only when stopping a container.
self.module.warn("Docker SDK for Python's version is %s. Minimum version required is 2.1 to update "
"the container's stop_timeout configuration. "
"If you use the 'docker-py' module, you have to switch to the 'docker' Python package." % (docker_version,))
else:
if stop_timeout_needed_for_update and not stop_timeout_supported:
# We warn (instead of fail) since in older versions, stop_timeout was not used
# to update the container's configuration, but only when stopping a container.
self.module.warn("Docker API version is %s. Minimum version required is 1.25 to set or "
"update the container's stop_timeout configuration." % (self.docker_api_version_str,))
self.option_minimal_versions['stop_timeout']['supported'] = stop_timeout_supported
def __init__(self, **kwargs):
option_minimal_versions = dict(
# internal options
log_config=dict(),
publish_all_ports=dict(),
ports=dict(),
volume_binds=dict(),
name=dict(),
# normal options
device_read_bps=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
device_read_iops=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
device_write_bps=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
device_write_iops=dict(docker_py_version='1.9.0', docker_api_version='1.22'),
dns_opts=dict(docker_api_version='1.21', docker_py_version='1.10.0'),
ipc_mode=dict(docker_api_version='1.25'),
mac_address=dict(docker_api_version='1.25'),
oom_score_adj=dict(docker_api_version='1.22'),
shm_size=dict(docker_api_version='1.22'),
stop_signal=dict(docker_api_version='1.21'),
tmpfs=dict(docker_api_version='1.22'),
volume_driver=dict(docker_api_version='1.21'),
memory_reservation=dict(docker_api_version='1.21'),
kernel_memory=dict(docker_api_version='1.21'),
auto_remove=dict(docker_py_version='2.1.0', docker_api_version='1.25'),
healthcheck=dict(docker_py_version='2.0.0', docker_api_version='1.24'),
init=dict(docker_py_version='2.2.0', docker_api_version='1.25'),
runtime=dict(docker_py_version='2.4.0', docker_api_version='1.25'),
sysctls=dict(docker_py_version='1.10.0', docker_api_version='1.24'),
userns_mode=dict(docker_py_version='1.10.0', docker_api_version='1.23'),
uts=dict(docker_py_version='3.5.0', docker_api_version='1.25'),
pids_limit=dict(docker_py_version='1.10.0', docker_api_version='1.23'),
mounts=dict(docker_py_version='2.6.0', docker_api_version='1.25'),
cpus=dict(docker_py_version='2.3.0', docker_api_version='1.25'),
# specials
ipvX_address_supported=dict(docker_py_version='1.9.0', docker_api_version='1.22',
detect_usage=detect_ipvX_address_usage,
usage_msg='ipv4_address or ipv6_address in networks'),
stop_timeout=dict(), # see _get_additional_minimal_versions()
)
super(AnsibleDockerClientContainer, self).__init__(
option_minimal_versions=option_minimal_versions,
option_minimal_versions_ignore_params=self.__NON_CONTAINER_PROPERTY_OPTIONS,
**kwargs
)
self.image_inspect_source = 'Config'
if self.docker_api_version < LooseVersion('1.21'):
self.image_inspect_source = 'ContainerConfig'
self._get_additional_minimal_versions()
self._parse_comparisons()
if self.module.params['container_default_behavior'] is None:
self.module.params['container_default_behavior'] = 'compatibility'
self.module.deprecate(
'The container_default_behavior option will change its default value from "compatibility" to '
'"no_defaults" in Ansible 2.14. To remove this warning, please specify an explicit value for it now',
version='2.14'
)
if self.module.params['container_default_behavior'] == 'compatibility':
old_default_values = dict(
auto_remove=False,
detach=True,
init=False,
interactive=False,
memory="0",
paused=False,
privileged=False,
read_only=False,
tty=False,
)
for param, value in old_default_values.items():
if self.module.params[param] is None:
self.module.params[param] = value
def main():
argument_spec = dict(
auto_remove=dict(type='bool'),
blkio_weight=dict(type='int'),
capabilities=dict(type='list', elements='str'),
cap_drop=dict(type='list', elements='str'),
cleanup=dict(type='bool', default=False),
command=dict(type='raw'),
comparisons=dict(type='dict'),
container_default_behavior=dict(type='str', choices=['compatibility', 'no_defaults']),
cpu_period=dict(type='int'),
cpu_quota=dict(type='int'),
cpus=dict(type='float'),
cpuset_cpus=dict(type='str'),
cpuset_mems=dict(type='str'),
cpu_shares=dict(type='int'),
detach=dict(type='bool'),
devices=dict(type='list', elements='str'),
device_read_bps=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='str'),
)),
device_write_bps=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='str'),
)),
device_read_iops=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='int'),
)),
device_write_iops=dict(type='list', elements='dict', options=dict(
path=dict(required=True, type='str'),
rate=dict(required=True, type='int'),
)),
dns_servers=dict(type='list', elements='str'),
dns_opts=dict(type='list', elements='str'),
dns_search_domains=dict(type='list', elements='str'),
domainname=dict(type='str'),
entrypoint=dict(type='list', elements='str'),
env=dict(type='dict'),
env_file=dict(type='path'),
etc_hosts=dict(type='dict'),
exposed_ports=dict(type='list', elements='str', aliases=['exposed', 'expose']),
force_kill=dict(type='bool', default=False, aliases=['forcekill']),
groups=dict(type='list', elements='str'),
healthcheck=dict(type='dict', options=dict(
test=dict(type='raw'),
interval=dict(type='str'),
timeout=dict(type='str'),
start_period=dict(type='str'),
retries=dict(type='int'),
)),
hostname=dict(type='str'),
ignore_image=dict(type='bool', default=False),
image=dict(type='str'),
init=dict(type='bool'),
interactive=dict(type='bool'),
ipc_mode=dict(type='str'),
keep_volumes=dict(type='bool', default=True),
kernel_memory=dict(type='str'),
kill_signal=dict(type='str'),
labels=dict(type='dict'),
links=dict(type='list', elements='str'),
log_driver=dict(type='str'),
log_options=dict(type='dict', aliases=['log_opt']),
mac_address=dict(type='str'),
memory=dict(type='str'),
memory_reservation=dict(type='str'),
memory_swap=dict(type='str'),
memory_swappiness=dict(type='int'),
mounts=dict(type='list', elements='dict', options=dict(
target=dict(type='str', required=True),
source=dict(type='str'),
type=dict(type='str', choices=['bind', 'volume', 'tmpfs', 'npipe'], default='volume'),
read_only=dict(type='bool'),
consistency=dict(type='str', choices=['default', 'consistent', 'cached', 'delegated']),
propagation=dict(type='str', choices=['private', 'rprivate', 'shared', 'rshared', 'slave', 'rslave']),
no_copy=dict(type='bool'),
labels=dict(type='dict'),
volume_driver=dict(type='str'),
volume_options=dict(type='dict'),
tmpfs_size=dict(type='str'),
tmpfs_mode=dict(type='str'),
)),
name=dict(type='str', required=True),
network_mode=dict(type='str'),
networks=dict(type='list', elements='dict', options=dict(
name=dict(type='str', required=True),
ipv4_address=dict(type='str'),
ipv6_address=dict(type='str'),
aliases=dict(type='list', elements='str'),
links=dict(type='list', elements='str'),
)),
networks_cli_compatible=dict(type='bool'),
oom_killer=dict(type='bool'),
oom_score_adj=dict(type='int'),
output_logs=dict(type='bool', default=False),
paused=dict(type='bool'),
pid_mode=dict(type='str'),
pids_limit=dict(type='int'),
privileged=dict(type='bool'),
published_ports=dict(type='list', elements='str', aliases=['ports']),
pull=dict(type='bool', default=False),
purge_networks=dict(type='bool', default=False),
read_only=dict(type='bool'),
recreate=dict(type='bool', default=False),
restart=dict(type='bool', default=False),
restart_policy=dict(type='str', choices=['no', 'on-failure', 'always', 'unless-stopped']),
restart_retries=dict(type='int'),
runtime=dict(type='str'),
security_opts=dict(type='list', elements='str'),
shm_size=dict(type='str'),
state=dict(type='str', default='started', choices=['absent', 'present', 'started', 'stopped']),
stop_signal=dict(type='str'),
stop_timeout=dict(type='int'),
sysctls=dict(type='dict'),
tmpfs=dict(type='list', elements='str'),
trust_image_content=dict(type='bool', default=False, removed_in_version='2.14'),
tty=dict(type='bool'),
ulimits=dict(type='list', elements='str'),
user=dict(type='str'),
userns_mode=dict(type='str'),
uts=dict(type='str'),
volume_driver=dict(type='str'),
volumes=dict(type='list', elements='str'),
volumes_from=dict(type='list', elements='str'),
working_dir=dict(type='str'),
)
required_if = [
('state', 'present', ['image'])
]
client = AnsibleDockerClientContainer(
argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True,
min_docker_api_version='1.20',
)
if client.module.params['networks_cli_compatible'] is None and client.module.params['networks']:
client.module.deprecate(
'Please note that docker_container handles networks slightly different than docker CLI. '
'If you specify networks, the default network will still be attached as the first network. '
'(You can specify purge_networks to remove all networks not explicitly listed.) '
'This behavior will change in Ansible 2.12. You can change the behavior now by setting '
'the new `networks_cli_compatible` option to `yes`, and remove this warning by setting '
'it to `no`',
version='2.12'
)
if client.module.params['networks_cli_compatible'] is True and client.module.params['networks'] and client.module.params['network_mode'] is None:
client.module.deprecate(
'Please note that the default value for `network_mode` will change from not specified '
'(which is equal to `default`) to the name of the first network in `networks` if '
'`networks` has at least one entry and `networks_cli_compatible` is `true`. You can '
'change the behavior now by explicitly setting `network_mode` to the name of the first '
'network in `networks`, and remove this warning by setting `network_mode` to `default`. '
'Please make sure that the value you set to `network_mode` equals the inspection result '
'for existing containers, otherwise the module will recreate them. You can find out the '
'correct value by running "docker inspect --format \'{{.HostConfig.NetworkMode}}\' <container_name>"',
version='2.14'
)
try:
cm = ContainerManager(client)
client.module.exit_json(**sanitize_result(cm.results))
except DockerException as e:
client.fail('An unexpected docker error occurred: {0}'.format(e), exception=traceback.format_exc())
except RequestException as e:
client.fail('An unexpected requests error occurred when docker-py tried to talk to the docker daemon: {0}'.format(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Alessandro Camilli (a.camilli@yahoo.it)
# Copyright (C) 2014
# Associazione OpenERP Italia (<http://www.openerp-italia.org>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, orm
from openerp.tools.translate import _
import decimal_precision as dp
import datetime, time
class res_country(orm.Model):
_inherit = "res.country"
_columns = {
'codice_stato_agenzia_entrate': fields.char('Codice stato Agenzia Entrate', size=3)
}
class account_tax_code(orm.Model):
_inherit = "account.tax.code"
_columns = {
'spesometro_escludi': fields.boolean('Escludi dalla dichiarazione'),
}
_defaults = {
'spesometro_escludi' : False,
}
class account_journal(orm.Model):
_inherit = "account.journal"
_columns = {
'spesometro': fields.boolean('Da includere'),
'spesometro_operazione': fields.selection((('FA','Operazioni documentate da fattura'),
('SA','Operazioni senza fattura'),
('BL1','Operazioni con paesi con fiscalità privilegiata'),
('BL2','Operazioni con soggetti non residenti'),
('BL3','Acquisti di servizi da soggetti non residenti'),
('DR','Documento Riepilogativo')),
'Operazione' ),
'spesometro_segno': fields.selection((('attiva','Attiva'),
('passiva','Passiva')),
'Segno operaz.' ),
'spesometro_IVA_non_esposta': fields.boolean('IVA non esposta')
}
class res_partner(orm.Model):
_inherit = "res.partner"
_columns = {
'spesometro_escludi': fields.boolean('Escludi'),
'spesometro_operazione': fields.selection((('FA','Operazioni documentate da fattura'),
('SA','Operazioni senza fattura'),
('BL1','Operazioni con paesi con fiscalità privilegiata'),
('BL2','Operazioni con soggetti non residenti'),
('BL3','Acquisti di servizi da soggetti non residenti'),
('DR','Documento Riepilogativo')),
'Operazione' ),
'spesometro_IVA_non_esposta': fields.boolean('IVA non esposta'),
'spesometro_leasing': fields.selection((('A','Autovettura'),
('B','Caravan'),
('C','Altri veicoli'),
('D','Unità da diporto'),
('E','Aeromobili')),
'Tipo Leasing' ),
'spesometro_tipo_servizio': fields.selection((('cessione','Cessione Beni'),
('servizi','Prestazione di servizi')),
'Tipo servizio', help="Specificare per 'Operazioni con paesi con fiscalità privilegiata' "),
'spesometro_indirizzo_estero': fields.many2one('res.partner.address', 'Indirizzo non residente'),
}
_defaults = {
'spesometro_escludi' : False,
}
class spesometro_configurazione(orm.Model):
def _check_one_year(self, cr, uid, ids, context=None):
for element in self.browse(cr, uid, ids, context=context):
element_ids = self.search(cr, uid, [('anno','=', element.anno)], context=context)
if len(element_ids) > 1:
return False
return True
_name = "spesometro.configurazione"
_description = "Spesometro - Configurazione"
_columns = {
'anno': fields.integer('Anno', size=4, required=True ),
'stato_san_marino': fields.many2one('res.country', 'Stato San Marino', required=True),
'quadro_fa_limite_importo': fields.float('Quadro FA - Limite importo'),
'quadro_fa_limite_importo_line': fields.float('Quadro FA - Limite importo singola operaz.'),
'quadro_sa_limite_importo': fields.float('Quadro SA - Limite importo'),
'quadro_sa_limite_importo_line': fields.float('Quadro SA - Limite importo singola operaz.'),
'quadro_bl_limite_importo': fields.float('Quadro BL - Limite importo'),
'quadro_bl_limite_importo_line': fields.float('Quadro BL - Limite importo singola operaz.'),
'quadro_se_limite_importo_line': fields.float('Quadro SE - Limite importo singola operaz.'),
}
_constraints = [
(_check_one_year, 'Error! Config for this year already exists.', ['anno']),
]
class spesometro_comunicazione(orm.Model):
_name = "spesometro.comunicazione"
_description = "Spesometro - Comunicazione "
def _tot_operation_number(self, cr, uid, ids, field_names, args, context=None):
res = {}
for com in self.browse(cr, uid, ids):
# Aggregate
tot_FA = len(com.line_FA_ids)
tot_SA = len(com.line_SA_ids)
tot_BL1 = 0
tot_BL2 = 0
tot_BL3 = 0
for line in com.line_BL_ids:
if line.operazione_fiscalita_privilegiata:
tot_BL1 += 1
elif line.operazione_con_soggetti_non_residenti:
tot_BL2 += 1
elif line.Acquisto_servizi_da_soggetti_non_residenti:
tot_BL3 += 1
#Analitiche
tot_FE = 0 # Fatture emesse
tot_FE_R = 0 # Doc riepilogativi
for line in com.line_FE_ids:
if line.documento_riepilogativo:
tot_FE_R += 1
else:
tot_FE += 1
tot_FR = 0 # Fatture ricevute
tot_FR_R = 0 # Doc riepilogativi ricevuti
for line in com.line_FR_ids:
if line.documento_riepilogativo:
tot_FR_R += 1
else:
tot_FR += 1
tot_NE = len(com.line_NE_ids)
tot_NR = len(com.line_NR_ids)
tot_DF = len(com.line_DF_ids)
tot_FN = len(com.line_FN_ids)
tot_SE = len(com.line_SE_ids)
tot_TU = len(com.line_TU_ids)
res[com.id] = {
'totale_FA' : tot_FA,
'totale_SA' : tot_SA,
'totale_BL1' : tot_BL1,
'totale_BL2' : tot_BL2,
'totale_BL3' : tot_BL3,
'totale_FE' : tot_FE,
'totale_FE_R' : tot_FE_R,
'totale_FR' : tot_FR,
'totale_FR_r' : tot_FR_R,
'totale_NE' : tot_NE,
'totale_NR' : tot_NR,
'totale_DF' : tot_DF,
'totale_FN' : tot_FN,
'totale_SE' : tot_SE,
'totale_TU' : tot_TU,
}
return res
_columns = {
'company_id': fields.many2one('res.company', 'Azienda', required=True ),
'periodo': fields.selection((('anno','Annuale'), ('trimestre','Trimestrale'), ('mese','Mensile')),
'Periodo', required=True),
'anno' : fields.integer('Anno', size=4, required=True),
'trimestre' : fields.integer('Trimestre', size=1 ),
'mese' : fields.selection((('1','Gennaio'), ('2','Febbraio'), ('3','Marzo'), ('4','Aprile'),
('5','Maggio'), ('6','Giugno'), ('7','Luglio'), ('8','Agosto'),
('9','Settembre'), ('10','Ottobre'), ('11','Novembre'), ('12','Dicembre'),
),'Mese'),
'tipo': fields.selection((('ordinaria','Ordinaria'), ('sostitutiva','Sostitutiva'), ('annullamento','Annullamento')),
'Tipo comunicazione', required=True),
'comunicazione_da_sostituire_annullare': fields.integer('Protocollo comunicaz. da sostituire/annullare'),
'documento_da_sostituire_annullare': fields.integer('Protocollo documento da sostituire/annullare'),
'formato_dati': fields.selection((('aggregati','Dati Aggregati'), ('analitici','Dati Analitici')),
'Formato dati', readonly=True ),
'codice_fornitura': fields.char('Codice fornitura', readonly=True, size=5, help='Impostare a "NSP00" '),
'tipo_fornitore': fields.selection((('01','Invio propria comunicazione'), ('10','Intermediario')),
'Tipo fornitore' ),
'codice_fiscale_fornitore': fields.char('Codice fiscale Fornitore', size=16,
help="Deve essere uguale al Codice fiscale dell'intermediario (campo 52 del record B) se presente, altrimenti al Codice fiscale del soggetto tenuto alla comunicazione (campo 41 del record B) se presente, altrimenti al Codice fiscale del soggetto obbligato (campo 2 del record B)"),
#
# Valori per comunicazione su più invii (non gestito)
'progressivo_telematico': fields.integer('Progressivo telematico', readonly=True),
'numero_totale_invii': fields.integer('Numero totale invii telematici', readonly=True),
#
# Soggetto a cui si riferisce la comunicazione
#
'soggetto_codice_fiscale': fields.char('Codice fiscale soggetto obbligato', size=16,
help="Soggetto cui si riferisce la comunicazione"),
'soggetto_partitaIVA': fields.char('Partita IVA', size=11),
'soggetto_codice_attivita': fields.char('Codice attività', size=6, help="Codice ATECO 2007"),
'soggetto_telefono': fields.char('Telefono', size=12),
'soggetto_fax': fields.char('Fax', size=12),
'soggetto_email': fields.char('E-mail', size=50),
'soggetto_forma_giuridica': fields.selection((('persona_giuridica','Persona Giuridica'), ('persona_fisica','Persona Fisica')),
'Forma Giuridica'),
'soggetto_pf_cognome': fields.char('Cognome', size=24, help=""),
'soggetto_pf_nome': fields.char('Nome', size=20, help=""),
'soggetto_pf_sesso': fields.selection((('M','M'), ('F','F')),'Sesso'),
'soggetto_pf_data_nascita': fields.date('Data di nascita'),
'soggetto_pf_comune_nascita': fields.char('Comune o stato estero di nascita', size=40),
'soggetto_pf_provincia_nascita': fields.char('Provincia', size=2),
'soggetto_pg_denominazione': fields.char('Denominazione', size=60),
# Soggetto tenuto alla comunicazione
'soggetto_cm_forma_giuridica': fields.selection((('persona_giuridica','Persona Giuridica'), ('persona_fisica','Persona Fisica')),
'Forma Giuridica'),
'soggetto_cm_codice_fiscale': fields.char('Codice Fiscale', size=16, help="Soggetto che effettua la comunicazione se diverso dal soggetto tenuto alla comunicazione"),
'soggetto_cm_pf_cognome': fields.char('Cognome', size=24, help=""),
'soggetto_cm_pf_nome': fields.char('Nome', size=20, help=""),
'soggetto_cm_pf_sesso': fields.selection((('M','M'), ('F','F')),'Sesso'),
'soggetto_cm_pf_data_nascita': fields.date('Data di nascita'),
'soggetto_cm_pf_comune_nascita': fields.char('Comune o stato estero di nascita', size=40),
'soggetto_cm_pf_provincia_nascita': fields.char('Provincia', size=2),
'soggetto_cm_pf_codice_carica': fields.integer('Codice Fiscale', size=2, help=""),
'soggetto_cm_pf_data_inizio_procedura': fields.date('Data inizio procedura'),
'soggetto_cm_pf_data_fine_procedura': fields.date('Data fine procedura'),
'soggetto_cm_pg_denominazione': fields.char('Denominazione', size=60),
# Soggetto incaricato alla trasmissione
'soggetto_trasmissione_codice_fiscale': fields.char('Codice Fiscale', size=16, help="Intermediario che effettua la trasmissione telematica"),
'soggetto_trasmissione_numero_CAF': fields.integer('Nr iscrizione albo del C.A.F.', size=5, help="Intermediario che effettua la trasmissione telematica"),
'soggetto_trasmissione_impegno': fields.selection((('1','Soggetto obbligato'), ('2','Intermediario')),'Impegno trasmissione'),
'soggetto_trasmissione_data_impegno': fields.date('Data data impegno'),
'line_FA_ids': fields.one2many('spesometro.comunicazione.line.fa', 'comunicazione_id', 'Quadri FA' ),
'line_SA_ids': fields.one2many('spesometro.comunicazione.line.sa', 'comunicazione_id', 'Quadri SA' ),
'line_BL_ids': fields.one2many('spesometro.comunicazione.line.bl', 'comunicazione_id', 'Quadri BL' ),
'line_FE_ids': fields.one2many('spesometro.comunicazione.line.fe', 'comunicazione_id', 'Quadri FE' ),
'line_FR_ids': fields.one2many('spesometro.comunicazione.line.fr', 'comunicazione_id', 'Quadri FR' ),
'line_NE_ids': fields.one2many('spesometro.comunicazione.line.ne', 'comunicazione_id', 'Quadri NE' ),
'line_NR_ids': fields.one2many('spesometro.comunicazione.line.nr', 'comunicazione_id', 'Quadri NR' ),
'line_DF_ids': fields.one2many('spesometro.comunicazione.line.df', 'comunicazione_id', 'Quadri DF' ),
'line_FN_ids': fields.one2many('spesometro.comunicazione.line.fn', 'comunicazione_id', 'Quadri FN' ),
'line_SE_ids': fields.one2many('spesometro.comunicazione.line.se', 'comunicazione_id', 'Quadri SE' ),
'line_TU_ids': fields.one2many('spesometro.comunicazione.line.tu', 'comunicazione_id', 'Quadri TU' ),
'totale_FA': fields.function(_tot_operation_number, string='Tot operazioni FA', type='integer', multi='operation_number'),
'totale_SA': fields.function(_tot_operation_number, string='Tot operazioni SA', type='integer', multi='operation_number'),
'totale_BL1': fields.function(_tot_operation_number, string='Tot operazioni BL - Paesi con fiscalita privilegiata', type='integer', multi='operation_number'),
'totale_BL2': fields.function(_tot_operation_number, string='Tot operazioni BL - Soggetti non residenti', type='integer', multi='operation_number'),
'totale_BL3': fields.function(_tot_operation_number, string='Tot operazioni BL - Acquisti servizi non soggetti non residenti', type='integer', multi='operation_number'),
'totale_FE': fields.function(_tot_operation_number, string='Tot operazioni FE', type='integer', multi='operation_number'),
'totale_FE_R': fields.function(_tot_operation_number, string='Tot operazioni FE doc riepil.', type='integer', multi='operation_number'),
'totale_FR': fields.function(_tot_operation_number, string='Tot operazioni FR', type='integer', multi='operation_number'),
'totale_FR_R': fields.function(_tot_operation_number, string='Tot operazioni FR doc riepil.', type='integer', multi='operation_number'),
'totale_NE': fields.function(_tot_operation_number, string='Tot operazioni NE', type='integer', multi='operation_number'),
'totale_NR': fields.function(_tot_operation_number, string='Tot operazioni NR', type='integer', multi='operation_number'),
'totale_DF': fields.function(_tot_operation_number, string='Tot operazioni DF', type='integer', multi='operation_number'),
'totale_FN': fields.function(_tot_operation_number, string='Tot operazioni FN', type='integer', multi='operation_number'),
'totale_SE': fields.function(_tot_operation_number, string='Tot operazioni SE', type='integer', multi='operation_number'),
'totale_TU': fields.function(_tot_operation_number, string='Tot operazioni TU', type='integer', multi='operation_number'),
}
_default ={
'codice_fornitura': 'NSP00',
'tipo_fornitore': '01',
'formato_dati': 'aggregati',
}
def onchange_trasmissione_impegno(self, cr, uid, ids, type, context=None):
res = {}
fiscalcode = False
if type == '1': # soggetto obbligato
fiscalcode = context.get('soggetto_codice_fiscale', False)
res = {
'value' : {'soggetto_trasmissione_codice_fiscale' : fiscalcode}
}
return res
def partner_is_from_san_marino(self, cr, uid, move, invoice, arg):
# configurazione
anno_competenza = datetime.datetime.strptime(move.period_id.date_start, "%Y-%m-%d").year
configurazione_ids = self.pool.get('spesometro.configurazione').search(cr, uid, \
[('anno', '=', anno_competenza)])
if not configurazione_ids:
raise orm.except_orm(_('Configurazione mancante!'),_("Configurare l'anno relativo alla comunicazione") )
configurazione = self.pool.get('spesometro.configurazione').browse(cr, uid, configurazione_ids[0])
stato_estero = False
address = self._get_partner_address_obj(cr, uid, move, invoice, arg)
if address and address.country_id and configurazione.stato_san_marino.id == address.country_id.id:
return True
else:
return False
def _get_partner_address_obj(self, cr, uid, move, invoice, arg):
address = False
if move.partner_id.spesometro_indirizzo_estero:
address = move.partner_id.spesometro_indirizzo_estero
elif move.partner_id.address[0]:
address = move.partner_id.address[0]
return address
def compute_invoice_amounts(self, cr, uid, move, invoice, arg):
'''
Calcolo totali documento. Dall'imponibile vanno esclusi gli importi assoggettati ad un'imposta che ha l'esclusione sulla "Comunicazione art.21"
'''
res ={
'amount_untaxed' : 0,
'amount_tax' : 0,
'amount_total' : 0,
}
for line in invoice.tax_line:
if not line.tax_code_id.spesometro_escludi:
res['amount_untaxed'] += line.base
res['amount_tax'] += line.amount
res['amount_total'] += round(line.base + line.amount, 2)
return res
def truncate_values(self, cr, uid, ids, context=None):
for com in self.browse(cr, uid, ids):
for line in com.line_FA_ids:
vals = {
'attive_imponibile_non_esente': int(line.attive_imponibile_non_esente),
'attive_imposta': int(line.attive_imposta),
'attive_operazioni_iva_non_esposta': int(line.attive_operazioni_iva_non_esposta),
'attive_note_variazione': int(line.attive_note_variazione),
'attive_note_variazione_imposta': int(line.attive_note_variazione_imposta),
'passive_imponibile_non_esente': int(line.passive_imponibile_non_esente),
'passive_imposta': int(line.passive_imposta),
'passive_operazioni_iva_non_esposta': int(line.passive_operazioni_iva_non_esposta),
'passive_note_variazione': int(line.passive_note_variazione),
'passive_note_variazione_imposta': int(line.passive_note_variazione_imposta),
}
self.pool.get('spesometro.comunicazione.line.fa').write(cr, uid, [line.id], vals)
for line in com.line_SA_ids:
vals = {
'importo_complessivo': int(line.importo_complessivo),
}
self.pool.get('spesometro.comunicazione.line.sa').write(cr, uid, [line.id], vals)
for line in com.line_BL_ids:
vals = {
'attive_importo_complessivo': int(line.attive_importo_complessivo),
'attive_imposta': int(line.attive_imposta),
'attive_non_sogg_cessione_beni': int(line.attive_non_sogg_cessione_beni),
'attive_non_sogg_servizi': int(line.attive_non_sogg_servizi),
'attive_note_variazione': int(line.attive_note_variazione),
'attive_note_variazione_imposta': int(line.attive_note_variazione_imposta),
'passive_importo_complessivo': int(line.passive_importo_complessivo),
'passive_imposta': int(line.passive_imposta),
'passive_non_sogg_importo_complessivo': int(line.passive_non_sogg_importo_complessivo),
'passive_note_variazione': int(line.passive_note_variazione),
'passive_note_variazione_imposta': int(line.passive_note_variazione_imposta),
}
self.pool.get('spesometro.comunicazione.line.bl').write(cr, uid, [line.id], vals)
return True
def validate_lines(self, cr, uid, ids, context=None):
for com in self.browse(cr, uid, ids):
# configurazione
configurazione_ids = self.pool.get('spesometro.configurazione').search(cr, uid, \
[('anno', '=', com.anno)])
if not configurazione_ids:
raise orm.except_orm(_('Configurazione mancante!'),_("Configurare l'anno relativo alla comunicazione") )
configurazione = self.pool.get('spesometro.configurazione').browse(cr, uid, configurazione_ids[0])
for line in com.line_FA_ids:
if configurazione.quadro_fa_limite_importo :
if line.attive_imponibile_non_esente and \
line.attive_imponibile_non_esente < configurazione.quadro_fa_limite_importo:
self.pool.get('spesometro.comunicazione.line.fa').unlink(cr, uid, [line.id])
for line in com.line_SA_ids:
if configurazione.quadro_sa_limite_importo :
if line.importo_complessivo and \
line.importo_complessivo < configurazione.quadro_sa_limite_importo:
self.pool.get('spesometro.comunicazione.line.sa').unlink(cr, uid, [line.id])
for line in com.line_BL_ids:
if configurazione.quadro_bl_limite_importo :
importo_test = 0
if line.attive_importo_complessivo :
importo_test = line.attive_importo_complessivo
elif line.attive_non_sogg_cessione_beni :
importo_test = line.attive_non_sogg_cessione_beni
elif line.attive_non_sogg_servizi :
importo_test = line.attive_non_sogg_servizi
if importo_test and \
importo_test < configurazione.quadro_bl_limite_importo:
self.pool.get('spesometro.comunicazione.line.bl').unlink(cr, uid, [line.id])
# Controllo formale comunicazione
# ... periodo in presenza di linee nel quadro SE
if com.line_SE_ids and not com.trimestre and not com.mese:
raise orm.except_orm(_('Perido Errato!'),_("In presenza di operazione nel qudro SE (Acquisti da San Marino) \
sono ammessi solo periodi mensili/trimestrali") )
return True
def validate_operation(self, cr, uid, move, invoice, arg):
# configurazione
anno_competenza = datetime.datetime.strptime(move.period_id.date_start, "%Y-%m-%d").year
configurazione_ids = self.pool.get('spesometro.configurazione').search(cr, uid, \
[('anno', '=', anno_competenza)])
if not configurazione_ids:
raise orm.except_orm(_('Configurazione mancante!'),_("Configurare l'anno relativo alla comunicazione") )
configurazione = self.pool.get('spesometro.configurazione').browse(cr, uid, configurazione_ids[0])
doc_vals = self.pool.get('spesometro.comunicazione').compute_invoice_amounts(cr, uid, move, invoice, arg)
# Nessu quadro definito
if not arg['quadro']:
return False
# Quadro richiesto
if arg['quadro'] not in arg['quadri_richiesti']:
return False
# Valori minimi
if arg['quadro'] == 'FA':
if configurazione.quadro_fa_limite_importo_line :
if not doc_vals.get('amount_untaxed', 0) or doc_vals.get('amount_untaxed', 0) < configurazione.quadro_fa_limite_importo_line:
return False
if arg['quadro'] == 'SA':
if configurazione.quadro_sa_limite_importo_line :
if not doc_vals.get('amount_total', 0) or doc_vals.get('amount_total', 0) < configurazione.quadro_sa_limite_importo_line:
return False
if arg['quadro'] == 'BL':
if configurazione.quadro_bl_limite_importo_line :
if not doc_vals.get('amount_total', 0) or doc_vals.get('amount_total', 0) < configurazione.quadro_bl_limite_importo_line:
return False
if arg['quadro'] == 'SE':
if configurazione.quadro_se_limite_importo_line :
if not doc_vals.get('amount_untaxed', 0) or doc_vals.get('amount_untaxed', 0) < configurazione.quadro_se_limite_importo_line:
return False
# Operazioni con San Marino Escluse se richiesta forma aggregata
if arg['formato_dati'] == 'aggregati' and self.partner_is_from_san_marino(cr, uid, move, invoice, arg):
return False
return True
def get_define_quadro(self, cr, uid, move, invoice, arg):
quadro = False
operazione = arg.get('operazione')
# Forma aggregata
if arg['formato_dati'] == 'aggregati':
if operazione == 'FA' or operazione == 'DR':
quadro = 'FA'
elif operazione == 'SA': # Operazioni senza fattura
quadro = 'SA'
elif (operazione == 'BL1') or (operazione == 'BL2') or (operazione == 'BL2'):
quadro = 'BL'
# Forma analitica
if arg['formato_dati'] == 'analitici':
# Priorità x San Marino -> quadro SE
if self.partner_is_from_san_marino(cr, uid, move, invoice, arg):
operazione = 'BL3'
# Impostazioni anagrafiche partner
if operazione == 'FA' or operazione == 'DR':
if arg.get('segno') == 'attiva':
quadro = 'FE'
elif arg.get('segno') == 'passiva':
quadro = 'FR'
elif operazione == 'SA': # Operazioni senza fattura
quadro = 'DF'
elif operazione == 'BL2': #Operazioni con soggetti non residenti
quadro = 'FN'
elif operazione == 'BL1' or operazione == 'BL3': #Operazioni con paesi con fiscalità privilegiata - Acquisti di servizi da soggetti non residenti
quadro = 'SE'
# Note di variazione
if operazione == 'FE' and 'refund' in move.journal_id.type:
operazione = 'NE'
elif operazione == 'FR' and 'refund' in move.journal_id.type:
operazione = 'NR'
return quadro
def genera_comunicazione(self, cr, uid, params, context=None):
def _get_periods(cr, uid, params, context=None):
'''
Definizione periodi di competenza
'''
sql_select = "SELECT p.id FROM account_period p "
sql_where = " WHERE p.special = False "
search_params = {}
# Periodo annuale
if params.get('periodo') == 'anno':
period_date_start = datetime.date(params.get('anno') , 1, 1)
period_date_stop = datetime.date(params.get('anno') , 12, 31)
sql_where += " AND p.date_start >= date(%(period_date_start)s) AND p.date_stop <=date(%(period_date_stop)s) "
search_params.update({
'period_date_start' : period_date_start,
'period_date_stop' : period_date_stop
})
# Periodo mensile
if params.get('periodo') == 'mese':
period_date_start = datetime.date(params.get('anno') , int(params.get('mese')), 1)
sql_where += " AND p.date_start = date(%(period_date_start)s) "
search_params.update({
'period_date_start' : period_date_start,
})
# Periodo trimestrale
if params.get('periodo') == 'trimestre':
if params.get('trimestre') == 1:
period_date_start = datetime.date(params.get('anno') , 1, 1)
period_date_start = datetime.date(params.get('anno') , 3, 31)
elif params.get('trimestre') == 2:
period_date_start = datetime.date(params.get('anno') , 3, 1)
period_date_start = datetime.date(params.get('anno') , 6, 30)
elif params.get('trimestre') == 2:
period_date_start = datetime.date(params.get('anno') , 7, 1)
period_date_start = datetime.date(params.get('anno') , 9, 30)
elif params.get('trimestre') == 2:
period_date_start = datetime.date(params.get('anno') , 10, 1)
period_date_start = datetime.date(params.get('anno') , 12, 31)
else:
raise orm.except_orm(_('Dato errato!'),_("Errore nel valore del trimestre") )
sql_where += " AND p.date_start >= date(%(period_date_start)s) AND p.date_stop <=date(%(period_date_stop)s) "
search_params.update({
'period_date_start' : period_date_start,
'period_date_stop' : period_date_stop
})
sql = sql_select + sql_where
cr.execute(sql, search_params)
periods = [i[0] for i in cr.fetchall()]
return periods
def _genera_testata(cr, uid, params, context=None):
'''
Generazione testata dichiarazione
'''
company = self.pool.get('res.company').browse(cr, uid, params['company_id'])
# progressivo telematico :" il progressivo deve essere univoco e crescente (con incrementi di una unità per ogni file prodotto)"
if params['tipo'] == 'ordinaria':
com_search = [('tipo', '=', 'ordinaria')]
com_last_ids = self.search(cr, uid, com_search, order='progressivo_telematico desc', limit=1)
com_next_prg = 1
if com_last_ids:
com_next_prg = self.browse(cr, uid, com_last_ids[0]).progressivo_telematico + 1
progressivo_telematico = com_next_prg
# vat
if company.partner_id.vat:
partita_iva = company.partner_id.vat[2:]
else:
partita_iva = '{:11s}'.format("".zfill(11))
# codice fiscale soggetto incaricato alla trasmissione
codice_fiscale_incaricato_trasmissione=''
if params.get('tipo_fornitore') == '10' and params.get('partner_intermediario', False):
partner_intermediario = self.pool.get('res.partner').browse(cr, uid, params.get('partner_intermediario'))
codice_fiscale_incaricato_trasmissione = partner_intermediario.fiscalcode or False
# Soggetto con impegno alla trasmissione
if params.get('tipo_fornitore') == '10':
soggetto_trasmissione_impegno = '2'
else:
soggetto_trasmissione_impegno = '1'
# Persona fisica o giuridica
# Considerazione: se se lunghezza codice fiscale < 16 allora c'è la P.Iva e quindi trattasi di soggetto giuridico
tipo_persona = 'persona_fisica'
if len(company.partner_id.fiscalcode) < 16:
tipo_persona = 'persona_giuridica'
values = {
'company_id' : company.id,
'codice_fiscale_fornitore' : company.partner_id.fiscalcode,
'tipo' : params.get('tipo', False),
'periodo' : params.get('periodo', False),
'anno' : params.get('anno', False),
'mese' : params.get('mese', False),
'trimestre' : params.get('trimestre', False),
'progressivo_telematico' : progressivo_telematico or False,
'tipo_fornitore' : params.get('tipo_fornitore', False),
'formato_dati' : params.get('formato_dati', False),
'soggetto_codice_fiscale' : company.partner_id and company.partner_id.fiscalcode or '',
'soggetto_partitaIVA' : partita_iva,
'soggetto_telefono' : company.partner_id and company.partner_id.address[0].phone or '',
'soggetto_fax' : company.partner_id and company.partner_id.address[0].fax or '',
'soggetto_email' : company.partner_id and company.partner_id.address[0].email or '',
'soggetto_forma_giuridica' : tipo_persona,
'soggetto_pg_denominazione' : company.partner_id and company.partner_id.name or company.name or '',
'soggetto_cm_forma_giuridica' : tipo_persona,
'soggetto_cm_pg_denominazione' : company.partner_id and company.partner_id.name or company.name or '',
'soggetto_trasmissione_codice_fiscale' : codice_fiscale_incaricato_trasmissione,
'soggetto_trasmissione_impegno' : soggetto_trasmissione_impegno,
}
comunicazione_id = self.create(cr, uid, values)
return comunicazione_id
# Esistenza record di configurazione per l'anno della comunicazione
configurazione_ids = self.pool.get('spesometro.configurazione').search(cr, uid, [('anno', '=', params.get('anno'))])
if not configurazione_ids:
raise orm.except_orm(_('Configurazione mancante!'),_("Configurare l'anno relativo alla comunicazione") )
configurazione = self.pool.get('spesometro.configurazione').browse(cr, uid, configurazione_ids[0])
# Testata comunicazione
comunicazione_id = _genera_testata(cr, uid, params, context=None)
period_obj = self.pool.get('account.period')
journal_obj = self.pool.get('account.journal')
partner_obj = self.pool.get('res.partner')
account_move_obj = self.pool.get('account.move')
invoice_obj = self.pool.get('account.invoice')
# periods
period_ids = _get_periods(cr, uid, params, context=None)
# journal
journal_search = [('spesometro','=', True)]
journal_ids = journal_obj.search(cr, uid, journal_search, context=context)
# Partners to exclude
partner_search = [('spesometro_escludi','=', True)]
partner_to_exclude_ids = partner_obj.search(cr, uid, partner_search, context=context)
move_search = [('company_id', '=', params['company_id']),('period_id','in', period_ids), ('journal_id','in', journal_ids), ('partner_id','not in', partner_to_exclude_ids)]
move_ids = account_move_obj.search(cr, uid, move_search, context=context)
for move in self.pool.get('account.move').browse(cr, uid, move_ids):
# Test move validate
if not move.partner_id:
continue
# Invoice
invoice_search = [('move_id','=', move.id)]
invoice_ids = invoice_obj.search(cr, uid, invoice_search, context=context)
if not invoice_ids:
continue
invoice = invoice_obj.browse(cr,uid, invoice_ids[0])
# Config spesometro
operazione = False
operazione_iva_non_esposta = False
operazione = move.journal_id.spesometro_operazione
operazione_iva_non_esposta = move.journal_id.spesometro_IVA_non_esposta
segno = move.journal_id.spesometro_segno
if move.partner_id.spesometro_operazione:
operazione = move.partner_id.spesometro_operazione
operazione_iva_non_esposta = move.partner_id.spesometro_IVA_non_esposta
arg = {
'comunicazione_id' : comunicazione_id,
'segno' : segno,
'operazione_iva_non_esposta' : operazione_iva_non_esposta,
'operazione' : operazione,
'formato_dati' : params['formato_dati'],
'quadri_richiesti' : params['quadri_richiesti'],
}
# Quadro di competenza
quadro = self.get_define_quadro(cr, uid, move, invoice, arg)
arg.update({'quadro': quadro})
# Test operazione da includere nella comunicazione
if not self.validate_operation(cr, uid, move, invoice, arg):
continue
if quadro == 'FA':
line_id = self.pool.get('spesometro.comunicazione.line.fa').add_line(cr, uid, move, invoice, arg)
if quadro == 'SA':
line_id = self.pool.get('spesometro.comunicazione.line.sa').add_line(cr, uid, move, invoice, arg)
if quadro == 'BL':
line_id = self.pool.get('spesometro.comunicazione.line.bl').add_line(cr, uid, move, invoice, arg)
if quadro == 'SE':
line_id = self.pool.get('spesometro.comunicazione.line.se').add_line(cr, uid, move, invoice, arg)
# Arrotonda importi su valori raggruppati -> troncare i decimali
if params['formato_dati'] == 'aggregati':
self.truncate_values(cr, uid, [comunicazione_id])
# Rimuove le linee che non rientrano nei limiti ed effettua un controllo formale sull'intera comunicazione
self.validate_lines(cr, uid, [comunicazione_id])
# Update for compute totals
self.write(cr, uid, [comunicazione_id],{})
return True
class spesometro_comunicazione_line_FA(orm.Model):
'''
QUADRO FA - Operazioni documentate da fattura esposte in forma aggregata
'''
_name = "spesometro.comunicazione.line.fa"
_description = "Spesometro - Comunicazione linee quadro FA"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'partita_iva': fields.char('Partita IVA', size=11),
'codice_fiscale': fields.char('Codice Fiscale', size=16),
'documento_riepilogativo': fields.boolean('Documento Riepilogativo'),
'noleggio': fields.selection((('A','Autovettura'), ('B','Caravan'), ('C','Altri Veicoli'), ('D','Unità da diporto'), ('E','Aeromobii')),'Leasing'),
'numero_operazioni_attive_aggregate': fields.integer('Nr op. attive', size=16),
'numero_operazioni_passive_aggregate': fields.integer('Nr op. passive', size=16),
'attive_imponibile_non_esente': fields.float('Tot impon., non impon ed esenti', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'attive_imposta': fields.float(' Tot imposta', digits_compute=dp.get_precision('Account'), help="Totale imposta"),
'attive_operazioni_iva_non_esposta': fields.float('Totale operaz. IVA non esposta', digits_compute=dp.get_precision('Account'), help="Totale operazioni con IVA non esposta"),
'attive_note_variazione': fields.float('Totale note variazione', digits_compute=dp.get_precision('Account'), help="Totale note di variazione a debito per la controparte"),
'attive_note_variazione_imposta': fields.float('Totale imposta note variazione', digits_compute=dp.get_precision('Account'), help="Totale imposta sulle note di variazione a debito"),
'passive_imponibile_non_esente': fields.float('Tot impon., non impon ed esenti', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'passive_imposta': fields.float('Totale imposta', digits_compute=dp.get_precision('Account'), help="Totale imposta"),
'passive_operazioni_iva_non_esposta': fields.float('Totale operaz. IVA non esposta', digits_compute=dp.get_precision('Account'), help="Totale operazioni con IVA non esposta"),
'passive_note_variazione': fields.float('Totale note variazione', digits_compute=dp.get_precision('Account'), help="Totale note di variazione a credito per la controparte"),
'passive_note_variazione_imposta': fields.float('Totale imposta note variazione', digits_compute=dp.get_precision('Account'), help="Totale imposta sulle note di variazione a credito"),
}
def add_line(self, cr, uid, move, invoice, arg):
comunicazione_lines_obj = self.pool.get('spesometro.comunicazione.line.fa')
comunicazione_id = arg.get('comunicazione_id', False)
com_line_search = [('comunicazione_id','=',comunicazione_id), ('partner_id', '=', move.partner_id.id)]
com_line_ids = self.search(cr, uid, com_line_search)
val = {}
# Valori documento
doc_vals = self.pool.get('spesometro.comunicazione').compute_invoice_amounts(cr, uid, move, invoice, arg)
# New partner
if not com_line_ids:
partita_iva =''
if move.partner_id.vat:
partita_iva = move.partner_id.vat[2:]
documento_riepilogativo = False
if arg['operazione'] == 'DR':
documento_riepilogativo = True
val = {
'comunicazione_id' : comunicazione_id,
'partner_id' : move.partner_id.id,
'partita_iva' : partita_iva,
'codice_fiscale' : move.partner_id.fiscalcode or '',
'noleggio' : move.partner_id.spesometro_leasing or '',
'documento_riepilogativo' : documento_riepilogativo,
}
# attive
if arg.get('segno', False) == 'attiva':
val['numero_operazioni_attive_aggregate'] = 1
if 'refund' in move.journal_id.type:
val['attive_note_variazione'] = doc_vals.get('amount_untaxed', 0)
val['attive_note_variazione_imposta'] = doc_vals.get('amount_tax', 0)
else:
if arg.get('operazione_iva_non_esposta', False):
val['attive_operazioni_iva_non_esposta' ] = doc_vals.get('amount_total', 0)
else:
val['attive_imponibile_non_esente' ] = doc_vals.get('amount_untaxed', 0)
val['attive_imposta'] =doc_vals.get('amount_tax', 0)
# passive
else:
val['numero_operazioni_passive_aggregate'] = 1
if 'refund' in move.journal_id.type:
val['passive_note_variazione'] = doc_vals.get('amount_untaxed', 0)
val['passive_note_variazione_imposta'] = doc_vals.get('amount_tax', 0)
else:
if arg.get('operazione_iva_non_esposta', False):
val['passive_operazioni_iva_non_esposta' ] = doc_vals.get('amount_total', 0)
else:
val['passive_imponibile_non_esente' ] = doc_vals.get('amount_untaxed', 0)
val['passive_imposta' ] = doc_vals.get('amount_tax', 0)
# Partner already exists
if com_line_ids:
for com_line in self.browse(cr, uid, com_line_ids):
# attive
if arg.get('segno', False) == 'attiva':
val['numero_operazioni_attive_aggregate'] = com_line.numero_operazioni_attive_aggregate + 1
if 'refund' in move.journal_id.type:
val['attive_note_variazione'] = com_line.attive_note_variazione + doc_vals.get('amount_untaxed', 0)
val['attive_note_variazione_imposta'] = com_line.attive_note_variazione_imposta + doc_vals.get('amount_tax', 0)
else:
if arg.get('operazione_iva_non_esposta', False):
val['attive_operazioni_iva_non_esposta' ] = com_line.attive_operazioni_iva_non_esposta + doc_vals.get('amount_total', 0)
else:
val['attive_imponibile_non_esente' ] = com_line.attive_imponibile_non_esente + doc_vals.get('amount_untaxed', 0)
val['attive_imposta' ] = com_line.attive_imposta + doc_vals.get('amount_tax', 0)
# passive
else:
val['numero_operazioni_passive_aggregate'] = com_line.numero_operazioni_passive_aggregate + 1
if 'refund' in move.journal_id.type:
val['passive_note_variazione'] = com_line.passive_note_variazione + doc_vals.get('amount_untaxed', 0)
val['passive_note_variazione_imposta'] = com_line.passive_note_variazione_imposta + doc_vals.get('amount_tax', 0)
else:
if arg.get('operazione_iva_non_esposta', False):
val['passive_operazioni_iva_non_esposta' ] = com_line.passive_operazioni_iva_non_esposta + doc_vals.get('amount_total', 0)
else:
val['passive_imponibile_non_esente' ] = com_line.passive_imponibile_non_esente + doc_vals.get('amount_untaxed', 0)
val['passive_imposta' ] = com_line.passive_imposta + doc_vals.get('amount_tax', 0)
if com_line_ids:
line_id = com_line.id
self.write(cr, uid, [com_line.id], val)
else:
line_id = self.create(cr, uid, val)
return line_id
class spesometro_comunicazione_line_SA(orm.Model):
'''
QUADRO SA - Operazioni senza fattura esposte in forma aggregata
'''
_name = "spesometro.comunicazione.line.sa"
_description = "Spesometro - Comunicazione linee quadro SA"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione' , ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'codice_fiscale': fields.char('Codice Fiscale', size=16),
'numero_operazioni': fields.integer('Numero operazioni'),
'importo_complessivo': fields.float('Importo complessivo', digits_compute=dp.get_precision('Account')),
'noleggio': fields.selection((('A','Autovettura'), ('B','Caravan'), ('C','Altri Veicoli'), ('D','Unità da diporto'), ('E','Aeromobii')),'Leasing'),
}
def add_line(self, cr, uid, move, invoice, arg):
comunicazione_lines_obj = self.pool.get('spesometro.comunicazione.line.fa')
comunicazione_id = arg.get('comunicazione_id', False)
com_line_search = [('comunicazione_id','=',comunicazione_id), ('partner_id', '=', move.partner_id.id)]
com_line_ids = self.search(cr, uid, com_line_search)
val = {}
# Valori documento
doc_vals = self.pool.get('spesometro.comunicazione').compute_invoice_amounts(cr, uid, move, invoice, arg)
# New partner
if not com_line_ids:
val = {
'comunicazione_id' : comunicazione_id,
'partner_id' : move.partner_id.id,
'codice_fiscale' : move.partner_id.fiscalcode or False,
'noleggio' : move.partner_id.spesometro_leasing or False,
'numero_operazioni' : 1,
'importo_complessivo' : doc_vals.get('amount_total', 0),
}
# Partner already exists
if com_line_ids:
for com_line in self.browse(cr, uid, com_line_ids):
val['numero_operazioni'] = com_line.numero_operazioni + 1
val['importo_complessivo'] = com_line.importo_complessivo + doc_vals.get('amount_total', 0)
if com_line_ids:
line_id = com_line.id
self.write(cr, uid, [com_line.id], val)
else:
line_id = self.create(cr, uid, val)
return line_id
class spesometro_comunicazione_line_BL(orm.Model):
'''
QUADRO BL
- Operazioni con paesi con fiscalità privilegiata (è obbligatorio compilare le sezioni BL001, BL002
e almeno un campo delle sezioni BL003, BL004, BL005, BL006, BL007, BL008)
- Operazioni con soggetti non residenti (è obbligatorio compilare le sezioni BL001, BL002 e almeno
un campo delle sezioni BL003 e BL006)
- Acquisti di servizi da soggetti non residenti (è obbligatorio compilare le sezioni BL001, BL002 e
almeno un campo della sezione BL006)
'''
_name = "spesometro.comunicazione.line.bl"
_description = "Spesometro - Comunicazione linee quadro BL"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'codice_fiscale': fields.char('Codice Fiscale', size=16),
'numero_operazioni': fields.integer('Numero operazioni'),
'importo_complessivo': fields.integer('Importo complessivo', digits_compute=dp.get_precision('Account')),
'noleggio': fields.selection((('A','Autovettura'), ('B','Caravan'), ('C','Altri Veicoli'), ('D','Unità da diporto'), ('E','Aeromobii')),'Leasing'),
'pf_cognome': fields.char('Cognome', size=24, help=""),
'pf_nome': fields.char('Nome', size=20, help=""),
'pf_data_nascita': fields.date('Data di nascita'),
'pf_comune_stato_nascita': fields.char('Comune o stato estero di nascita', size=40),
'pf_provincia_nascita': fields.char('Provincia', size=2),
'pf_codice_stato_estero': fields.char('Codice Stato Estero', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'pg_denominazione': fields.char('Denominazione/Ragione sociale', size=60),
'pg_citta_estera_sede_legale': fields.char('Città estera delle Sede legale', size=40),
'pg_codice_stato_estero': fields.char('Codice Stato Estero', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'pg_indirizzo_sede_legale': fields.char('Indirizzo sede legale', size=60),
'codice_identificativo_IVA': fields.char('Codice identificativo IVA', size=16),
'operazione_fiscalita_privilegiata': fields.boolean('Operazione con pesei con fiscalità privilegiata'),
'operazione_con_soggetti_non_residenti': fields.boolean('Operazione con soggetto non residente'),
'Acquisto_servizi_da_soggetti_non_residenti': fields.boolean('Acquisto di servizi da soggetti non residenti'),
'attive_importo_complessivo': fields.float('Tot operaz. attive impon., non impon ed esenti', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'attive_imposta': fields.float('Tot operaz. attive imposta', digits_compute=dp.get_precision('Account'), help="Totale imposta"),
'attive_non_sogg_cessione_beni': fields.float('Operaz.attive non soggette ad IVA - Cessione beni', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'attive_non_sogg_servizi': fields.float('Operaz.attive non soggette ad IVA - Servizi', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'attive_note_variazione': fields.float('Totale note variazione', digits_compute=dp.get_precision('Account'), help="Totale note di variazione a debito per la controparte"),
'attive_note_variazione_imposta': fields.float('Totale imposta note variazione', digits_compute=dp.get_precision('Account'), help="Totale imposta sulle note di variazione a debito"),
'passive_importo_complessivo': fields.float('Tot operaz. passive impon., non impon ed esenti', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'passive_imposta': fields.float('Tot operaz. passive imposta', digits_compute=dp.get_precision('Account'), help="Totale imposta"),
'passive_non_sogg_importo_complessivo': fields.float('Operaz.passive non soggette ad IVA', digits_compute=dp.get_precision('Account'), help="Totale operazioni imponibili, non imponibili ed esenti"),
'passive_note_variazione': fields.float('Totale note variazione', digits_compute=dp.get_precision('Account'), help="Totale note di variazione a debito per la controparte"),
'passive_note_variazione_imposta': fields.float('Totale imposta note variazione', digits_compute=dp.get_precision('Account'), help="Totale imposta sulle note di variazione a debito"),
}
def add_line(self, cr, uid, move, invoice, arg):
comunicazione_lines_obj = self.pool.get('spesometro.comunicazione.line.bl')
comunicazione_id = arg.get('comunicazione_id', False)
com_line_search = [('comunicazione_id','=',comunicazione_id), ('partner_id', '=', move.partner_id.id)]
com_line_ids = self.search(cr, uid, com_line_search)
val = {}
# Valori documento
doc_vals = self.pool.get('spesometro.comunicazione').compute_invoice_amounts(cr, uid, move, invoice, arg)
# New partner
if not com_line_ids:
# p.iva
if move.partner_id.vat:
partita_iva = move.partner_id.vat[2:]
else:
partita_iva = '{:11s}'.format("".zfill(11))
# prov. nascita
prov_code = False
if move.partner_id.birth_city.name:
city_data = move.partner_id.address[0]._set_vals_city_data(cr, uid, {'city' : move.partner_id.birth_city.name})
prov_id = city_data.get('province_id', False)
if prov_id:
prov = self.pool.get('res.province').borwse(cr, uid, prov_id)
prov_nascita_code = prov.code
val = {
'comunicazione_id' : comunicazione_id,
'partner_id' : move.partner_id.id,
'codice_fiscale' : move.partner_id.fiscalcode or False,
'noleggio' : move.partner_id.spesometro_leasing or False,
'pf_cognome' : move.partner_id.fiscalcode_surname or False,
'pf_nome' : move.partner_id.fiscalcode_firstname or False,
'pf_data_nascita' : move.partner_id.birth_date or False,
'pf_comune_stato_nascita' : move.partner_id.birth_city.name or False,
'pf_provincia_nascita' : prov_code or False,
'pf_codice_stato_estero' : move.partner_id.address[0].country_id.codice_stato_agenzia_entrate or '',
'pg_denominazione' : move.partner_id.name or False,
'pg_citta_estera_sede_legale' : move.partner_id.address[0].city or False,
'pg_codice_stato_estero' : move.partner_id.address[0].country_id.codice_stato_agenzia_entrate or '',
'pg_indirizzo_sede_legale' : move.partner_id.address[0].street or False,
'operazione_fiscalita_privilegiata' : False,
'operazione_con_soggetti_non_residenti' : False,
'Acquisto_servizi_da_soggetti_non_residenti' : False,
}
if move.partner_id.spesometro_operazione == 'BL1':
val['operazione_fiscalita_privilegiata'] = True
elif move.partner_id.spesometro_operazione == 'BL2':
val['operazione_con_soggetti_non_residenti'] = True
elif move.partner_id.spesometro_operazione == 'BL3':
val['Acquisto_servizi_da_soggetti_non_residenti'] = True
# attive
if arg.get('segno', False) == 'attiva':
if val['operazione_fiscalita_privilegiata'] or val['operazione_con_soggetti_non_residenti']:
val['attive_importo_complessivo'] = doc_vals.get('amount_total', 0)
val['attive_imposta'] = doc_vals.get('amount_tax', 0)
if val['operazione_fiscalita_privilegiata'] == True:
if move.partner_id.spesometro_operazione == 'cessioni':
val['attive_non_sogg_cessione_beni'] = doc_vals.get('amount_total', 0)
else:
val['attive_non_sogg_servizi'] = doc_vals.get('amount_total', 0)
if 'refund' in move.journal_id.type:
val['attive_note_variazione'] = doc_vals.get('amount_untaxed', 0)
val['attive_note_variazione_imposta'] = doc_vals.get('amount_tax', 0)
# passive
else:
if val['operazione_fiscalita_privilegiata'] or val['operazione_con_soggetti_non_residenti'] or val['Acquisto_servizi_da_soggetti_non_residenti']:
val['passive_importo_complessivo'] = doc_vals.get('amount_total', 0)
val['passive_imposta'] = doc_vals.get('amount_tax', 0)
if val['operazione_fiscalita_privilegiata'] == True:
val['passive_non_sogg_importo_complessivo'] = doc_vals.get('amount_total', 0)
if 'refund' in move.journal_id.type:
val['passive_note_variazione'] = doc_vals.get('amount_untaxed', 0)
val['passive_note_variazione_imposta'] = doc_vals.get('amount_tax', 0)
# Partner already exists
if com_line_ids:
for com_line in self.browse(cr, uid, com_line_ids):
# attive
if arg.get('segno', False) == 'attiva':
if val['operazione_fiscalita_privilegiata'] or val['operazione_con_soggetti_non_residenti']:
val['attive_importo_complessivo'] = com_line.attive_importo_complessivo + doc_vals.get('amount_total', 0)
val['attive_imposta'] = com_line.attive_imposta + doc_vals.get('amount_tax', 0)
if val['operazione_fiscalita_privilegiata'] == True:
if move.partner_id.spesometro_operazione == 'cessioni':
val['attive_non_sogg_cessione_beni'] = com_line.attive_non_sogg_cessione_beni + doc_vals.get('amount_total', 0)
else:
val['attive_non_sogg_servizi'] = com_line.attive_non_sogg_servizi + doc_vals.get('amount_total', 0)
if 'refund' in move.journal_id.type:
val['attive_note_variazione'] = com_line.attive_note_variazione + doc_vals.get('amount_untaxed', 0)
val['attive_note_variazione_imposta'] = com_line.attive_note_variazione_imposta + doc_vals.get('amount_tax', 0)
# passive
else:
if val['operazione_fiscalita_privilegiata'] or val['operazione_con_soggetti_non_residenti'] or val['Acquisto_servizi_da_soggetti_non_residenti']:
val['passive_importo_complessivo'] = com_line.passive_importo_complessivo + doc_vals.get('amount_total', 0)
val['passive_imposta'] = com_line.passive_imposta + doc_vals.get('amount_tax', 0)
if val['operazione_fiscalita_privilegiata'] == True:
val['passive_non_sogg_importo_complessivo'] = com_line.passive_non_sogg_importo_complessivo + doc_vals.get('amount_total', 0)
if 'refund' in move.journal_id.type:
val['passive_note_variazione'] = com_line.passive_note_variazione + doc_vals.get('amount_untaxed', 0)
val['passive_note_variazione_imposta'] = com_line.passive_note_variazione_imposta + doc_vals.get('amount_tax', 0)
if com_line_ids:
line_id = com_line.id
self.write(cr, uid, [com_line.id], val)
else:
line_id = self.create(cr, uid, val)
return line_id
class spesometro_comunicazione_line_FE(orm.Model):
_name = "spesometro.comunicazione.line.fe"
_description = "Spesometro - Comunicazione linee quadro FE"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'partita_iva': fields.char('Partita IVA', size=11),
'codice_fiscale': fields.char('Codice Fiscale', size=16),
'documento_riepilogativo': fields.boolean('Documento Riepilogativo'),
'noleggio': fields.selection((('A','Autovettura'), ('B','Caravan'), ('C','Altri Veicoli'), ('D','Unità da diporto'), ('E','Aeromobii')),'Leasing'),
'autofattura': fields.boolean('Autofattura'),
'data_documento': fields.date('Data documento'),
'data_registrazione': fields.date('Data registrazione'),
'numero_fattura': fields.char('Numero Fattura - Doc riepilog.', size=16),
'importo': fields.float('Importo', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
class spesometro_comunicazione_line_FR(orm.Model):
_name = "spesometro.comunicazione.line.fr"
_description = "Spesometro - Comunicazione linee quadro FR"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'partita_iva': fields.char('Partita IVA', size=11),
'documento_riepilogativo': fields.boolean('Documento Riepilogativo'),
'data_documento': fields.date('Data documento'),
'data_registrazione': fields.date('Data registrazione'),
'iva_non_esposta': fields.boolean('IVA non esposta'),
'reverse_charge': fields.boolean('Reverse charge'),
'autofattura': fields.boolean('Autofattura'),
'importo': fields.float('Importo', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
class spesometro_comunicazione_line_NE(orm.Model):
_name = "spesometro.comunicazione.line.ne"
_description = "Spesometro - Comunicazione linee quadro NE"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'partita_iva': fields.char('Partita IVA', size=11),
'codice_fiscale': fields.char('Codice Fiscale', size=16),
'data_emissione': fields.date('Data emissione'),
'data_registrazione': fields.date('Data registrazione'),
'numero_nota': fields.char('Numero Nota', size=16),
'importo': fields.float('Importo', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
class spesometro_comunicazione_line_NR(orm.Model):
_name = "spesometro.comunicazione.line.nr"
_description = "Spesometro - Comunicazione linee quadro NR"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'partita_iva': fields.char('Partita IVA', size=11),
'data_documento': fields.date('Data documento'),
'data_registrazione': fields.date('Data registrazione'),
'importo': fields.float('Importo', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
class spesometro_comunicazione_line_DF(orm.Model):
_name = "spesometro.comunicazione.line.df"
_description = "Spesometro - Comunicazione linee quadro DF"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'codice_fiscale': fields.char('Codice Fiscale', size=16),
'data_operazione': fields.date('Data operazione'),
'importo': fields.float('Importo', digits_compute=dp.get_precision('Account')),
'noleggio': fields.selection((('A','Autovettura'), ('B','Caravan'), ('C','Altri Veicoli'), ('D','Unità da diporto'), ('E','Aeromobii')),'Leasing'),
}
class spesometro_comunicazione_line_FN(orm.Model):
_name = "spesometro.comunicazione.line.fn"
_description = "Spesometro - Comunicazione linee quadro FN"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'pf_cognome': fields.char('Cognome', size=24, help=""),
'pf_nome': fields.char('Nome', size=20, help=""),
'pf_data_nascita': fields.date('Data di nascita'),
'pf_comune_stato_nascita': fields.char('Comune o stato estero di nascita', size=40),
'pf_provincia_nascita': fields.char('Provincia', size=2),
'pf_codice_stato_estero_domicilio': fields.char('Codice Stato Estero del Domicilio', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'pg_denominazione': fields.char('Denominazione/Ragione sociale', size=60),
'pg_citta_estera_sede_legale': fields.char('Città estera delle Sede legale', size=40),
'pg_codice_stato_estero_domicilio': fields.char('Codice Stato Estero del Domicilio', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'pg_indirizzo_sede_legale': fields.char('Indirizzo legale', size=40),
'data_emissione': fields.date('Data emissione'),
'data_registrazione': fields.date('Data registrazione'),
'numero_fattura': fields.char('Numero Fattura/Doc riepilog.', size=16),
'noleggio': fields.selection((('A','Autovettura'), ('B','Caravan'), ('C','Altri Veicoli'), ('D','Unità da diporto'), ('E','Aeromobii')),'Leasing'),
'importo': fields.float('Importo', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
class spesometro_comunicazione_line_SE(orm.Model):
'''
QUADRO SE - Acquisti di servizi da non residenti e Acquisti da operatori di San Marino
'''
_name = "spesometro.comunicazione.line.se"
_description = "Spesometro - Comunicazione linee quadro SE"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'pf_cognome': fields.char('Cognome', size=24, help=""),
'pf_nome': fields.char('Nome', size=20, help=""),
'pf_data_nascita': fields.date('Data di nascita'),
'pf_comune_stato_nascita': fields.char('Comune o stato estero di nascita', size=40),
'pf_provincia_nascita': fields.char('Provincia', size=2),
'pf_codice_stato_estero_domicilio': fields.char('Codice Stato Estero del Domicilio', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'pg_denominazione': fields.char('Denominazione/Ragione sociale', size=60),
'pg_citta_estera_sede_legale': fields.char('Città estera delle Sede legale', size=40),
'pg_codice_stato_estero_domicilio': fields.char('Codice Stato Estero del Domicilio', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'pg_indirizzo_sede_legale': fields.char('Indirizzo legale', size=40),
'codice_identificativo_IVA': fields.char('Codice Identificativo IVA (037=San Marino)', size=3),
'data_emissione': fields.date('Data emissione'),
'data_registrazione': fields.date('Data registrazione'),
'numero_fattura': fields.char('Numero Fattura/Doc riepilog.', size=16),
'importo': fields.float('Importo/imponibile', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
def add_line(self, cr, uid, move, invoice, arg):
comunicazione_lines_obj = self.pool.get('spesometro.comunicazione.line.se')
comunicazione_id = arg.get('comunicazione_id', False)
com_line_search = [('comunicazione_id','=',comunicazione_id), ('partner_id', '=', move.partner_id.id)]
com_line_ids = self.search(cr, uid, com_line_search)
val = {}
# Valori documento
doc_vals = self.pool.get('spesometro.comunicazione').compute_invoice_amounts(cr, uid, move, invoice, arg)
# p.iva
if move.partner_id.vat:
partita_iva = move.partner_id.vat[2:]
else:
partita_iva = '{:11s}'.format("".zfill(11))
# prov. nascita
prov_code = False
if move.partner_id.birth_city.name:
city_data = move.partner_id.address[0]._set_vals_city_data(cr, uid, {'city' : move.partner_id.birth_city.name})
prov_id = city_data.get('province_id', False)
if prov_id:
prov = self.pool.get('res.province').borwse(cr, uid, prov_id)
prov_nascita_code = prov.code
# Indirizzo
address = self.pool.get('spesometro.comunicazione')._get_partner_address_obj(cr, uid, move, invoice, arg)
# Codice identificativo IVA -Da indicare esclusivamente per operazioni con San Marino (Codice Stato = 037)
codice_identificativo_iva=''
if self.pool.get('spesometro.comunicazione').partner_is_from_san_marino(cr, uid, move, invoice, arg):
codice_identificativo_iva = '037'
val = {
'comunicazione_id' : comunicazione_id,
'partner_id' : move.partner_id.id,
'codice_fiscale' : move.partner_id.fiscalcode or False,
'noleggio' : move.partner_id.spesometro_leasing or False,
'pf_cognome' : move.partner_id.fiscalcode_surname or False,
'pf_nome' : move.partner_id.fiscalcode_firstname or False,
'pf_data_nascita' : move.partner_id.birth_date or False,
'pf_comune_stato_nascita' : move.partner_id.birth_city.name or False,
'pf_provincia_nascita' : prov_code or False,
'pf_codice_stato_estero_domicilio' : address.country_id.codice_stato_agenzia_entrate or codice_identificativo_iva or '',
'pg_denominazione' : move.partner_id.name or False,
'pg_citta_estera_sede_legale' : address.city or False,
'pg_codice_stato_estero_domicilio' : address.country_id.codice_stato_agenzia_entrate or codice_identificativo_iva or '',
'pg_indirizzo_sede_legale' : address.street or False,
'codice_identificativo_IVA' : codice_identificativo_iva,
'data_emissione': move.date,
'data_registrazione': invoice.date_invoice or move.date,
'numero_fattura': move.name,
'importo': doc_vals.get('amount_untaxed', 0),
'imposta': doc_vals.get('amount_tax', 0)
}
line_id = self.create(cr, uid, val)
return line_id
class spesometro_comunicazione_line_TU(orm.Model):
_name = "spesometro.comunicazione.line.tu"
_description = "Spesometro - Comunicazione linee quadro TU"
_columns = {
'comunicazione_id': fields.many2one('spesometro.comunicazione', 'Comunicazione', ondelete='cascade'),
'partner_id': fields.many2one('res.partner', 'Partner'),
'cognome': fields.char('Cognome', size=24, help=""),
'nome': fields.char('Nome', size=20, help=""),
'data_nascita': fields.date('Data di nascita'),
'comune_stato_nascita': fields.char('Comune o stato estero di nascita', size=40),
'provincia_nascita': fields.char('Provincia', size=2),
'citta_estera_residenza': fields.char('Città Estera di residenza', size=40),
'codice_stato_estero': fields.char('Codice Stato Estero', size=3, help="Deve essere uno di quelli presenti nella tabella 'elenco dei paesi e\
territori esteri' pubblicata nelle istruzioni del modello Unico"),
'indirizzo_estero_residenza': fields.char('Indirizzo Estero di residenza', size=40),
'data_emissione': fields.date('Data emissione'),
'data_registrazione': fields.date('Data registrazione'),
'numero_fattura': fields.char('Numero Fattura/Doc riepilog.', size=16),
'importo': fields.float('Importo/imponibile', digits_compute=dp.get_precision('Account')),
'imposta': fields.float('Imposta', digits_compute=dp.get_precision('Account')),
}
| codeparrot/github-code-clean |
#!/usr/bin/env python
#
# Generated Thu Jul 22 14:11:34 2010 by generateDS.py.
#
import sys
import getopt
from xml.dom import minidom
from xml.dom import Node
#
# If you have installed IPython you can uncomment and use the following.
# IPython is available from http://ipython.scipy.org/.
#
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
#
# Support/utility functions.
#
def showIndent(outfile, level):
for idx in range(level):
outfile.write(' ')
def quote_xml(inStr):
s1 = inStr
s1 = s1.replace('&', '&')
s1 = s1.replace('<', '<')
s1 = s1.replace('"', '"')
return s1
def quote_python(inStr):
s1 = inStr
if s1.find("'") == -1:
if s1.find('\n') == -1:
return "'%s'" % s1
else:
return "'''%s'''" % s1
else:
if s1.find('"') != -1:
s1 = s1.replace('"', '\\"')
if s1.find('\n') == -1:
return '"%s"' % s1
else:
return '"""%s"""' % s1
class MixedContainer:
# Constants for category:
CategoryNone = 0
CategoryText = 1
CategorySimple = 2
CategoryComplex = 3
# Constants for content_type:
TypeNone = 0
TypeText = 1
TypeString = 2
TypeInteger = 3
TypeFloat = 4
TypeDecimal = 5
TypeDouble = 6
TypeBoolean = 7
def __init__(self, category, content_type, name, value):
self.category = category
self.content_type = content_type
self.name = name
self.value = value
def getCategory(self):
return self.category
def getContenttype(self, content_type):
return self.content_type
def getValue(self):
return self.value
def getName(self):
return self.name
def export(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
outfile.write(self.value)
elif self.category == MixedContainer.CategorySimple:
self.exportSimple(outfile, level, name)
else: # category == MixedContainer.CategoryComplex
self.value.export(outfile, level, name)
def exportSimple(self, outfile, level, name):
if self.content_type == MixedContainer.TypeString:
outfile.write('<%s>%s</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeInteger or \
self.content_type == MixedContainer.TypeBoolean:
outfile.write('<%s>%d</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeFloat or \
self.content_type == MixedContainer.TypeDecimal:
outfile.write('<%s>%f</%s>' % (self.name, self.value, self.name))
elif self.content_type == MixedContainer.TypeDouble:
outfile.write('<%s>%g</%s>' % (self.name, self.value, self.name))
def exportLiteral(self, outfile, level, name):
if self.category == MixedContainer.CategoryText:
showIndent(outfile, level)
outfile.write('MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
elif self.category == MixedContainer.CategorySimple:
showIndent(outfile, level)
outfile.write('MixedContainer(%d, %d, "%s", "%s"),\n' % \
(self.category, self.content_type, self.name, self.value))
else: # category == MixedContainer.CategoryComplex
showIndent(outfile, level)
outfile.write('MixedContainer(%d, %d, "%s",\n' % \
(self.category, self.content_type, self.name,))
self.value.exportLiteral(outfile, level + 1)
showIndent(outfile, level)
outfile.write(')\n')
#
# Data representation classes.
#
class GenerateModel:
subclass = None
def __init__(self, Module=None, PythonExport=None):
if Module is None:
self.Module = []
else:
self.Module = Module
if PythonExport is None:
self.PythonExport = []
else:
self.PythonExport = PythonExport
def factory(*args_, **kwargs_):
if GenerateModel.subclass:
return GenerateModel.subclass(*args_, **kwargs_)
else:
return GenerateModel(*args_, **kwargs_)
factory = staticmethod(factory)
def getModule(self): return self.Module
def setModule(self, Module): self.Module = Module
def addModule(self, value): self.Module.append(value)
def insertModule(self, index, value): self.Module[index] = value
def getPythonexport(self): return self.PythonExport
def setPythonexport(self, PythonExport): self.PythonExport = PythonExport
def addPythonexport(self, value): self.PythonExport.append(value)
def insertPythonexport(self, index, value): self.PythonExport[index] = value
def export(self, outfile, level, name_='GenerateModel'):
showIndent(outfile, level)
outfile.write('<%s>\n' % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='GenerateModel'):
pass
def exportChildren(self, outfile, level, name_='GenerateModel'):
for Module_ in self.getModule():
Module_.export(outfile, level)
for PythonExport_ in self.getPythonexport():
PythonExport_.export(outfile, level)
def exportLiteral(self, outfile, level, name_='GenerateModel'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Module=[\n')
level += 1
for Module in self.Module:
showIndent(outfile, level)
outfile.write('Module(\n')
Module.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('PythonExport=[\n')
level += 1
for PythonExport in self.PythonExport:
showIndent(outfile, level)
outfile.write('PythonExport(\n')
PythonExport.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Module':
obj_ = Module.factory()
obj_.build(child_)
self.Module.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'PythonExport':
obj_ = PythonExport.factory()
obj_.build(child_)
self.PythonExport.append(obj_)
# end class GenerateModel
class PythonExport:
subclass = None
def __init__(self, FatherNamespace='', RichCompare=0, Name='', Reference=0, FatherInclude='', Father='', Namespace='', Twin='', Constructor=0, TwinPointer='', Include='', NumberProtocol=0, Delete=0, Documentation=None, Methode=None, Attribute=None, Sequence=None, CustomAttributes='', ClassDeclarations='', Initialization=0):
self.FatherNamespace = FatherNamespace
self.RichCompare = RichCompare
self.Name = Name
self.Reference = Reference
self.FatherInclude = FatherInclude
self.Father = Father
self.Namespace = Namespace
self.Twin = Twin
self.Constructor = Constructor
self.TwinPointer = TwinPointer
self.Include = Include
self.NumberProtocol = NumberProtocol
self.Delete = Delete
self.Documentation = Documentation
self.Initialization = Initialization
if Methode is None:
self.Methode = []
else:
self.Methode = Methode
if Attribute is None:
self.Attribute = []
else:
self.Attribute = Attribute
self.Sequence = Sequence
self.CustomAttributes = CustomAttributes
self.ClassDeclarations = ClassDeclarations
def factory(*args_, **kwargs_):
if PythonExport.subclass:
return PythonExport.subclass(*args_, **kwargs_)
else:
return PythonExport(*args_, **kwargs_)
factory = staticmethod(factory)
def getInitialization(self): return self.Initialization
def setInitialization(self, Initialization): self.Initialization = Initialization
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getMethode(self): return self.Methode
def setMethode(self, Methode): self.Methode = Methode
def addMethode(self, value): self.Methode.append(value)
def insertMethode(self, index, value): self.Methode[index] = value
def getAttribute(self): return self.Attribute
def setAttribute(self, Attribute): self.Attribute = Attribute
def addAttribute(self, value): self.Attribute.append(value)
def insertAttribute(self, index, value): self.Attribute[index] = value
def getSequence(self): return self.Sequence
def setSequence(self, Sequence): self.Sequence = Sequence
def getCustomattributes(self): return self.CustomAttributes
def setCustomattributes(self, CustomAttributes): self.CustomAttributes = CustomAttributes
def getClassdeclarations(self): return self.ClassDeclarations
def setClassdeclarations(self, ClassDeclarations): self.ClassDeclarations = ClassDeclarations
def getFathernamespace(self): return self.FatherNamespace
def setFathernamespace(self, FatherNamespace): self.FatherNamespace = FatherNamespace
def getRichcompare(self): return self.RichCompare
def setRichcompare(self, RichCompare): self.RichCompare = RichCompare
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def getReference(self): return self.Reference
def setReference(self, Reference): self.Reference = Reference
def getFatherinclude(self): return self.FatherInclude
def setFatherinclude(self, FatherInclude): self.FatherInclude = FatherInclude
def getFather(self): return self.Father
def setFather(self, Father): self.Father = Father
def getNamespace(self): return self.Namespace
def setNamespace(self, Namespace): self.Namespace = Namespace
def getTwin(self): return self.Twin
def setTwin(self, Twin): self.Twin = Twin
def getConstructor(self): return self.Constructor
def setConstructor(self, Constructor): self.Constructor = Constructor
def getTwinpointer(self): return self.TwinPointer
def setTwinpointer(self, TwinPointer): self.TwinPointer = TwinPointer
def getInclude(self): return self.Include
def setInclude(self, Include): self.Include = Include
def getNumberprotocol(self): return self.NumberProtocol
def setNumberprotocol(self, NumberProtocol): self.NumberProtocol = NumberProtocol
def getDelete(self): return self.Delete
def setDelete(self, Delete): self.Delete = Delete
def export(self, outfile, level, name_='PythonExport'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='PythonExport')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='PythonExport'):
outfile.write(' FatherNamespace="%s"' % (self.getFathernamespace(), ))
if self.getRichcompare() is not None:
outfile.write(' RichCompare="%s"' % (self.getRichcompare(), ))
outfile.write(' Name="%s"' % (self.getName(), ))
if self.getReference() is not None:
outfile.write(' Reference="%s"' % (self.getReference(), ))
outfile.write(' FatherInclude="%s"' % (self.getFatherinclude(), ))
outfile.write(' Father="%s"' % (self.getFather(), ))
outfile.write(' Namespace="%s"' % (self.getNamespace(), ))
outfile.write(' Twin="%s"' % (self.getTwin(), ))
if self.getConstructor() is not None:
outfile.write(' Constructor="%s"' % (self.getConstructor(), ))
if self.getInitialization() is not None:
outfile.write(' Initialization="%s"' % (self.getInitialization(), ))
outfile.write(' TwinPointer="%s"' % (self.getTwinpointer(), ))
outfile.write(' Include="%s"' % (self.getInclude(), ))
if self.getNumberprotocol() is not None:
outfile.write(' NumberProtocol="%s"' % (self.getNumberprotocol(), ))
if self.getDelete() is not None:
outfile.write(' Delete="%s"' % (self.getDelete(), ))
def exportChildren(self, outfile, level, name_='PythonExport'):
if self.Documentation:
self.Documentation.export(outfile, level)
for Methode_ in self.getMethode():
Methode_.export(outfile, level)
for Attribute_ in self.getAttribute():
Attribute_.export(outfile, level)
if self.Sequence:
self.Sequence.export(outfile, level)
showIndent(outfile, level)
outfile.write('<CustomAttributes>%s</CustomAttributes>\n' % quote_xml(self.getCustomattributes()))
showIndent(outfile, level)
outfile.write('<ClassDeclarations>%s</ClassDeclarations>\n' % quote_xml(self.getClassdeclarations()))
def exportLiteral(self, outfile, level, name_='PythonExport'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('FatherNamespace = "%s",\n' % (self.getFathernamespace(),))
showIndent(outfile, level)
outfile.write('RichCompare = "%s",\n' % (self.getRichcompare(),))
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('Reference = "%s",\n' % (self.getReference(),))
showIndent(outfile, level)
outfile.write('FatherInclude = "%s",\n' % (self.getFatherinclude(),))
showIndent(outfile, level)
outfile.write('Father = "%s",\n' % (self.getFather(),))
showIndent(outfile, level)
outfile.write('Namespace = "%s",\n' % (self.getNamespace(),))
showIndent(outfile, level)
outfile.write('Twin = "%s",\n' % (self.getTwin(),))
showIndent(outfile, level)
outfile.write('Constructor = "%s",\n' % (self.getConstructor(),))
showIndent(outfile, level)
outfile.write('Initialization = "%s",\n' % (self.getInitialization(),))
outfile.write('TwinPointer = "%s",\n' % (self.getTwinpointer(),))
showIndent(outfile, level)
outfile.write('Include = "%s",\n' % (self.getInclude(),))
showIndent(outfile, level)
outfile.write('NumberProtocol = "%s",\n' % (self.getNumberprotocol(),))
showIndent(outfile, level)
outfile.write('Delete = "%s",\n' % (self.getDelete(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Methode=[\n')
level += 1
for Methode in self.Methode:
showIndent(outfile, level)
outfile.write('Methode(\n')
Methode.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('Attribute=[\n')
level += 1
for Attribute in self.Attribute:
showIndent(outfile, level)
outfile.write('Attribute(\n')
Attribute.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.Sequence:
showIndent(outfile, level)
outfile.write('Sequence=Sequence(\n')
self.Sequence.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('CustomAttributes=%s,\n' % quote_python(self.getCustomattributes()))
showIndent(outfile, level)
outfile.write('ClassDeclarations=%s,\n' % quote_python(self.getClassdeclarations()))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('FatherNamespace'):
self.FatherNamespace = attrs.get('FatherNamespace').value
if attrs.get('RichCompare'):
if attrs.get('RichCompare').value in ('true', '1'):
self.RichCompare = 1
elif attrs.get('RichCompare').value in ('false', '0'):
self.RichCompare = 0
else:
raise ValueError('Bad boolean attribute (RichCompare)')
if attrs.get('Name'):
self.Name = attrs.get('Name').value
if attrs.get('Reference'):
if attrs.get('Reference').value in ('true', '1'):
self.Reference = 1
elif attrs.get('Reference').value in ('false', '0'):
self.Reference = 0
else:
raise ValueError('Bad boolean attribute (Reference)')
if attrs.get('FatherInclude'):
self.FatherInclude = attrs.get('FatherInclude').value
if attrs.get('Father'):
self.Father = attrs.get('Father').value
if attrs.get('Namespace'):
self.Namespace = attrs.get('Namespace').value
if attrs.get('Twin'):
self.Twin = attrs.get('Twin').value
if attrs.get('Constructor'):
if attrs.get('Constructor').value in ('true', '1'):
self.Constructor = 1
elif attrs.get('Constructor').value in ('false', '0'):
self.Constructor = 0
else:
raise ValueError('Bad boolean attribute (Constructor)')
if attrs.get('Initialization'):
if attrs.get('Initialization').value in ('true', '1'):
self.Initialization = 1
elif attrs.get('Initialization').value in ('false', '0'):
self.Initialization = 0
else:
raise ValueError('Bad boolean attribute (Initialization)')
if attrs.get('TwinPointer'):
self.TwinPointer = attrs.get('TwinPointer').value
if attrs.get('Include'):
self.Include = attrs.get('Include').value
if attrs.get('NumberProtocol'):
if attrs.get('NumberProtocol').value in ('true', '1'):
self.NumberProtocol = 1
elif attrs.get('NumberProtocol').value in ('false', '0'):
self.NumberProtocol = 0
else:
raise ValueError('Bad boolean attribute (NumberProtocol)')
if attrs.get('Delete'):
if attrs.get('Delete').value in ('true', '1'):
self.Delete = 1
elif attrs.get('Delete').value in ('false', '0'):
self.Delete = 0
else:
raise ValueError('Bad boolean attribute (Delete)')
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Methode':
obj_ = Methode.factory()
obj_.build(child_)
self.Methode.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Attribute':
obj_ = Attribute.factory()
obj_.build(child_)
self.Attribute.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Sequence':
obj_ = Sequence.factory()
obj_.build(child_)
self.setSequence(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'CustomAttributes':
CustomAttributes_ = ''
for text__content_ in child_.childNodes:
CustomAttributes_ += text__content_.nodeValue
self.CustomAttributes = CustomAttributes_
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'ClassDeclarations':
ClassDeclarations_ = ''
for text__content_ in child_.childNodes:
ClassDeclarations_ += text__content_.nodeValue
self.ClassDeclarations = ClassDeclarations_
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Initialization':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
# end class PythonExport
class Methode:
subclass = None
def __init__(self, Const=0, Name='', Keyword=0, Documentation=None, Parameter=None):
self.Const = Const
self.Name = Name
self.Keyword = Keyword
self.Documentation = Documentation
if Parameter is None:
self.Parameter = []
else:
self.Parameter = Parameter
def factory(*args_, **kwargs_):
if Methode.subclass:
return Methode.subclass(*args_, **kwargs_)
else:
return Methode(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getParameter(self): return self.Parameter
def setParameter(self, Parameter): self.Parameter = Parameter
def addParameter(self, value): self.Parameter.append(value)
def insertParameter(self, index, value): self.Parameter[index] = value
def getConst(self): return self.Const
def setConst(self, Const): self.Const = Const
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def getKeyword(self): return self.Keyword
def setKeyword(self, Keyword): self.Keyword = Keyword
def export(self, outfile, level, name_='Methode'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Methode')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Methode'):
if self.getConst() is not None:
outfile.write(' Const="%s"' % (self.getConst(), ))
outfile.write(' Name="%s"' % (self.getName(), ))
if self.getKeyword() is not None:
outfile.write(' Keyword="%s"' % (self.getKeyword(), ))
def exportChildren(self, outfile, level, name_='Methode'):
if self.Documentation:
self.Documentation.export(outfile, level)
for Parameter_ in self.getParameter():
Parameter_.export(outfile, level)
def exportLiteral(self, outfile, level, name_='Methode'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Const = "%s",\n' % (self.getConst(),))
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('Keyword = "%s",\n' % (self.getKeyword(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Parameter=[\n')
level += 1
for Parameter in self.Parameter:
showIndent(outfile, level)
outfile.write('Parameter(\n')
Parameter.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Const'):
if attrs.get('Const').value in ('true', '1'):
self.Const = 1
elif attrs.get('Const').value in ('false', '0'):
self.Const = 0
else:
raise ValueError('Bad boolean attribute (Const)')
if attrs.get('Name'):
self.Name = attrs.get('Name').value
if attrs.get('Keyword'):
if attrs.get('Keyword').value in ('true', '1'):
self.Keyword = 1
elif attrs.get('Keyword').value in ('false', '0'):
self.Keyword = 0
else:
raise ValueError('Bad boolean attribute (Keyword)')
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Parameter':
obj_ = Parameter.factory()
obj_.build(child_)
self.Parameter.append(obj_)
# end class Methode
class Attribute:
subclass = None
def __init__(self, ReadOnly=0, Name='', Documentation=None, Parameter=None):
self.ReadOnly = ReadOnly
self.Name = Name
self.Documentation = Documentation
self.Parameter = Parameter
def factory(*args_, **kwargs_):
if Attribute.subclass:
return Attribute.subclass(*args_, **kwargs_)
else:
return Attribute(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getParameter(self): return self.Parameter
def setParameter(self, Parameter): self.Parameter = Parameter
def getReadonly(self): return self.ReadOnly
def setReadonly(self, ReadOnly): self.ReadOnly = ReadOnly
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def export(self, outfile, level, name_='Attribute'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Attribute')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Attribute'):
outfile.write(' ReadOnly="%s"' % (self.getReadonly(), ))
outfile.write(' Name="%s"' % (self.getName(), ))
def exportChildren(self, outfile, level, name_='Attribute'):
if self.Documentation:
self.Documentation.export(outfile, level)
if self.Parameter:
self.Parameter.export(outfile, level)
def exportLiteral(self, outfile, level, name_='Attribute'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('ReadOnly = "%s",\n' % (self.getReadonly(),))
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Parameter:
showIndent(outfile, level)
outfile.write('Parameter=Parameter(\n')
self.Parameter.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('ReadOnly'):
if attrs.get('ReadOnly').value in ('true', '1'):
self.ReadOnly = 1
elif attrs.get('ReadOnly').value in ('false', '0'):
self.ReadOnly = 0
else:
raise ValueError('Bad boolean attribute (ReadOnly)')
if attrs.get('Name'):
self.Name = attrs.get('Name').value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Parameter':
obj_ = Parameter.factory()
obj_.build(child_)
self.setParameter(obj_)
# end class Attribute
class Sequence:
subclass = None
def __init__(self, sq_slice=0, sq_item=0, sq_concat=0, sq_inplace_repeat=0, sq_ass_slice=0, sq_contains=0, sq_ass_item=0, sq_repeat=0, sq_length=0, sq_inplace_concat=0, valueOf_=''):
self.sq_slice = sq_slice
self.sq_item = sq_item
self.sq_concat = sq_concat
self.sq_inplace_repeat = sq_inplace_repeat
self.sq_ass_slice = sq_ass_slice
self.sq_contains = sq_contains
self.sq_ass_item = sq_ass_item
self.sq_repeat = sq_repeat
self.sq_length = sq_length
self.sq_inplace_concat = sq_inplace_concat
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if Sequence.subclass:
return Sequence.subclass(*args_, **kwargs_)
else:
return Sequence(*args_, **kwargs_)
factory = staticmethod(factory)
def getSq_slice(self): return self.sq_slice
def setSq_slice(self, sq_slice): self.sq_slice = sq_slice
def getSq_item(self): return self.sq_item
def setSq_item(self, sq_item): self.sq_item = sq_item
def getSq_concat(self): return self.sq_concat
def setSq_concat(self, sq_concat): self.sq_concat = sq_concat
def getSq_inplace_repeat(self): return self.sq_inplace_repeat
def setSq_inplace_repeat(self, sq_inplace_repeat): self.sq_inplace_repeat = sq_inplace_repeat
def getSq_ass_slice(self): return self.sq_ass_slice
def setSq_ass_slice(self, sq_ass_slice): self.sq_ass_slice = sq_ass_slice
def getSq_contains(self): return self.sq_contains
def setSq_contains(self, sq_contains): self.sq_contains = sq_contains
def getSq_ass_item(self): return self.sq_ass_item
def setSq_ass_item(self, sq_ass_item): self.sq_ass_item = sq_ass_item
def getSq_repeat(self): return self.sq_repeat
def setSq_repeat(self, sq_repeat): self.sq_repeat = sq_repeat
def getSq_length(self): return self.sq_length
def setSq_length(self, sq_length): self.sq_length = sq_length
def getSq_inplace_concat(self): return self.sq_inplace_concat
def setSq_inplace_concat(self, sq_inplace_concat): self.sq_inplace_concat = sq_inplace_concat
def getValueOf_(self): return self.valueOf_
def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, name_='Sequence'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Sequence')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Sequence'):
outfile.write(' sq_slice="%s"' % (self.getSq_slice(), ))
outfile.write(' sq_item="%s"' % (self.getSq_item(), ))
outfile.write(' sq_concat="%s"' % (self.getSq_concat(), ))
outfile.write(' sq_inplace_repeat="%s"' % (self.getSq_inplace_repeat(), ))
outfile.write(' sq_ass_slice="%s"' % (self.getSq_ass_slice(), ))
outfile.write(' sq_contains="%s"' % (self.getSq_contains(), ))
outfile.write(' sq_ass_item="%s"' % (self.getSq_ass_item(), ))
outfile.write(' sq_repeat="%s"' % (self.getSq_repeat(), ))
outfile.write(' sq_length="%s"' % (self.getSq_length(), ))
outfile.write(' sq_inplace_concat="%s"' % (self.getSq_inplace_concat(), ))
def exportChildren(self, outfile, level, name_='Sequence'):
outfile.write(self.valueOf_)
def exportLiteral(self, outfile, level, name_='Sequence'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('sq_slice = "%s",\n' % (self.getSq_slice(),))
showIndent(outfile, level)
outfile.write('sq_item = "%s",\n' % (self.getSq_item(),))
showIndent(outfile, level)
outfile.write('sq_concat = "%s",\n' % (self.getSq_concat(),))
showIndent(outfile, level)
outfile.write('sq_inplace_repeat = "%s",\n' % (self.getSq_inplace_repeat(),))
showIndent(outfile, level)
outfile.write('sq_ass_slice = "%s",\n' % (self.getSq_ass_slice(),))
showIndent(outfile, level)
outfile.write('sq_contains = "%s",\n' % (self.getSq_contains(),))
showIndent(outfile, level)
outfile.write('sq_ass_item = "%s",\n' % (self.getSq_ass_item(),))
showIndent(outfile, level)
outfile.write('sq_repeat = "%s",\n' % (self.getSq_repeat(),))
showIndent(outfile, level)
outfile.write('sq_length = "%s",\n' % (self.getSq_length(),))
showIndent(outfile, level)
outfile.write('sq_inplace_concat = "%s",\n' % (self.getSq_inplace_concat(),))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('sq_slice'):
if attrs.get('sq_slice').value in ('true', '1'):
self.sq_slice = 1
elif attrs.get('sq_slice').value in ('false', '0'):
self.sq_slice = 0
else:
raise ValueError('Bad boolean attribute (sq_slice)')
if attrs.get('sq_item'):
if attrs.get('sq_item').value in ('true', '1'):
self.sq_item = 1
elif attrs.get('sq_item').value in ('false', '0'):
self.sq_item = 0
else:
raise ValueError('Bad boolean attribute (sq_item)')
if attrs.get('sq_concat'):
if attrs.get('sq_concat').value in ('true', '1'):
self.sq_concat = 1
elif attrs.get('sq_concat').value in ('false', '0'):
self.sq_concat = 0
else:
raise ValueError('Bad boolean attribute (sq_concat)')
if attrs.get('sq_inplace_repeat'):
if attrs.get('sq_inplace_repeat').value in ('true', '1'):
self.sq_inplace_repeat = 1
elif attrs.get('sq_inplace_repeat').value in ('false', '0'):
self.sq_inplace_repeat = 0
else:
raise ValueError('Bad boolean attribute (sq_inplace_repeat)')
if attrs.get('sq_ass_slice'):
if attrs.get('sq_ass_slice').value in ('true', '1'):
self.sq_ass_slice = 1
elif attrs.get('sq_ass_slice').value in ('false', '0'):
self.sq_ass_slice = 0
else:
raise ValueError('Bad boolean attribute (sq_ass_slice)')
if attrs.get('sq_contains'):
if attrs.get('sq_contains').value in ('true', '1'):
self.sq_contains = 1
elif attrs.get('sq_contains').value in ('false', '0'):
self.sq_contains = 0
else:
raise ValueError('Bad boolean attribute (sq_contains)')
if attrs.get('sq_ass_item'):
if attrs.get('sq_ass_item').value in ('true', '1'):
self.sq_ass_item = 1
elif attrs.get('sq_ass_item').value in ('false', '0'):
self.sq_ass_item = 0
else:
raise ValueError('Bad boolean attribute (sq_ass_item)')
if attrs.get('sq_repeat'):
if attrs.get('sq_repeat').value in ('true', '1'):
self.sq_repeat = 1
elif attrs.get('sq_repeat').value in ('false', '0'):
self.sq_repeat = 0
else:
raise ValueError('Bad boolean attribute (sq_repeat)')
if attrs.get('sq_length'):
if attrs.get('sq_length').value in ('true', '1'):
self.sq_length = 1
elif attrs.get('sq_length').value in ('false', '0'):
self.sq_length = 0
else:
raise ValueError('Bad boolean attribute (sq_length)')
if attrs.get('sq_inplace_concat'):
if attrs.get('sq_inplace_concat').value in ('true', '1'):
self.sq_inplace_concat = 1
elif attrs.get('sq_inplace_concat').value in ('false', '0'):
self.sq_inplace_concat = 0
else:
raise ValueError('Bad boolean attribute (sq_inplace_concat)')
def buildChildren(self, child_, nodeName_):
self.valueOf_ = ''
for child in child_.childNodes:
if child.nodeType == Node.TEXT_NODE:
self.valueOf_ += child.nodeValue
# end class Sequence
class Module:
subclass = None
def __init__(self, Name='', Documentation=None, Dependencies=None, Content=None):
self.Name = Name
self.Documentation = Documentation
self.Dependencies = Dependencies
self.Content = Content
def factory(*args_, **kwargs_):
if Module.subclass:
return Module.subclass(*args_, **kwargs_)
else:
return Module(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getDependencies(self): return self.Dependencies
def setDependencies(self, Dependencies): self.Dependencies = Dependencies
def getContent(self): return self.Content
def setContent(self, Content): self.Content = Content
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def export(self, outfile, level, name_='Module'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Module')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Module'):
outfile.write(' Name="%s"' % (self.getName(), ))
def exportChildren(self, outfile, level, name_='Module'):
if self.Documentation:
self.Documentation.export(outfile, level)
if self.Dependencies:
self.Dependencies.export(outfile, level)
if self.Content:
self.Content.export(outfile, level)
def exportLiteral(self, outfile, level, name_='Module'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Dependencies:
showIndent(outfile, level)
outfile.write('Dependencies=Dependencies(\n')
self.Dependencies.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
if self.Content:
showIndent(outfile, level)
outfile.write('Content=Content(\n')
self.Content.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Name'):
self.Name = attrs.get('Name').value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Dependencies':
obj_ = Dependencies.factory()
obj_.build(child_)
self.setDependencies(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Content':
obj_ = Content.factory()
obj_.build(child_)
self.setContent(obj_)
# end class Module
class Dependencies:
subclass = None
def __init__(self, Module=None):
if Module is None:
self.Module = []
else:
self.Module = Module
def factory(*args_, **kwargs_):
if Dependencies.subclass:
return Dependencies.subclass(*args_, **kwargs_)
else:
return Dependencies(*args_, **kwargs_)
factory = staticmethod(factory)
def getModule(self): return self.Module
def setModule(self, Module): self.Module = Module
def addModule(self, value): self.Module.append(value)
def insertModule(self, index, value): self.Module[index] = value
def export(self, outfile, level, name_='Dependencies'):
showIndent(outfile, level)
outfile.write('<%s>\n' % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Dependencies'):
pass
def exportChildren(self, outfile, level, name_='Dependencies'):
for Module_ in self.getModule():
Module_.export(outfile, level)
def exportLiteral(self, outfile, level, name_='Dependencies'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Module=[\n')
level += 1
for Module in self.Module:
showIndent(outfile, level)
outfile.write('Module(\n')
Module.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Module':
obj_ = Module.factory()
obj_.build(child_)
self.Module.append(obj_)
# end class Dependencies
class Content:
subclass = None
def __init__(self, Property=None, Feature=None, DocObject=None, GuiCommand=None, PreferencesPage=None):
if Property is None:
self.Property = []
else:
self.Property = Property
if Feature is None:
self.Feature = []
else:
self.Feature = Feature
if DocObject is None:
self.DocObject = []
else:
self.DocObject = DocObject
if GuiCommand is None:
self.GuiCommand = []
else:
self.GuiCommand = GuiCommand
if PreferencesPage is None:
self.PreferencesPage = []
else:
self.PreferencesPage = PreferencesPage
def factory(*args_, **kwargs_):
if Content.subclass:
return Content.subclass(*args_, **kwargs_)
else:
return Content(*args_, **kwargs_)
factory = staticmethod(factory)
def getProperty(self): return self.Property
def setProperty(self, Property): self.Property = Property
def addProperty(self, value): self.Property.append(value)
def insertProperty(self, index, value): self.Property[index] = value
def getFeature(self): return self.Feature
def setFeature(self, Feature): self.Feature = Feature
def addFeature(self, value): self.Feature.append(value)
def insertFeature(self, index, value): self.Feature[index] = value
def getDocobject(self): return self.DocObject
def setDocobject(self, DocObject): self.DocObject = DocObject
def addDocobject(self, value): self.DocObject.append(value)
def insertDocobject(self, index, value): self.DocObject[index] = value
def getGuicommand(self): return self.GuiCommand
def setGuicommand(self, GuiCommand): self.GuiCommand = GuiCommand
def addGuicommand(self, value): self.GuiCommand.append(value)
def insertGuicommand(self, index, value): self.GuiCommand[index] = value
def getPreferencespage(self): return self.PreferencesPage
def setPreferencespage(self, PreferencesPage): self.PreferencesPage = PreferencesPage
def addPreferencespage(self, value): self.PreferencesPage.append(value)
def insertPreferencespage(self, index, value): self.PreferencesPage[index] = value
def export(self, outfile, level, name_='Content'):
showIndent(outfile, level)
outfile.write('<%s>\n' % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Content'):
pass
def exportChildren(self, outfile, level, name_='Content'):
for Property_ in self.getProperty():
Property_.export(outfile, level)
for Feature_ in self.getFeature():
Feature_.export(outfile, level)
for DocObject_ in self.getDocobject():
DocObject_.export(outfile, level)
for GuiCommand_ in self.getGuicommand():
showIndent(outfile, level)
outfile.write('<GuiCommand>%s</GuiCommand>\n' % quote_xml(GuiCommand_))
for PreferencesPage_ in self.getPreferencespage():
showIndent(outfile, level)
outfile.write('<PreferencesPage>%s</PreferencesPage>\n' % quote_xml(PreferencesPage_))
def exportLiteral(self, outfile, level, name_='Content'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Property=[\n')
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write('Property(\n')
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('Feature=[\n')
level += 1
for Feature in self.Feature:
showIndent(outfile, level)
outfile.write('Feature(\n')
Feature.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('DocObject=[\n')
level += 1
for DocObject in self.DocObject:
showIndent(outfile, level)
outfile.write('DocObject(\n')
DocObject.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('GuiCommand=[\n')
level += 1
for GuiCommand in self.GuiCommand:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(GuiCommand))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
showIndent(outfile, level)
outfile.write('PreferencesPage=[\n')
level += 1
for PreferencesPage in self.PreferencesPage:
showIndent(outfile, level)
outfile.write('%s,\n' % quote_python(PreferencesPage))
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Property':
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Feature':
obj_ = Feature.factory()
obj_.build(child_)
self.Feature.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'DocObject':
obj_ = DocObject.factory()
obj_.build(child_)
self.DocObject.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'GuiCommand':
GuiCommand_ = ''
for text__content_ in child_.childNodes:
GuiCommand_ += text__content_.nodeValue
self.GuiCommand.append(GuiCommand_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'PreferencesPage':
PreferencesPage_ = ''
for text__content_ in child_.childNodes:
PreferencesPage_ += text__content_.nodeValue
self.PreferencesPage.append(PreferencesPage_)
# end class Content
class Feature:
subclass = None
def __init__(self, Name='', Documentation=None, Property=None, ViewProvider=None):
self.Name = Name
self.Documentation = Documentation
if Property is None:
self.Property = []
else:
self.Property = Property
self.ViewProvider = ViewProvider
def factory(*args_, **kwargs_):
if Feature.subclass:
return Feature.subclass(*args_, **kwargs_)
else:
return Feature(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getProperty(self): return self.Property
def setProperty(self, Property): self.Property = Property
def addProperty(self, value): self.Property.append(value)
def insertProperty(self, index, value): self.Property[index] = value
def getViewprovider(self): return self.ViewProvider
def setViewprovider(self, ViewProvider): self.ViewProvider = ViewProvider
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def export(self, outfile, level, name_='Feature'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Feature')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Feature'):
outfile.write(' Name="%s"' % (self.getName(), ))
def exportChildren(self, outfile, level, name_='Feature'):
if self.Documentation:
self.Documentation.export(outfile, level)
for Property_ in self.getProperty():
Property_.export(outfile, level)
if self.ViewProvider:
self.ViewProvider.export(outfile, level)
def exportLiteral(self, outfile, level, name_='Feature'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Property=[\n')
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write('Property(\n')
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
if self.ViewProvider:
showIndent(outfile, level)
outfile.write('ViewProvider=ViewProvider(\n')
self.ViewProvider.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Name'):
self.Name = attrs.get('Name').value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Property':
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'ViewProvider':
obj_ = ViewProvider.factory()
obj_.build(child_)
self.setViewprovider(obj_)
# end class Feature
class DocObject:
subclass = None
def __init__(self, Name='', Documentation=None, Property=None):
self.Name = Name
self.Documentation = Documentation
if Property is None:
self.Property = []
else:
self.Property = Property
def factory(*args_, **kwargs_):
if DocObject.subclass:
return DocObject.subclass(*args_, **kwargs_)
else:
return DocObject(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getProperty(self): return self.Property
def setProperty(self, Property): self.Property = Property
def addProperty(self, value): self.Property.append(value)
def insertProperty(self, index, value): self.Property[index] = value
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def export(self, outfile, level, name_='DocObject'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='DocObject')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='DocObject'):
outfile.write(' Name="%s"' % (self.getName(), ))
def exportChildren(self, outfile, level, name_='DocObject'):
if self.Documentation:
self.Documentation.export(outfile, level)
for Property_ in self.getProperty():
Property_.export(outfile, level)
def exportLiteral(self, outfile, level, name_='DocObject'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('Property=[\n')
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write('Property(\n')
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Name'):
self.Name = attrs.get('Name').value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Property':
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
# end class DocObject
class Property:
subclass = None
def __init__(self, Type='', Name='', StartValue='', Documentation=None):
self.Type = Type
self.Name = Name
self.StartValue = StartValue
self.Documentation = Documentation
def factory(*args_, **kwargs_):
if Property.subclass:
return Property.subclass(*args_, **kwargs_)
else:
return Property(*args_, **kwargs_)
factory = staticmethod(factory)
def getDocumentation(self): return self.Documentation
def setDocumentation(self, Documentation): self.Documentation = Documentation
def getType(self): return self.Type
def setType(self, Type): self.Type = Type
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def getStartvalue(self): return self.StartValue
def setStartvalue(self, StartValue): self.StartValue = StartValue
def export(self, outfile, level, name_='Property'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Property')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Property'):
outfile.write(' Type="%s"' % (self.getType(), ))
outfile.write(' Name="%s"' % (self.getName(), ))
if self.getStartvalue() is not None:
outfile.write(' StartValue="%s"' % (self.getStartvalue(), ))
def exportChildren(self, outfile, level, name_='Property'):
if self.Documentation:
self.Documentation.export(outfile, level)
def exportLiteral(self, outfile, level, name_='Property'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Type = "%s",\n' % (self.getType(),))
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('StartValue = "%s",\n' % (self.getStartvalue(),))
def exportLiteralChildren(self, outfile, level, name_):
if self.Documentation:
showIndent(outfile, level)
outfile.write('Documentation=Documentation(\n')
self.Documentation.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Type'):
self.Type = attrs.get('Type').value
if attrs.get('Name'):
self.Name = attrs.get('Name').value
if attrs.get('StartValue'):
self.StartValue = attrs.get('StartValue').value
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Documentation':
obj_ = Documentation.factory()
obj_.build(child_)
self.setDocumentation(obj_)
# end class Property
class Documentation:
subclass = None
def __init__(self, Author=None, DeveloperDocu='', UserDocu=''):
self.Author = Author
self.DeveloperDocu = DeveloperDocu
self.UserDocu = UserDocu
def factory(*args_, **kwargs_):
if Documentation.subclass:
return Documentation.subclass(*args_, **kwargs_)
else:
return Documentation(*args_, **kwargs_)
factory = staticmethod(factory)
def getAuthor(self): return self.Author
def setAuthor(self, Author): self.Author = Author
def getDeveloperdocu(self): return self.DeveloperDocu
def setDeveloperdocu(self, DeveloperDocu): self.DeveloperDocu = DeveloperDocu
def getUserdocu(self): return self.UserDocu
def setUserdocu(self, UserDocu): self.UserDocu = UserDocu
def export(self, outfile, level, name_='Documentation'):
showIndent(outfile, level)
outfile.write('<%s>\n' % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Documentation'):
pass
def exportChildren(self, outfile, level, name_='Documentation'):
if self.Author:
self.Author.export(outfile, level)
showIndent(outfile, level)
outfile.write('<DeveloperDocu>%s</DeveloperDocu>\n' % quote_xml(self.getDeveloperdocu()))
showIndent(outfile, level)
outfile.write('<UserDocu>%s</UserDocu>\n' % quote_xml(self.getUserdocu()))
def exportLiteral(self, outfile, level, name_='Documentation'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
if self.Author:
showIndent(outfile, level)
outfile.write('Author=Author(\n')
self.Author.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
showIndent(outfile, level)
outfile.write('DeveloperDocu=%s,\n' % quote_python(self.getDeveloperdocu()))
showIndent(outfile, level)
outfile.write('UserDocu=%s,\n' % quote_python(self.getUserdocu()))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Author':
obj_ = Author.factory()
obj_.build(child_)
self.setAuthor(obj_)
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'DeveloperDocu':
DeveloperDocu_ = ''
for text__content_ in child_.childNodes:
DeveloperDocu_ += text__content_.nodeValue
self.DeveloperDocu = DeveloperDocu_
elif child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'UserDocu':
UserDocu_ = ''
for text__content_ in child_.childNodes:
UserDocu_ += text__content_.nodeValue
self.UserDocu = UserDocu_
# end class Documentation
class Author:
subclass = None
def __init__(self, Name='', Licence='', EMail='', valueOf_=''):
self.Name = Name
self.Licence = Licence
self.EMail = EMail
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if Author.subclass:
return Author.subclass(*args_, **kwargs_)
else:
return Author(*args_, **kwargs_)
factory = staticmethod(factory)
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def getLicence(self): return self.Licence
def setLicence(self, Licence): self.Licence = Licence
def getEmail(self): return self.EMail
def setEmail(self, EMail): self.EMail = EMail
def getValueOf_(self): return self.valueOf_
def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, name_='Author'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Author')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Author'):
outfile.write(' Name="%s"' % (self.getName(), ))
if self.getLicence() is not None:
outfile.write(' Licence="%s"' % (self.getLicence(), ))
outfile.write(' EMail="%s"' % (self.getEmail(), ))
def exportChildren(self, outfile, level, name_='Author'):
outfile.write(self.valueOf_)
def exportLiteral(self, outfile, level, name_='Author'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
showIndent(outfile, level)
outfile.write('Licence = "%s",\n' % (self.getLicence(),))
showIndent(outfile, level)
outfile.write('EMail = "%s",\n' % (self.getEmail(),))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Name'):
self.Name = attrs.get('Name').value
if attrs.get('Licence'):
self.Licence = attrs.get('Licence').value
if attrs.get('EMail'):
self.EMail = attrs.get('EMail').value
def buildChildren(self, child_, nodeName_):
self.valueOf_ = ''
for child in child_.childNodes:
if child.nodeType == Node.TEXT_NODE:
self.valueOf_ += child.nodeValue
# end class Author
class ViewProvider:
subclass = None
def __init__(self, Property=None):
if Property is None:
self.Property = []
else:
self.Property = Property
def factory(*args_, **kwargs_):
if ViewProvider.subclass:
return ViewProvider.subclass(*args_, **kwargs_)
else:
return ViewProvider(*args_, **kwargs_)
factory = staticmethod(factory)
def getProperty(self): return self.Property
def setProperty(self, Property): self.Property = Property
def addProperty(self, value): self.Property.append(value)
def insertProperty(self, index, value): self.Property[index] = value
def export(self, outfile, level, name_='ViewProvider'):
showIndent(outfile, level)
outfile.write('<%s>\n' % name_)
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='ViewProvider'):
pass
def exportChildren(self, outfile, level, name_='ViewProvider'):
for Property_ in self.getProperty():
Property_.export(outfile, level)
def exportLiteral(self, outfile, level, name_='ViewProvider'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
pass
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Property=[\n')
level += 1
for Property in self.Property:
showIndent(outfile, level)
outfile.write('Property(\n')
Property.exportLiteral(outfile, level)
showIndent(outfile, level)
outfile.write('),\n')
level -= 1
showIndent(outfile, level)
outfile.write('],\n')
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
pass
def buildChildren(self, child_, nodeName_):
if child_.nodeType == Node.ELEMENT_NODE and \
nodeName_ == 'Property':
obj_ = Property.factory()
obj_.build(child_)
self.Property.append(obj_)
# end class ViewProvider
class Parameter:
subclass = None
def __init__(self, Type='', Name='', valueOf_=''):
self.Type = Type
self.Name = Name
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if Parameter.subclass:
return Parameter.subclass(*args_, **kwargs_)
else:
return Parameter(*args_, **kwargs_)
factory = staticmethod(factory)
def getType(self): return self.Type
def setType(self, Type): self.Type = Type
def getName(self): return self.Name
def setName(self, Name): self.Name = Name
def getValueOf_(self): return self.valueOf_
def setValueOf_(self, valueOf_): self.valueOf_ = valueOf_
def export(self, outfile, level, name_='Parameter'):
showIndent(outfile, level)
outfile.write('<%s' % (name_, ))
self.exportAttributes(outfile, level, name_='Parameter')
outfile.write('>\n')
self.exportChildren(outfile, level + 1, name_)
showIndent(outfile, level)
outfile.write('</%s>\n' % name_)
def exportAttributes(self, outfile, level, name_='Parameter'):
outfile.write(' Type="%s"' % (self.getType(), ))
outfile.write(' Name="%s"' % (self.getName(), ))
def exportChildren(self, outfile, level, name_='Parameter'):
outfile.write(self.valueOf_)
def exportLiteral(self, outfile, level, name_='Parameter'):
level += 1
self.exportLiteralAttributes(outfile, level, name_)
self.exportLiteralChildren(outfile, level, name_)
def exportLiteralAttributes(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('Type = "%s",\n' % (self.getType(),))
showIndent(outfile, level)
outfile.write('Name = "%s",\n' % (self.getName(),))
def exportLiteralChildren(self, outfile, level, name_):
showIndent(outfile, level)
outfile.write('valueOf_ = "%s",\n' % (self.valueOf_,))
def build(self, node_):
attrs = node_.attributes
self.buildAttributes(attrs)
for child_ in node_.childNodes:
nodeName_ = child_.nodeName.split(':')[-1]
self.buildChildren(child_, nodeName_)
def buildAttributes(self, attrs):
if attrs.get('Type'):
self.Type = attrs.get('Type').value
if attrs.get('Name'):
self.Name = attrs.get('Name').value
def buildChildren(self, child_, nodeName_):
self.valueOf_ = ''
for child in child_.childNodes:
if child.nodeType == Node.TEXT_NODE:
self.valueOf_ += child.nodeValue
# end class Parameter
from xml.sax import handler, make_parser
class SaxStackElement:
def __init__(self, name='', obj=None):
self.name = name
self.obj = obj
self.content = ''
#
# SAX handler
#
class SaxGeneratemodelHandler(handler.ContentHandler):
def __init__(self):
self.stack = []
self.root = None
def getRoot(self):
return self.root
def setDocumentLocator(self, locator):
self.locator = locator
def showError(self, msg):
print '*** (showError):', msg
sys.exit(-1)
def startElement(self, name, attrs):
done = 0
if name == 'GenerateModel':
obj = GenerateModel.factory()
stackObj = SaxStackElement('GenerateModel', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Module':
obj = Module.factory()
stackObj = SaxStackElement('Module', obj)
self.stack.append(stackObj)
done = 1
elif name == 'PythonExport':
obj = PythonExport.factory()
val = attrs.get('FatherNamespace', None)
if val is not None:
obj.setFathernamespace(val)
val = attrs.get('RichCompare', None)
if val is not None:
if val in ('true', '1'):
obj.setRichcompare(1)
elif val in ('false', '0'):
obj.setRichcompare(0)
else:
self.reportError('"RichCompare" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
val = attrs.get('Reference', None)
if val is not None:
if val in ('true', '1'):
obj.setReference(1)
elif val in ('false', '0'):
obj.setReference(0)
else:
self.reportError('"Reference" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('FatherInclude', None)
if val is not None:
obj.setFatherinclude(val)
val = attrs.get('Father', None)
if val is not None:
obj.setFather(val)
val = attrs.get('Namespace', None)
if val is not None:
obj.setNamespace(val)
val = attrs.get('Twin', None)
if val is not None:
obj.setTwin(val)
val = attrs.get('Constructor', None)
if val is not None:
if val in ('true', '1'):
obj.setConstructor(1)
elif val in ('false', '0'):
obj.setConstructor(0)
else:
self.reportError('"Constructor" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('Initialization', None)
if val is not None:
if val in ('true', '1'):
obj.setInitialization(1)
elif val in ('false', '0'):
obj.setInitialization(0)
else:
self.reportError('"Initialization" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('TwinPointer', None)
if val is not None:
obj.setTwinpointer(val)
val = attrs.get('Include', None)
if val is not None:
obj.setInclude(val)
val = attrs.get('NumberProtocol', None)
if val is not None:
if val in ('true', '1'):
obj.setNumberprotocol(1)
elif val in ('false', '0'):
obj.setNumberprotocol(0)
else:
self.reportError('"NumberProtocol" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('Delete', None)
if val is not None:
if val in ('true', '1'):
obj.setDelete(1)
elif val in ('false', '0'):
obj.setDelete(0)
else:
self.reportError('"Delete" attribute must be boolean ("true", "1", "false", "0")')
stackObj = SaxStackElement('PythonExport', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Documentation':
obj = Documentation.factory()
stackObj = SaxStackElement('Documentation', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Methode':
obj = Methode.factory()
val = attrs.get('Const', None)
if val is not None:
if val in ('true', '1'):
obj.setConst(1)
elif val in ('false', '0'):
obj.setConst(0)
else:
self.reportError('"Const" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
val = attrs.get('Keyword', None)
if val is not None:
if val in ('true', '1'):
obj.setKeyword(1)
elif val in ('false', '0'):
obj.setKeyword(0)
else:
self.reportError('"Keyword" attribute must be boolean ("true", "1", "false", "0")')
stackObj = SaxStackElement('Methode', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Parameter':
obj = Parameter.factory()
val = attrs.get('Type', None)
if val is not None:
obj.setType(val)
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
stackObj = SaxStackElement('Parameter', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Attribute':
obj = Attribute.factory()
val = attrs.get('ReadOnly', None)
if val is not None:
if val in ('true', '1'):
obj.setReadonly(1)
elif val in ('false', '0'):
obj.setReadonly(0)
else:
self.reportError('"ReadOnly" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
stackObj = SaxStackElement('Attribute', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Sequence':
obj = Sequence.factory()
val = attrs.get('sq_slice', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_slice(1)
elif val in ('false', '0'):
obj.setSq_slice(0)
else:
self.reportError('"sq_slice" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_item', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_item(1)
elif val in ('false', '0'):
obj.setSq_item(0)
else:
self.reportError('"sq_item" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_concat', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_concat(1)
elif val in ('false', '0'):
obj.setSq_concat(0)
else:
self.reportError('"sq_concat" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_inplace_repeat', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_inplace_repeat(1)
elif val in ('false', '0'):
obj.setSq_inplace_repeat(0)
else:
self.reportError('"sq_inplace_repeat" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_ass_slice', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_ass_slice(1)
elif val in ('false', '0'):
obj.setSq_ass_slice(0)
else:
self.reportError('"sq_ass_slice" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_contains', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_contains(1)
elif val in ('false', '0'):
obj.setSq_contains(0)
else:
self.reportError('"sq_contains" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_ass_item', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_ass_item(1)
elif val in ('false', '0'):
obj.setSq_ass_item(0)
else:
self.reportError('"sq_ass_item" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_repeat', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_repeat(1)
elif val in ('false', '0'):
obj.setSq_repeat(0)
else:
self.reportError('"sq_repeat" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_length', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_length(1)
elif val in ('false', '0'):
obj.setSq_length(0)
else:
self.reportError('"sq_length" attribute must be boolean ("true", "1", "false", "0")')
val = attrs.get('sq_inplace_concat', None)
if val is not None:
if val in ('true', '1'):
obj.setSq_inplace_concat(1)
elif val in ('false', '0'):
obj.setSq_inplace_concat(0)
else:
self.reportError('"sq_inplace_concat" attribute must be boolean ("true", "1", "false", "0")')
stackObj = SaxStackElement('Sequence', obj)
self.stack.append(stackObj)
done = 1
elif name == 'CustomAttributes':
stackObj = SaxStackElement('CustomAttributes', None)
self.stack.append(stackObj)
done = 1
elif name == 'ClassDeclarations':
stackObj = SaxStackElement('ClassDeclarations', None)
self.stack.append(stackObj)
done = 1
elif name == 'Dependencies':
obj = Dependencies.factory()
stackObj = SaxStackElement('Dependencies', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Content':
obj = Content.factory()
stackObj = SaxStackElement('Content', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Property':
obj = Property.factory()
stackObj = SaxStackElement('Property', obj)
self.stack.append(stackObj)
done = 1
elif name == 'Feature':
obj = Feature.factory()
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
stackObj = SaxStackElement('Feature', obj)
self.stack.append(stackObj)
done = 1
elif name == 'ViewProvider':
obj = ViewProvider.factory()
stackObj = SaxStackElement('ViewProvider', obj)
self.stack.append(stackObj)
done = 1
elif name == 'DocObject':
obj = DocObject.factory()
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
stackObj = SaxStackElement('DocObject', obj)
self.stack.append(stackObj)
done = 1
elif name == 'GuiCommand':
stackObj = SaxStackElement('GuiCommand', None)
self.stack.append(stackObj)
done = 1
elif name == 'PreferencesPage':
stackObj = SaxStackElement('PreferencesPage', None)
self.stack.append(stackObj)
done = 1
elif name == 'Author':
obj = Author.factory()
val = attrs.get('Name', None)
if val is not None:
obj.setName(val)
val = attrs.get('Licence', None)
if val is not None:
obj.setLicence(val)
val = attrs.get('EMail', None)
if val is not None:
obj.setEmail(val)
stackObj = SaxStackElement('Author', obj)
self.stack.append(stackObj)
done = 1
elif name == 'DeveloperDocu':
stackObj = SaxStackElement('DeveloperDocu', None)
self.stack.append(stackObj)
done = 1
elif name == 'UserDocu':
stackObj = SaxStackElement('UserDocu', None)
self.stack.append(stackObj)
done = 1
if not done:
self.reportError('"%s" element not allowed here.' % name)
def endElement(self, name):
done = 0
if name == 'GenerateModel':
if len(self.stack) == 1:
self.root = self.stack[-1].obj
self.stack.pop()
done = 1
elif name == 'Module':
if len(self.stack) >= 2:
self.stack[-2].obj.addModule(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'PythonExport':
if len(self.stack) >= 2:
self.stack[-2].obj.addPythonexport(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Documentation':
if len(self.stack) >= 2:
self.stack[-2].obj.setDocumentation(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Methode':
if len(self.stack) >= 2:
self.stack[-2].obj.addMethode(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Parameter':
if len(self.stack) >= 2:
self.stack[-2].obj.addParameter(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Attribute':
if len(self.stack) >= 2:
self.stack[-2].obj.addAttribute(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Sequence':
if len(self.stack) >= 2:
self.stack[-2].obj.setSequence(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'CustomAttributes':
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setCustomattributes(content)
self.stack.pop()
done = 1
elif name == 'ClassDeclarations':
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setClassdeclarations(content)
self.stack.pop()
done = 1
elif name == 'Dependencies':
if len(self.stack) >= 2:
self.stack[-2].obj.setDependencies(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Content':
if len(self.stack) >= 2:
self.stack[-2].obj.setContent(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Property':
if len(self.stack) >= 2:
self.stack[-2].obj.addProperty(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'Feature':
if len(self.stack) >= 2:
self.stack[-2].obj.addFeature(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'ViewProvider':
if len(self.stack) >= 2:
self.stack[-2].obj.setViewprovider(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'DocObject':
if len(self.stack) >= 2:
self.stack[-2].obj.addDocobject(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'GuiCommand':
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.addGuicommand(content)
self.stack.pop()
done = 1
elif name == 'PreferencesPage':
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.addPreferencespage(content)
self.stack.pop()
done = 1
elif name == 'Author':
if len(self.stack) >= 2:
self.stack[-2].obj.setAuthor(self.stack[-1].obj)
self.stack.pop()
done = 1
elif name == 'DeveloperDocu':
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setDeveloperdocu(content)
self.stack.pop()
done = 1
elif name == 'UserDocu':
if len(self.stack) >= 2:
content = self.stack[-1].content
self.stack[-2].obj.setUserdocu(content)
self.stack.pop()
done = 1
if not done:
self.reportError('"%s" element not allowed here.' % name)
def characters(self, chrs, start, end):
if len(self.stack) > 0:
self.stack[-1].content += chrs[start:end]
def reportError(self, mesg):
locator = self.locator
sys.stderr.write('Doc: %s Line: %d Column: %d\n' % \
(locator.getSystemId(), locator.getLineNumber(),
locator.getColumnNumber() + 1))
sys.stderr.write(mesg)
sys.stderr.write('\n')
sys.exit(-1)
#raise RuntimeError
USAGE_TEXT = """
Usage: python <Parser>.py [ -s ] <in_xml_file>
Options:
-s Use the SAX parser, not the minidom parser.
"""
def usage():
print USAGE_TEXT
sys.exit(-1)
#
# SAX handler used to determine the top level element.
#
class SaxSelectorHandler(handler.ContentHandler):
def __init__(self):
self.topElementName = None
def getTopElementName(self):
return self.topElementName
def startElement(self, name, attrs):
self.topElementName = name
raise StopIteration
def parseSelect(inFileName):
infile = file(inFileName, 'r')
topElementName = None
parser = make_parser()
documentHandler = SaxSelectorHandler()
parser.setContentHandler(documentHandler)
try:
try:
parser.parse(infile)
except StopIteration:
topElementName = documentHandler.getTopElementName()
if topElementName is None:
raise RuntimeError, 'no top level element'
topElementName = topElementName.replace('-', '_').replace(':', '_')
if topElementName not in globals():
raise RuntimeError, 'no class for top element: %s' % topElementName
topElement = globals()[topElementName]
infile.seek(0)
doc = minidom.parse(infile)
finally:
infile.close()
rootNode = doc.childNodes[0]
rootObj = topElement.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0)
return rootObj
def saxParse(inFileName):
parser = make_parser()
documentHandler = SaxGeneratemodelHandler()
parser.setDocumentHandler(documentHandler)
parser.parse('file:%s' % inFileName)
root = documentHandler.getRoot()
sys.stdout.write('<?xml version="1.0" ?>\n')
root.export(sys.stdout, 0)
return root
def saxParseString(inString):
parser = make_parser()
documentHandler = SaxGeneratemodelHandler()
parser.setDocumentHandler(documentHandler)
parser.feed(inString)
parser.close()
rootObj = documentHandler.getRoot()
#sys.stdout.write('<?xml version="1.0" ?>\n')
#rootObj.export(sys.stdout, 0)
return rootObj
def parse(inFileName):
doc = minidom.parse(inFileName)
rootNode = doc.documentElement
rootObj = GenerateModel.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="GenerateModel")
return rootObj
def parseString(inString):
doc = minidom.parseString(inString)
rootNode = doc.documentElement
rootObj = GenerateModel.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('<?xml version="1.0" ?>\n')
rootObj.export(sys.stdout, 0, name_="GenerateModel")
return rootObj
def parseLiteral(inFileName):
doc = minidom.parse(inFileName)
rootNode = doc.documentElement
rootObj = GenerateModel.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
sys.stdout.write('from generateModel_Module import *\n\n')
sys.stdout.write('rootObj = GenerateModel(\n')
rootObj.exportLiteral(sys.stdout, 0, name_="GenerateModel")
sys.stdout.write(')\n')
return rootObj
def main():
args = sys.argv[1:]
if len(args) == 2 and args[0] == '-s':
saxParse(args[1])
elif len(args) == 1:
parse(args[0])
else:
usage()
if __name__ == '__main__':
main()
#import pdb
#pdb.run('main()')
| codeparrot/github-code-clean |
"""Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import tkinter
from tkinter.constants import *
tk = tkinter.Tk()
frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
import enum
import sys
import _tkinter # If this fails your Python may not be configured for Tk
TclError = _tkinter.TclError
from tkinter.constants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])', re.ASCII)
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if _magic_re.search(value):
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = value.replace('\n', r'\n')
value = _space_re.sub(r'\\\1', value)
if value[0] == '"':
value = '\\' + value
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(seq):
"""Internal function."""
res = ()
for item in seq:
if isinstance(item, (tuple, list)):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if isinstance(cnfs, dict):
return cnfs
elif isinstance(cnfs, (type(None), str)):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError) as msg:
print("_cnfmerge: fallback due to:", msg)
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
def _splitdict(tk, v, cut_minus=True, conv=None):
"""Return a properly formatted dict built from Tcl list pairs.
If cut_minus is True, the supposed '-' prefix will be removed from
keys. If conv is specified, it is used to convert values.
Tcl list is expected to contain an even number of elements.
"""
t = tk.splitlist(v)
if len(t) % 2:
raise RuntimeError('Tcl list representing a dict is expected '
'to contain an even number of elements')
it = iter(t)
dict = {}
for key, value in zip(it, it):
key = str(key)
if cut_minus and key[0] == '-':
key = key[1:]
if conv:
value = conv(value)
dict[key] = value
return dict
class EventType(str, enum.Enum):
KeyPress = '2'
Key = KeyPress,
KeyRelease = '3'
ButtonPress = '4'
Button = ButtonPress,
ButtonRelease = '5'
Motion = '6'
Enter = '7'
Leave = '8'
FocusIn = '9'
FocusOut = '10'
Keymap = '11' # undocumented
Expose = '12'
GraphicsExpose = '13' # undocumented
NoExpose = '14' # undocumented
Visibility = '15'
Create = '16'
Destroy = '17'
Unmap = '18'
Map = '19'
MapRequest = '20'
Reparent = '21'
Configure = '22'
ConfigureRequest = '23'
Gravity = '24'
ResizeRequest = '25'
Circulate = '26'
CirculateRequest = '27'
Property = '28'
SelectionClear = '29' # undocumented
SelectionRequest = '30' # undocumented
Selection = '31' # undocumented
Colormap = '32'
ClientMessage = '33' # undocumented
Mapping = '34' # undocumented
VirtualEvent = '35', # undocumented
Activate = '36',
Deactivate = '37',
MouseWheel = '38',
def __str__(self):
return self.name
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
def __repr__(self):
attrs = {k: v for k, v in self.__dict__.items() if v != '??'}
if not self.char:
del attrs['char']
elif self.char != '??':
attrs['char'] = repr(self.char)
if not getattr(self, 'send_event', True):
del attrs['send_event']
if self.state == 0:
del attrs['state']
elif isinstance(self.state, int):
state = self.state
mods = ('Shift', 'Lock', 'Control',
'Mod1', 'Mod2', 'Mod3', 'Mod4', 'Mod5',
'Button1', 'Button2', 'Button3', 'Button4', 'Button5')
s = []
for i, n in enumerate(mods):
if state & (1 << i):
s.append(n)
state = state & ~((1<< len(mods)) - 1)
if state or not s:
s.append(hex(state))
attrs['state'] = '|'.join(s)
if self.delta == 0:
del attrs['delta']
# widget usually is known
# serial and time are not very interesting
# keysym_num duplicates keysym
# x_root and y_root mostly duplicate x and y
keys = ('send_event',
'state', 'keysym', 'keycode', 'char',
'num', 'delta', 'focus',
'x', 'y', 'width', 'height')
return '<%s event%s>' % (
self.type,
''.join(' %s=%s' % (k, attrs[k]) for k in keys if k in attrs)
)
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit(code)
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
_tk = None
_tclCommands = None
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
# check for type of NAME parameter to override weird error message
# raised from Modules/_tkinter.c:SetVar like:
# TypeError: setvar() takes exactly 3 arguments (2 given)
if name is not None and not isinstance(name, str):
raise TypeError("name must be a string")
global _varnum
if not master:
master = _default_root
self._root = master._root()
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.initialize(value)
elif not self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self.initialize(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
if self._tk is None:
return
if self._tk.getboolean(self._tk.call("info", "exists", self._name)):
self._tk.globalunsetvar(self._name)
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self._tk.deletecommand(name)
self._tclCommands = None
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
initialize = set
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def _register(self, callback):
f = CallWrapper(callback, None, self._root).__call__
cbname = repr(id(f))
try:
callback = callback.__func__
except AttributeError:
pass
try:
cbname = cbname + callback.__name__
except AttributeError:
pass
self._tk.createcommand(cbname, f)
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(cbname)
return cbname
def trace_add(self, mode, callback):
"""Define a trace callback for the variable.
Mode is one of "read", "write", "unset", or a list or tuple of
such strings.
Callback must be a function which is called when the variable is
read, written or unset.
Return the name of the callback.
"""
cbname = self._register(callback)
self._tk.call('trace', 'add', 'variable',
self._name, mode, (cbname,))
return cbname
def trace_remove(self, mode, cbname):
"""Delete the trace callback for a variable.
Mode is one of "read", "write", "unset" or a list or tuple of
such strings. Must be same as were specified in trace_add().
cbname is the name of the callback returned from trace_add().
"""
self._tk.call('trace', 'remove', 'variable',
self._name, mode, cbname)
for m, ca in self.trace_info():
if self._tk.splitlist(ca)[0] == cbname:
break
else:
self._tk.deletecommand(cbname)
try:
self._tclCommands.remove(cbname)
except ValueError:
pass
def trace_info(self):
"""Return all trace callback information."""
splitlist = self._tk.splitlist
return [(splitlist(k), v) for k, v in map(splitlist,
splitlist(self._tk.call('trace', 'info', 'variable', self._name)))]
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
This deprecated method wraps a deprecated Tcl method that will
likely be removed in the future. Use trace_add() instead.
"""
# TODO: Add deprecation warning
cbname = self._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
This deprecated method wraps a deprecated Tcl method that will
likely be removed in the future. Use trace_remove() instead.
"""
# TODO: Add deprecation warning
self._tk.call("trace", "vdelete", self._name, mode, cbname)
cbname = self._tk.splitlist(cbname)[0]
for m, ca in self.trace_info():
if self._tk.splitlist(ca)[0] == cbname:
break
else:
self._tk.deletecommand(cbname)
try:
self._tclCommands.remove(cbname)
except ValueError:
pass
def trace_vinfo(self):
"""Return all trace callback information.
This deprecated method wraps a deprecated Tcl method that will
likely be removed in the future. Use trace_info() instead.
"""
# TODO: Add deprecation warning
return [self._tk.splitlist(x) for x in self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name))]
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, str):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as an integer."""
value = self._tk.globalgetvar(self._name)
try:
return self._tk.getint(value)
except (TypeError, TclError):
return int(self._tk.getdouble(value))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return self._tk.getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, self._tk.getboolean(value))
initialize = set
def get(self):
"""Return the value of the variable as a bool."""
try:
return self._tk.getboolean(self._tk.globalgetvar(self._name))
except TclError:
raise ValueError("invalid literal for getboolean()")
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
try:
return _default_root.tk.getboolean(s)
except TclError:
raise ValueError("invalid literal for getboolean()")
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# used for generating child widget names
_last_child_ids = None
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(list(kw.items())))
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
def getint(self, s):
try:
return self.tk.getint(s)
except TclError as exc:
raise ValueError(str(exc))
def getdouble(self, s):
try:
return self.tk.getdouble(s)
except TclError as exc:
raise ValueError(str(exc))
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
try:
return self.tk.getboolean(s)
except TclError:
raise ValueError("invalid literal for getboolean()")
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
return None
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
callit.__name__ = func.__name__
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter.
"""
if not id:
raise ValueError('id must be a valid identifier returned from '
'after or after_idle')
try:
data = self.tk.call('after', 'info', id)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return self.tk.getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return True if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return self.tk.getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return self.tk.getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return self.tk.getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return int(self.tk.call('winfo', 'id', self._w), 0)
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return self.tk.getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window manager name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return self.tk.getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return self.tk.getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return self.tk.getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return self.tk.getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return self.tk.getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return self.tk.getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return self.tk.getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return self.tk.getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return self.tk.getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return self.tk.getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=False):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if includeids is true is given also the X identifier."""
data = self.tk.call('winfo', 'visualsavailable', self._w,
'includeids' if includeids else None)
data = [self.tk.splitlist(x) for x in self.tk.splitlist(data)]
return [self.__winfo_parseitem(x) for x in data]
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return self.tk.getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return self.tk.getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return self.tk.getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return self.tk.getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if isinstance(func, str):
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(self.tk.getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(self.tk.getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if callable(v):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if isinstance(item, int):
nv.append(str(item))
elif isinstance(item, str):
nv.append(_stringify(item))
else:
break
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.__func__
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = self.tk.getint
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return getint(s)
except (ValueError, TclError):
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid for all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain an x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease, and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
try:
e.type = EventType(T)
except ValueError:
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except (ValueError, TclError):
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
exc, val, tb = sys.exc_info()
root = self._root()
root.report_callback_exception(exc, val, tb)
def _getconfigure(self, *args):
"""Call Tcl configure command and return the result as a dict."""
cnf = {}
for x in self.tk.splitlist(self.tk.call(*args)):
x = self.tk.splitlist(x)
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
def _getconfigure1(self, *args):
x = self.tk.splitlist(self.tk.call(*args))
return (x[0][1:],) + x[1:]
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
return self._getconfigure(_flatten((self._w, cmd)))
if isinstance(cnf, str):
return self._getconfigure1(_flatten((self._w, cmd, '-'+cnf)))
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def keys(self):
"""Return a list of all resource names of this widget."""
splitlist = self.tk.splitlist
return [splitlist(x)[0][1:] for x in
splitlist(self.tk.call(self._w, 'configure'))]
def __str__(self):
"""Return the window path name of this widget."""
return self._w
def __repr__(self):
return '<%s.%s object %s>' % (
self.__class__.__module__, self.__class__.__qualname__, self._w)
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w))]
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w))]
# Grid methods that apply to the master
def grid_anchor(self, anchor=None): # new in Tk 8.5
"""The anchor value controls how to place the grid within the
master when no row/column has any weight.
The default anchor is nw."""
self.tk.call('grid', 'anchor', self._w, anchor)
anchor = grid_anchor
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _gridconvvalue(self, value):
if isinstance(value, (str, _tkinter.Tcl_Obj)):
try:
svalue = str(value)
if not svalue:
return None
elif '.' in svalue:
return self.tk.getdouble(svalue)
else:
return self.tk.getint(svalue)
except (ValueError, TclError):
pass
return value
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if isinstance(cnf, str) and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
return _splitdict(
self.tk,
self.tk.call('grid', command, self._w, index),
conv=self._gridconvvalue)
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
return self._gridconvvalue(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return [self._nametowidget(x) for x in
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args))]
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.splitlist(self.tk.call('image', 'names'))
def image_types(self):
"""Return a list of all available image types (e.g. photo bitmap)."""
return self.tk.splitlist(self.tk.call('image', 'types'))
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit:
raise
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
if wlist:
self.tk.call(args)
else:
return [self._nametowidget(x)
for x in self.tk.splitlist(self.tk.call(args))]
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_forget(self, window): # new in Tk 8.5
"""The window will be unmapped from the screen and will no longer
be managed by wm. toplevel windows will be treated like frame
windows once they are no longer managed by wm, however, the menu
option configuration will be remembered and the menus will return
once the widget is managed again."""
self.tk.call('wm', 'forget', window)
forget = wm_forget
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconphoto(self, default=False, *args): # new in Tk 8.5
"""Sets the titlebar icon for this window based on the named photo
images passed through args. If default is True, this is applied to
all future created toplevels as well.
The data in the images is taken as a snapshot at the time of
invocation. If the images are later changed, this is not reflected
to the titlebar icons. Multiple images are accepted to allow
different images sizes to be provided. The window manager may scale
provided icons to an appropriate size.
On Windows, the images are packed into a Windows icon structure.
This will override an icon specified to wm_iconbitmap, and vice
versa.
On X, the images are arranged into the _NET_WM_ICON X property,
which most modern window managers support. An icon specified by
wm_iconbitmap may exist simultaneously.
On Macintosh, this currently does nothing."""
if default:
self.tk.call('wm', 'iconphoto', self._w, "-default", *args)
else:
self.tk.call('wm', 'iconphoto', self._w, *args)
iconphoto = wm_iconphoto
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_manage(self, widget): # new in Tk 8.5
"""The widget specified will become a stand alone top-level window.
The window will be decorated with the window managers title bar,
etc."""
self.tk.call('wm', 'manage', widget)
manage = wm_manage
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if callable(func):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError("tk.h version (%s) doesn't match libtk.a version (%s)"
% (_tkinter.TK_VERSION, tk_version))
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError("tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version))
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls exec on the contents of BASENAME.py and
CLASSNAME.py if such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec('from tkinter import *', dir)
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
exec(open(class_py).read(), dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
exec(open(base_py).read(), dir)
def report_callback_exception(self, exc, val, tb):
"""Report callback exception on sys.stderr.
Applications may want to override this internal function, and
should when sys.stderr is None."""
import traceback
print("Exception in Tkinter callback", file=sys.stderr)
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
d = _splitdict(self.tk, self.tk.call('pack', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
d = _splitdict(self.tk, self.tk.call('place', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
d = _splitdict(self.tk, self.tk.call('grid', 'info', self._w))
if 'in' in d:
d['in'] = self.nametowidget(d['in'])
return d
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = self.__class__.__name__.lower()
if master._last_child_ids is None:
master._last_child_ids = {}
count = master._last_child_ids.get(name, 0) + 1
master._last_child_ids[name] = count
if count == 1:
name = '!%s' % (name,)
else:
name = '!%s%d' % (name, count)
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)]
for k, v in classes:
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return self.tk.getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return self.tk.getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return [self.tk.getdouble(x) for x in
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args))]
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if isinstance(cnf, (dict, tuple)):
args = args[:-1]
else:
cnf = {}
return self.tk.getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closest). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return self.tk.getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, width, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows displaying simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return self.tk.getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by the given index."""
return self._getints(self.tk.call(self._w, 'bbox', index)) or None
def curselection(self):
"""Return the indices of currently selected item."""
return self._getints(self.tk.call(self._w, 'curselection')) or ()
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (included)."""
if last is not None:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return self.tk.getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return self.tk.getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return True if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return self.tk.getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows displaying menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of a menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return self.tk.getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def xposition(self, index): # new in Tk 8.5
"""Return the x-position of the leftmost pixel of the menu item
at INDEX."""
return self.tk.getint(self.tk.call(self._w, 'xposition', index))
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return self.tk.getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return self.tk.getint(value)
except (ValueError, TypeError, TclError):
return self.tk.getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index=None):
"""Marks the element indicated by index as active.
The only index values understood by this method are "arrow1",
"slider", or "arrow2". If any other value is specified then no
element of the scrollbar will be active. If index is not specified,
the method returns the name of the element that is currently active,
or None if no element is active."""
return self.tk.call(self._w, 'activate', index) or None
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return self.tk.getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return self.tk.getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, first, last):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call(self._w, 'set', first, last)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, index):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the given index."""
return self._getints(
self.tk.call(self._w, 'bbox', index)) or None
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def count(self, index1, index2, *args): # new in Tk 8.5
"""Counts the number of relevant things between the two indices.
If index1 is after index2, the result will be a negative number
(and this holds for each of the possible options).
The actual items which are counted depends on the options given by
args. The result is a list of integers, one for the result of each
counting option given. Valid counting options are "chars",
"displaychars", "displayindices", "displaylines", "indices",
"lines", "xpixels" and "ypixels". There is an additional possible
option "update", which if given then all subsequent options ensure
that any possible out of date information is recalculated."""
args = ['-%s' % arg for arg in args if not arg.startswith('-')]
args += [index1, index2]
res = self.tk.call(self._w, 'count', *args) or None
if res is not None and len(args) <= 3:
return (res, )
else:
return res
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
if boolean is None:
return self.tk.getboolean(self.tk.call(self._w, 'debug'))
self.tk.call(self._w, 'debug', boolean)
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return | codeparrot/github-code-clean |
import math,re,sys,os,time
import random as RD
import time
try:
import netCDF4 as NC
except:
print("You no install netCDF4 for python")
print("So I do not import netCDF4")
try:
import numpy as NP
except:
print("You no install numpy")
print("Do not import numpy")
class GRIDINFORMATER:
"""
This object is the information of the input gridcells/array/map.
Using
.add_an_element to add an element/gridcell
.add_an_geo_element to add an element/gridcell
.create_resample_lat_lon to create a new map of lat and lon for resampling
.create_resample_map to create resample map as ARR_RESAMPLE_MAP
.create_reference_map to create ARR_REFERENCE_MAP to resample target map.
.export_reference_map to export ARR_REFERENCE_MAP into netCDF4 format
"""
STR_VALUE_INIT = "None"
NUM_VALUE_INIT = -9999.9
NUM_NULL = float("NaN")
ARR_RESAMPLE_X_LIM = []
ARR_RESAMPLE_Y_LIM = []
# FROM WRF: module_cam_shr_const_mod.f90
NUM_CONST_EARTH_R = 6.37122E6
NUM_CONST_PI = 3.14159265358979323846
def __init__(self, name="GRID", ARR_LAT=[], ARR_LON=[], NUM_NT=1, DIMENSIONS=2 ):
self.STR_NAME = name
self.NUM_DIMENSIONS = DIMENSIONS
self.NUM_LAST_INDEX = -1
self.ARR_GRID = []
self.NUM_NT = NUM_NT
self.ARR_LAT = ARR_LAT
self.ARR_LON = ARR_LON
self.ARR_RESAMPLE_MAP_PARA = { "EDGE": {"N" :-999, "S":-999, "E":-999, "W":-999 } }
if len(ARR_LAT) != 0 and len(ARR_LON) != 0:
NUM_ARR_NY_T1 = len(ARR_LAT)
NUM_ARR_NY_T2 = len(ARR_LON)
Y_T2 = len(ARR_LON)
NUM_ARR_NX_T1 = len(ARR_LAT[0])
NUM_ARR_NX_T2 = len(ARR_LON[0])
self.NUM_NX = NUM_ARR_NX_T1
self.NUM_NY = NUM_ARR_NY_T1
if NUM_ARR_NY_T1 - NUM_ARR_NY_T2 + NUM_ARR_NX_T1 - NUM_ARR_NX_T2 != 0:
print("The gridcell of LAT is {0:d}&{1:d}, and LON is {2:d}&{3:d} are not match"\
.format(NUM_ARR_NY_T1,NUM_ARR_NY_T2,NUM_ARR_NX_T1,NUM_ARR_NX_T2))
def index_map(self, ARR_IN=[], NUM_IN_NX=0, NUM_IN_NY=0):
if len(ARR_IN) == 0:
self.INDEX_MAP = [[ self.NUM_NULL for i in range(self.NUM_NX)] for j in range(self.NUM_NY)]
NUM_ALL_INDEX = len(self.ARR_GRID)
for n in range(NUM_ALL_INDEX):
self.INDEX_MAP[self.ARR_GRID[n]["INDEX_J"]][self.ARR_GRID[n]["INDEX_I"]] =\
self.ARR_GRID[n]["INDEX"]
else:
MAP_INDEX = [[ self.NUM_NULL for i in range(NUM_IN_NX)] for j in range(NUM_IN_NY)]
NUM_ALL_INDEX = len(ARR_IN)
for n in range(NUM_ALL_INDEX):
MAP_INDEX[ARR_IN[n]["INDEX_J"]][ARR_IN[n]["INDEX_I"]] = ARR_IN[n]["INDEX"]
return MAP_INDEX
def add_an_element(self, ARR_GRID, NUM_INDEX=0, STR_VALUE=STR_VALUE_INIT, NUM_VALUE=NUM_VALUE_INIT ):
""" Adding an element to an empty array """
OBJ_ELEMENT = {"INDEX" : NUM_INDEX, \
STR_VALUE : NUM_VALUE}
ARR_GRID.append(OBJ_ELEMENT)
def add_an_geo_element(self, ARR_GRID, NUM_INDEX=-999, NUM_J=0, NUM_I=0, \
NUM_NX = 0, NUM_NY = 0, NUM_NT=0, \
ARR_VALUE_STR=[], ARR_VALUE_NUM=[] ):
""" Adding an geological element to an empty array
The information for lat and lon of center, edge, and vertex will
be stored for further used.
"""
NUM_NVAR = len(ARR_VALUE_STR)
if NUM_NX == 0 or NUM_NY == 0:
NUM_NX = self.NUM_NX
NUM_NY = self.NUM_NY
if NUM_NT == 0:
NUM_NT = self.NUM_NT
NUM_CENTER_LON = self.ARR_LON[NUM_J][NUM_I]
NUM_CENTER_LAT = self.ARR_LAT[NUM_J][NUM_I]
if NUM_I == 0:
NUM_WE_LON = ( self.ARR_LON[NUM_J][NUM_I] - self.ARR_LON[NUM_J][NUM_I + 1] ) * 0.5
NUM_EW_LON = -1 * ( self.ARR_LON[NUM_J][NUM_I] - self.ARR_LON[NUM_J][NUM_I + 1] ) * 0.5
elif NUM_I == NUM_NX - 1:
NUM_WE_LON = -1 * ( self.ARR_LON[NUM_J][NUM_I] - self.ARR_LON[NUM_J][NUM_I - 1] ) * 0.5
NUM_EW_LON = ( self.ARR_LON[NUM_J][NUM_I] - self.ARR_LON[NUM_J][NUM_I - 1] ) * 0.5
else:
NUM_WE_LON = ( self.ARR_LON[NUM_J][NUM_I] - self.ARR_LON[NUM_J][NUM_I + 1] ) * 0.5
NUM_EW_LON = ( self.ARR_LON[NUM_J][NUM_I] - self.ARR_LON[NUM_J][NUM_I - 1] ) * 0.5
if NUM_J == 0:
NUM_SN_LAT = -1 * ( self.ARR_LAT[NUM_J][NUM_I] - self.ARR_LAT[NUM_J + 1][NUM_I ] ) * 0.5
NUM_NS_LAT = ( self.ARR_LAT[NUM_J][NUM_I] - self.ARR_LAT[NUM_J + 1][NUM_I ] ) * 0.5
elif NUM_J == NUM_NY - 1:
NUM_SN_LAT = ( self.ARR_LAT[NUM_J][NUM_I] - self.ARR_LAT[NUM_J - 1][NUM_I ] ) * 0.5
NUM_NS_LAT = -1 * ( self.ARR_LAT[NUM_J][NUM_I] - self.ARR_LAT[NUM_J - 1][NUM_I ] ) * 0.5
else:
NUM_SN_LAT = ( self.ARR_LAT[NUM_J][NUM_I] - self.ARR_LAT[NUM_J - 1][NUM_I ] ) * 0.5
NUM_NS_LAT = ( self.ARR_LAT[NUM_J][NUM_I] - self.ARR_LAT[NUM_J + 1][NUM_I ] ) * 0.5
ARR_NE = [ NUM_CENTER_LON + NUM_EW_LON , NUM_CENTER_LAT + NUM_NS_LAT ]
ARR_NW = [ NUM_CENTER_LON + NUM_WE_LON , NUM_CENTER_LAT + NUM_NS_LAT ]
ARR_SE = [ NUM_CENTER_LON + NUM_EW_LON , NUM_CENTER_LAT + NUM_SN_LAT ]
ARR_SW = [ NUM_CENTER_LON + NUM_WE_LON , NUM_CENTER_LAT + NUM_SN_LAT ]
if NUM_INDEX == -999:
NUM_INDEX = self.NUM_LAST_INDEX +1
self.NUM_LAST_INDEX += 1
OBJ_ELEMENT = {"INDEX" : NUM_INDEX,\
"INDEX_I" : NUM_I,\
"INDEX_J" : NUM_J,\
"CENTER" : {"LAT" : NUM_CENTER_LAT, "LON" : NUM_CENTER_LON},\
"VERTEX" : {"NE": ARR_NE, "SE": ARR_SE, "SW": ARR_SW, "NW": ARR_NW},\
"EDGE" : {"N": NUM_CENTER_LAT + NUM_NS_LAT,"S": NUM_CENTER_LAT + NUM_SN_LAT,\
"E": NUM_CENTER_LON + NUM_EW_LON,"W": NUM_CENTER_LON + NUM_WE_LON}}
if len(ARR_VALUE_STR) > 0:
for I, VAR in enumerate(ARR_VALUE_STR):
OBJ_ELEMENT[VAR] = [{ "VALUE" : 0.0} for t in range(NUM_NT) ]
if len(ARR_VALUE_NUM) == NUM_NVAR:
for T in range(NUM_NT):
OBJ_ELEMENT[VAR][T]["VALUE"] = ARR_VALUE_NUM[I][T]
ARR_GRID.append(OBJ_ELEMENT)
def add_an_geo_variable(self, ARR_GRID, NUM_INDEX=-999, NUM_J=0, NUM_I=0, NUM_NT=0,\
STR_VALUE=STR_VALUE_INIT, NUM_VALUE=NUM_VALUE_INIT ):
if NUM_INDEX == -999:
NUM_INDEX = self.INDEX_MAP[NUM_J][NUM_I]
if NUM_NT == 0:
NUM_NT = self.NUM_NT
ARR_GRID[NUM_INDEX][STR_VALUE] = {{"VALUE": NUM_VALUE } for t in range(NUM_NT)}
def create_resample_lat_lon(self, ARR_RANGE_LAT=[0,0],NUM_EDGE_LAT=0,\
ARR_RANGE_LON=[0,0],NUM_EDGE_LON=0 ):
self.NUM_GRIDS_LON = round((ARR_RANGE_LON[1] - ARR_RANGE_LON[0])/NUM_EDGE_LON)
self.NUM_GRIDS_LAT = round((ARR_RANGE_LAT[1] - ARR_RANGE_LAT[0])/NUM_EDGE_LAT)
self.ARR_LAT = [[ 0 for i in range(self.NUM_GRIDS_LON)] for j in range(self.NUM_GRIDS_LAT) ]
self.ARR_LON = [[ 0 for i in range(self.NUM_GRIDS_LON)] for j in range(self.NUM_GRIDS_LAT) ]
for j in range(self.NUM_GRIDS_LAT):
for i in range(self.NUM_GRIDS_LON):
NUM_LAT = ARR_RANGE_LAT[0] + NUM_EDGE_LAT * j
NUM_LON = ARR_RANGE_LON[0] + NUM_EDGE_LON * i
self.ARR_LON[j][i] = ARR_RANGE_LON[0] + NUM_EDGE_LON * i
self.ARR_LAT[j][i] = ARR_RANGE_LAT[0] + NUM_EDGE_LAT * j
def create_reference_map(self, MAP_TARGET, MAP_RESAMPLE, STR_TYPE="FIX", NUM_SHIFT=0.001, IF_PB=False):
"""Must input with OBJ_REFERENCE
WARNING: The edge of gridcells may not be included due to the unfinished algorithm
"""
self.ARR_REFERENCE_MAP = []
if STR_TYPE=="GRIDBYGEO":
NUM_OBJ_G_LEN = len(MAP_TARGET)
for OBJ_G in MAP_TARGET:
NUM_G_COOR = [OBJ_G["CENTER"]["LAT"], OBJ_G["CENTER"]["LON"]]
for OBJ_R in MAP_RESAMPLE:
NUM_CHK_IN_EW = (OBJ_R["EDGE"]["E"] - OBJ_G["CENTER"]["LON"]) *\
(OBJ_R["EDGE"]["W"] - OBJ_G["CENTER"]["LON"])
NUM_CHK_IN_SN = (OBJ_R["EDGE"]["N"] - OBJ_G["CENTER"]["LAT"]) *\
(OBJ_R["EDGE"]["S"] - OBJ_G["CENTER"]["LAT"])
if NUM_CHK_IN_EW == 0: NUM_CHK_IN_EW = (OBJ_R["EDGE"]["E"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"]) *\
(OBJ_R["EDGE"]["W"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"])
if NUM_CHK_IN_SN == 0: NUM_CHK_IN_SN = (OBJ_R["EDGE"]["E"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"]) *\
(OBJ_R["EDGE"]["W"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"])
if NUM_CHK_IN_EW < 0 and NUM_CHK_IN_SN < 0:
OBJ_ELEMENT = {"INDEX" : OBJ_G["INDEX"],\
"CENTER" : OBJ_G["CENTER"],\
"INDEX_REF" : OBJ_R["INDEX"],\
"INDEX_REF_I" : OBJ_R["INDEX_I"],\
"INDEX_REF_J" : OBJ_R["INDEX_J"],\
"CENTER_REF" : OBJ_R["CENTER"],\
}
self.ARR_REFERENCE_MAP.append(OBJ_ELEMENT)
break
if IF_PB: TOOLS.progress_bar(TOOLS.cal_loop_progress([OBJ_G["INDEX"]], [NUM_OBJ_G_LEN]), STR_DES="CREATING REFERENCE MAP")
elif STR_TYPE=="FIX":
NUM_OBJ_G_LEN = len(MAP_TARGET)
for OBJ_G in MAP_TARGET:
NUM_G_COOR = [OBJ_G["CENTER"]["LAT"], OBJ_G["CENTER"]["LON"]]
if self.ARR_RESAMPLE_MAP_PARA["EDGE"]["W"] == -999 or self.ARR_RESAMPLE_MAP_PARA["EDGE"]["E"] == -999:
NUM_CHK_EW_IN = -1
else:
NUM_CHK_EW_IN = (NUM_G_COOR[1] - self.ARR_RESAMPLE_MAP_PARA["EDGE"]["W"] ) * ( NUM_G_COOR[1] - self.ARR_RESAMPLE_MAP_PARA["EDGE"]["E"] )
if self.ARR_RESAMPLE_MAP_PARA["EDGE"]["N"] == -999 or self.ARR_RESAMPLE_MAP_PARA["EDGE"]["S"] == -999:
NUM_CHK_SN_IN = -1
else:
NUM_CHK_SN_IN = (NUM_G_COOR[0] - self.ARR_RESAMPLE_MAP_PARA["EDGE"]["S"] ) * ( NUM_G_COOR[0] - self.ARR_RESAMPLE_MAP_PARA["EDGE"]["N"] )
if NUM_CHK_EW_IN < 0 and NUM_CHK_SN_IN < 0:
for OBJ_R in MAP_RESAMPLE:
NUM_CHK_IN_EW = (OBJ_R["EDGE"]["E"] - OBJ_G["CENTER"]["LON"]) *\
(OBJ_R["EDGE"]["W"] - OBJ_G["CENTER"]["LON"])
NUM_CHK_IN_SN = (OBJ_R["EDGE"]["N"] - OBJ_G["CENTER"]["LAT"]) *\
(OBJ_R["EDGE"]["S"] - OBJ_G["CENTER"]["LAT"])
if NUM_CHK_IN_EW == 0: NUM_CHK_IN_EW = (OBJ_R["EDGE"]["E"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"]) *\
(OBJ_R["EDGE"]["W"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"])
if NUM_CHK_IN_SN == 0: NUM_CHK_IN_SN = (OBJ_R["EDGE"]["E"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"]) *\
(OBJ_R["EDGE"]["W"] + NUM_SHIFT - OBJ_G["CENTER"]["LON"])
if NUM_CHK_IN_EW < 0 and NUM_CHK_IN_SN < 0:
OBJ_ELEMENT = {"INDEX" : OBJ_G["INDEX"],\
"INDEX_I" : OBJ_G["INDEX_I"],\
"INDEX_J" : OBJ_G["INDEX_J"],\
"CENTER" : OBJ_G["CENTER"],\
"INDEX_REF" : OBJ_R["INDEX"],\
"INDEX_REF_I" : OBJ_R["INDEX_I"],\
"INDEX_REF_J" : OBJ_R["INDEX_J"],\
"CENTER_REF" : OBJ_R["CENTER"],\
}
self.ARR_REFERENCE_MAP.append(OBJ_ELEMENT)
break
if IF_PB: TOOLS.progress_bar(TOOLS.cal_loop_progress([OBJ_G["INDEX"]], [NUM_OBJ_G_LEN]), STR_DES="CREATING REFERENCE MAP")
def export_grid_map(self, ARR_GRID_IN, STR_DIR, STR_FILENAME, ARR_VAR_STR=[],\
ARR_VAR_ITEM=["MEAN", "MEDIAN", "MIN", "MAX", "P95", "P75", "P25", "P05"],\
NUM_NX=0, NUM_NY=0, NUM_NT=0, STR_TYPE="netCDF4", IF_PB=False ):
TIME_NOW = time.gmtime()
STR_DATE_NOW = "{0:04d}-{1:02d}-{2:02d}".format(TIME_NOW.tm_year, TIME_NOW.tm_mon, TIME_NOW.tm_mday)
STR_TIME_NOW = "{0:04d}:{1:02d}:{2:02d}".format(TIME_NOW.tm_hour, TIME_NOW.tm_min, TIME_NOW.tm_sec)
if NUM_NX==0: NUM_NX = self.NUM_NX
if NUM_NY==0: NUM_NY = self.NUM_NY
if NUM_NT==0: NUM_NT = self.NUM_NT
if STR_TYPE == "netCDF4":
NCDF4_DATA = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, STR_FILENAME), 'w', format="NETCDF4")
# CREATE ATTRIBUTEs:
NCDF4_DATA.description = \
"The grid information in netCDF4"
NCDF4_DATA.history = "Create on {0:s} at {1:s}".format(STR_DATE_NOW, STR_TIME_NOW)
# CREATE DIMENSIONs:
NCDF4_DATA.createDimension("Y" , NUM_NY )
NCDF4_DATA.createDimension("X" , NUM_NX )
NCDF4_DATA.createDimension("Time" , NUM_NT )
NCDF4_DATA.createDimension("Values", None )
# CREATE BASIC VARIABLES:
NCDF4_DATA.createVariable("INDEX", "i4", ("Y", "X"))
NCDF4_DATA.createVariable("INDEX_J", "i4", ("Y", "X"))
NCDF4_DATA.createVariable("INDEX_I", "i4", ("Y", "X"))
NCDF4_DATA.createVariable("CENTER_LON", "f8", ("Y", "X"))
NCDF4_DATA.createVariable("CENTER_LAT", "f8", ("Y", "X"))
# CREATE GROUP for Variables:
for VAR in ARR_VAR_STR:
NCDF4_DATA.createGroup(VAR)
for ITEM in ARR_VAR_ITEM:
if ITEM == "VALUE" :
NCDF4_DATA.groups[VAR].createVariable(ITEM, "f8", ("Time", "Y", "X", "Values"))
else:
NCDF4_DATA.groups[VAR].createVariable(ITEM, "f8", ("Time", "Y", "X"))
# WRITE IN VARIABLE
for V in ["INDEX", "INDEX_J", "INDEX_I"]:
map_in = self.convert_grid2map(ARR_GRID_IN, V, NX=NUM_NX, NY=NUM_NY, NC_TYPE="INT")
for n in range(len(map_in)):
NCDF4_DATA.variables[V][n] = map_in[n]
for V1 in ["CENTER"]:
for V2 in ["LON", "LAT"]:
map_in = self.convert_grid2map(ARR_GRID_IN, V1, V2, NX=NUM_NX, NY=NUM_NY, NC_TYPE="FLOAT")
for n in range(len(map_in)):
NCDF4_DATA.variables["{0:s}_{1:s}".format(V1, V2)][n] = map_in[n]
for V1 in ARR_VAR_STR:
for V2 in ARR_VAR_ITEM:
map_in = self.convert_grid2map(ARR_GRID_IN, V1, V2, NX=NUM_NX, NY=NUM_NY, NT=NUM_NT)
for n in range(len(map_in)):
NCDF4_DATA.groups[V1].variables[V2][n] = map_in[n]
NCDF4_DATA.close()
def export_grid(self, ARR_GRID_IN, STR_DIR, STR_FILENAME, ARR_VAR_STR=[],\
ARR_VAR_ITEM=["VALUE", "MEAN", "MEDIAN", "MIN", "MAX", "P95", "P75", "P25", "P05"],\
NUM_NX=0, NUM_NY=0, NUM_NT=0, STR_TYPE="netCDF4", IF_PB=False ):
TIME_NOW = time.gmtime()
STR_DATE_NOW = "{0:04d}-{1:02d}-{2:02d}".format(TIME_NOW.tm_year, TIME_NOW.tm_mon, TIME_NOW.tm_mday)
STR_TIME_NOW = "{0:04d}:{1:02d}:{2:02d}".format(TIME_NOW.tm_hour, TIME_NOW.tm_min, TIME_NOW.tm_sec)
if NUM_NX==0: NUM_NX = self.NUM_NX
if NUM_NY==0: NUM_NY = self.NUM_NY
if NUM_NT==0: NUM_NT = self.NUM_NT
if STR_TYPE == "netCDF4":
NCDF4_DATA = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, STR_FILENAME), 'w', format="NETCDF4")
# CREATE ATTRIBUTEs:
NCDF4_DATA.description = \
"The grid information in netCDF4"
NCDF4_DATA.history = "Create on {0:s} at {1:s}".format(STR_DATE_NOW, STR_TIME_NOW)
# CREATE DIMENSIONs:
NCDF4_DATA.createDimension("Y" , NUM_NY )
NCDF4_DATA.createDimension("X" , NUM_NX )
NCDF4_DATA.createDimension("Time" , NUM_NT )
NCDF4_DATA.createDimension("Values", None )
# CREATE BASIC VARIABLES:
INDEX = NCDF4_DATA.createVariable("INDEX", "i4", ("Y", "X"))
INDEX_J = NCDF4_DATA.createVariable("INDEX_J", "i4", ("Y", "X"))
INDEX_I = NCDF4_DATA.createVariable("INDEX_I", "i4", ("Y", "X"))
CENTER_LON = NCDF4_DATA.createVariable("CENTER_LON", "f8", ("Y", "X"))
CENTER_LAT = NCDF4_DATA.createVariable("CENTER_LAT", "f8", ("Y", "X"))
# CREATE GROUP for Variables:
for VAR in ARR_VAR_STR:
NCDF4_DATA.createGroup(VAR)
for ITEM in ARR_VAR_ITEM:
if ITEM == "VALUE" :
NCDF4_DATA.groups[VAR].createVariable(ITEM, "f8", ("Time", "Y", "X", "Values"))
else:
NCDF4_DATA.groups[VAR].createVariable(ITEM, "f8", ("Time", "Y", "X"))
# WRITE IN VARIABLE
for IND, OBJ in enumerate(ARR_GRID_IN):
j = OBJ["INDEX_J"]
i = OBJ["INDEX_I"]
INDEX [j,i] = OBJ["INDEX"]
INDEX_J [j,i] = OBJ["INDEX_J"]
INDEX_I [j,i] = OBJ["INDEX_I"]
CENTER_LON [j,i] = OBJ["CENTER"]["LON"]
CENTER_LAT [j,i] = OBJ["CENTER"]["LAT"]
for VAR in ARR_VAR_STR:
for ITEM in ARR_VAR_ITEM:
for T in range(NUM_NT):
NCDF4_DATA.groups[VAR].variables[ITEM][T,j,i] = OBJ[VAR][T][ITEM]
if IF_PB: TOOLS.progress_bar((IND+1)/(NUM_NX*NUM_NY), STR_DES="WRITING PROGRESS")
NCDF4_DATA.close()
def export_reference_map(self, STR_DIR, STR_FILENAME, STR_TYPE="netCDF4", IF_PB=False, IF_PARALLEL=False ):
TIME_NOW = time.gmtime()
self.STR_DATE_NOW = "{0:04d}-{1:02d}-{2:02d}".format(TIME_NOW.tm_year, TIME_NOW.tm_mon, TIME_NOW.tm_mday)
self.STR_TIME_NOW = "{0:02d}:{1:02d}:{2:02d}".format(TIME_NOW.tm_hour, TIME_NOW.tm_min, TIME_NOW.tm_sec)
STR_INPUT_FILENAME = "{0:s}/{1:s}".format(STR_DIR, STR_FILENAME)
if STR_TYPE == "netCDF4":
IF_FILECHK = os.path.exists(STR_INPUT_FILENAME)
if IF_FILECHK:
NCDF4_DATA = NC.Dataset(STR_INPUT_FILENAME, 'a', format="NETCDF4", parallel=IF_PARALLEL)
INDEX = NCDF4_DATA.variables["INDEX" ]
INDEX_J = NCDF4_DATA.variables["INDEX_J" ]
INDEX_I = NCDF4_DATA.variables["INDEX_I" ]
CENTER_LON = NCDF4_DATA.variables["CENTER_LON" ]
CENTER_LAT = NCDF4_DATA.variables["CENTER_LAT" ]
INDEX_REF = NCDF4_DATA.variables["INDEX_REF" ]
INDEX_REF_J = NCDF4_DATA.variables["INDEX_REF_J" ]
INDEX_REF_I = NCDF4_DATA.variables["INDEX_REF_I" ]
CENTER_REF_LON = NCDF4_DATA.variables["CENTER_REF_LON" ]
CENTER_REF_LAT = NCDF4_DATA.variables["CENTER_REF_LAT" ]
else:
NCDF4_DATA = NC.Dataset(STR_INPUT_FILENAME, 'w', format="NETCDF4", parallel=IF_PARALLEL)
# CREATE ATTRIBUTEs:
NCDF4_DATA.description = \
"The netCDF4 version of reference map which contains grid information for resampling"
NCDF4_DATA.history = "Create on {0:s} at {1:s}".format(self.STR_DATE_NOW, self.STR_TIME_NOW)
# CREATE DIMENSIONs:
NCDF4_DATA.createDimension("Y",self.NUM_NY)
NCDF4_DATA.createDimension("X",self.NUM_NX)
# CREATE_VARIABLES:
INDEX = NCDF4_DATA.createVariable("INDEX", "i4", ("Y", "X"))
INDEX_J = NCDF4_DATA.createVariable("INDEX_J", "i4", ("Y", "X"))
INDEX_I = NCDF4_DATA.createVariable("INDEX_I", "i4", ("Y", "X"))
CENTER_LON = NCDF4_DATA.createVariable("CENTER_LON", "f8", ("Y", "X"))
CENTER_LAT = NCDF4_DATA.createVariable("CENTER_LAT", "f8", ("Y", "X"))
INDEX_REF = NCDF4_DATA.createVariable("INDEX_REF", "i4", ("Y", "X"))
INDEX_REF_J = NCDF4_DATA.createVariable("INDEX_REF_J", "i4", ("Y", "X"))
INDEX_REF_I = NCDF4_DATA.createVariable("INDEX_REF_I", "i4", ("Y", "X"))
CENTER_REF_LON = NCDF4_DATA.createVariable("CENTER_REF_LON", "f8", ("Y", "X"))
CENTER_REF_LAT = NCDF4_DATA.createVariable("CENTER_REF_LAT", "f8", ("Y", "X"))
NUM_TOTAL_OBJ = len(self.ARR_REFERENCE_MAP)
NUM_MAX_I = self.NUM_NX
for OBJ in self.ARR_REFERENCE_MAP:
j = OBJ["INDEX_J"]
i = OBJ["INDEX_I"]
INDEX[j,i] = OBJ["INDEX"]
INDEX_J[j,i] = OBJ["INDEX_J"]
INDEX_I[j,i] = OBJ["INDEX_I"]
INDEX_REF[j,i] = OBJ["INDEX_REF"]
INDEX_REF_J[j,i] = OBJ["INDEX_REF_J"]
INDEX_REF_I[j,i] = OBJ["INDEX_REF_I"]
CENTER_LON [j,i] = OBJ["CENTER"]["LON"]
CENTER_LAT [j,i] = OBJ["CENTER"]["LAT"]
CENTER_REF_LON [j,i] = OBJ["CENTER_REF"]["LON"]
CENTER_REF_LAT [j,i] = OBJ["CENTER_REF"]["LAT"]
if IF_PB: TOOLS.progress_bar((i+j*NUM_MAX_I)/float(NUM_TOTAL_OBJ), STR_DES="Exporting")
NCDF4_DATA.close()
def import_reference_map(self, STR_DIR, STR_FILENAME, ARR_X_RANGE=[], ARR_Y_RANGE=[], STR_TYPE="netCDF4", IF_PB=False):
self.ARR_REFERENCE_MAP = []
self.NUM_MAX_INDEX_RS = 0
self.NUM_MIN_INDEX_RS = 999
if len(ARR_X_RANGE) != 0:
self.I_MIN = ARR_X_RANGE[0]
self.I_MAX = ARR_X_RANGE[1]
else:
self.I_MIN = 0
self.I_MAX = self.REFERENCE_MAP_NX
if len(ARR_Y_RANGE) != 0:
self.J_MIN = ARR_Y_RANGE[0]
self.J_MAX = ARR_Y_RANGE[1]
else:
self.J_MIN = 0
self.J_MAX = self.REFERENCE_MAP_NY
if STR_TYPE == "netCDF4":
NCDF4_DATA = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, STR_FILENAME), 'r', format="NETCDF4")
# READ DIMENSIONs:
self.REFERENCE_MAP_NY = NCDF4_DATA.dimensions["Y"].size
self.REFERENCE_MAP_NX = NCDF4_DATA.dimensions["X"].size
# CREATE_VARIABLES:
INDEX = NCDF4_DATA.variables["INDEX" ]
INDEX_J = NCDF4_DATA.variables["INDEX_J" ]
INDEX_I = NCDF4_DATA.variables["INDEX_I" ]
CENTER_LON = NCDF4_DATA.variables["CENTER_LON" ]
CENTER_LAT = NCDF4_DATA.variables["CENTER_LAT" ]
INDEX_REF = NCDF4_DATA.variables["INDEX_REF" ]
INDEX_REF_J = NCDF4_DATA.variables["INDEX_REF_J" ]
INDEX_REF_I = NCDF4_DATA.variables["INDEX_REF_I" ]
CENTER_REF_LON = NCDF4_DATA.variables["CENTER_REF_LON" ]
CENTER_REF_LAT = NCDF4_DATA.variables["CENTER_REF_LAT" ]
for j in range(self.J_MIN, self.J_MAX):
for i in range(self.I_MIN, self.I_MAX):
OBJ_ELEMENT = {"INDEX" : 0 ,\
"INDEX_I" : 0 ,\
"INDEX_J" : 0 ,\
"CENTER" : {"LAT": 0.0, "LON": 0.0} ,\
"INDEX_REF" : 0 ,\
"INDEX_REF_I" : 0 ,\
"INDEX_REF_J" : 0 ,\
"CENTER_REF" : {"LAT": 0.0, "LON": 0.0} }
if INDEX [j][i] != None:
OBJ_ELEMENT["INDEX"] = INDEX [j][i]
OBJ_ELEMENT["INDEX_J"] = INDEX_J [j][i]
OBJ_ELEMENT["INDEX_I"] = INDEX_I [j][i]
OBJ_ELEMENT["INDEX_REF"] = INDEX_REF [j][i]
OBJ_ELEMENT["INDEX_REF_J"] = INDEX_REF_J [j][i]
OBJ_ELEMENT["INDEX_REF_I"] = INDEX_REF_I [j][i]
OBJ_ELEMENT["CENTER"]["LAT"] = CENTER_LAT [j][i]
OBJ_ELEMENT["CENTER"]["LON"] = CENTER_LON [j][i]
OBJ_ELEMENT["CENTER_REF"]["LAT"] = CENTER_REF_LAT[j][i]
OBJ_ELEMENT["CENTER_REF"]["LON"] = CENTER_REF_LON[j][i]
else:
OBJ_ELEMENT["INDEX"] = INDEX [j][i]
OBJ_ELEMENT["INDEX_I"] = INDEX_J [j][i]
OBJ_ELEMENT["INDEX_J"] = INDEX_I [j][i]
OBJ_ELEMENT["INDEX_REF"] = -999
OBJ_ELEMENT["INDEX_REF_J"] = -999
OBJ_ELEMENT["INDEX_REF_I"] = -999
OBJ_ELEMENT["CENTER"]["LAT"] = CENTER_LAT [j][i]
OBJ_ELEMENT["CENTER"]["LON"] = CENTER_LON [j][i]
OBJ_ELEMENT["CENTER_REF"]["LAT"] = -999
OBJ_ELEMENT["CENTER_REF"]["LON"] = -999
self.ARR_REFERENCE_MAP.append(OBJ_ELEMENT)
self.NUM_MIN_INDEX_RS = min(self.NUM_MIN_INDEX_RS, INDEX_REF[j][i])
self.NUM_MAX_INDEX_RS = max(self.NUM_MAX_INDEX_RS, INDEX_REF[j][i])
if IF_PB: TOOLS.progress_bar((j - self.J_MIN + 1)/float(self.J_MAX - self.J_MIN), STR_DES="IMPORTING")
if self.NUM_MIN_INDEX_RS == 0:
self.NUM_MAX_RS = self.NUM_MAX_INDEX_RS + 1
NCDF4_DATA.close()
def create_resample_map(self, ARR_REFERENCE_MAP=[], ARR_VARIABLES=["Value"], ARR_GRID_IN=[],\
IF_PB=False, NUM_NT=0, NUM_NX=0, NUM_NY=0, NUM_NULL=-9999.999):
if NUM_NT == 0:
NUM_NT = self.NUM_NT
if NUM_NX == 0:
NUM_NX = self.NUM_NX
if NUM_NY == 0:
NUM_NY = self.NUM_NY
if len(ARR_REFERENCE_MAP) == 0:
self.ARR_RESAMPLE_OUT = []
self.ARR_RESAMPLE_OUT_PARA = {"EDGE": {"N": 0.0,"S": 0.0,"E": 0.0,"W": 0.0}}
NUM_END_J = self.NUM_GRIDS_LAT - 1
NUM_END_I = self.NUM_GRIDS_LON - 1
ARR_EMPTY = [float("NaN") for n in range(self.NUM_NT)]
for J in range(self.NUM_GRIDS_LAT):
for I in range(self.NUM_GRIDS_LON):
NUM_IND = I + J * self.NUM_GRIDS_LON
self.add_an_geo_element(self.ARR_RESAMPLE_OUT, NUM_INDEX=NUM_IND, NUM_J=J, NUM_I=I, \
NUM_NX= self.NUM_GRIDS_LON, NUM_NY= self.NUM_GRIDS_LAT,\
ARR_VALUE_STR=ARR_VARIABLES, NUM_NT=NUM_NT)
self.ARR_RESAMPLE_MAP_PARA["EDGE"]["N"] = max( self.ARR_LAT[NUM_END_J][0], self.ARR_LAT[NUM_END_J][NUM_END_I] )
self.ARR_RESAMPLE_MAP_PARA["EDGE"]["S"] = min( self.ARR_LAT[0][0], self.ARR_LAT[0][NUM_END_I] )
self.ARR_RESAMPLE_MAP_PARA["EDGE"]["W"] = min( self.ARR_LAT[0][0], self.ARR_LAT[NUM_END_J][0] )
self.ARR_RESAMPLE_MAP_PARA["EDGE"]["E"] = max( self.ARR_LAT[0][NUM_END_I], self.ARR_LAT[NUM_END_J][NUM_END_I] )
self.NUM_MAX_INDEX_RS = NUM_IND
else:
if ARR_GRID_IN == []: ARR_GRID_IN = self.ARR_GRID
self.ARR_RESAMPLE_OUT = [ {} for n in range(NUM_NX * NUM_NY)]
for IND in range(len(self.ARR_RESAMPLE_OUT)):
for VAR in ARR_VARIABLES:
self.ARR_RESAMPLE_OUT[IND][VAR] = [{"VALUE" : []} for T in range(NUM_NT) ]
#for IND in range(len(ARR_REFERENCE_MAP)):
for IND in range(len(ARR_GRID_IN)):
R_IND = ARR_REFERENCE_MAP[IND]["INDEX_REF"]
R_J = ARR_REFERENCE_MAP[IND]["INDEX_REF_J"]
R_I = ARR_REFERENCE_MAP[IND]["INDEX_REF_I"]
R_IND_FIX = TOOLS.fix_ind(R_IND, R_J, R_I, ARR_XRANGE=self.ARR_RESAMPLE_LIM_X, ARR_YRANGE=self.ARR_RESAMPLE_LIM_Y, NX=NUM_NX, NY=NUM_NY)
if R_IND != None:
for VAR in ARR_VARIABLES:
for T in range(NUM_NT):
#print("R_IND:{0:d}, T:{1:d}, IND:{2:d} ".format(R_IND, T, IND))
NUM_VAL_IN = ARR_GRID_IN[IND][VAR][T]["VALUE"]
self.ARR_RESAMPLE_OUT[R_IND][VAR][T]["VALUE"].append(NUM_VAL_IN)
self.ARR_RESAMPLE_OUT[R_IND]["INDEX"] = ARR_REFERENCE_MAP[IND]["INDEX_REF"]
self.ARR_RESAMPLE_OUT[R_IND]["INDEX_J"] = ARR_REFERENCE_MAP[IND]["INDEX_REF_J"]
self.ARR_RESAMPLE_OUT[R_IND]["INDEX_I"] = ARR_REFERENCE_MAP[IND]["INDEX_REF_I"]
self.ARR_RESAMPLE_OUT[R_IND]["CENTER"] = {"LAT": 0.0, "LON": 0.0 }
self.ARR_RESAMPLE_OUT[R_IND]["CENTER"]["LAT"] = ARR_REFERENCE_MAP[IND]["CENTER"]["LAT"]
self.ARR_RESAMPLE_OUT[R_IND]["CENTER"]["LON"] = ARR_REFERENCE_MAP[IND]["CENTER"]["LON"]
if IF_PB: TOOLS.progress_bar(TOOLS.cal_loop_progress([IND], [len(ARR_GRID_IN)]), STR_DES="RESAMPLING PROGRESS")
def cal_resample_map(self, ARR_VARIABLES, ARR_GRID_IN=[], NUM_NT=0, IF_PB=False, \
DIC_PERCENTILE={ "P05": 0.05, "P10": 0.1, "P25": 0.25, "P75": 0.75, "P90": 0.90, "P95": 0.95}, NUM_NULL=-9999.999):
if NUM_NT == 0:
NUM_NT = self.NUM_NT
NUM_RS_OUT_LEN = len(self.ARR_RESAMPLE_OUT)
for IND in range(NUM_RS_OUT_LEN):
for VAR in ARR_VARIABLES:
for T in range(NUM_NT):
ARR_IN = self.ARR_RESAMPLE_OUT[IND][VAR][T]["VALUE"]
if len(ARR_IN) > 0:
ARR_IN.sort()
NUM_ARR_LEN = len(ARR_IN)
NUM_ARR_MEAN = sum(ARR_IN) / float(NUM_ARR_LEN)
NUM_ARR_S2SUM = 0
if math.fmod(NUM_ARR_LEN,2) == 1:
NUM_MPOS = [int((NUM_ARR_LEN-1)/2.0), int((NUM_ARR_LEN-1)/2.0)]
else:
NUM_MPOS = [int(NUM_ARR_LEN/2.0) , int(NUM_ARR_LEN/2.0 -1) ]
self.ARR_RESAMPLE_OUT[IND][VAR][T]["MIN"] = min(ARR_IN)
self.ARR_RESAMPLE_OUT[IND][VAR][T]["MAX"] = max(ARR_IN)
self.ARR_RESAMPLE_OUT[IND][VAR][T]["MEAN"] = NUM_ARR_MEAN
self.ARR_RESAMPLE_OUT[IND][VAR][T]["MEDIAN"] = ARR_IN[NUM_MPOS[0]] *0.5 + ARR_IN[NUM_MPOS[1]] *0.5
for STVA in DIC_PERCENTILE:
self.ARR_RESAMPLE_OUT[IND][VAR][T][STVA] = ARR_IN[ round(NUM_ARR_LEN * DIC_PERCENTILE[STVA])-1]
for VAL in ARR_IN:
NUM_ARR_S2SUM += (VAL - NUM_ARR_MEAN)**2
self.ARR_RESAMPLE_OUT[IND][VAR][T]["STD"] = (NUM_ARR_S2SUM / max(1, NUM_ARR_LEN-1))**0.5
if IF_PB: TOOLS.progress_bar(TOOLS.cal_loop_progress([IND], [NUM_RS_OUT_LEN]), STR_DES="RESAMPLING CALCULATION")
def convert_grid2map(self, ARR_GRID_IN, STR_VAR, STR_VAR_TYPE="", NX=0, NY=0, NT=0, IF_PB=False, NC_TYPE=""):
if NC_TYPE == "INT":
if NT == 0:
ARR_OUT = NP.empty([NY, NX], dtype=NP.int8)
else:
ARR_OUT = NP.empty([NT, NY, NX], dtype=NP.int8)
elif NC_TYPE == "FLOAT":
if NT == 0:
ARR_OUT = NP.empty([NY, NX], dtype=NP.float64)
else:
ARR_OUT = NP.empty([NT, NY, NX], dtype=NP.float64)
else:
if NT == 0:
ARR_OUT = [[ self.NUM_NULL for i in range(NX)] for j in range(NY) ]
else:
ARR_OUT = [[[ self.NUM_NULL for i in range(NX)] for j in range(NY) ] for t in range(NT)]
if STR_VAR_TYPE == "":
for I, GRID in enumerate(ARR_GRID_IN):
if GRID["INDEX"] != -999:
if NT == 0:
#print(GRID["INDEX_J"], GRID["INDEX_I"], GRID[STR_VAR])
ARR_OUT[ GRID["INDEX_J"] ][ GRID["INDEX_I"] ] = GRID[STR_VAR]
else:
for T in range(NT):
ARR_OUT[T][ GRID["INDEX_J"] ][ GRID["INDEX_I"] ] = GRID[STR_VAR][T]
if IF_PB==True: TOOLS.progress_bar(((I+1)/(len(ARR_GRID_IN))))
else:
for I, GRID in enumerate(ARR_GRID_IN):
if GRID["INDEX"] != -999:
if NT == 0:
ARR_OUT[ GRID["INDEX_J"] ][ GRID["INDEX_I"] ] = GRID[STR_VAR][STR_VAR_TYPE]
else:
for T in range(NT):
ARR_OUT[T][ GRID["INDEX_J"] ][ GRID["INDEX_I"] ] = GRID[STR_VAR][T][STR_VAR_TYPE]
if IF_PB==True: TOOLS.progress_bar(((I+1)/(len(ARR_GRID_IN))))
return ARR_OUT
def mask_grid(self, ARR_GRID_IN, STR_VAR, STR_VAR_TYPE, NUM_NT=0, STR_MASK="MASK",\
ARR_NUM_DTM=[0,1,2], ARR_NUM_DTM_RANGE=[0,1]):
if NUM_NT == 0:
NUM_NT= self.NUM_NT
for IND, GRID in enumerate(ARR_GRID_IN):
for T in range(NUM_NT):
NUM_DTM = GEO_TOOLS.mask_dtm(GRID[STR_VAR][T][STR_VAR_TYPE], ARR_NUM_DTM=ARR_NUM_DTM, ARR_NUM_DTM_RANGE=ARR_NUM_DTM_RANGE)
ARR_GRID_IN[IND][STR_VAR][T][STR_MASK] = NUM_DTM
class MATH_TOOLS:
""" Some math tools that help us to calculate.
gau_kde: kernel density estimator by Gaussian Function
standard_dev: The Standard deviation
"""
def GaussJordanEli(arr_in):
num_ydim = len(arr_in)
num_xdim = len(arr_in[0])
arr_out = arr_in
if num_ydim -num_xdim == 0 or num_xdim - num_ydim == 1:
arr_i = NP.array([[0.0 for j in range(num_ydim)] for i in range(num_ydim)])
for ny in range(num_ydim):
arr_i[ny][ny] = 1.0
#print(arr_i)
for nx in range(num_xdim):
for ny in range(nx+1, num_ydim):
arr_i [ny] = arr_i [ny] - arr_i [nx] * arr_out[ny][nx] / float(arr_out[nx][nx])
arr_out[ny] = arr_out[ny] - arr_out[nx] * arr_out[ny][nx] / float(arr_out[nx][nx])
if num_xdim - num_ydim == 1:
for nx in range(num_xdim-1,-1,-1):
for ny in range(num_ydim-1,nx, -1):
print(nx,ny)
arr_i [nx] = arr_i [nx] - arr_i [ny] * arr_out[nx][ny] / float(arr_out[ny][ny])
arr_out[nx] = arr_out[nx] - arr_out[ny] * arr_out[nx][ny] / float(arr_out[ny][ny])
else:
for nx in range(num_xdim,-1,-1):
for ny in range(num_ydim-1, nx, -1):
print(nx,ny)
arr_i [nx] = arr_i [nx] - arr_i [ny] * arr_out[nx][ny] / float(arr_out[ny][ny])
arr_out[nx] = arr_out[nx] - arr_out[ny] * arr_out[nx][ny] / float(arr_out[ny][ny])
if num_xdim - num_ydim == 1:
arr_sol = [0.0 for n in range(num_ydim)]
for ny in range(num_ydim):
arr_sol[ny] = arr_out[ny][num_xdim-1]/arr_out[ny][ny]
return arr_out, arr_i, arr_sol
else:
return arr_out, arr_i
else:
print("Y dim: {0:d}, X dim: {1:d}: can not apply Gaussian-Jordan".format(num_ydim, num_xdim))
return [0]
def finding_XM_LSM(arr_in1, arr_in2, m=2):
# Finding the by least square method
arr_out=[[0.0 for i in range(m+2)] for j in range(m+1)]
arr_x_power_m = [0.0 for i in range(m+m+1)]
arr_xy_power_m = [0.0 for i in range(m+1)]
for n in range(len(arr_x_power_m)):
for x in range(len(arr_in1)):
arr_x_power_m[n] += arr_in1[x] ** n
for n in range(len(arr_xy_power_m)):
for x in range(len(arr_in1)):
arr_xy_power_m[n] += arr_in1[x] ** n * arr_in2[x]
for j in range(m+1):
for i in range(j,j+m+1):
arr_out[j][i-j] = arr_x_power_m[i]
arr_out[j][m+1] = arr_xy_power_m[j]
return arr_out
def cal_modelperform (arr_obs , arr_sim , num_empty=-999.999):
# Based on Vazquez et al. 2002 (Hydrol. Process.)
num_arr = len(arr_obs)
num_n_total = num_arr
num_sum = 0
num_obs_sum = 0
for n in range( num_arr ):
if math.isnan(arr_obs[n]) or arr_obs[n] == num_empty:
num_n_total += -1
else:
num_sum = num_sum + ( arr_sim[n] - arr_obs[n] ) ** 2
num_obs_sum = num_obs_sum + arr_obs[n]
if num_n_total == 0 or num_obs_sum == 0:
RRMSE = -999.999
RMSE = -999.999
obs_avg = -999.999
else:
RRMSE = ( num_sum / num_n_total ) ** 0.5 * ( num_n_total / num_obs_sum )
RMSE = ( num_sum / num_n_total ) ** 0.5
obs_avg = num_obs_sum / num_n_total
num_n_total = num_arr
oo_sum = 0
po_sum = 0
for nn in range( num_arr ):
if math.isnan(arr_obs[nn]) or arr_obs[nn] == num_empty:
num_n_total = num_n_total - 1
else:
oo_sum = oo_sum + ( arr_obs[nn] - obs_avg ) ** 2
po_sum = po_sum + ( arr_sim[nn] - arr_obs[nn] ) ** 2
if num_n_total == 0 or oo_sum * po_sum == 0:
EF = -999.999
CD = -999.999
else:
EF = ( oo_sum - po_sum ) / oo_sum
CD = oo_sum / po_sum
return RRMSE,EF,CD,RMSE, num_arr
def cal_kappa(ARR_IN, NUM_n=0, NUM_N=0, NUM_k=0):
""" Fleiss' kappa
Mustt input with ARR_IN in the following format:
ARR_IN = [ [ NUM for k in range(catalogue)] for N in range(Subjects)]
Additional parameters: NUM_n is the number of raters (e.g. sim and obs results)
Additional parameters: NUM_N is the number of subjects (e.g the outputs
Additional parameters: NUM_k is the number of catalogue (e.g. results )
"""
if NUM_N == 0:
NUM_N = len(ARR_IN)
if NUM_n == 0:
NUM_n = sum(ARR_IN[0])
if NUM_k == 0:
NUM_k = len(ARR_IN[0])
ARR_p_out = [ 0 for n in range(NUM_k)]
ARR_P_OUT = [ 0 for n in range(NUM_N)]
for N in range(NUM_N):
for k in range(NUM_k):
ARR_p_out[k] += ARR_IN[N][k]
ARR_P_OUT[N] += ARR_IN[N][k] ** 2
ARR_P_OUT[N] -= NUM_n
ARR_P_OUT[N] = ARR_P_OUT[N] * (1./(NUM_n *(NUM_n - 1)))
for k in range(NUM_k):
ARR_p_out[k] = ARR_p_out[k] / (NUM_N * NUM_n)
NUM_P_BAR = 0
for N in range(NUM_N):
NUM_P_BAR += ARR_P_OUT[N]
NUM_P_BAR = NUM_P_BAR / float(NUM_N)
NUM_p_bar = 0
for k in ARR_p_out:
NUM_p_bar += k **2
return (NUM_P_BAR - NUM_p_bar) / (1 - NUM_p_bar)
def gau_kde(ARR_IN_X, ARR_IN_I, NUM_BW=0.1 ):
NUM_SUM = 0.
NUM_LENG = len(ARR_IN_X)
ARR_OUT = [ 0. for n in range(NUM_LENG)]
for IND_J, J in enumerate(ARR_IN_X):
NUM_SUM = 0.0
for I in ARR_IN_I:
NUM_SUM += 1 / (2 * math.pi)**0.5 * math.e ** (-0.5 * ((J-I)/NUM_BW) ** 2 )
ARR_OUT[IND_J] = NUM_SUM / len(ARR_IN_I) / NUM_BW
return ARR_OUT
def standard_dev(ARR_IN):
NUM_SUM = sum(ARR_IN)
NUM_N = len(ARR_IN)
NUM_MEAN = 1.0*NUM_SUM/NUM_N
NUM_SUM2 = 0.0
for N in ARR_IN:
if not math.isnan(N):
NUM_SUM2 = (N-NUM_MEAN)**2
else:
NUM_N += -1
return (NUM_SUM2 / (NUM_N-1)) ** 0.5
def h_esti(ARR_IN):
#A rule-of-thumb bandwidth estimator
NUM_SIGMA = standard_dev(ARR_IN)
NUM_N = len(ARR_IN)
return ((4 * NUM_SIGMA ** 5) / (3*NUM_N) ) ** 0.2
def data2array(ARR_IN, STR_IN="MEAN"):
NUM_J = len(ARR_IN)
NUM_I = len(ARR_IN[0])
ARR_OUT = [[ 0.0 for i in range(NUM_I)] for j in range(NUM_J) ]
for j in range(NUM_J):
for i in range(NUM_I):
ARR_OUT[j][i] = ARR_IN[j][i][STR_IN]
return ARR_OUT
def reshape2d(ARR_IN):
ARR_OUT=[]
for A in ARR_IN:
for B in A:
ARR_OUT.append(B)
return ARR_OUT
def NormalVector( V1, V2):
return [(V1[1]*V2[2] - V1[2]*V2[1]), (V1[2]*V2[0] - V1[0]*V2[2]),(V1[0]*V2[1] - V1[1]*V2[0])]
def NVtoPlane( P0, P1, P2):
"""Input of P should be 3-dimensionals"""
V1 = [(P1[0]-P0[0]),(P1[1]-P0[1]),(P1[2]-P0[2])]
V2 = [(P2[0]-P0[0]),(P2[1]-P0[1]),(P2[2]-P0[2])]
ARR_NV = MATH_TOOLS.NormalVector(V1, V2)
D = ARR_NV[0] * P0[0] + ARR_NV[1] * P0[1] + ARR_NV[2] * P0[2]
return ARR_NV[0],ARR_NV[1],ARR_NV[2],D
def FindZatP3( P0, P1, P2, P3):
""" input of P: (X,Y,Z); but P3 is (X,Y) only """
A,B,C,D = MATH_TOOLS.NVtoPlane(P0, P1, P2)
return (D-A*P3[0] - B*P3[1])/float(C)
class TOOLS:
""" TOOLS is contains:
timestamp
fix_ind
progress_bar
cal_progrss
"""
ARR_HOY = [0, 744, 1416, 2160, 2880, 3624, 4344, 5088, 5832, 6552, 7296, 8016, 8760]
ARR_HOY_LEAP = [0, 744, 1440, 2184, 2904, 3648, 4368, 5112, 5856, 6576, 7320, 8040, 8784]
def NNARR(ARR_IN, IF_PAIRING=False):
"Clean the NaN value in the array"
if IF_PAIRING:
ARR_SIZE = len(ARR_IN)
ARR_OUT = [ [] for N in range(ARR_SIZE)]
for ind_n, N in enumerate(ARR_IN[0]):
IF_NAN = False
for ind_a in range(ARR_SIZE):
if math.isnan(ARR_IN[ind_a][ind_n]):
IF_NAN = True
break
if not IF_NAN:
for ind_a in range(ARR_SIZE):
ARR_OUT[ind_a].append(ARR_IN[ind_a][ind_n])
else:
ARR_OUT = [ ]
for N in ARR_IN:
if not math.isnan(N):
ARR_OUT.append(N)
return ARR_OUT
def DATETIME2HOY(ARR_TIME, ARR_HOY_IN=[]):
if math.fmod(ARR_TIME[0], 4) == 0 and len(ARR_HOY_IN) == 0:
ARR_HOY_IN = [0, 744, 1440, 2184, 2904, 3648, 4368, 5112, 5856, 6576, 7320, 8040, 8784]
elif math.fmod(ARR_TIME[0], 4) != 0 and len(ARR_HOY_IN) == 0:
ARR_HOY_IN = [0, 744, 1416, 2160, 2880, 3624, 4344, 5088, 5832, 6552, 7296, 8016, 8760]
else:
ARR_HOY_IN = ARR_HOY_IN
return ARR_HOY_IN[ARR_TIME[1]-1] + (ARR_TIME[2]-1)*24 + ARR_TIME[3]
def timestamp(STR_IN=""):
print("{0:04d}-{1:02d}-{2:02d}_{3:02d}:{4:02d}:{5:02d} {6:s}".format(time.gmtime().tm_year, time.gmtime().tm_mon, time.gmtime().tm_mday,\
time.gmtime().tm_hour, time.gmtime().tm_min, time.gmtime().tm_sec, STR_IN) )
def fix_ind(IND_IN, IND_J, IND_I, ARR_XRANGE=[], ARR_YRANGE=[], NX=0, NY=0):
NUM_DY = ARR_YRANGE[0]
NUM_NX_F = ARR_XRANGE[0]
NUM_NX_R = NX - (ARR_XRANGE[1]+1)
if IND_J == ARR_YRANGE[0]:
IND_OUT = IND_IN - NUM_DY * NX - NUM_NX_F
else:
IND_OUT = IND_IN - NUM_DY * NX - NUM_NX_F * (IND_J - NUM_DY +1) - NUM_NX_R * (IND_J - NUM_DY)
return IND_OUT
def progress_bar(NUM_PROGRESS, NUM_PROGRESS_BIN=0.05, STR_SYS_SYMBOL="=", STR_DES="Progress"):
NUM_SYM = int(NUM_PROGRESS / NUM_PROGRESS_BIN)
sys.stdout.write('\r')
sys.stdout.write('[{0:20s}] {1:4.2f}% {2:s}'.format(STR_SYS_SYMBOL*NUM_SYM, NUM_PROGRESS*100, STR_DES))
sys.stdout.flush()
def clean_arr(ARR_IN, CRITERIA=1):
ARR_OUT=[]
for i,n in enumerate(ARR_IN):
if len(n)> CRITERIA:
ARR_OUT.append(n)
return ARR_OUT
def cal_loop_progress(ARR_INDEX, ARR_INDEX_MAX, NUM_CUM_MAX=1, NUM_CUM_IND=1, NUM_TOTAL_MAX=1):
""" Please list from smallest to largest, i.e.: x->y->z """
if len(ARR_INDEX) == len(ARR_INDEX_MAX):
for i, i_index in enumerate(ARR_INDEX):
NUM_IND_PER = (i_index+1)/float(ARR_INDEX_MAX[i])
NUM_TOTAL_MAX = NUM_TOTAL_MAX * ARR_INDEX_MAX[i]
if i >0: NUM_CUM_MAX = NUM_CUM_MAX * ARR_INDEX_MAX[i-1]
NUM_CUM_IND = NUM_CUM_IND + NUM_CUM_MAX * i_index
return NUM_CUM_IND / float(NUM_TOTAL_MAX)
else:
print("Wrong dimenstion for in put ARR_INDEX ({0:d}) and ARR_INDEX_MAX ({1:d})".format(len(ARR_INDEX), len(ARR_INDEX_MAX)))
def calendar_cal(ARR_START_TIME, ARR_INTERVAL, ARR_END_TIME_IN=[0, 0, 0, 0, 0, 0.0], IF_LEAP=False):
ARR_END_TIME = [ 0,0,0,0,0,0.0]
ARR_DATETIME = ["SECOND", "MINUTE", "HOUR","DAY", "MON", "YEAR"]
NUM_ARR_DATETIME = len(ARR_DATETIME)
IF_FERTIG = False
ARR_FERTIG = [0,0,0,0,0,0]
DIC_TIME_LIM = \
{"YEAR" : {"START": 0 , "LIMIT": 9999 },\
"MON" : {"START": 1 , "LIMIT": 12 },\
"DAY" : {"START": 1 , "LIMIT": 31 },\
"HOUR" : {"START": 0 , "LIMIT": 23 },\
"MINUTE": {"START": 0 , "LIMIT": 59 },\
"SECOND": {"START": 0 , "LIMIT": 59 },\
}
for I, T in enumerate(ARR_START_TIME):
ARR_END_TIME[I] = T + ARR_INTERVAL[I]
while IF_FERTIG == False:
if math.fmod(ARR_END_TIME[0],4) == 0: IF_LEAP=True
if IF_LEAP:
ARR_DAY_LIM = [0,31,29,31,30,31,30,31,31,30,31,30,31]
else:
ARR_DAY_LIM = [0,31,28,31,30,31,30,31,31,30,31,30,31]
for I, ITEM in enumerate(ARR_DATETIME):
NUM_ARR_POS = NUM_ARR_DATETIME-I-1
if ITEM == "DAY":
if ARR_END_TIME[NUM_ARR_POS] > ARR_DAY_LIM[ARR_END_TIME[1]]:
ARR_END_TIME[NUM_ARR_POS] = ARR_END_TIME[NUM_ARR_POS] - ARR_DAY_LIM[ARR_END_TIME[1]]
ARR_END_TIME[NUM_ARR_POS - 1] += 1
else:
if ARR_END_TIME[NUM_ARR_POS] > DIC_TIME_LIM[ITEM]["LIMIT"]:
ARR_END_TIME[NUM_ARR_POS - 1] += 1
ARR_END_TIME[NUM_ARR_POS] = ARR_END_TIME[NUM_ARR_POS] - DIC_TIME_LIM[ITEM]["LIMIT"] - 1
for I, ITEM in enumerate(ARR_DATETIME):
NUM_ARR_POS = NUM_ARR_DATETIME-I-1
if ITEM == "DAY":
if ARR_END_TIME[NUM_ARR_POS] <= ARR_DAY_LIM[ARR_END_TIME[1]]: ARR_FERTIG[NUM_ARR_POS] = 1
else:
if ARR_END_TIME[NUM_ARR_POS] <= DIC_TIME_LIM[ITEM]["LIMIT"]: ARR_FERTIG[NUM_ARR_POS] = 1
if sum(ARR_FERTIG) == 6: IF_FERTIG = True
return ARR_END_TIME
class MPI_TOOLS:
def __init__(self, MPI_SIZE=1, MPI_RANK=0,\
NUM_NX_END=1, NUM_NY_END=1, NUM_NX_START=0, NUM_NY_START=0, NUM_NX_CORES=1 ,\
NUM_NX_TOTAL=1, NUM_NY_TOTAL=1 ):
""" END number follow the python philisophy: End number is not included in the list """
self.NUM_SIZE = MPI_SIZE
self.NUM_RANK = MPI_RANK
self.NUM_NX_START = NUM_NX_START
self.NUM_NY_START = NUM_NY_START
self.NUM_NX_SIZE = NUM_NX_END - NUM_NX_START
self.NUM_NY_SIZE = NUM_NY_END - NUM_NY_START
self.NUM_NX_CORES = NUM_NX_CORES
self.NUM_NY_CORES = max(1, int(self.NUM_SIZE / NUM_NX_CORES))
self.ARR_RANK_DESIGN = [ {} for n in range(self.NUM_SIZE)]
def CPU_GEOMETRY_2D(self):
NUM_NX_REMAIN = self.NUM_NX_SIZE % self.NUM_NX_CORES
NUM_NY_REMAIN = self.NUM_NY_SIZE % self.NUM_NY_CORES
NUM_NX_DIFF = int((self.NUM_NX_SIZE - NUM_NX_REMAIN) / self.NUM_NX_CORES )
NUM_NY_DIFF = int((self.NUM_NY_SIZE - NUM_NY_REMAIN) / self.NUM_NY_CORES )
NUM_NY_DIFF_P1 = NUM_NY_DIFF + 1
NUM_NX_DIFF_P1 = NUM_NX_DIFF + 1
IND_RANK = 0
ARR_RANK_DESIGN = [ 0 for n in range(self.NUM_SIZE)]
for ny in range(self.NUM_NY_CORES):
for nx in range(self.NUM_NX_CORES):
NUM_RANK = ny * self.NUM_NX_CORES + nx
DIC_IN = {"INDEX_IN": NUM_RANK, "NX_START": 0, "NY_START": 0, "NX_END": 0, "NY_END": 0 }
if ny < NUM_NY_REMAIN:
DIC_IN["NY_START"] = (ny + 0) * NUM_NY_DIFF_P1 + self.NUM_NY_START
DIC_IN["NY_END" ] = (ny + 1) * NUM_NY_DIFF_P1 + self.NUM_NY_START
else:
DIC_IN["NY_START"] = (ny - NUM_NY_REMAIN + 0) * NUM_NY_DIFF + NUM_NY_REMAIN * NUM_NY_DIFF_P1 + self.NUM_NY_START
DIC_IN["NY_END" ] = (ny - NUM_NY_REMAIN + 1) * NUM_NY_DIFF + NUM_NY_REMAIN * NUM_NY_DIFF_P1 + self.NUM_NY_START
if nx < NUM_NX_REMAIN:
DIC_IN["NX_START"] = (nx + 0) * NUM_NX_DIFF_P1 + self.NUM_NX_START
DIC_IN["NX_END" ] = (nx + 1) * NUM_NX_DIFF_P1 + self.NUM_NX_START
else:
DIC_IN["NX_START"] = (nx - NUM_NX_REMAIN + 0) * NUM_NX_DIFF + NUM_NX_REMAIN * NUM_NX_DIFF_P1 + self.NUM_NX_START
DIC_IN["NX_END" ] = (nx - NUM_NX_REMAIN + 1) * NUM_NX_DIFF + NUM_NX_REMAIN * NUM_NX_DIFF_P1 + self.NUM_NX_START
ARR_RANK_DESIGN[NUM_RANK] = DIC_IN
self.ARR_RANK_DESIGN = ARR_RANK_DESIGN
return ARR_RANK_DESIGN
def CPU_MAP(self ):
ARR_CPU_MAP = [ [ NP.nan for i in range(self.NUM_NX_TOTAL)] for j in range(self.NUM_NY_TOTAL) ]
for RANK in range(len(ARR_RANK_DESIGN)):
print("DEAL WITH {0:d} {1:d}".format(RANK, ARR_RANK_DESIGN[RANK]["INDEX_IN"] ))
for jj in range(ARR_RANK_DESIGN[RANK]["NY_START"], ARR_RANK_DESIGN[RANK]["NY_END"]):
for ii in range(ARR_RANK_DESIGN[RANK]["NX_START"], ARR_RANK_DESIGN[RANK]["NX_END"]):
ARR_CPU_MAP[jj][ii] = ARR_RANK_DESIGN[RANK]["INDEX_IN"]
return MAP_CPU
def GATHER_ARR_2D(self, ARR_IN, ARR_IN_GATHER, ARR_RANK_DESIGN=[]):
if ARR_RANK_DESIGN == []:
ARR_RANK_DESIGN = self.ARR_RANK_DESIGN
for N in range(1, self.NUM_SIZE):
I_STA = ARR_RANK_DESIGN[N]["NX_START"]
I_END = ARR_RANK_DESIGN[N]["NX_END" ]
J_STA = ARR_RANK_DESIGN[N]["NY_START"]
J_END = ARR_RANK_DESIGN[N]["NY_END" ]
for J in range(J_STA, J_END ):
for I in range(I_STA, I_END ):
ARR_IN[J][I] = ARR_IN_GATHER[N][J][I]
return ARR_IN
def MPI_MESSAGE(self, STR_TEXT=""):
TIME_NOW = time.gmtime()
print("MPI RANK: {0:5d} @ {1:02d}:{2:02d}:{3:02d} # {4:s}"\
.format(self.NUM_RANK, TIME_NOW.tm_hour, TIME_NOW.tm_min, TIME_NOW.tm_sec, STR_TEXT ))
class GEO_TOOLS:
def __init__(self):
STR_NCDF4PY = NC.__version__
print("Using netCDF4 for Python, Version: {0:s}".format(STR_NCDF4PY))
def mask_dtm(self, NUM, ARR_DTM=[0,1,2], ARR_DTM_RANGE=[0,1], ARR_DTM_STR=["OUT","IN","OUT"]):
""" The determination algorithm is : x-1 < NUM <= x """
for i, n in enumerate(ARR_DTM):
if i == 0:
if NUM <= ARR_DTM_RANGE[i]: NUM_OUT = n
elif i == len(ARR_DTM_RANGE):
if NUM > ARR_DTM_RANGE[i-1]: NUM_OUT = n
else:
if NUM > ARR_DTM_RANGE[i-1] and NUM <= ARR_DTM_RANGE[i]: NUM_OUT = n
return NUM_OUT
def mask_array(self, ARR_IN, ARR_MASK_OUT=[], ARR_DTM=[0,1,2], ARR_DTM_RANGE=[0,1], ARR_DTM_STR=["OUT","IN","OUT"], IF_2D=False):
if IF_2D:
NUM_NX = len(ARR_IN[0])
NUM_NY = len(ARR_IN)
ARR_OUT = [ [ self.NUM_NULL for i in range(NUM_NX)] for j in range(NUM_NY) ]
for J in range(NUM_NY):
for I in range(NUM_NY):
ARR_OUT[J][I] = self.mask_dtm(ARR_IN[J][I], ARR_NUM_DTM=ARR_NUM_DTM, ARR_NUM_DTM_RANGE=ARR_NUM_DTM_RANGE, ARR_STR_DTM=ARR_STR_DTM)
else:
NUM_NX = len(ARR_IN)
ARR_OUT = [0 for n in range(NUM_NX)]
for N in range(NUM_NX):
ARR_OUT[N] = self.mask_dtm(ARR_IN[N], ARR_NUM_DTM=ARR_NUM_DTM, ARR_NUM_DTM_RANGE=ARR_NUM_DTM_RANGE, ARR_STR_DTM=ARR_STR_DTM)
return ARR_OUT
def MAKE_LAT_LON_ARR(self, FILE_NC_IN, STR_LAT="lat", STR_LON="lon", source="CFC"):
""" Reading LAT and LON from a NC file """
NC_DATA_IN = NC.Dataset(FILE_NC_IN, "r", format="NETCDF4")
if source == "CFC":
arr_lat_in = NC_DATA_IN.variables[STR_LAT]
arr_lon_in = NC_DATA_IN.variables[STR_LON]
num_nlat = len(arr_lat_in)
num_nlon = len(arr_lon_in)
arr_lon_out = [[0.0 for i in range(num_nlon)] for j in range(num_nlat)]
arr_lat_out = [[0.0 for i in range(num_nlon)] for j in range(num_nlat)]
for j in range(num_nlat):
for i in range(num_nlon):
arr_lon_out[j][i] = arr_lat_in[j]
arr_lat_out[j][i] = arr_lon_in[i]
return arr_lat_out, arr_lon_out
class NETCDF4_HELPER:
def __init__(self):
STR_NCDF4PY = NC.__version__
print("Using netCDF4 for Python, Version: {0:s}".format(STR_NCDF4PY))
def create_wrf_ensemble(self, STR_FILE_IN, STR_FILE_OUT, ARR_VAR=[], STR_DIR="./", NUM_ENSEMBLE_SIZE=1 ):
FILE_OUT = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, STR_FILE_OUT), "w",format="NETCDF4")
FILE_IN = NC.Dataset("{1:s}/{1:s}".format(STR_DIR, STR_FILE_IN ), "r",format="NETCDF4")
# CREATE DIMENSIONS:
for DIM in FILE_IN.dimensions:
FILE_OUT.createDimension(DIM, FILE_IN.dimensions[DIM].size )
FILE_OUT.createDimension("Ensembles", NUM_ENSEMBLE_SIZE )
# CREATE ATTRIBUTES:
FILE_OUT.TITLE = FILE_IN.TITLE
FILE_OUT.START_DATE = FILE_IN.START_DATE
FILE_OUT.SIMULATION_START_DATE = FILE_IN.SIMULATION_START_DATE
FILE_OUT.DX = FILE_IN.DX
FILE_OUT.DY = FILE_IN.DY
FILE_OUT.SKEBS_ON = FILE_IN.SKEBS_ON
FILE_OUT.SPEC_BDY_FINAL_MU = FILE_IN.SPEC_BDY_FINAL_MU
FILE_OUT.USE_Q_DIABATIC = FILE_IN.USE_Q_DIABATIC
FILE_OUT.GRIDTYPE = FILE_IN.GRIDTYPE
FILE_OUT.DIFF_OPT = FILE_IN.DIFF_OPT
FILE_OUT.KM_OPT = FILE_IN.KM_OPT
if len(ARR_VAR) >0:
for V in ARR_VAR:
if V[1] == "2D":
FILE_OUT.createVariable(V[0], "f8", ("Ensembles", "Time", "south_north", "west_east" ))
elif V[1] == "3D":
FILE_OUT.createVariable(V[0], "f8", ("Ensembles", "Time", "bottom_top", "south_north", "west_east" ))
FILE_OUT.close()
FILE_IN.close()
def add_ensemble(self, FILE_IN, FILE_OUT, STR_VAR, STR_DIM="2D", STR_DIR="./", IND_ENSEMBLE=0):
FILE_OUT = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, FILE_OUT), "a",format="NETCDF4")
FILE_IN = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, FILE_IN ), "r",format="NETCDF4")
ARR_VAR_IN = FILE_IN.variables[STR_VAR]
NUM_NT = len(ARR_VAR_IN)
NUM_NK = FILE_IN.dimensions["bottom_top"].size
NUM_NJ = FILE_IN.dimensions["south_north"].size
NUM_NI = FILE_IN.dimensions["west_east"].size
for time in range(NUM_NT):
if STR_DIM == "2D":
FILE_OUT.variables[STR_VAR][IND_ENSEMBLE, time] = FILE_IN.variables[STR_VAR][time]
elif STR_DIM == "3D":
for k in range(NUM_NK):
FILE_OUT.variables[STR_VAR][IND_ENSEMBLE, time] = FILE_IN.variables[STR_VAR][time]
FILE_OUT.close()
FILE_IN.close()
class WRF_HELPER:
STR_DIR_ROOT = "./"
NUM_TIME_INIT = 0
NUM_SHIFT = 0.001
def __init__(self):
"""
Remember: most array should be follow the rule of [j,i] instead of [x,y].
"""
STR_NCDF4PY = NC.__version__
print("Using netCDF4 for Python, Version: {0:s}".format(STR_NCDF4PY))
def GEO_INFORMATER(self, STR_FILE="geo_em.d01.nc", STR_DIR=""):
print("INPUT GEO FILE: {0:s}".format(STR_FILE))
if STR_DIR == "":
STR_DIR == self.STR_DIR_ROOT
self.FILE_IN = NC.Dataset("{0:s}/{1:s}".format(STR_DIR, STR_FILE ), "r",format="NETCDF4")
self.MAP_LAT = self.FILE_IN.variables["CLAT"] [self.NUM_TIME_INIT]
self.MAP_LON = self.FILE_IN.variables["CLONG"][self.NUM_TIME_INIT]
ARR_TMP_IN = self.FILE_IN.variables["CLONG"][0]
# Since NetCDF4 for python does not support the hyphen in attributes, I
# am forced to calculate the NX and NY based on a map in the NC file.
self.NUM_NX = len(ARR_TMP_IN[0])
self.NUM_NY = len(ARR_TMP_IN)
self.NUM_DX = self.FILE_IN.DX
self.NUM_DY = self.FILE_IN.DX
def GEO_HELPER(self, ARR_LL_SW, ARR_LL_NE):
self.MAP_CROP_MASK = [[ 0 for i in range(self.NUM_NX)] for j in range(self.NUM_NY)]
self.DIC_CROP_INFO = {"NE": {"LAT":0, "LON":0, "I":0, "J":0},\
"SW": {"LAT":0, "LON":0, "I":0, "J":0}}
ARR_TMP_I = []
ARR_TMP_J = []
for j in range(self.NUM_NY):
for i in range(self.NUM_NX):
NUM_CHK_SW_J = self.MAP_LAT[j][i] - ARR_LL_SW[0]
if NUM_CHK_SW_J == 0:
NUM_CHK_SW_J = self.MAP_LAT[j][i] - ARR_LL_SW[0] + self.NUM_SHIFT
NUM_CHK_SW_I = self.MAP_LON[j][i] - ARR_LL_SW[1]
if NUM_CHK_SW_I == 0:
NUM_CHK_SW_I = self.MAP_LAT[j][i] - ARR_LL_SW[1] - self.NUM_SHIFT
NUM_CHK_NE_J = self.MAP_LAT[j][i] - ARR_LL_NE[0]
if NUM_CHK_NE_J == 0:
NUM_CHK_NE_J = self.MAP_LAT[j][i] - ARR_LL_NE[0] + self.NUM_SHIFT
NUM_CHK_NE_I = self.MAP_LON[j][i] - ARR_LL_NE[1]
if NUM_CHK_NE_I == 0:
NUM_CHK_NE_I = self.MAP_LON[j][i] - ARR_LL_NE[1] - self.NUM_SHIFT
NUM_CHK_NS_IN = NUM_CHK_SW_J * NUM_CHK_NE_J
NUM_CHK_WE_IN = NUM_CHK_SW_I * NUM_CHK_NE_I
if NUM_CHK_NS_IN < 0 and NUM_CHK_WE_IN < 0:
self.MAP_CROP_MASK[j][i] = 1
ARR_TMP_J.append(j)
ARR_TMP_I.append(i)
NUM_SW_J = min( ARR_TMP_J )
NUM_SW_I = min( ARR_TMP_I )
NUM_NE_J = max( ARR_TMP_J )
NUM_NE_I = max( ARR_TMP_I )
self.DIC_CROP_INFO["NE"]["J"] = NUM_NE_J
self.DIC_CROP_INFO["NE"]["I"] = NUM_NE_I
self.DIC_CROP_INFO["NE"]["LAT"] = self.MAP_LAT[NUM_NE_J][NUM_NE_I]
self.DIC_CROP_INFO["NE"]["LON"] = self.MAP_LON[NUM_NE_J][NUM_NE_I]
self.DIC_CROP_INFO["SW"]["J"] = NUM_SW_J
self.DIC_CROP_INFO["SW"]["I"] = NUM_SW_I
self.DIC_CROP_INFO["SW"]["LAT"] = self.MAP_LAT[NUM_SW_J][NUM_SW_I]
self.DIC_CROP_INFO["SW"]["LON"] = self.MAP_LON[NUM_SW_J][NUM_SW_I]
def PROFILE_HELPER(STR_FILE_IN, ARR_DATE_START, NUM_DOMS=3, NUM_TIMESTEPS=24, IF_PB=False):
"""
This functions reads the filename, array of starting date,
and simulation hours and numbers of domains
to profiling the time it takes for WRF.
"""
FILE_READ_IN = open("{0:s}".format(STR_FILE_IN))
ARR_READ_IN = FILE_READ_IN.readlines()
NUM_TIME = NUM_TIMESTEPS
NUM_DOMAIN = NUM_DOMS
NUM_DATE_START = ARR_DATE_START
NUM_LEN_IN = len(ARR_READ_IN)
ARR_TIME_PROFILE = [[0 for T in range(NUM_TIME)] for D in range(NUM_DOMS)]
for I, TEXT_IN in enumerate(ARR_READ_IN):
ARR_TEXT = re.split("\s",TEXT_IN.strip())
if ARR_TEXT[0] == "Timing":
if ARR_TEXT[2] == "main:" or ARR_TEXT[2] == "main":
for ind, T in enumerate(ARR_TEXT):
if T == "time" : ind_time_text = ind + 1
if T == "elapsed": ind_elapsed_text = ind - 1
if T == "domain" : ind_domain_text = ind + 3
arr_time_in = re.split("_", ARR_TEXT[ind_time_text])
arr_date = re.split("-", arr_time_in[0])
arr_time = re.split(":", arr_time_in[1])
num_domain = int(re.split(":", ARR_TEXT[ind_domain_text])[0])
num_elapsed = float(ARR_TEXT[ind_elapsed_text])
NUM_HOUR_FIX = (int(arr_date[2]) - NUM_DATE_START[2]) * 24
NUM_HOUR = NUM_HOUR_FIX + int(arr_time[0])
ARR_TIME_PROFILE[num_domain-1][NUM_HOUR] += num_elapsed
if IF_PB: TOOLS.progress_bar(I/float(NUM_LEN_IN))
#self.ARR_TIME_PROFILE = ARR_TIME_PROFILE
return ARR_TIME_PROFILE
class DATA_READER:
"""
The DATA_READER is based on my old work: gridtrans.py.
"""
def __init__(self, STR_NULL="noData", NUM_NULL=-999.999):
self.STR_NULL=STR_NULL
self.NUM_NULL=NUM_NULL
def stripblnk(arr,*num_typ):
new_arr=[]
for i in arr:
if i == "":
pass
else:
if num_typ[0] == 'int':
new_arr.append(int(i))
elif num_typ[0] == 'float':
new_arr.append(float(i))
elif num_typ[0] == '':
new_arr.append(i)
else:
print("WRONG num_typ!")
return new_arr
def tryopen(self, sourcefile, ag):
try:
opf=open(sourcefile,ag)
return opf
except :
print("No such file.")
return "error"
def READCSV(self, sourcefile):
opf = self.tryopen(sourcefile,'r')
opfchk = self.tryopen(sourcefile,'r')
print("reading source file {0:s}".format(sourcefile))
chk_lines = opfchk.readlines()
num_totallines = len(chk_lines)
ncols = 0
num_notnum = 0
for n in range(num_totallines):
line_in = chk_lines[n]
c_first = re.findall(".",line_in.strip())
if c_first[0] == "#":
num_notnum += 1
else:
ncols = len( re.split(",",line_in.strip()) )
break
if ncols == 0:
print("something wrong with the input file! (all comments?)")
else:
del opfchk
nrows=num_totallines - num_notnum
result_arr=[[self.NUM_NULL for j in range(ncols)] for i in range(nrows)]
result_arr_text=[]
num_pass = 0
for j in range(0,num_totallines):
# chk if comment
#print (j,i,chk_val)
line_in = opf.readline()
c_first = re.findall(".",line_in.strip())[0]
if c_first == "#":
result_arr_text.append(line_in)
num_pass += 1
else:
arr_in = re.split(",",line_in.strip())
for i in range(ncols):
chk_val = arr_in[i]
if chk_val == self.STR_NULL:
result_arr[j-num_pass][i] = self.NUM_NULL
else:
result_arr[j-num_pass][i] = float(chk_val)
return result_arr,result_arr_text
| codeparrot/github-code-clean |
"""
Python bindings for GLFW.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = 'Florian Rhiem (florian.rhiem@gmail.com)'
__copyright__ = 'Copyright (c) 2013-2016 Florian Rhiem'
__license__ = 'MIT'
__version__ = '1.3.1'
# By default (ERROR_REPORTING = True), GLFW errors will be reported as Python
# exceptions. Set ERROR_REPORTING to False or set a curstom error callback to
# disable this behavior.
ERROR_REPORTING = True
import ctypes
import os
import functools
import glob
import sys
import subprocess
import textwrap
# Python 3 compatibility:
try:
_getcwd = os.getcwdu
except AttributeError:
_getcwd = os.getcwd
if sys.version_info.major > 2:
_to_char_p = lambda s: s.encode('utf-8')
def _reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
_to_char_p = lambda s: s
def _reraise(exception, traceback):
raise (exception, None, traceback)
class GLFWError(Exception):
"""
Exception class used for reporting GLFW errors.
"""
def __init__(self, message):
super(GLFWError, self).__init__(message)
def _find_library_candidates(library_names,
library_file_extensions,
library_search_paths):
"""
Finds and returns filenames which might be the library you are looking for.
"""
candidates = set()
for library_name in library_names:
for search_path in library_search_paths:
glob_query = os.path.join(search_path, '*'+library_name+'*')
for filename in glob.iglob(glob_query):
filename = os.path.realpath(filename)
if filename in candidates:
continue
basename = os.path.basename(filename)
if basename.startswith('lib'+library_name):
basename_end = basename[len('lib'+library_name):]
elif basename.startswith(library_name):
basename_end = basename[len(library_name):]
else:
continue
for file_extension in library_file_extensions:
if basename_end.startswith(file_extension):
if basename_end[len(file_extension):][:1] in ('', '.'):
candidates.add(filename)
if basename_end.endswith(file_extension):
basename_middle = basename_end[:-len(file_extension)]
if all(c in '0123456789.' for c in basename_middle):
candidates.add(filename)
return candidates
def _load_library(library_names, library_file_extensions,
library_search_paths, version_check_callback):
"""
Finds, loads and returns the most recent version of the library.
"""
candidates = _find_library_candidates(library_names,
library_file_extensions,
library_search_paths)
library_versions = []
for filename in candidates:
version = version_check_callback(filename)
if version is not None and version >= (3, 0, 0):
library_versions.append((version, filename))
if not library_versions:
return None
library_versions.sort()
return ctypes.CDLL(library_versions[-1][1])
def _glfw_get_version(filename):
"""
Queries and returns the library version tuple or None by using a
subprocess.
"""
version_checker_source = '''
import sys
import ctypes
def get_version(library_handle):
"""
Queries and returns the library version tuple or None.
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
if hasattr(library_handle, 'glfwGetVersion'):
library_handle.glfwGetVersion(major, minor, rev)
version = (major_value.value,
minor_value.value,
rev_value.value)
return version
else:
return None
try:
input_func = raw_input
except NameError:
input_func = input
filename = input_func().strip()
try:
library_handle = ctypes.CDLL(filename)
except OSError:
pass
else:
version = get_version(library_handle)
print(version)
'''
args = [sys.executable, '-c', textwrap.dedent(version_checker_source)]
process = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
out = process.communicate(filename)[0]
out = out.strip()
if out:
return eval(out)
else:
return None
if sys.platform == 'win32':
# only try glfw3.dll on windows
try:
_glfw = ctypes.CDLL('glfw3.dll')
except OSError:
_glfw = None
else:
_glfw = _load_library(['glfw', 'glfw3'], ['.so', '.dylib'],
['',
'/usr/lib64', '/usr/local/lib64',
'/usr/lib', '/usr/local/lib',
'/usr/lib/x86_64-linux-gnu/'], _glfw_get_version)
if _glfw is None:
raise ImportError("Failed to load GLFW3 shared library.")
_callback_repositories = []
class _GLFWwindow(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWwindow GLFWwindow;
"""
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWmonitor(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWmonitor GLFWmonitor;
"""
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWvidmode(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWvidmode GLFWvidmode;
"""
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("red_bits", ctypes.c_int),
("green_bits", ctypes.c_int),
("blue_bits", ctypes.c_int),
("refresh_rate", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.red_bits = 0
self.green_bits = 0
self.blue_bits = 0
self.refresh_rate = 0
def wrap(self, video_mode):
"""
Wraps a nested python sequence.
"""
size, bits, self.refresh_rate = video_mode
self.width, self.height = size
self.red_bits, self.green_bits, self.blue_bits = bits
def unwrap(self):
"""
Returns a nested python sequence.
"""
size = self.width, self.height
bits = self.red_bits, self.green_bits, self.blue_bits
return size, bits, self.refresh_rate
class _GLFWgammaramp(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWgammaramp GLFWgammaramp;
"""
_fields_ = [("red", ctypes.POINTER(ctypes.c_ushort)),
("green", ctypes.POINTER(ctypes.c_ushort)),
("blue", ctypes.POINTER(ctypes.c_ushort)),
("size", ctypes.c_uint)]
def __init__(self):
ctypes.Structure.__init__(self)
self.red = None
self.red_array = None
self.green = None
self.green_array = None
self.blue = None
self.blue_array = None
self.size = 0
def wrap(self, gammaramp):
"""
Wraps a nested python sequence.
"""
red, green, blue = gammaramp
size = min(len(red), len(green), len(blue))
array_type = ctypes.c_ushort*size
self.size = ctypes.c_uint(size)
self.red_array = array_type()
self.green_array = array_type()
self.blue_array = array_type()
for i in range(self.size):
self.red_array[i] = int(red[i]*65535)
self.green_array[i] = int(green[i]*65535)
self.blue_array[i] = int(blue[i]*65535)
pointer_type = ctypes.POINTER(ctypes.c_ushort)
self.red = ctypes.cast(self.red_array, pointer_type)
self.green = ctypes.cast(self.green_array, pointer_type)
self.blue = ctypes.cast(self.blue_array, pointer_type)
def unwrap(self):
"""
Returns a nested python sequence.
"""
red = [self.red[i]/65535.0 for i in range(self.size)]
green = [self.green[i]/65535.0 for i in range(self.size)]
blue = [self.blue[i]/65535.0 for i in range(self.size)]
return red, green, blue
class _GLFWcursor(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWcursor GLFWcursor;
"""
_fields_ = [("dummy", ctypes.c_int)]
class _GLFWimage(ctypes.Structure):
"""
Wrapper for:
typedef struct GLFWimage GLFWimage;
"""
_fields_ = [("width", ctypes.c_int),
("height", ctypes.c_int),
("pixels", ctypes.POINTER(ctypes.c_ubyte))]
def __init__(self):
ctypes.Structure.__init__(self)
self.width = 0
self.height = 0
self.pixels = None
self.pixels_array = None
def wrap(self, image):
"""
Wraps a nested python sequence.
"""
self.width, self.height, pixels = image
array_type = ctypes.c_ubyte * 4 * self.width * self.height
self.pixels_array = array_type()
for i in range(self.height):
for j in range(self.width):
for k in range(4):
self.pixels_array[i][j][k] = pixels[i][j][k]
pointer_type = ctypes.POINTER(ctypes.c_ubyte)
self.pixels = ctypes.cast(self.pixels_array, pointer_type)
def unwrap(self):
"""
Returns a nested python sequence.
"""
pixels = [[[int(c) for c in p] for p in l] for l in self.pixels_array]
return self.width, self.height, pixels
VERSION_MAJOR = 3
VERSION_MINOR = 2
VERSION_REVISION = 1
RELEASE = 0
PRESS = 1
REPEAT = 2
KEY_UNKNOWN = -1
KEY_SPACE = 32
KEY_APOSTROPHE = 39
KEY_COMMA = 44
KEY_MINUS = 45
KEY_PERIOD = 46
KEY_SLASH = 47
KEY_0 = 48
KEY_1 = 49
KEY_2 = 50
KEY_3 = 51
KEY_4 = 52
KEY_5 = 53
KEY_6 = 54
KEY_7 = 55
KEY_8 = 56
KEY_9 = 57
KEY_SEMICOLON = 59
KEY_EQUAL = 61
KEY_A = 65
KEY_B = 66
KEY_C = 67
KEY_D = 68
KEY_E = 69
KEY_F = 70
KEY_G = 71
KEY_H = 72
KEY_I = 73
KEY_J = 74
KEY_K = 75
KEY_L = 76
KEY_M = 77
KEY_N = 78
KEY_O = 79
KEY_P = 80
KEY_Q = 81
KEY_R = 82
KEY_S = 83
KEY_T = 84
KEY_U = 85
KEY_V = 86
KEY_W = 87
KEY_X = 88
KEY_Y = 89
KEY_Z = 90
KEY_LEFT_BRACKET = 91
KEY_BACKSLASH = 92
KEY_RIGHT_BRACKET = 93
KEY_GRAVE_ACCENT = 96
KEY_WORLD_1 = 161
KEY_WORLD_2 = 162
KEY_ESCAPE = 256
KEY_ENTER = 257
KEY_TAB = 258
KEY_BACKSPACE = 259
KEY_INSERT = 260
KEY_DELETE = 261
KEY_RIGHT = 262
KEY_LEFT = 263
KEY_DOWN = 264
KEY_UP = 265
KEY_PAGE_UP = 266
KEY_PAGE_DOWN = 267
KEY_HOME = 268
KEY_END = 269
KEY_CAPS_LOCK = 280
KEY_SCROLL_LOCK = 281
KEY_NUM_LOCK = 282
KEY_PRINT_SCREEN = 283
KEY_PAUSE = 284
KEY_F1 = 290
KEY_F2 = 291
KEY_F3 = 292
KEY_F4 = 293
KEY_F5 = 294
KEY_F6 = 295
KEY_F7 = 296
KEY_F8 = 297
KEY_F9 = 298
KEY_F10 = 299
KEY_F11 = 300
KEY_F12 = 301
KEY_F13 = 302
KEY_F14 = 303
KEY_F15 = 304
KEY_F16 = 305
KEY_F17 = 306
KEY_F18 = 307
KEY_F19 = 308
KEY_F20 = 309
KEY_F21 = 310
KEY_F22 = 311
KEY_F23 = 312
KEY_F24 = 313
KEY_F25 = 314
KEY_KP_0 = 320
KEY_KP_1 = 321
KEY_KP_2 = 322
KEY_KP_3 = 323
KEY_KP_4 = 324
KEY_KP_5 = 325
KEY_KP_6 = 326
KEY_KP_7 = 327
KEY_KP_8 = 328
KEY_KP_9 = 329
KEY_KP_DECIMAL = 330
KEY_KP_DIVIDE = 331
KEY_KP_MULTIPLY = 332
KEY_KP_SUBTRACT = 333
KEY_KP_ADD = 334
KEY_KP_ENTER = 335
KEY_KP_EQUAL = 336
KEY_LEFT_SHIFT = 340
KEY_LEFT_CONTROL = 341
KEY_LEFT_ALT = 342
KEY_LEFT_SUPER = 343
KEY_RIGHT_SHIFT = 344
KEY_RIGHT_CONTROL = 345
KEY_RIGHT_ALT = 346
KEY_RIGHT_SUPER = 347
KEY_MENU = 348
KEY_LAST = KEY_MENU
MOD_SHIFT = 0x0001
MOD_CONTROL = 0x0002
MOD_ALT = 0x0004
MOD_SUPER = 0x0008
MOUSE_BUTTON_1 = 0
MOUSE_BUTTON_2 = 1
MOUSE_BUTTON_3 = 2
MOUSE_BUTTON_4 = 3
MOUSE_BUTTON_5 = 4
MOUSE_BUTTON_6 = 5
MOUSE_BUTTON_7 = 6
MOUSE_BUTTON_8 = 7
MOUSE_BUTTON_LAST = MOUSE_BUTTON_8
MOUSE_BUTTON_LEFT = MOUSE_BUTTON_1
MOUSE_BUTTON_RIGHT = MOUSE_BUTTON_2
MOUSE_BUTTON_MIDDLE = MOUSE_BUTTON_3
JOYSTICK_1 = 0
JOYSTICK_2 = 1
JOYSTICK_3 = 2
JOYSTICK_4 = 3
JOYSTICK_5 = 4
JOYSTICK_6 = 5
JOYSTICK_7 = 6
JOYSTICK_8 = 7
JOYSTICK_9 = 8
JOYSTICK_10 = 9
JOYSTICK_11 = 10
JOYSTICK_12 = 11
JOYSTICK_13 = 12
JOYSTICK_14 = 13
JOYSTICK_15 = 14
JOYSTICK_16 = 15
JOYSTICK_LAST = JOYSTICK_16
NOT_INITIALIZED = 0x00010001
NO_CURRENT_CONTEXT = 0x00010002
INVALID_ENUM = 0x00010003
INVALID_VALUE = 0x00010004
OUT_OF_MEMORY = 0x00010005
API_UNAVAILABLE = 0x00010006
VERSION_UNAVAILABLE = 0x00010007
PLATFORM_ERROR = 0x00010008
FORMAT_UNAVAILABLE = 0x00010009
NO_WINDOW_CONTEXT = 0x0001000A
FOCUSED = 0x00020001
ICONIFIED = 0x00020002
RESIZABLE = 0x00020003
VISIBLE = 0x00020004
DECORATED = 0x00020005
AUTO_ICONIFY = 0x00020006
FLOATING = 0x00020007
MAXIMIZED = 0x00020008
RED_BITS = 0x00021001
GREEN_BITS = 0x00021002
BLUE_BITS = 0x00021003
ALPHA_BITS = 0x00021004
DEPTH_BITS = 0x00021005
STENCIL_BITS = 0x00021006
ACCUM_RED_BITS = 0x00021007
ACCUM_GREEN_BITS = 0x00021008
ACCUM_BLUE_BITS = 0x00021009
ACCUM_ALPHA_BITS = 0x0002100A
AUX_BUFFERS = 0x0002100B
STEREO = 0x0002100C
SAMPLES = 0x0002100D
SRGB_CAPABLE = 0x0002100E
REFRESH_RATE = 0x0002100F
DOUBLEBUFFER = 0x00021010
CLIENT_API = 0x00022001
CONTEXT_VERSION_MAJOR = 0x00022002
CONTEXT_VERSION_MINOR = 0x00022003
CONTEXT_REVISION = 0x00022004
CONTEXT_ROBUSTNESS = 0x00022005
OPENGL_FORWARD_COMPAT = 0x00022006
OPENGL_DEBUG_CONTEXT = 0x00022007
OPENGL_PROFILE = 0x00022008
CONTEXT_RELEASE_BEHAVIOR = 0x00022009
CONTEXT_NO_ERROR = 0x0002200A
CONTEXT_CREATION_API = 0x0002200B
NO_API = 0
OPENGL_API = 0x00030001
OPENGL_ES_API = 0x00030002
NO_ROBUSTNESS = 0
NO_RESET_NOTIFICATION = 0x00031001
LOSE_CONTEXT_ON_RESET = 0x00031002
OPENGL_ANY_PROFILE = 0
OPENGL_CORE_PROFILE = 0x00032001
OPENGL_COMPAT_PROFILE = 0x00032002
CURSOR = 0x00033001
STICKY_KEYS = 0x00033002
STICKY_MOUSE_BUTTONS = 0x00033003
CURSOR_NORMAL = 0x00034001
CURSOR_HIDDEN = 0x00034002
CURSOR_DISABLED = 0x00034003
ANY_RELEASE_BEHAVIOR = 0
RELEASE_BEHAVIOR_FLUSH = 0x00035001
RELEASE_BEHAVIOR_NONE = 0x00035002
NATIVE_CONTEXT_API = 0x00036001
EGL_CONTEXT_API = 0x00036002
ARROW_CURSOR = 0x00036001
IBEAM_CURSOR = 0x00036002
CROSSHAIR_CURSOR = 0x00036003
HAND_CURSOR = 0x00036004
HRESIZE_CURSOR = 0x00036005
VRESIZE_CURSOR = 0x00036006
CONNECTED = 0x00040001
DISCONNECTED = 0x00040002
DONT_CARE = -1
_exc_info_from_callback = None
def _callback_exception_decorator(func):
@functools.wraps(func)
def callback_wrapper(*args, **kwargs):
global _exc_info_from_callback
if _exc_info_from_callback is not None:
# We are on the way back to Python after an exception was raised.
# Do not call further callbacks and wait for the errcheck function
# to handle the exception first.
return
try:
return func(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
_exc_info_from_callback = sys.exc_info()
return callback_wrapper
def _prepare_errcheck():
"""
This function sets the errcheck attribute of all ctypes wrapped functions
to evaluate the _exc_info_from_callback global variable and re-raise any
exceptions that might have been raised in callbacks.
It also modifies all callback types to automatically wrap the function
using the _callback_exception_decorator.
"""
def errcheck(result, *args):
global _exc_info_from_callback
if _exc_info_from_callback is not None:
exc = _exc_info_from_callback
_exc_info_from_callback = None
_reraise(exc[1], exc[2])
return result
for symbol in dir(_glfw):
if symbol.startswith('glfw'):
getattr(_glfw, symbol).errcheck = errcheck
_globals = globals()
for symbol in _globals:
if symbol.startswith('_GLFW') and symbol.endswith('fun'):
def wrapper_cfunctype(func, cfunctype=_globals[symbol]):
return cfunctype(_callback_exception_decorator(func))
_globals[symbol] = wrapper_cfunctype
_GLFWerrorfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_char_p)
_GLFWwindowposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowsizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWwindowclosefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowrefreshfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow))
_GLFWwindowfocusfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWwindowiconifyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWframebuffersizefun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int)
_GLFWmousebuttonfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcursorposfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWcursorenterfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWscrollfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double)
_GLFWkeyfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int)
_GLFWcharfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int)
_GLFWmonitorfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int)
_GLFWdropfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.POINTER(ctypes.c_char_p))
_GLFWcharmodsfun = ctypes.CFUNCTYPE(None,
ctypes.POINTER(_GLFWwindow),
ctypes.c_uint,
ctypes.c_int)
_GLFWjoystickfun = ctypes.CFUNCTYPE(None,
ctypes.c_int,
ctypes.c_int)
_glfw.glfwInit.restype = ctypes.c_int
_glfw.glfwInit.argtypes = []
def init():
"""
Initializes the GLFW library.
Wrapper for:
int glfwInit(void);
"""
cwd = _getcwd()
res = _glfw.glfwInit()
os.chdir(cwd)
return res
_glfw.glfwTerminate.restype = None
_glfw.glfwTerminate.argtypes = []
def terminate():
"""
Terminates the GLFW library.
Wrapper for:
void glfwTerminate(void);
"""
for callback_repository in _callback_repositories:
for window_addr in list(callback_repository.keys()):
del callback_repository[window_addr]
for window_addr in list(_window_user_data_repository.keys()):
del _window_user_data_repository[window_addr]
_glfw.glfwTerminate()
_glfw.glfwGetVersion.restype = None
_glfw.glfwGetVersion.argtypes = [ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_version():
"""
Retrieves the version of the GLFW library.
Wrapper for:
void glfwGetVersion(int* major, int* minor, int* rev);
"""
major_value = ctypes.c_int(0)
major = ctypes.pointer(major_value)
minor_value = ctypes.c_int(0)
minor = ctypes.pointer(minor_value)
rev_value = ctypes.c_int(0)
rev = ctypes.pointer(rev_value)
_glfw.glfwGetVersion(major, minor, rev)
return major_value.value, minor_value.value, rev_value.value
_glfw.glfwGetVersionString.restype = ctypes.c_char_p
_glfw.glfwGetVersionString.argtypes = []
def get_version_string():
"""
Returns a string describing the compile-time configuration.
Wrapper for:
const char* glfwGetVersionString(void);
"""
return _glfw.glfwGetVersionString()
@_callback_exception_decorator
def _raise_glfw_errors_as_exceptions(error_code, description):
"""
Default error callback that raises GLFWError exceptions for glfw errors.
Set an alternative error callback or set glfw.ERROR_REPORTING to False to
disable this behavior.
"""
global ERROR_REPORTING
if ERROR_REPORTING:
message = "(%d) %s" % (error_code, description)
raise GLFWError(message)
_default_error_callback = _GLFWerrorfun(_raise_glfw_errors_as_exceptions)
_error_callback = (_raise_glfw_errors_as_exceptions, _default_error_callback)
_glfw.glfwSetErrorCallback.restype = _GLFWerrorfun
_glfw.glfwSetErrorCallback.argtypes = [_GLFWerrorfun]
_glfw.glfwSetErrorCallback(_default_error_callback)
def set_error_callback(cbfun):
"""
Sets the error callback.
Wrapper for:
GLFWerrorfun glfwSetErrorCallback(GLFWerrorfun cbfun);
"""
global _error_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = _raise_glfw_errors_as_exceptions
c_cbfun = _default_error_callback
else:
c_cbfun = _GLFWerrorfun(cbfun)
_error_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetErrorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != _raise_glfw_errors_as_exceptions:
return previous_callback[0]
_glfw.glfwGetMonitors.restype = ctypes.POINTER(ctypes.POINTER(_GLFWmonitor))
_glfw.glfwGetMonitors.argtypes = [ctypes.POINTER(ctypes.c_int)]
def get_monitors():
"""
Returns the currently connected monitors.
Wrapper for:
GLFWmonitor** glfwGetMonitors(int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetMonitors(count)
monitors = [result[i] for i in range(count_value.value)]
return monitors
_glfw.glfwGetPrimaryMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetPrimaryMonitor.argtypes = []
def get_primary_monitor():
"""
Returns the primary monitor.
Wrapper for:
GLFWmonitor* glfwGetPrimaryMonitor(void);
"""
return _glfw.glfwGetPrimaryMonitor()
_glfw.glfwGetMonitorPos.restype = None
_glfw.glfwGetMonitorPos.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_pos(monitor):
"""
Returns the position of the monitor's viewport on the virtual screen.
Wrapper for:
void glfwGetMonitorPos(GLFWmonitor* monitor, int* xpos, int* ypos);
"""
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetMonitorPos(monitor, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwGetMonitorPhysicalSize.restype = None
_glfw.glfwGetMonitorPhysicalSize.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_monitor_physical_size(monitor):
"""
Returns the physical size of the monitor.
Wrapper for:
void glfwGetMonitorPhysicalSize(GLFWmonitor* monitor, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetMonitorPhysicalSize(monitor, width, height)
return width_value.value, height_value.value
_glfw.glfwGetMonitorName.restype = ctypes.c_char_p
_glfw.glfwGetMonitorName.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_monitor_name(monitor):
"""
Returns the name of the specified monitor.
Wrapper for:
const char* glfwGetMonitorName(GLFWmonitor* monitor);
"""
return _glfw.glfwGetMonitorName(monitor)
_monitor_callback = None
_glfw.glfwSetMonitorCallback.restype = _GLFWmonitorfun
_glfw.glfwSetMonitorCallback.argtypes = [_GLFWmonitorfun]
def set_monitor_callback(cbfun):
"""
Sets the monitor configuration callback.
Wrapper for:
GLFWmonitorfun glfwSetMonitorCallback(GLFWmonitorfun cbfun);
"""
global _monitor_callback
previous_callback = _monitor_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmonitorfun(cbfun)
_monitor_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMonitorCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwGetVideoModes.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoModes.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(ctypes.c_int)]
def get_video_modes(monitor):
"""
Returns the available video modes for the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoModes(GLFWmonitor* monitor, int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetVideoModes(monitor, count)
videomodes = [result[i].unwrap() for i in range(count_value.value)]
return videomodes
_glfw.glfwGetVideoMode.restype = ctypes.POINTER(_GLFWvidmode)
_glfw.glfwGetVideoMode.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_video_mode(monitor):
"""
Returns the current mode of the specified monitor.
Wrapper for:
const GLFWvidmode* glfwGetVideoMode(GLFWmonitor* monitor);
"""
videomode = _glfw.glfwGetVideoMode(monitor).contents
return videomode.unwrap()
_glfw.glfwSetGamma.restype = None
_glfw.glfwSetGamma.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.c_float]
def set_gamma(monitor, gamma):
"""
Generates a gamma ramp and sets it for the specified monitor.
Wrapper for:
void glfwSetGamma(GLFWmonitor* monitor, float gamma);
"""
_glfw.glfwSetGamma(monitor, gamma)
_glfw.glfwGetGammaRamp.restype = ctypes.POINTER(_GLFWgammaramp)
_glfw.glfwGetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor)]
def get_gamma_ramp(monitor):
"""
Retrieves the current gamma ramp for the specified monitor.
Wrapper for:
const GLFWgammaramp* glfwGetGammaRamp(GLFWmonitor* monitor);
"""
gammaramp = _glfw.glfwGetGammaRamp(monitor).contents
return gammaramp.unwrap()
_glfw.glfwSetGammaRamp.restype = None
_glfw.glfwSetGammaRamp.argtypes = [ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWgammaramp)]
def set_gamma_ramp(monitor, ramp):
"""
Sets the current gamma ramp for the specified monitor.
Wrapper for:
void glfwSetGammaRamp(GLFWmonitor* monitor, const GLFWgammaramp* ramp);
"""
gammaramp = _GLFWgammaramp()
gammaramp.wrap(ramp)
_glfw.glfwSetGammaRamp(monitor, ctypes.pointer(gammaramp))
_glfw.glfwDefaultWindowHints.restype = None
_glfw.glfwDefaultWindowHints.argtypes = []
def default_window_hints():
"""
Resets all window hints to their default values.
Wrapper for:
void glfwDefaultWindowHints(void);
"""
_glfw.glfwDefaultWindowHints()
_glfw.glfwWindowHint.restype = None
_glfw.glfwWindowHint.argtypes = [ctypes.c_int,
ctypes.c_int]
def window_hint(target, hint):
"""
Sets the specified window hint to the desired value.
Wrapper for:
void glfwWindowHint(int target, int hint);
"""
_glfw.glfwWindowHint(target, hint)
_glfw.glfwCreateWindow.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwCreateWindow.argtypes = [ctypes.c_int,
ctypes.c_int,
ctypes.c_char_p,
ctypes.POINTER(_GLFWmonitor),
ctypes.POINTER(_GLFWwindow)]
def create_window(width, height, title, monitor, share):
"""
Creates a window and its associated context.
Wrapper for:
GLFWwindow* glfwCreateWindow(int width, int height, const char* title, GLFWmonitor* monitor, GLFWwindow* share);
"""
return _glfw.glfwCreateWindow(width, height, _to_char_p(title),
monitor, share)
_glfw.glfwDestroyWindow.restype = None
_glfw.glfwDestroyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def destroy_window(window):
"""
Destroys the specified window and its context.
Wrapper for:
void glfwDestroyWindow(GLFWwindow* window);
"""
_glfw.glfwDestroyWindow(window)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_ulong)).contents.value
for callback_repository in _callback_repositories:
if window_addr in callback_repository:
del callback_repository[window_addr]
if window_addr in _window_user_data_repository:
del _window_user_data_repository[window_addr]
_glfw.glfwWindowShouldClose.restype = ctypes.c_int
_glfw.glfwWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow)]
def window_should_close(window):
"""
Checks the close flag of the specified window.
Wrapper for:
int glfwWindowShouldClose(GLFWwindow* window);
"""
return _glfw.glfwWindowShouldClose(window)
_glfw.glfwSetWindowShouldClose.restype = None
_glfw.glfwSetWindowShouldClose.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def set_window_should_close(window, value):
"""
Sets the close flag of the specified window.
Wrapper for:
void glfwSetWindowShouldClose(GLFWwindow* window, int value);
"""
_glfw.glfwSetWindowShouldClose(window, value)
_glfw.glfwSetWindowTitle.restype = None
_glfw.glfwSetWindowTitle.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_window_title(window, title):
"""
Sets the title of the specified window.
Wrapper for:
void glfwSetWindowTitle(GLFWwindow* window, const char* title);
"""
_glfw.glfwSetWindowTitle(window, _to_char_p(title))
_glfw.glfwGetWindowPos.restype = None
_glfw.glfwGetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_pos(window):
"""
Retrieves the position of the client area of the specified window.
Wrapper for:
void glfwGetWindowPos(GLFWwindow* window, int* xpos, int* ypos);
"""
xpos_value = ctypes.c_int(0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_int(0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetWindowPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetWindowPos.restype = None
_glfw.glfwSetWindowPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_pos(window, xpos, ypos):
"""
Sets the position of the client area of the specified window.
Wrapper for:
void glfwSetWindowPos(GLFWwindow* window, int xpos, int ypos);
"""
_glfw.glfwSetWindowPos(window, xpos, ypos)
_glfw.glfwGetWindowSize.restype = None
_glfw.glfwGetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_window_size(window):
"""
Retrieves the size of the client area of the specified window.
Wrapper for:
void glfwGetWindowSize(GLFWwindow* window, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetWindowSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwSetWindowSize.restype = None
_glfw.glfwSetWindowSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_window_size(window, width, height):
"""
Sets the size of the client area of the specified window.
Wrapper for:
void glfwSetWindowSize(GLFWwindow* window, int width, int height);
"""
_glfw.glfwSetWindowSize(window, width, height)
_glfw.glfwGetFramebufferSize.restype = None
_glfw.glfwGetFramebufferSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def get_framebuffer_size(window):
"""
Retrieves the size of the framebuffer of the specified window.
Wrapper for:
void glfwGetFramebufferSize(GLFWwindow* window, int* width, int* height);
"""
width_value = ctypes.c_int(0)
width = ctypes.pointer(width_value)
height_value = ctypes.c_int(0)
height = ctypes.pointer(height_value)
_glfw.glfwGetFramebufferSize(window, width, height)
return width_value.value, height_value.value
_glfw.glfwIconifyWindow.restype = None
_glfw.glfwIconifyWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def iconify_window(window):
"""
Iconifies the specified window.
Wrapper for:
void glfwIconifyWindow(GLFWwindow* window);
"""
_glfw.glfwIconifyWindow(window)
_glfw.glfwRestoreWindow.restype = None
_glfw.glfwRestoreWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def restore_window(window):
"""
Restores the specified window.
Wrapper for:
void glfwRestoreWindow(GLFWwindow* window);
"""
_glfw.glfwRestoreWindow(window)
_glfw.glfwShowWindow.restype = None
_glfw.glfwShowWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def show_window(window):
"""
Makes the specified window visible.
Wrapper for:
void glfwShowWindow(GLFWwindow* window);
"""
_glfw.glfwShowWindow(window)
_glfw.glfwHideWindow.restype = None
_glfw.glfwHideWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def hide_window(window):
"""
Hides the specified window.
Wrapper for:
void glfwHideWindow(GLFWwindow* window);
"""
_glfw.glfwHideWindow(window)
_glfw.glfwGetWindowMonitor.restype = ctypes.POINTER(_GLFWmonitor)
_glfw.glfwGetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_monitor(window):
"""
Returns the monitor that the window uses for full screen mode.
Wrapper for:
GLFWmonitor* glfwGetWindowMonitor(GLFWwindow* window);
"""
return _glfw.glfwGetWindowMonitor(window)
_glfw.glfwGetWindowAttrib.restype = ctypes.c_int
_glfw.glfwGetWindowAttrib.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_window_attrib(window, attrib):
"""
Returns an attribute of the specified window.
Wrapper for:
int glfwGetWindowAttrib(GLFWwindow* window, int attrib);
"""
return _glfw.glfwGetWindowAttrib(window, attrib)
_window_user_data_repository = {}
_glfw.glfwSetWindowUserPointer.restype = None
_glfw.glfwSetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p]
def set_window_user_pointer(window, pointer):
"""
Sets the user pointer of the specified window. You may pass a normal python object into this function and it will
be wrapped automatically. The object will be kept in existence until the pointer is set to something else or
until the window is destroyed.
Wrapper for:
void glfwSetWindowUserPointer(GLFWwindow* window, void* pointer);
"""
data = (False, pointer)
if not isinstance(pointer, ctypes.c_void_p):
data = (True, pointer)
# Create a void pointer for the python object
pointer = ctypes.cast(ctypes.pointer(ctypes.py_object(pointer)), ctypes.c_void_p)
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
_window_user_data_repository[window_addr] = data
_glfw.glfwSetWindowUserPointer(window, pointer)
_glfw.glfwGetWindowUserPointer.restype = ctypes.c_void_p
_glfw.glfwGetWindowUserPointer.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_window_user_pointer(window):
"""
Returns the user pointer of the specified window.
Wrapper for:
void* glfwGetWindowUserPointer(GLFWwindow* window);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_user_data_repository:
data = _window_user_data_repository[window_addr]
is_wrapped_py_object = data[0]
if is_wrapped_py_object:
return data[1]
return _glfw.glfwGetWindowUserPointer(window)
_window_pos_callback_repository = {}
_callback_repositories.append(_window_pos_callback_repository)
_glfw.glfwSetWindowPosCallback.restype = _GLFWwindowposfun
_glfw.glfwSetWindowPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowposfun]
def set_window_pos_callback(window, cbfun):
"""
Sets the position callback for the specified window.
Wrapper for:
GLFWwindowposfun glfwSetWindowPosCallback(GLFWwindow* window, GLFWwindowposfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_pos_callback_repository:
previous_callback = _window_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowposfun(cbfun)
_window_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_size_callback_repository = {}
_callback_repositories.append(_window_size_callback_repository)
_glfw.glfwSetWindowSizeCallback.restype = _GLFWwindowsizefun
_glfw.glfwSetWindowSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowsizefun]
def set_window_size_callback(window, cbfun):
"""
Sets the size callback for the specified window.
Wrapper for:
GLFWwindowsizefun glfwSetWindowSizeCallback(GLFWwindow* window, GLFWwindowsizefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_size_callback_repository:
previous_callback = _window_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowsizefun(cbfun)
_window_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_close_callback_repository = {}
_callback_repositories.append(_window_close_callback_repository)
_glfw.glfwSetWindowCloseCallback.restype = _GLFWwindowclosefun
_glfw.glfwSetWindowCloseCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowclosefun]
def set_window_close_callback(window, cbfun):
"""
Sets the close callback for the specified window.
Wrapper for:
GLFWwindowclosefun glfwSetWindowCloseCallback(GLFWwindow* window, GLFWwindowclosefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_close_callback_repository:
previous_callback = _window_close_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowclosefun(cbfun)
_window_close_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowCloseCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_refresh_callback_repository = {}
_callback_repositories.append(_window_refresh_callback_repository)
_glfw.glfwSetWindowRefreshCallback.restype = _GLFWwindowrefreshfun
_glfw.glfwSetWindowRefreshCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowrefreshfun]
def set_window_refresh_callback(window, cbfun):
"""
Sets the refresh callback for the specified window.
Wrapper for:
GLFWwindowrefreshfun glfwSetWindowRefreshCallback(GLFWwindow* window, GLFWwindowrefreshfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_refresh_callback_repository:
previous_callback = _window_refresh_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowrefreshfun(cbfun)
_window_refresh_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowRefreshCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_focus_callback_repository = {}
_callback_repositories.append(_window_focus_callback_repository)
_glfw.glfwSetWindowFocusCallback.restype = _GLFWwindowfocusfun
_glfw.glfwSetWindowFocusCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowfocusfun]
def set_window_focus_callback(window, cbfun):
"""
Sets the focus callback for the specified window.
Wrapper for:
GLFWwindowfocusfun glfwSetWindowFocusCallback(GLFWwindow* window, GLFWwindowfocusfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_focus_callback_repository:
previous_callback = _window_focus_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowfocusfun(cbfun)
_window_focus_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowFocusCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_window_iconify_callback_repository = {}
_callback_repositories.append(_window_iconify_callback_repository)
_glfw.glfwSetWindowIconifyCallback.restype = _GLFWwindowiconifyfun
_glfw.glfwSetWindowIconifyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWwindowiconifyfun]
def set_window_iconify_callback(window, cbfun):
"""
Sets the iconify callback for the specified window.
Wrapper for:
GLFWwindowiconifyfun glfwSetWindowIconifyCallback(GLFWwindow* window, GLFWwindowiconifyfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_iconify_callback_repository:
previous_callback = _window_iconify_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWwindowiconifyfun(cbfun)
_window_iconify_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetWindowIconifyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_framebuffer_size_callback_repository = {}
_callback_repositories.append(_framebuffer_size_callback_repository)
_glfw.glfwSetFramebufferSizeCallback.restype = _GLFWframebuffersizefun
_glfw.glfwSetFramebufferSizeCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWframebuffersizefun]
def set_framebuffer_size_callback(window, cbfun):
"""
Sets the framebuffer resize callback for the specified window.
Wrapper for:
GLFWframebuffersizefun glfwSetFramebufferSizeCallback(GLFWwindow* window, GLFWframebuffersizefun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _framebuffer_size_callback_repository:
previous_callback = _framebuffer_size_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWframebuffersizefun(cbfun)
_framebuffer_size_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetFramebufferSizeCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwPollEvents.restype = None
_glfw.glfwPollEvents.argtypes = []
def poll_events():
"""
Processes all pending events.
Wrapper for:
void glfwPollEvents(void);
"""
_glfw.glfwPollEvents()
_glfw.glfwWaitEvents.restype = None
_glfw.glfwWaitEvents.argtypes = []
def wait_events():
"""
Waits until events are pending and processes them.
Wrapper for:
void glfwWaitEvents(void);
"""
_glfw.glfwWaitEvents()
_glfw.glfwGetInputMode.restype = ctypes.c_int
_glfw.glfwGetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_input_mode(window, mode):
"""
Returns the value of an input option for the specified window.
Wrapper for:
int glfwGetInputMode(GLFWwindow* window, int mode);
"""
return _glfw.glfwGetInputMode(window, mode)
_glfw.glfwSetInputMode.restype = None
_glfw.glfwSetInputMode.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.c_int]
def set_input_mode(window, mode, value):
"""
Sets an input option for the specified window.
@param[in] window The window whose input mode to set.
@param[in] mode One of `GLFW_CURSOR`, `GLFW_STICKY_KEYS` or
`GLFW_STICKY_MOUSE_BUTTONS`.
@param[in] value The new value of the specified input mode.
Wrapper for:
void glfwSetInputMode(GLFWwindow* window, int mode, int value);
"""
_glfw.glfwSetInputMode(window, mode, value)
_glfw.glfwGetKey.restype = ctypes.c_int
_glfw.glfwGetKey.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_key(window, key):
"""
Returns the last reported state of a keyboard key for the specified
window.
Wrapper for:
int glfwGetKey(GLFWwindow* window, int key);
"""
return _glfw.glfwGetKey(window, key)
_glfw.glfwGetMouseButton.restype = ctypes.c_int
_glfw.glfwGetMouseButton.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int]
def get_mouse_button(window, button):
"""
Returns the last reported state of a mouse button for the specified
window.
Wrapper for:
int glfwGetMouseButton(GLFWwindow* window, int button);
"""
return _glfw.glfwGetMouseButton(window, button)
_glfw.glfwGetCursorPos.restype = None
_glfw.glfwGetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_double),
ctypes.POINTER(ctypes.c_double)]
def get_cursor_pos(window):
"""
Retrieves the last reported cursor position, relative to the client
area of the window.
Wrapper for:
void glfwGetCursorPos(GLFWwindow* window, double* xpos, double* ypos);
"""
xpos_value = ctypes.c_double(0.0)
xpos = ctypes.pointer(xpos_value)
ypos_value = ctypes.c_double(0.0)
ypos = ctypes.pointer(ypos_value)
_glfw.glfwGetCursorPos(window, xpos, ypos)
return xpos_value.value, ypos_value.value
_glfw.glfwSetCursorPos.restype = None
_glfw.glfwSetCursorPos.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_double,
ctypes.c_double]
def set_cursor_pos(window, xpos, ypos):
"""
Sets the position of the cursor, relative to the client area of the window.
Wrapper for:
void glfwSetCursorPos(GLFWwindow* window, double xpos, double ypos);
"""
_glfw.glfwSetCursorPos(window, xpos, ypos)
_key_callback_repository = {}
_callback_repositories.append(_key_callback_repository)
_glfw.glfwSetKeyCallback.restype = _GLFWkeyfun
_glfw.glfwSetKeyCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWkeyfun]
def set_key_callback(window, cbfun):
"""
Sets the key callback.
Wrapper for:
GLFWkeyfun glfwSetKeyCallback(GLFWwindow* window, GLFWkeyfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _key_callback_repository:
previous_callback = _key_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWkeyfun(cbfun)
_key_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetKeyCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_char_callback_repository = {}
_callback_repositories.append(_char_callback_repository)
_glfw.glfwSetCharCallback.restype = _GLFWcharfun
_glfw.glfwSetCharCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharfun]
def set_char_callback(window, cbfun):
"""
Sets the Unicode character callback.
Wrapper for:
GLFWcharfun glfwSetCharCallback(GLFWwindow* window, GLFWcharfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _char_callback_repository:
previous_callback = _char_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharfun(cbfun)
_char_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_mouse_button_callback_repository = {}
_callback_repositories.append(_mouse_button_callback_repository)
_glfw.glfwSetMouseButtonCallback.restype = _GLFWmousebuttonfun
_glfw.glfwSetMouseButtonCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWmousebuttonfun]
def set_mouse_button_callback(window, cbfun):
"""
Sets the mouse button callback.
Wrapper for:
GLFWmousebuttonfun glfwSetMouseButtonCallback(GLFWwindow* window, GLFWmousebuttonfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _mouse_button_callback_repository:
previous_callback = _mouse_button_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWmousebuttonfun(cbfun)
_mouse_button_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetMouseButtonCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_pos_callback_repository = {}
_callback_repositories.append(_cursor_pos_callback_repository)
_glfw.glfwSetCursorPosCallback.restype = _GLFWcursorposfun
_glfw.glfwSetCursorPosCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorposfun]
def set_cursor_pos_callback(window, cbfun):
"""
Sets the cursor position callback.
Wrapper for:
GLFWcursorposfun glfwSetCursorPosCallback(GLFWwindow* window, GLFWcursorposfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_pos_callback_repository:
previous_callback = _cursor_pos_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorposfun(cbfun)
_cursor_pos_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorPosCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_cursor_enter_callback_repository = {}
_callback_repositories.append(_cursor_enter_callback_repository)
_glfw.glfwSetCursorEnterCallback.restype = _GLFWcursorenterfun
_glfw.glfwSetCursorEnterCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcursorenterfun]
def set_cursor_enter_callback(window, cbfun):
"""
Sets the cursor enter/exit callback.
Wrapper for:
GLFWcursorenterfun glfwSetCursorEnterCallback(GLFWwindow* window, GLFWcursorenterfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _cursor_enter_callback_repository:
previous_callback = _cursor_enter_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcursorenterfun(cbfun)
_cursor_enter_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCursorEnterCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_scroll_callback_repository = {}
_callback_repositories.append(_scroll_callback_repository)
_glfw.glfwSetScrollCallback.restype = _GLFWscrollfun
_glfw.glfwSetScrollCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWscrollfun]
def set_scroll_callback(window, cbfun):
"""
Sets the scroll callback.
Wrapper for:
GLFWscrollfun glfwSetScrollCallback(GLFWwindow* window, GLFWscrollfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _scroll_callback_repository:
previous_callback = _scroll_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWscrollfun(cbfun)
_scroll_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetScrollCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
_glfw.glfwJoystickPresent.restype = ctypes.c_int
_glfw.glfwJoystickPresent.argtypes = [ctypes.c_int]
def joystick_present(joy):
"""
Returns whether the specified joystick is present.
Wrapper for:
int glfwJoystickPresent(int joy);
"""
return _glfw.glfwJoystickPresent(joy)
_glfw.glfwGetJoystickAxes.restype = ctypes.POINTER(ctypes.c_float)
_glfw.glfwGetJoystickAxes.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_axes(joy):
"""
Returns the values of all axes of the specified joystick.
Wrapper for:
const float* glfwGetJoystickAxes(int joy, int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickAxes(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickButtons.restype = ctypes.POINTER(ctypes.c_ubyte)
_glfw.glfwGetJoystickButtons.argtypes = [ctypes.c_int,
ctypes.POINTER(ctypes.c_int)]
def get_joystick_buttons(joy):
"""
Returns the state of all buttons of the specified joystick.
Wrapper for:
const unsigned char* glfwGetJoystickButtons(int joy, int* count);
"""
count_value = ctypes.c_int(0)
count = ctypes.pointer(count_value)
result = _glfw.glfwGetJoystickButtons(joy, count)
return result, count_value.value
_glfw.glfwGetJoystickName.restype = ctypes.c_char_p
_glfw.glfwGetJoystickName.argtypes = [ctypes.c_int]
def get_joystick_name(joy):
"""
Returns the name of the specified joystick.
Wrapper for:
const char* glfwGetJoystickName(int joy);
"""
return _glfw.glfwGetJoystickName(joy)
_glfw.glfwSetClipboardString.restype = None
_glfw.glfwSetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_char_p]
def set_clipboard_string(window, string):
"""
Sets the clipboard to the specified string.
Wrapper for:
void glfwSetClipboardString(GLFWwindow* window, const char* string);
"""
_glfw.glfwSetClipboardString(window, _to_char_p(string))
_glfw.glfwGetClipboardString.restype = ctypes.c_char_p
_glfw.glfwGetClipboardString.argtypes = [ctypes.POINTER(_GLFWwindow)]
def get_clipboard_string(window):
"""
Retrieves the contents of the clipboard as a string.
Wrapper for:
const char* glfwGetClipboardString(GLFWwindow* window);
"""
return _glfw.glfwGetClipboardString(window)
_glfw.glfwGetTime.restype = ctypes.c_double
_glfw.glfwGetTime.argtypes = []
def get_time():
"""
Returns the value of the GLFW timer.
Wrapper for:
double glfwGetTime(void);
"""
return _glfw.glfwGetTime()
_glfw.glfwSetTime.restype = None
_glfw.glfwSetTime.argtypes = [ctypes.c_double]
def set_time(time):
"""
Sets the GLFW timer.
Wrapper for:
void glfwSetTime(double time);
"""
_glfw.glfwSetTime(time)
_glfw.glfwMakeContextCurrent.restype = None
_glfw.glfwMakeContextCurrent.argtypes = [ctypes.POINTER(_GLFWwindow)]
def make_context_current(window):
"""
Makes the context of the specified window current for the calling
thread.
Wrapper for:
void glfwMakeContextCurrent(GLFWwindow* window);
"""
_glfw.glfwMakeContextCurrent(window)
_glfw.glfwGetCurrentContext.restype = ctypes.POINTER(_GLFWwindow)
_glfw.glfwGetCurrentContext.argtypes = []
def get_current_context():
"""
Returns the window whose context is current on the calling thread.
Wrapper for:
GLFWwindow* glfwGetCurrentContext(void);
"""
return _glfw.glfwGetCurrentContext()
_glfw.glfwSwapBuffers.restype = None
_glfw.glfwSwapBuffers.argtypes = [ctypes.POINTER(_GLFWwindow)]
def swap_buffers(window):
"""
Swaps the front and back buffers of the specified window.
Wrapper for:
void glfwSwapBuffers(GLFWwindow* window);
"""
_glfw.glfwSwapBuffers(window)
_glfw.glfwSwapInterval.restype = None
_glfw.glfwSwapInterval.argtypes = [ctypes.c_int]
def swap_interval(interval):
"""
Sets the swap interval for the current context.
Wrapper for:
void glfwSwapInterval(int interval);
"""
_glfw.glfwSwapInterval(interval)
_glfw.glfwExtensionSupported.restype = ctypes.c_int
_glfw.glfwExtensionSupported.argtypes = [ctypes.c_char_p]
def extension_supported(extension):
"""
Returns whether the specified extension is available.
Wrapper for:
int glfwExtensionSupported(const char* extension);
"""
return _glfw.glfwExtensionSupported(_to_char_p(extension))
_glfw.glfwGetProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetProcAddress.argtypes = [ctypes.c_char_p]
def get_proc_address(procname):
"""
Returns the address of the specified function for the current
context.
Wrapper for:
GLFWglproc glfwGetProcAddress(const char* procname);
"""
return _glfw.glfwGetProcAddress(_to_char_p(procname))
if hasattr(_glfw, 'glfwSetDropCallback'):
_window_drop_callback_repository = {}
_callback_repositories.append(_window_drop_callback_repository)
_glfw.glfwSetDropCallback.restype = _GLFWdropfun
_glfw.glfwSetDropCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWdropfun]
def set_drop_callback(window, cbfun):
"""
Sets the file drop callback.
Wrapper for:
GLFWdropfun glfwSetDropCallback(GLFWwindow* window, GLFWdropfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_drop_callback_repository:
previous_callback = _window_drop_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
else:
def cb_wrapper(window, count, c_paths, cbfun=cbfun):
paths = [c_paths[i].decode('utf-8') for i in range(count)]
cbfun(window, paths)
cbfun = cb_wrapper
c_cbfun = _GLFWdropfun(cbfun)
_window_drop_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetDropCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
if hasattr(_glfw, 'glfwSetCharModsCallback'):
_window_char_mods_callback_repository = {}
_callback_repositories.append(_window_char_mods_callback_repository)
_glfw.glfwSetCharModsCallback.restype = _GLFWcharmodsfun
_glfw.glfwSetCharModsCallback.argtypes = [ctypes.POINTER(_GLFWwindow),
_GLFWcharmodsfun]
def set_char_mods_callback(window, cbfun):
"""
Sets the Unicode character with modifiers callback.
Wrapper for:
GLFWcharmodsfun glfwSetCharModsCallback(GLFWwindow* window, GLFWcharmodsfun cbfun);
"""
window_addr = ctypes.cast(ctypes.pointer(window),
ctypes.POINTER(ctypes.c_long)).contents.value
if window_addr in _window_char_mods_callback_repository:
previous_callback = _window_char_mods_callback_repository[window_addr]
else:
previous_callback = None
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWcharmodsfun(cbfun)
_window_char_mods_callback_repository[window_addr] = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetCharModsCallback(window, cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
if hasattr(_glfw, 'glfwVulkanSupported'):
_glfw.glfwVulkanSupported.restype = ctypes.c_int
_glfw.glfwVulkanSupported.argtypes = []
def vulkan_supported():
"""
Returns whether the Vulkan loader has been found.
Wrapper for:
int glfwVulkanSupported(void);
"""
return _glfw.glfwVulkanSupported() != 0
if hasattr(_glfw, 'glfwGetRequiredInstanceExtensions'):
_glfw.glfwGetRequiredInstanceExtensions.restype = ctypes.POINTER(ctypes.c_char_p)
_glfw.glfwGetRequiredInstanceExtensions.argtypes = [ctypes.POINTER(ctypes.c_uint32)]
def get_required_instance_extensions():
"""
Returns the Vulkan instance extensions required by GLFW.
Wrapper for:
const char** glfwGetRequiredInstanceExtensions(uint32_t* count);
"""
count_value = ctypes.c_uint32(0)
count = ctypes.pointer(count_value)
c_extensions = _glfw.glfwGetRequiredInstanceExtensions(count)
count = count_value.value
extensions = [c_extensions[i].decode('utf-8') for i in range(count)]
return extensions
if hasattr(_glfw, 'glfwGetTimerValue'):
_glfw.glfwGetTimerValue.restype = ctypes.c_uint64
_glfw.glfwGetTimerValue.argtypes = []
def get_timer_value():
"""
Returns the current value of the raw timer.
Wrapper for:
uint64_t glfwGetTimerValue(void);
"""
return int(_glfw.glfwGetTimerValue())
if hasattr(_glfw, 'glfwGetTimerFrequency'):
_glfw.glfwGetTimerFrequency.restype = ctypes.c_uint64
_glfw.glfwGetTimerFrequency.argtypes = []
def get_timer_frequency():
"""
Returns the frequency, in Hz, of the raw timer.
Wrapper for:
uint64_t glfwGetTimerFrequency(void);
"""
return int(_glfw.glfwGetTimerFrequency())
if hasattr(_glfw, 'glfwSetJoystickCallback'):
_joystick_callback = None
_glfw.glfwSetJoystickCallback.restype = _GLFWjoystickfun
_glfw.glfwSetJoystickCallback.argtypes = [_GLFWjoystickfun]
def set_joystick_callback(cbfun):
"""
Sets the error callback.
Wrapper for:
GLFWjoystickfun glfwSetJoystickCallback(GLFWjoystickfun cbfun);
"""
global _joystick_callback
previous_callback = _error_callback
if cbfun is None:
cbfun = 0
c_cbfun = _GLFWjoystickfun(cbfun)
_joystick_callback = (cbfun, c_cbfun)
cbfun = c_cbfun
_glfw.glfwSetJoystickCallback(cbfun)
if previous_callback is not None and previous_callback[0] != 0:
return previous_callback[0]
if hasattr(_glfw, 'glfwGetKeyName'):
_glfw.glfwGetKeyName.restype = ctypes.c_char_p
_glfw.glfwGetKeyName.argtypes = [ctypes.c_int, ctypes.c_int]
def get_key_name(key, scancode):
"""
Returns the localized name of the specified printable key.
Wrapper for:
const char* glfwGetKeyName(int key, int scancode);
"""
key_name = _glfw.glfwGetKeyName(key, scancode)
if key_name:
return key_name.decode('utf-8')
return None
if hasattr(_glfw, 'glfwCreateCursor'):
_glfw.glfwCreateCursor.restype = ctypes.POINTER(_GLFWcursor)
_glfw.glfwCreateCursor.argtypes = [ctypes.POINTER(_GLFWimage),
ctypes.c_int,
ctypes.c_int]
def create_cursor(image, xhot, yhot):
"""
Creates a custom cursor.
Wrapper for:
GLFWcursor* glfwCreateCursor(const GLFWimage* image, int xhot, int yhot);
"""
c_image = _GLFWimage()
c_image.wrap(image)
return _glfw.glfwCreateCursor(ctypes.pointer(c_image), xhot, yhot)
if hasattr(_glfw, 'glfwCreateStandardCursor'):
_glfw.glfwCreateStandardCursor.restype = ctypes.POINTER(_GLFWcursor)
_glfw.glfwCreateStandardCursor.argtypes = [ctypes.c_int]
def create_standard_cursor(shape):
"""
Creates a cursor with a standard shape.
Wrapper for:
GLFWcursor* glfwCreateStandardCursor(int shape);
"""
return _glfw.glfwCreateStandardCursor(shape)
if hasattr(_glfw, 'glfwDestroyCursor'):
_glfw.glfwDestroyCursor.restype = None
_glfw.glfwDestroyCursor.argtypes = [ctypes.POINTER(_GLFWcursor)]
def destroy_cursor(cursor):
"""
Destroys a cursor.
Wrapper for:
void glfwDestroyCursor(GLFWcursor* cursor);
"""
_glfw.glfwDestroyCursor(cursor)
if hasattr(_glfw, 'glfwSetCursor'):
_glfw.glfwSetCursor.restype = None
_glfw.glfwSetCursor.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(_GLFWcursor)]
def set_cursor(window, cursor):
"""
Sets the cursor for the window.
Wrapper for:
void glfwSetCursor(GLFWwindow* window, GLFWcursor* cursor);
"""
_glfw.glfwSetCursor(window, cursor)
if hasattr(_glfw, 'glfwCreateWindowSurface'):
_glfw.glfwCreateWindowSurface.restype = ctypes.c_int
_glfw.glfwCreateWindowSurface.argtypes = [ctypes.c_void_p,
ctypes.POINTER(_GLFWwindow),
ctypes.c_void_p,
ctypes.c_void_p]
def create_window_surface(instance, window, allocator, surface):
"""
Creates a Vulkan surface for the specified window.
Wrapper for:
VkResult glfwCreateWindowSurface(VkInstance instance, GLFWwindow* window, const VkAllocationCallbacks* allocator, VkSurfaceKHR* surface);
"""
return _glfw.glfwCreateWindowSurface(instance, window, allocator, surface)
if hasattr(_glfw, 'glfwGetPhysicalDevicePresentationSupport'):
_glfw.glfwGetPhysicalDevicePresentationSupport.restype = ctypes.c_int
_glfw.glfwGetPhysicalDevicePresentationSupport.argtypes = [ctypes.c_void_p,
ctypes.c_void_p,
ctypes.c_uint32]
def get_physical_device_presentation_support(instance, device, queuefamily):
"""
Creates a Vulkan surface for the specified window.
Wrapper for:
int glfwGetPhysicalDevicePresentationSupport(VkInstance instance, VkPhysicalDevice device, uint32_t queuefamily);
"""
return _glfw.glfwGetPhysicalDevicePresentationSupport(instance, device, queuefamily)
if hasattr(_glfw, 'glfwGetInstanceProcAddress'):
_glfw.glfwGetInstanceProcAddress.restype = ctypes.c_void_p
_glfw.glfwGetInstanceProcAddress.argtypes = [ctypes.c_void_p,
ctypes.c_char_p]
def get_instance_proc_address(instance, procname):
"""
Returns the address of the specified Vulkan instance function.
Wrapper for:
GLFWvkproc glfwGetInstanceProcAddress(VkInstance instance, const char* procname);
"""
return _glfw.glfwGetInstanceProcAddress(instance, procname)
if hasattr(_glfw, 'glfwSetWindowIcon'):
_glfw.glfwSetWindowIcon.restype = None
_glfw.glfwSetWindowIcon.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int,
ctypes.POINTER(_GLFWimage)]
def set_window_icon(window, count, image):
"""
Sets the icon for the specified window.
Wrapper for:
void glfwSetWindowIcon(GLFWwindow* window, int count, const GLFWimage* images);
"""
_image = _GLFWimage()
_image.wrap(image)
_glfw.glfwSetWindowIcon(window, count, ctypes.pointer(_image))
if hasattr(_glfw, 'glfwSetWindowSizeLimits'):
_glfw.glfwSetWindowSizeLimits.restype = None
_glfw.glfwSetWindowSizeLimits.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int, ctypes.c_int,
ctypes.c_int, ctypes.c_int]
def set_window_size_limits(window,
minwidth, minheight,
maxwidth, maxheight):
"""
Sets the size limits of the specified window.
Wrapper for:
void glfwSetWindowSizeLimits(GLFWwindow* window, int minwidth, int minheight, int maxwidth, int maxheight);
"""
_glfw.glfwSetWindowSizeLimits(window,
minwidth, minheight,
maxwidth, maxheight)
if hasattr(_glfw, 'glfwSetWindowAspectRatio'):
_glfw.glfwSetWindowAspectRatio.restype = None
_glfw.glfwSetWindowAspectRatio.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.c_int, ctypes.c_int]
def set_window_aspect_ratio(window, numer, denom):
"""
Sets the aspect ratio of the specified window.
Wrapper for:
void glfwSetWindowAspectRatio(GLFWwindow* window, int numer, int denom);
"""
_glfw.glfwSetWindowAspectRatio(window, numer, denom)
if hasattr(_glfw, 'glfwGetWindowFrameSize'):
_glfw.glfwGetWindowFrameSize.restype = None
_glfw.glfwGetWindowFrameSize.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int),
ctypes.POINTER(ctypes.c_int)]
def set_get_window_frame_size(window):
"""
Retrieves the size of the frame of the window.
Wrapper for:
void glfwGetWindowFrameSize(GLFWwindow* window, int* left, int* top, int* right, int* bottom);
"""
left = ctypes.c_int(0)
top = ctypes.c_int(0)
right = ctypes.c_int(0)
bottom = ctypes.c_int(0)
_glfw.glfwGetWindowFrameSize(window,
ctypes.pointer(left),
ctypes.pointer(top),
ctypes.pointer(right),
ctypes.pointer(bottom))
return left.value, top.value, right.value, bottom.value
if hasattr(_glfw, 'glfwMaximizeWindow'):
_glfw.glfwMaximizeWindow.restype = None
_glfw.glfwMaximizeWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def maximize_window(window):
"""
Maximizes the specified window.
Wrapper for:
void glfwMaximizeWindow(GLFWwindow* window);
"""
_glfw.glfwMaximizeWindow(window)
if hasattr(_glfw, 'glfwFocusWindow'):
_glfw.glfwFocusWindow.restype = None
_glfw.glfwFocusWindow.argtypes = [ctypes.POINTER(_GLFWwindow)]
def focus_window(window):
"""
Brings the specified window to front and sets input focus.
Wrapper for:
void glfwFocusWindow(GLFWwindow* window);
"""
_glfw.glfwFocusWindow(window)
if hasattr(_glfw, 'glfwSetWindowMonitor'):
_glfw.glfwSetWindowMonitor.restype = None
_glfw.glfwSetWindowMonitor.argtypes = [ctypes.POINTER(_GLFWwindow),
ctypes.POINTER(_GLFWmonitor),
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int,
ctypes.c_int]
def set_window_monitor(window, monitor, xpos, ypos, width, height,
refresh_rate):
"""
Sets the mode, monitor, video mode and placement of a window.
Wrapper for:
void glfwSetWindowMonitor(GLFWwindow* window, GLFWmonitor* monitor, int xpos, int ypos, int width, int height, int refreshRate);
"""
_glfw.glfwSetWindowMonitor(window, monitor,
xpos, ypos, width, height, refresh_rate)
if hasattr(_glfw, 'glfwWaitEventsTimeout'):
_glfw.glfwWaitEventsTimeout.restype = None
_glfw.glfwWaitEventsTimeout.argtypes = [ctypes.c_double]
def wait_events_timeout(timeout):
"""
Waits with timeout until events are queued and processes them.
Wrapper for:
void glfwWaitEventsTimeout(double timeout);
"""
_glfw.glfwWaitEventsTimeout(timeout)
if hasattr(_glfw, 'glfwPostEmptyEvent'):
_glfw.glfwPostEmptyEvent.restype = None
_glfw.glfwPostEmptyEvent.argtypes = []
def post_empty_event():
"""
Posts an empty event to the event queue.
Wrapper for:
void glfwPostEmptyEvent();
"""
_glfw.glfwPostEmptyEvent()
_prepare_errcheck()
| codeparrot/github-code-clean |
# Copyright 2012 OpenStack Foundation.
# Copyright 2015 Hewlett-Packard Development Company, L.P.
# Copyright 2017 FUJITSU LIMITED
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import inspect
import itertools
import logging
import re
import time
import urllib.parse as urlparse
import debtcollector.renames
from keystoneauth1 import exceptions as ksa_exc
import requests
from neutronclient._i18n import _
from neutronclient import client
from neutronclient.common import exceptions
from neutronclient.common import extension as client_extension
from neutronclient.common import serializer
from neutronclient.common import utils
_logger = logging.getLogger(__name__)
HEX_ELEM = '[0-9A-Fa-f]'
UUID_PATTERN = '-'.join([HEX_ELEM + '{8}', HEX_ELEM + '{4}',
HEX_ELEM + '{4}', HEX_ELEM + '{4}',
HEX_ELEM + '{12}'])
def exception_handler_v20(status_code, error_content):
"""Exception handler for API v2.0 client.
This routine generates the appropriate Neutron exception according to
the contents of the response body.
:param status_code: HTTP error status code
:param error_content: deserialized body of error response
"""
error_dict = None
request_ids = error_content.request_ids
if isinstance(error_content, dict):
error_dict = error_content.get('NeutronError')
# Find real error type
client_exc = None
if error_dict:
# If Neutron key is found, it will definitely contain
# a 'message' and 'type' keys?
try:
error_type = error_dict['type']
error_message = error_dict['message']
if error_dict['detail']:
error_message += "\n" + error_dict['detail']
# If corresponding exception is defined, use it.
client_exc = getattr(exceptions, '%sClient' % error_type, None)
except Exception:
error_message = "%s" % error_dict
else:
error_message = None
if isinstance(error_content, dict):
error_message = error_content.get('message')
if not error_message:
# If we end up here the exception was not a neutron error
error_message = "%s-%s" % (status_code, error_content)
# If an exception corresponding to the error type is not found,
# look up per status-code client exception.
if not client_exc:
client_exc = exceptions.HTTP_EXCEPTION_MAP.get(status_code)
# If there is no exception per status-code,
# Use NeutronClientException as fallback.
if not client_exc:
client_exc = exceptions.NeutronClientException
raise client_exc(message=error_message,
status_code=status_code,
request_ids=request_ids)
class _RequestIdMixin(object):
"""Wrapper class to expose x-openstack-request-id to the caller."""
def _request_ids_setup(self):
self._request_ids = []
@property
def request_ids(self):
return self._request_ids
def _append_request_ids(self, resp):
"""Add request_ids as an attribute to the object
:param resp: Response object or list of Response objects
"""
if isinstance(resp, list):
# Add list of request_ids if response is of type list.
for resp_obj in resp:
self._append_request_id(resp_obj)
elif resp is not None:
# Add request_ids if response contains single object.
self._append_request_id(resp)
def _append_request_id(self, resp):
if isinstance(resp, requests.Response):
# Extract 'x-openstack-request-id' from headers if
# response is a Response object.
request_id = resp.headers.get('x-openstack-request-id')
else:
# If resp is of type string.
request_id = resp
if request_id:
self._request_ids.append(request_id)
class _DictWithMeta(dict, _RequestIdMixin):
def __init__(self, values, resp):
super(_DictWithMeta, self).__init__(values)
self._request_ids_setup()
self._append_request_ids(resp)
class _TupleWithMeta(tuple, _RequestIdMixin):
def __new__(cls, values, resp):
return super(_TupleWithMeta, cls).__new__(cls, values)
def __init__(self, values, resp):
self._request_ids_setup()
self._append_request_ids(resp)
class _StrWithMeta(str, _RequestIdMixin):
def __new__(cls, value, resp):
return super(_StrWithMeta, cls).__new__(cls, value)
def __init__(self, values, resp):
self._request_ids_setup()
self._append_request_ids(resp)
class _GeneratorWithMeta(_RequestIdMixin):
def __init__(self, paginate_func, collection, path, **params):
self.paginate_func = paginate_func
self.collection = collection
self.path = path
self.params = params
self.generator = None
self._request_ids_setup()
def _paginate(self):
for r in self.paginate_func(
self.collection, self.path, **self.params):
yield r, r.request_ids
def __iter__(self):
return self
# Python 3 compatibility
def __next__(self):
return self.next()
def next(self):
if not self.generator:
self.generator = self._paginate()
try:
obj, req_id = next(self.generator)
self._append_request_ids(req_id)
except StopIteration:
raise StopIteration()
return obj
class ClientBase(object):
"""Client for the OpenStack Neutron v2.0 API.
:param string username: Username for authentication. (optional)
:param string user_id: User ID for authentication. (optional)
:param string password: Password for authentication. (optional)
:param string token: Token for authentication. (optional)
:param string tenant_name: DEPRECATED! Use project_name instead.
:param string project_name: Project name. (optional)
:param string tenant_id: DEPRECATED! Use project_id instead.
:param string project_id: Project id. (optional)
:param string auth_strategy: 'keystone' by default, 'noauth' for no
authentication against keystone. (optional)
:param string auth_url: Keystone service endpoint for authorization.
:param string service_type: Network service type to pull from the
keystone catalog (e.g. 'network') (optional)
:param string endpoint_type: Network service endpoint type to pull from the
keystone catalog (e.g. 'publicURL',
'internalURL', or 'adminURL') (optional)
:param string region_name: Name of a region to select when choosing an
endpoint from the service catalog.
:param string endpoint_url: A user-supplied endpoint URL for the neutron
service. Lazy-authentication is possible for API
service calls if endpoint is set at
instantiation.(optional)
:param integer timeout: Allows customization of the timeout for client
http requests. (optional)
:param bool insecure: SSL certificate validation. (optional)
:param bool log_credentials: Allow for logging of passwords or not.
Defaults to False. (optional)
:param string ca_cert: SSL CA bundle file to use. (optional)
:param cert: A client certificate to pass to requests. These are of the
same form as requests expects. Either a single filename
containing both the certificate and key or a tuple containing
the path to the certificate then a path to the key. (optional)
:param integer retries: How many times idempotent (GET, PUT, DELETE)
requests to Neutron server should be retried if
they fail (default: 0).
:param bool raise_errors: If True then exceptions caused by connection
failure are propagated to the caller.
(default: True)
:param session: Keystone client auth session to use. (optional)
:param auth: Keystone auth plugin to use. (optional)
Example::
from neutronclient.v2_0 import client
neutron = client.Client(username=USER,
password=PASS,
project_name=PROJECT_NAME,
auth_url=KEYSTONE_URL)
nets = neutron.list_networks()
...
"""
# API has no way to report plurals, so we have to hard code them
# This variable should be overridden by a child class.
EXTED_PLURALS = {}
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def __init__(self, **kwargs):
"""Initialize a new client for the Neutron v2.0 API."""
super(ClientBase, self).__init__()
self.retries = kwargs.pop('retries', 0)
self.raise_errors = kwargs.pop('raise_errors', True)
self.httpclient = client.construct_http_client(**kwargs)
self.version = '2.0'
self.action_prefix = "/v%s" % (self.version)
self.retry_interval = 1
def _handle_fault_response(self, status_code, response_body, resp):
# Create exception with HTTP status code and message
_logger.debug("Error message: %s", response_body)
# Add deserialized error message to exception arguments
try:
des_error_body = self.deserialize(response_body, status_code)
except Exception:
# If unable to deserialized body it is probably not a
# Neutron error
des_error_body = {'message': response_body}
error_body = self._convert_into_with_meta(des_error_body, resp)
# Raise the appropriate exception
exception_handler_v20(status_code, error_body)
def do_request(self, method, action, body=None, headers=None, params=None):
# Add format and project_id
action = self.action_prefix + action
if isinstance(params, dict) and params:
params = utils.safe_encode_dict(params)
action += '?' + urlparse.urlencode(params, doseq=1)
if body:
body = self.serialize(body)
resp, replybody = self.httpclient.do_request(action, method, body=body,
headers=headers)
status_code = resp.status_code
if status_code in (requests.codes.ok,
requests.codes.created,
requests.codes.accepted,
requests.codes.no_content):
data = self.deserialize(replybody, status_code)
return self._convert_into_with_meta(data, resp)
else:
if not replybody:
replybody = resp.reason
self._handle_fault_response(status_code, replybody, resp)
def get_auth_info(self):
return self.httpclient.get_auth_info()
def serialize(self, data):
"""Serializes a dictionary into JSON.
A dictionary with a single key can be passed and it can contain any
structure.
"""
if data is None:
return None
elif isinstance(data, dict):
return serializer.Serializer().serialize(data)
else:
raise Exception(_("Unable to serialize object of type = '%s'") %
type(data))
def deserialize(self, data, status_code):
"""Deserializes a JSON string into a dictionary."""
if not data:
return data
return serializer.Serializer().deserialize(
data)['body']
def retry_request(self, method, action, body=None,
headers=None, params=None):
"""Call do_request with the default retry configuration.
Only idempotent requests should retry failed connection attempts.
:raises: ConnectionFailed if the maximum # of retries is exceeded
"""
max_attempts = self.retries + 1
for i in range(max_attempts):
try:
return self.do_request(method, action, body=body,
headers=headers, params=params)
except (exceptions.ConnectionFailed, ksa_exc.ConnectionError):
# Exception has already been logged by do_request()
if i < self.retries:
_logger.debug('Retrying connection to Neutron service')
time.sleep(self.retry_interval)
elif self.raise_errors:
raise
if self.retries:
msg = (_("Failed to connect to Neutron server after %d attempts")
% max_attempts)
else:
msg = _("Failed to connect Neutron server")
raise exceptions.ConnectionFailed(reason=msg)
def delete(self, action, body=None, headers=None, params=None):
return self.retry_request("DELETE", action, body=body,
headers=headers, params=params)
def get(self, action, body=None, headers=None, params=None):
return self.retry_request("GET", action, body=body,
headers=headers, params=params)
def post(self, action, body=None, headers=None, params=None):
# Do not retry POST requests to avoid the orphan objects problem.
return self.do_request("POST", action, body=body,
headers=headers, params=params)
def put(self, action, body=None, headers=None, params=None):
return self.retry_request("PUT", action, body=body,
headers=headers, params=params)
def list(self, collection, path, retrieve_all=True, **params):
if retrieve_all:
res = []
request_ids = []
for r in self._pagination(collection, path, **params):
res.extend(r[collection])
request_ids.extend(r.request_ids)
return _DictWithMeta({collection: res}, request_ids)
else:
return _GeneratorWithMeta(self._pagination, collection,
path, **params)
def _pagination(self, collection, path, **params):
if params.get('page_reverse', False):
linkrel = 'previous'
else:
linkrel = 'next'
next = True
while next:
res = self.get(path, params=params)
yield res
next = False
try:
for link in res['%s_links' % collection]:
if link['rel'] == linkrel:
query_str = urlparse.urlparse(link['href']).query
params = urlparse.parse_qs(query_str)
next = True
break
except KeyError:
break
def _convert_into_with_meta(self, item, resp):
if item:
if isinstance(item, dict):
return _DictWithMeta(item, resp)
elif isinstance(item, str):
return _StrWithMeta(item, resp)
else:
return _TupleWithMeta((), resp)
def get_resource_plural(self, resource):
for k in self.EXTED_PLURALS:
if self.EXTED_PLURALS[k] == resource:
return k
return resource + 's'
def find_resource_by_id(self, resource, resource_id, cmd_resource=None,
parent_id=None, fields=None):
if not cmd_resource:
cmd_resource = resource
cmd_resource_plural = self.get_resource_plural(cmd_resource)
resource_plural = self.get_resource_plural(resource)
# TODO(amotoki): Use show_%s instead of list_%s
obj_lister = getattr(self, "list_%s" % cmd_resource_plural)
# perform search by id only if we are passing a valid UUID
match = re.match(UUID_PATTERN, resource_id)
collection = resource_plural
if match:
params = {'id': resource_id}
if fields:
params['fields'] = fields
if parent_id:
data = obj_lister(parent_id, **params)
else:
data = obj_lister(**params)
if data and data[collection]:
return data[collection][0]
not_found_message = (_("Unable to find %(resource)s with id "
"'%(id)s'") %
{'resource': resource, 'id': resource_id})
# 404 is raised by exceptions.NotFound to simulate serverside behavior
raise exceptions.NotFound(message=not_found_message)
def _find_resource_by_name(self, resource, name, project_id=None,
cmd_resource=None, parent_id=None, fields=None):
if not cmd_resource:
cmd_resource = resource
cmd_resource_plural = self.get_resource_plural(cmd_resource)
resource_plural = self.get_resource_plural(resource)
obj_lister = getattr(self, "list_%s" % cmd_resource_plural)
params = {'name': name}
if fields:
params['fields'] = fields
if project_id:
params['tenant_id'] = project_id
if parent_id:
data = obj_lister(parent_id, **params)
else:
data = obj_lister(**params)
collection = resource_plural
info = data[collection]
if len(info) > 1:
raise exceptions.NeutronClientNoUniqueMatch(resource=resource,
name=name)
elif len(info) == 0:
not_found_message = (_("Unable to find %(resource)s with name "
"'%(name)s'") %
{'resource': resource, 'name': name})
# 404 is raised by exceptions.NotFound
# to simulate serverside behavior
raise exceptions.NotFound(message=not_found_message)
else:
return info[0]
def find_resource(self, resource, name_or_id, project_id=None,
cmd_resource=None, parent_id=None, fields=None):
try:
return self.find_resource_by_id(resource, name_or_id,
cmd_resource, parent_id, fields)
except exceptions.NotFound:
try:
return self._find_resource_by_name(
resource, name_or_id, project_id,
cmd_resource, parent_id, fields)
except exceptions.NotFound:
not_found_message = (_("Unable to find %(resource)s with name "
"or id '%(name_or_id)s'") %
{'resource': resource,
'name_or_id': name_or_id})
raise exceptions.NotFound(
message=not_found_message)
class Client(ClientBase):
networks_path = "/networks"
network_path = "/networks/%s"
ports_path = "/ports"
port_path = "/ports/%s"
port_bindings_path = "/ports/%s/bindings"
port_binding_path = "/ports/%s/bindings/%s"
port_binding_path_activate = "/ports/%s/bindings/%s/activate"
subnets_path = "/subnets"
subnet_path = "/subnets/%s"
onboard_network_subnets_path = "/subnetpools/%s/onboard_network_subnets"
subnetpools_path = "/subnetpools"
subnetpool_path = "/subnetpools/%s"
address_scopes_path = "/address-scopes"
address_scope_path = "/address-scopes/%s"
quotas_path = "/quotas"
quota_path = "/quotas/%s"
quota_default_path = "/quotas/%s/default"
quota_details_path = "/quotas/%s/details.json"
extensions_path = "/extensions"
extension_path = "/extensions/%s"
routers_path = "/routers"
router_path = "/routers/%s"
floatingips_path = "/floatingips"
floatingip_path = "/floatingips/%s"
security_groups_path = "/security-groups"
security_group_path = "/security-groups/%s"
security_group_rules_path = "/security-group-rules"
security_group_rule_path = "/security-group-rules/%s"
segments_path = "/segments"
segment_path = "/segments/%s"
sfc_flow_classifiers_path = "/sfc/flow_classifiers"
sfc_flow_classifier_path = "/sfc/flow_classifiers/%s"
sfc_port_pairs_path = "/sfc/port_pairs"
sfc_port_pair_path = "/sfc/port_pairs/%s"
sfc_port_pair_groups_path = "/sfc/port_pair_groups"
sfc_port_pair_group_path = "/sfc/port_pair_groups/%s"
sfc_port_chains_path = "/sfc/port_chains"
sfc_port_chain_path = "/sfc/port_chains/%s"
sfc_service_graphs_path = "/sfc/service_graphs"
sfc_service_graph_path = "/sfc/service_graphs/%s"
endpoint_groups_path = "/vpn/endpoint-groups"
endpoint_group_path = "/vpn/endpoint-groups/%s"
vpnservices_path = "/vpn/vpnservices"
vpnservice_path = "/vpn/vpnservices/%s"
ipsecpolicies_path = "/vpn/ipsecpolicies"
ipsecpolicy_path = "/vpn/ipsecpolicies/%s"
ikepolicies_path = "/vpn/ikepolicies"
ikepolicy_path = "/vpn/ikepolicies/%s"
ipsec_site_connections_path = "/vpn/ipsec-site-connections"
ipsec_site_connection_path = "/vpn/ipsec-site-connections/%s"
lbaas_loadbalancers_path = "/lbaas/loadbalancers"
lbaas_loadbalancer_path = "/lbaas/loadbalancers/%s"
lbaas_loadbalancer_path_stats = "/lbaas/loadbalancers/%s/stats"
lbaas_loadbalancer_path_status = "/lbaas/loadbalancers/%s/statuses"
lbaas_listeners_path = "/lbaas/listeners"
lbaas_listener_path = "/lbaas/listeners/%s"
lbaas_l7policies_path = "/lbaas/l7policies"
lbaas_l7policy_path = lbaas_l7policies_path + "/%s"
lbaas_l7rules_path = lbaas_l7policy_path + "/rules"
lbaas_l7rule_path = lbaas_l7rules_path + "/%s"
lbaas_pools_path = "/lbaas/pools"
lbaas_pool_path = "/lbaas/pools/%s"
lbaas_healthmonitors_path = "/lbaas/healthmonitors"
lbaas_healthmonitor_path = "/lbaas/healthmonitors/%s"
lbaas_members_path = lbaas_pool_path + "/members"
lbaas_member_path = lbaas_pool_path + "/members/%s"
vips_path = "/lb/vips"
vip_path = "/lb/vips/%s"
pools_path = "/lb/pools"
pool_path = "/lb/pools/%s"
pool_path_stats = "/lb/pools/%s/stats"
members_path = "/lb/members"
member_path = "/lb/members/%s"
health_monitors_path = "/lb/health_monitors"
health_monitor_path = "/lb/health_monitors/%s"
associate_pool_health_monitors_path = "/lb/pools/%s/health_monitors"
disassociate_pool_health_monitors_path = (
"/lb/pools/%(pool)s/health_monitors/%(health_monitor)s")
qos_queues_path = "/qos-queues"
qos_queue_path = "/qos-queues/%s"
agents_path = "/agents"
agent_path = "/agents/%s"
network_gateways_path = "/network-gateways"
network_gateway_path = "/network-gateways/%s"
gateway_devices_path = "/gateway-devices"
gateway_device_path = "/gateway-devices/%s"
service_providers_path = "/service-providers"
metering_labels_path = "/metering/metering-labels"
metering_label_path = "/metering/metering-labels/%s"
metering_label_rules_path = "/metering/metering-label-rules"
metering_label_rule_path = "/metering/metering-label-rules/%s"
DHCP_NETS = '/dhcp-networks'
DHCP_AGENTS = '/dhcp-agents'
L3_ROUTERS = '/l3-routers'
L3_AGENTS = '/l3-agents'
LOADBALANCER_POOLS = '/loadbalancer-pools'
LOADBALANCER_AGENT = '/loadbalancer-agent'
AGENT_LOADBALANCERS = '/agent-loadbalancers'
LOADBALANCER_HOSTING_AGENT = '/loadbalancer-hosting-agent'
firewall_rules_path = "/fw/firewall_rules"
firewall_rule_path = "/fw/firewall_rules/%s"
firewall_policies_path = "/fw/firewall_policies"
firewall_policy_path = "/fw/firewall_policies/%s"
firewall_policy_insert_path = "/fw/firewall_policies/%s/insert_rule"
firewall_policy_remove_path = "/fw/firewall_policies/%s/remove_rule"
firewalls_path = "/fw/firewalls"
firewall_path = "/fw/firewalls/%s"
fwaas_firewall_groups_path = "/fwaas/firewall_groups"
fwaas_firewall_group_path = "/fwaas/firewall_groups/%s"
fwaas_firewall_rules_path = "/fwaas/firewall_rules"
fwaas_firewall_rule_path = "/fwaas/firewall_rules/%s"
fwaas_firewall_policies_path = "/fwaas/firewall_policies"
fwaas_firewall_policy_path = "/fwaas/firewall_policies/%s"
fwaas_firewall_policy_insert_path = \
"/fwaas/firewall_policies/%s/insert_rule"
fwaas_firewall_policy_remove_path = \
"/fwaas/firewall_policies/%s/remove_rule"
rbac_policies_path = "/rbac-policies"
rbac_policy_path = "/rbac-policies/%s"
qos_policies_path = "/qos/policies"
qos_policy_path = "/qos/policies/%s"
qos_bandwidth_limit_rules_path = "/qos/policies/%s/bandwidth_limit_rules"
qos_bandwidth_limit_rule_path = "/qos/policies/%s/bandwidth_limit_rules/%s"
qos_packet_rate_limit_rules_path = \
"/qos/policies/%s/packet_rate_limit_rules"
qos_packet_rate_limit_rule_path = \
"/qos/policies/%s/packet_rate_limit_rules/%s"
qos_dscp_marking_rules_path = "/qos/policies/%s/dscp_marking_rules"
qos_dscp_marking_rule_path = "/qos/policies/%s/dscp_marking_rules/%s"
qos_minimum_bandwidth_rules_path = \
"/qos/policies/%s/minimum_bandwidth_rules"
qos_minimum_bandwidth_rule_path = \
"/qos/policies/%s/minimum_bandwidth_rules/%s"
qos_minimum_packet_rate_rules_path = \
"/qos/policies/%s/minimum_packet_rate_rules"
qos_minimum_packet_rate_rule_path = \
"/qos/policies/%s/minimum_packet_rate_rules/%s"
qos_rule_types_path = "/qos/rule-types"
qos_rule_type_path = "/qos/rule-types/%s"
flavors_path = "/flavors"
flavor_path = "/flavors/%s"
service_profiles_path = "/service_profiles"
service_profile_path = "/service_profiles/%s"
flavor_profile_bindings_path = flavor_path + service_profiles_path
flavor_profile_binding_path = flavor_path + service_profile_path
availability_zones_path = "/availability_zones"
auto_allocated_topology_path = "/auto-allocated-topology/%s"
BGP_DRINSTANCES = "/bgp-drinstances"
BGP_DRINSTANCE = "/bgp-drinstance/%s"
BGP_DRAGENTS = "/bgp-dragents"
BGP_DRAGENT = "/bgp-dragents/%s"
bgp_speakers_path = "/bgp-speakers"
bgp_speaker_path = "/bgp-speakers/%s"
bgp_peers_path = "/bgp-peers"
bgp_peer_path = "/bgp-peers/%s"
network_ip_availabilities_path = '/network-ip-availabilities'
network_ip_availability_path = '/network-ip-availabilities/%s'
tags_path = "/%s/%s/tags"
tag_path = "/%s/%s/tags/%s"
trunks_path = "/trunks"
trunk_path = "/trunks/%s"
subports_path = "/trunks/%s/get_subports"
subports_add_path = "/trunks/%s/add_subports"
subports_remove_path = "/trunks/%s/remove_subports"
bgpvpns_path = "/bgpvpn/bgpvpns"
bgpvpn_path = "/bgpvpn/bgpvpns/%s"
bgpvpn_network_associations_path =\
"/bgpvpn/bgpvpns/%s/network_associations"
bgpvpn_network_association_path =\
"/bgpvpn/bgpvpns/%s/network_associations/%s"
bgpvpn_router_associations_path = "/bgpvpn/bgpvpns/%s/router_associations"
bgpvpn_router_association_path =\
"/bgpvpn/bgpvpns/%s/router_associations/%s"
bgpvpn_port_associations_path = "/bgpvpn/bgpvpns/%s/port_associations"
bgpvpn_port_association_path = "/bgpvpn/bgpvpns/%s/port_associations/%s"
network_logs_path = "/log/logs"
network_log_path = "/log/logs/%s"
network_loggables_path = "/log/loggable-resources"
# API has no way to report plurals, so we have to hard code them
EXTED_PLURALS = {'routers': 'router',
'floatingips': 'floatingip',
'service_types': 'service_type',
'service_definitions': 'service_definition',
'security_groups': 'security_group',
'security_group_rules': 'security_group_rule',
'segments': 'segment',
'ipsecpolicies': 'ipsecpolicy',
'ikepolicies': 'ikepolicy',
'ipsec_site_connections': 'ipsec_site_connection',
'vpnservices': 'vpnservice',
'endpoint_groups': 'endpoint_group',
'vips': 'vip',
'pools': 'pool',
'members': 'member',
'health_monitors': 'health_monitor',
'quotas': 'quota',
'service_providers': 'service_provider',
'firewall_rules': 'firewall_rule',
'firewall_policies': 'firewall_policy',
'firewalls': 'firewall',
'fwaas_firewall_rules': 'fwaas_firewall_rule',
'fwaas_firewall_policies': 'fwaas_firewall_policy',
'fwaas_firewall_groups': 'fwaas_firewall_group',
'metering_labels': 'metering_label',
'metering_label_rules': 'metering_label_rule',
'loadbalancers': 'loadbalancer',
'listeners': 'listener',
'l7rules': 'l7rule',
'l7policies': 'l7policy',
'lbaas_l7policies': 'lbaas_l7policy',
'lbaas_pools': 'lbaas_pool',
'lbaas_healthmonitors': 'lbaas_healthmonitor',
'lbaas_members': 'lbaas_member',
'healthmonitors': 'healthmonitor',
'rbac_policies': 'rbac_policy',
'address_scopes': 'address_scope',
'qos_policies': 'qos_policy',
'policies': 'policy',
'bandwidth_limit_rules': 'bandwidth_limit_rule',
'packet_rate_limit_rules': 'packet_rate_limit_rule',
'minimum_bandwidth_rules': 'minimum_bandwidth_rule',
'minimum_packet_rate_rules': 'minimum_packet_rate_rule',
'rules': 'rule',
'dscp_marking_rules': 'dscp_marking_rule',
'rule_types': 'rule_type',
'flavors': 'flavor',
'bgp_speakers': 'bgp_speaker',
'bgp_peers': 'bgp_peer',
'network_ip_availabilities': 'network_ip_availability',
'trunks': 'trunk',
'bgpvpns': 'bgpvpn',
'network_associations': 'network_association',
'router_associations': 'router_association',
'port_associations': 'port_association',
'flow_classifiers': 'flow_classifier',
'port_pairs': 'port_pair',
'port_pair_groups': 'port_pair_group',
'port_chains': 'port_chain',
'service_graphs': 'service_graph',
'logs': 'log',
'loggable_resources': 'loggable_resource',
}
def list_ext(self, collection, path, retrieve_all, **_params):
"""Client extension hook for list."""
return self.list(collection, path, retrieve_all, **_params)
def show_ext(self, path, id, **_params):
"""Client extension hook for show."""
return self.get(path % id, params=_params)
def create_ext(self, path, body=None):
"""Client extension hook for create."""
return self.post(path, body=body)
def update_ext(self, path, id, body=None):
"""Client extension hook for update."""
return self.put(path % id, body=body)
def delete_ext(self, path, id):
"""Client extension hook for delete."""
return self.delete(path % id)
def get_quotas_tenant(self, **_params):
"""Fetch project info for following quota operation."""
return self.get(self.quota_path % 'tenant', params=_params)
def list_quotas(self, **_params):
"""Fetch all projects' quotas."""
return self.get(self.quotas_path, params=_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def show_quota(self, project_id, **_params):
"""Fetch information of a certain project's quotas."""
return self.get(self.quota_path % (project_id), params=_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def show_quota_details(self, project_id, **_params):
"""Fetch information of a certain project's quota details."""
return self.get(self.quota_details_path % (project_id),
params=_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def show_quota_default(self, project_id, **_params):
"""Fetch information of a certain project's default quotas."""
return self.get(self.quota_default_path % (project_id), params=_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def update_quota(self, project_id, body=None):
"""Update a project's quotas."""
return self.put(self.quota_path % (project_id), body=body)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def delete_quota(self, project_id):
"""Delete the specified project's quota values."""
return self.delete(self.quota_path % (project_id))
def list_extensions(self, **_params):
"""Fetch a list of all extensions on server side."""
return self.get(self.extensions_path, params=_params)
def show_extension(self, ext_alias, **_params):
"""Fetches information of a certain extension."""
return self.get(self.extension_path % ext_alias, params=_params)
def list_ports(self, retrieve_all=True, **_params):
"""Fetches a list of all ports for a project."""
# Pass filters in "params" argument to do_request
return self.list('ports', self.ports_path, retrieve_all,
**_params)
def show_port(self, port, **_params):
"""Fetches information of a certain port."""
return self.get(self.port_path % (port), params=_params)
def create_port(self, body=None):
"""Creates a new port."""
return self.post(self.ports_path, body=body)
def update_port(self, port, body=None, revision_number=None):
"""Updates a port."""
return self._update_resource(self.port_path % (port), body=body,
revision_number=revision_number)
def delete_port(self, port):
"""Deletes the specified port."""
return self.delete(self.port_path % (port))
def create_port_binding(self, port_id, body=None):
"""Creates a new port binding."""
return self.post(self.port_bindings_path % port_id, body=body)
def delete_port_binding(self, port_id, host_id):
"""Deletes the specified port binding."""
return self.delete(self.port_binding_path % (port_id, host_id))
def show_port_binding(self, port_id, host_id, **_params):
"""Fetches information for a certain port binding."""
return self.get(self.port_binding_path % (port_id, host_id),
params=_params)
def list_port_bindings(self, port_id, retrieve_all=True, **_params):
"""Fetches a list of all bindings for a certain port."""
return self.list('port_bindings', self.port_bindings_path % port_id,
retrieve_all, **_params)
def activate_port_binding(self, port_id, host_id):
"""Activates a port binding."""
return self.put(self.port_binding_path_activate % (port_id, host_id))
def list_networks(self, retrieve_all=True, **_params):
"""Fetches a list of all networks for a project."""
# Pass filters in "params" argument to do_request
return self.list('networks', self.networks_path, retrieve_all,
**_params)
def show_network(self, network, **_params):
"""Fetches information of a certain network."""
return self.get(self.network_path % (network), params=_params)
def create_network(self, body=None):
"""Creates a new network."""
return self.post(self.networks_path, body=body)
def update_network(self, network, body=None, revision_number=None):
"""Updates a network."""
return self._update_resource(self.network_path % (network), body=body,
revision_number=revision_number)
def delete_network(self, network):
"""Deletes the specified network."""
return self.delete(self.network_path % (network))
def list_subnets(self, retrieve_all=True, **_params):
"""Fetches a list of all subnets for a project."""
return self.list('subnets', self.subnets_path, retrieve_all,
**_params)
def show_subnet(self, subnet, **_params):
"""Fetches information of a certain subnet."""
return self.get(self.subnet_path % (subnet), params=_params)
def create_subnet(self, body=None):
"""Creates a new subnet."""
return self.post(self.subnets_path, body=body)
def update_subnet(self, subnet, body=None, revision_number=None):
"""Updates a subnet."""
return self._update_resource(self.subnet_path % (subnet), body=body,
revision_number=revision_number)
def delete_subnet(self, subnet):
"""Deletes the specified subnet."""
return self.delete(self.subnet_path % (subnet))
def list_subnetpools(self, retrieve_all=True, **_params):
"""Fetches a list of all subnetpools for a project."""
return self.list('subnetpools', self.subnetpools_path, retrieve_all,
**_params)
def show_subnetpool(self, subnetpool, **_params):
"""Fetches information of a certain subnetpool."""
return self.get(self.subnetpool_path % (subnetpool), params=_params)
def create_subnetpool(self, body=None):
"""Creates a new subnetpool."""
return self.post(self.subnetpools_path, body=body)
def update_subnetpool(self, subnetpool, body=None, revision_number=None):
"""Updates a subnetpool."""
return self._update_resource(self.subnetpool_path % (subnetpool),
body=body,
revision_number=revision_number)
def delete_subnetpool(self, subnetpool):
"""Deletes the specified subnetpool."""
return self.delete(self.subnetpool_path % (subnetpool))
def list_routers(self, retrieve_all=True, **_params):
"""Fetches a list of all routers for a project."""
# Pass filters in "params" argument to do_request
return self.list('routers', self.routers_path, retrieve_all,
**_params)
def show_router(self, router, **_params):
"""Fetches information of a certain router."""
return self.get(self.router_path % (router), params=_params)
def create_router(self, body=None):
"""Creates a new router."""
return self.post(self.routers_path, body=body)
def update_router(self, router, body=None, revision_number=None):
"""Updates a router."""
return self._update_resource(self.router_path % (router), body=body,
revision_number=revision_number)
def delete_router(self, router):
"""Deletes the specified router."""
return self.delete(self.router_path % (router))
def list_address_scopes(self, retrieve_all=True, **_params):
"""Fetches a list of all address scopes for a project."""
return self.list('address_scopes', self.address_scopes_path,
retrieve_all, **_params)
def show_address_scope(self, address_scope, **_params):
"""Fetches information of a certain address scope."""
return self.get(self.address_scope_path % (address_scope),
params=_params)
def create_address_scope(self, body=None):
"""Creates a new address scope."""
return self.post(self.address_scopes_path, body=body)
def update_address_scope(self, address_scope, body=None):
"""Updates a address scope."""
return self.put(self.address_scope_path % (address_scope), body=body)
def delete_address_scope(self, address_scope):
"""Deletes the specified address scope."""
return self.delete(self.address_scope_path % (address_scope))
def add_interface_router(self, router, body=None):
"""Adds an internal network interface to the specified router."""
return self.put((self.router_path % router) + "/add_router_interface",
body=body)
def remove_interface_router(self, router, body=None):
"""Removes an internal network interface from the specified router."""
return self.put((self.router_path % router) +
"/remove_router_interface", body=body)
def add_extra_routes_to_router(self, router, body=None):
"""Adds extra routes to the specified router."""
return self.put((self.router_path % router) + "/add_extraroutes",
body=body)
def remove_extra_routes_from_router(self, router, body=None):
"""Removes extra routes from the specified router."""
return self.put((self.router_path % router) + "/remove_extraroutes",
body=body)
def add_gateway_router(self, router, body=None):
"""Adds an external network gateway to the specified router."""
return self.put((self.router_path % router),
body={'router': {'external_gateway_info': body}})
def remove_gateway_router(self, router):
"""Removes an external network gateway from the specified router."""
return self.put((self.router_path % router),
body={'router': {'external_gateway_info': {}}})
def list_floatingips(self, retrieve_all=True, **_params):
"""Fetches a list of all floatingips for a project."""
# Pass filters in "params" argument to do_request
return self.list('floatingips', self.floatingips_path, retrieve_all,
**_params)
def show_floatingip(self, floatingip, **_params):
"""Fetches information of a certain floatingip."""
return self.get(self.floatingip_path % (floatingip), params=_params)
def create_floatingip(self, body=None):
"""Creates a new floatingip."""
return self.post(self.floatingips_path, body=body)
def update_floatingip(self, floatingip, body=None, revision_number=None):
"""Updates a floatingip."""
return self._update_resource(self.floatingip_path % (floatingip),
body=body,
revision_number=revision_number)
def delete_floatingip(self, floatingip):
"""Deletes the specified floatingip."""
return self.delete(self.floatingip_path % (floatingip))
def create_security_group(self, body=None):
"""Creates a new security group."""
return self.post(self.security_groups_path, body=body)
def update_security_group(self, security_group, body=None,
revision_number=None):
"""Updates a security group."""
return self._update_resource(self.security_group_path %
security_group, body=body,
revision_number=revision_number)
def list_security_groups(self, retrieve_all=True, **_params):
"""Fetches a list of all security groups for a project."""
return self.list('security_groups', self.security_groups_path,
retrieve_all, **_params)
def show_security_group(self, security_group, **_params):
"""Fetches information of a certain security group."""
return self.get(self.security_group_path % (security_group),
params=_params)
def delete_security_group(self, security_group):
"""Deletes the specified security group."""
return self.delete(self.security_group_path % (security_group))
def create_security_group_rule(self, body=None):
"""Creates a new security group rule."""
return self.post(self.security_group_rules_path, body=body)
def delete_security_group_rule(self, security_group_rule):
"""Deletes the specified security group rule."""
return self.delete(self.security_group_rule_path %
(security_group_rule))
def list_security_group_rules(self, retrieve_all=True, **_params):
"""Fetches a list of all security group rules for a project."""
return self.list('security_group_rules',
self.security_group_rules_path,
retrieve_all, **_params)
def show_security_group_rule(self, security_group_rule, **_params):
"""Fetches information of a certain security group rule."""
return self.get(self.security_group_rule_path % (security_group_rule),
params=_params)
def create_segment(self, body=None):
"""Creates a new segment."""
return self.post(self.segments_path, body=body)
def update_segment(self, segment, body=None, revision_number=None):
"""Updates a segment."""
return self._update_resource(self.segment_path % segment, body=body,
revision_number=revision_number)
def list_segments(self, retrieve_all=True, **_params):
"""Fetches a list of all segments for a project."""
return self.list('segments', self.segments_path, retrieve_all,
**_params)
def show_segment(self, segment, **_params):
"""Fetches information of a certain segment."""
return self.get(self.segment_path % segment, params=_params)
def delete_segment(self, segment):
"""Deletes the specified segment."""
return self.delete(self.segment_path % segment)
def list_endpoint_groups(self, retrieve_all=True, **_params):
"""Fetches a list of all VPN endpoint groups for a project."""
return self.list('endpoint_groups', self.endpoint_groups_path,
retrieve_all, **_params)
def show_endpoint_group(self, endpointgroup, **_params):
"""Fetches information for a specific VPN endpoint group."""
return self.get(self.endpoint_group_path % endpointgroup,
params=_params)
def create_endpoint_group(self, body=None):
"""Creates a new VPN endpoint group."""
return self.post(self.endpoint_groups_path, body=body)
def update_endpoint_group(self, endpoint_group, body=None):
"""Updates a VPN endpoint group."""
return self.put(self.endpoint_group_path % endpoint_group, body=body)
def delete_endpoint_group(self, endpoint_group):
"""Deletes the specified VPN endpoint group."""
return self.delete(self.endpoint_group_path % endpoint_group)
def list_vpnservices(self, retrieve_all=True, **_params):
"""Fetches a list of all configured VPN services for a project."""
return self.list('vpnservices', self.vpnservices_path, retrieve_all,
**_params)
def show_vpnservice(self, vpnservice, **_params):
"""Fetches information of a specific VPN service."""
return self.get(self.vpnservice_path % (vpnservice), params=_params)
def create_vpnservice(self, body=None):
"""Creates a new VPN service."""
return self.post(self.vpnservices_path, body=body)
def update_vpnservice(self, vpnservice, body=None):
"""Updates a VPN service."""
return self.put(self.vpnservice_path % (vpnservice), body=body)
def delete_vpnservice(self, vpnservice):
"""Deletes the specified VPN service."""
return self.delete(self.vpnservice_path % (vpnservice))
def list_ipsec_site_connections(self, retrieve_all=True, **_params):
"""Fetches all configured IPsecSiteConnections for a project."""
return self.list('ipsec_site_connections',
self.ipsec_site_connections_path,
retrieve_all,
**_params)
def show_ipsec_site_connection(self, ipsecsite_conn, **_params):
"""Fetches information of a specific IPsecSiteConnection."""
return self.get(
self.ipsec_site_connection_path % (ipsecsite_conn), params=_params
)
def create_ipsec_site_connection(self, body=None):
"""Creates a new IPsecSiteConnection."""
return self.post(self.ipsec_site_connections_path, body=body)
def update_ipsec_site_connection(self, ipsecsite_conn, body=None):
"""Updates an IPsecSiteConnection."""
return self.put(
self.ipsec_site_connection_path % (ipsecsite_conn), body=body
)
def delete_ipsec_site_connection(self, ipsecsite_conn):
"""Deletes the specified IPsecSiteConnection."""
return self.delete(self.ipsec_site_connection_path % (ipsecsite_conn))
def list_ikepolicies(self, retrieve_all=True, **_params):
"""Fetches a list of all configured IKEPolicies for a project."""
return self.list('ikepolicies', self.ikepolicies_path, retrieve_all,
**_params)
def show_ikepolicy(self, ikepolicy, **_params):
"""Fetches information of a specific IKEPolicy."""
return self.get(self.ikepolicy_path % (ikepolicy), params=_params)
def create_ikepolicy(self, body=None):
"""Creates a new IKEPolicy."""
return self.post(self.ikepolicies_path, body=body)
def update_ikepolicy(self, ikepolicy, body=None):
"""Updates an IKEPolicy."""
return self.put(self.ikepolicy_path % (ikepolicy), body=body)
def delete_ikepolicy(self, ikepolicy):
"""Deletes the specified IKEPolicy."""
return self.delete(self.ikepolicy_path % (ikepolicy))
def list_ipsecpolicies(self, retrieve_all=True, **_params):
"""Fetches a list of all configured IPsecPolicies for a project."""
return self.list('ipsecpolicies',
self.ipsecpolicies_path,
retrieve_all,
**_params)
def show_ipsecpolicy(self, ipsecpolicy, **_params):
"""Fetches information of a specific IPsecPolicy."""
return self.get(self.ipsecpolicy_path % (ipsecpolicy), params=_params)
def create_ipsecpolicy(self, body=None):
"""Creates a new IPsecPolicy."""
return self.post(self.ipsecpolicies_path, body=body)
def update_ipsecpolicy(self, ipsecpolicy, body=None):
"""Updates an IPsecPolicy."""
return self.put(self.ipsecpolicy_path % (ipsecpolicy), body=body)
def delete_ipsecpolicy(self, ipsecpolicy):
"""Deletes the specified IPsecPolicy."""
return self.delete(self.ipsecpolicy_path % (ipsecpolicy))
def list_loadbalancers(self, retrieve_all=True, **_params):
"""Fetches a list of all loadbalancers for a project."""
return self.list('loadbalancers', self.lbaas_loadbalancers_path,
retrieve_all, **_params)
def show_loadbalancer(self, lbaas_loadbalancer, **_params):
"""Fetches information for a load balancer."""
return self.get(self.lbaas_loadbalancer_path % (lbaas_loadbalancer),
params=_params)
def create_loadbalancer(self, body=None):
"""Creates a new load balancer."""
return self.post(self.lbaas_loadbalancers_path, body=body)
def update_loadbalancer(self, lbaas_loadbalancer, body=None):
"""Updates a load balancer."""
return self.put(self.lbaas_loadbalancer_path % (lbaas_loadbalancer),
body=body)
def delete_loadbalancer(self, lbaas_loadbalancer):
"""Deletes the specified load balancer."""
return self.delete(self.lbaas_loadbalancer_path %
(lbaas_loadbalancer))
def retrieve_loadbalancer_stats(self, loadbalancer, **_params):
"""Retrieves stats for a certain load balancer."""
return self.get(self.lbaas_loadbalancer_path_stats % (loadbalancer),
params=_params)
def retrieve_loadbalancer_status(self, loadbalancer, **_params):
"""Retrieves status for a certain load balancer."""
return self.get(self.lbaas_loadbalancer_path_status % (loadbalancer),
params=_params)
def list_listeners(self, retrieve_all=True, **_params):
"""Fetches a list of all lbaas_listeners for a project."""
return self.list('listeners', self.lbaas_listeners_path,
retrieve_all, **_params)
def show_listener(self, lbaas_listener, **_params):
"""Fetches information for a lbaas_listener."""
return self.get(self.lbaas_listener_path % (lbaas_listener),
params=_params)
def create_listener(self, body=None):
"""Creates a new lbaas_listener."""
return self.post(self.lbaas_listeners_path, body=body)
def update_listener(self, lbaas_listener, body=None):
"""Updates a lbaas_listener."""
return self.put(self.lbaas_listener_path % (lbaas_listener),
body=body)
def delete_listener(self, lbaas_listener):
"""Deletes the specified lbaas_listener."""
return self.delete(self.lbaas_listener_path % (lbaas_listener))
def list_lbaas_l7policies(self, retrieve_all=True, **_params):
"""Fetches a list of all L7 policies for a listener."""
return self.list('l7policies', self.lbaas_l7policies_path,
retrieve_all, **_params)
def show_lbaas_l7policy(self, l7policy, **_params):
"""Fetches information of a certain listener's L7 policy."""
return self.get(self.lbaas_l7policy_path % l7policy,
params=_params)
def create_lbaas_l7policy(self, body=None):
"""Creates L7 policy for a certain listener."""
return self.post(self.lbaas_l7policies_path, body=body)
def update_lbaas_l7policy(self, l7policy, body=None):
"""Updates L7 policy."""
return self.put(self.lbaas_l7policy_path % l7policy,
body=body)
def delete_lbaas_l7policy(self, l7policy):
"""Deletes the specified L7 policy."""
return self.delete(self.lbaas_l7policy_path % l7policy)
def list_lbaas_l7rules(self, l7policy, retrieve_all=True, **_params):
"""Fetches a list of all rules for L7 policy."""
return self.list('rules', self.lbaas_l7rules_path % l7policy,
retrieve_all, **_params)
def show_lbaas_l7rule(self, l7rule, l7policy, **_params):
"""Fetches information of a certain L7 policy's rule."""
return self.get(self.lbaas_l7rule_path % (l7policy, l7rule),
params=_params)
def create_lbaas_l7rule(self, l7policy, body=None):
"""Creates rule for a certain L7 policy."""
return self.post(self.lbaas_l7rules_path % l7policy, body=body)
def update_lbaas_l7rule(self, l7rule, l7policy, body=None):
"""Updates L7 rule."""
return self.put(self.lbaas_l7rule_path % (l7policy, l7rule),
body=body)
def delete_lbaas_l7rule(self, l7rule, l7policy):
"""Deletes the specified L7 rule."""
return self.delete(self.lbaas_l7rule_path % (l7policy, l7rule))
def list_lbaas_pools(self, retrieve_all=True, **_params):
"""Fetches a list of all lbaas_pools for a project."""
return self.list('pools', self.lbaas_pools_path,
retrieve_all, **_params)
def show_lbaas_pool(self, lbaas_pool, **_params):
"""Fetches information for a lbaas_pool."""
return self.get(self.lbaas_pool_path % (lbaas_pool),
params=_params)
def create_lbaas_pool(self, body=None):
"""Creates a new lbaas_pool."""
return self.post(self.lbaas_pools_path, body=body)
def update_lbaas_pool(self, lbaas_pool, body=None):
"""Updates a lbaas_pool."""
return self.put(self.lbaas_pool_path % (lbaas_pool),
body=body)
def delete_lbaas_pool(self, lbaas_pool):
"""Deletes the specified lbaas_pool."""
return self.delete(self.lbaas_pool_path % (lbaas_pool))
def list_lbaas_healthmonitors(self, retrieve_all=True, **_params):
"""Fetches a list of all lbaas_healthmonitors for a project."""
return self.list('healthmonitors', self.lbaas_healthmonitors_path,
retrieve_all, **_params)
def show_lbaas_healthmonitor(self, lbaas_healthmonitor, **_params):
"""Fetches information for a lbaas_healthmonitor."""
return self.get(self.lbaas_healthmonitor_path % (lbaas_healthmonitor),
params=_params)
def create_lbaas_healthmonitor(self, body=None):
"""Creates a new lbaas_healthmonitor."""
return self.post(self.lbaas_healthmonitors_path, body=body)
def update_lbaas_healthmonitor(self, lbaas_healthmonitor, body=None):
"""Updates a lbaas_healthmonitor."""
return self.put(self.lbaas_healthmonitor_path % (lbaas_healthmonitor),
body=body)
def delete_lbaas_healthmonitor(self, lbaas_healthmonitor):
"""Deletes the specified lbaas_healthmonitor."""
return self.delete(self.lbaas_healthmonitor_path %
(lbaas_healthmonitor))
def list_lbaas_loadbalancers(self, retrieve_all=True, **_params):
"""Fetches a list of all lbaas_loadbalancers for a project."""
return self.list('loadbalancers', self.lbaas_loadbalancers_path,
retrieve_all, **_params)
def list_lbaas_members(self, lbaas_pool, retrieve_all=True, **_params):
"""Fetches a list of all lbaas_members for a project."""
return self.list('members', self.lbaas_members_path % lbaas_pool,
retrieve_all, **_params)
def show_lbaas_member(self, lbaas_member, lbaas_pool, **_params):
"""Fetches information of a certain lbaas_member."""
return self.get(self.lbaas_member_path % (lbaas_pool, lbaas_member),
params=_params)
def create_lbaas_member(self, lbaas_pool, body=None):
"""Creates a lbaas_member."""
return self.post(self.lbaas_members_path % lbaas_pool, body=body)
def update_lbaas_member(self, lbaas_member, lbaas_pool, body=None):
"""Updates a lbaas_member."""
return self.put(self.lbaas_member_path % (lbaas_pool, lbaas_member),
body=body)
def delete_lbaas_member(self, lbaas_member, lbaas_pool):
"""Deletes the specified lbaas_member."""
return self.delete(self.lbaas_member_path % (lbaas_pool, lbaas_member))
def list_vips(self, retrieve_all=True, **_params):
"""Fetches a list of all load balancer vips for a project."""
# Pass filters in "params" argument to do_request
return self.list('vips', self.vips_path, retrieve_all,
**_params)
def show_vip(self, vip, **_params):
"""Fetches information of a certain load balancer vip."""
return self.get(self.vip_path % (vip), params=_params)
def create_vip(self, body=None):
"""Creates a new load balancer vip."""
return self.post(self.vips_path, body=body)
def update_vip(self, vip, body=None):
"""Updates a load balancer vip."""
return self.put(self.vip_path % (vip), body=body)
def delete_vip(self, vip):
"""Deletes the specified load balancer vip."""
return self.delete(self.vip_path % (vip))
def list_pools(self, retrieve_all=True, **_params):
"""Fetches a list of all load balancer pools for a project."""
# Pass filters in "params" argument to do_request
return self.list('pools', self.pools_path, retrieve_all,
**_params)
def show_pool(self, pool, **_params):
"""Fetches information of a certain load balancer pool."""
return self.get(self.pool_path % (pool), params=_params)
def create_pool(self, body=None):
"""Creates a new load balancer pool."""
return self.post(self.pools_path, body=body)
def update_pool(self, pool, body=None):
"""Updates a load balancer pool."""
return self.put(self.pool_path % (pool), body=body)
def delete_pool(self, pool):
"""Deletes the specified load balancer pool."""
return self.delete(self.pool_path % (pool))
def retrieve_pool_stats(self, pool, **_params):
"""Retrieves stats for a certain load balancer pool."""
return self.get(self.pool_path_stats % (pool), params=_params)
def list_members(self, retrieve_all=True, **_params):
"""Fetches a list of all load balancer members for a project."""
# Pass filters in "params" argument to do_request
return self.list('members', self.members_path, retrieve_all,
**_params)
def show_member(self, member, **_params):
"""Fetches information of a certain load balancer member."""
return self.get(self.member_path % (member), params=_params)
def create_member(self, body=None):
"""Creates a new load balancer member."""
return self.post(self.members_path, body=body)
def update_member(self, member, body=None):
"""Updates a load balancer member."""
return self.put(self.member_path % (member), body=body)
def delete_member(self, member):
"""Deletes the specified load balancer member."""
return self.delete(self.member_path % (member))
def list_health_monitors(self, retrieve_all=True, **_params):
"""Fetches a list of all load balancer health monitors for a project.
"""
# Pass filters in "params" argument to do_request
return self.list('health_monitors', self.health_monitors_path,
retrieve_all, **_params)
def show_health_monitor(self, health_monitor, **_params):
"""Fetches information of a certain load balancer health monitor."""
return self.get(self.health_monitor_path % (health_monitor),
params=_params)
def create_health_monitor(self, body=None):
"""Creates a new load balancer health monitor."""
return self.post(self.health_monitors_path, body=body)
def update_health_monitor(self, health_monitor, body=None):
"""Updates a load balancer health monitor."""
return self.put(self.health_monitor_path % (health_monitor), body=body)
def delete_health_monitor(self, health_monitor):
"""Deletes the specified load balancer health monitor."""
return self.delete(self.health_monitor_path % (health_monitor))
def associate_health_monitor(self, pool, body):
"""Associate specified load balancer health monitor and pool."""
return self.post(self.associate_pool_health_monitors_path % (pool),
body=body)
def disassociate_health_monitor(self, pool, health_monitor):
"""Disassociate specified load balancer health monitor and pool."""
path = (self.disassociate_pool_health_monitors_path %
{'pool': pool, 'health_monitor': health_monitor})
return self.delete(path)
def create_qos_queue(self, body=None):
"""Creates a new queue."""
return self.post(self.qos_queues_path, body=body)
def list_qos_queues(self, **_params):
"""Fetches a list of all queues for a project."""
return self.get(self.qos_queues_path, params=_params)
def show_qos_queue(self, queue, **_params):
"""Fetches information of a certain queue."""
return self.get(self.qos_queue_path % (queue),
params=_params)
def delete_qos_queue(self, queue):
"""Deletes the specified queue."""
return self.delete(self.qos_queue_path % (queue))
def list_agents(self, **_params):
"""Fetches agents."""
# Pass filters in "params" argument to do_request
return self.get(self.agents_path, params=_params)
def show_agent(self, agent, **_params):
"""Fetches information of a certain agent."""
return self.get(self.agent_path % (agent), params=_params)
def update_agent(self, agent, body=None):
"""Updates an agent."""
return self.put(self.agent_path % (agent), body=body)
def delete_agent(self, agent):
"""Deletes the specified agent."""
return self.delete(self.agent_path % (agent))
def list_network_gateways(self, **_params):
"""Retrieve network gateways."""
return self.get(self.network_gateways_path, params=_params)
def show_network_gateway(self, gateway_id, **_params):
"""Fetch a network gateway."""
return self.get(self.network_gateway_path % gateway_id, params=_params)
def create_network_gateway(self, body=None):
"""Create a new network gateway."""
return self.post(self.network_gateways_path, body=body)
def update_network_gateway(self, gateway_id, body=None):
"""Update a network gateway."""
return self.put(self.network_gateway_path % gateway_id, body=body)
def delete_network_gateway(self, gateway_id):
"""Delete the specified network gateway."""
return self.delete(self.network_gateway_path % gateway_id)
def connect_network_gateway(self, gateway_id, body=None):
"""Connect a network gateway to the specified network."""
base_uri = self.network_gateway_path % gateway_id
return self.put("%s/connect_network" % base_uri, body=body)
def disconnect_network_gateway(self, gateway_id, body=None):
"""Disconnect a network from the specified gateway."""
base_uri = self.network_gateway_path % gateway_id
return self.put("%s/disconnect_network" % base_uri, body=body)
def list_gateway_devices(self, **_params):
"""Retrieve gateway devices."""
return self.get(self.gateway_devices_path, params=_params)
def show_gateway_device(self, gateway_device_id, **_params):
"""Fetch a gateway device."""
return self.get(self.gateway_device_path % gateway_device_id,
params=_params)
def create_gateway_device(self, body=None):
"""Create a new gateway device."""
return self.post(self.gateway_devices_path, body=body)
def update_gateway_device(self, gateway_device_id, body=None):
"""Updates a new gateway device."""
return self.put(self.gateway_device_path % gateway_device_id,
body=body)
def delete_gateway_device(self, gateway_device_id):
"""Delete the specified gateway device."""
return self.delete(self.gateway_device_path % gateway_device_id)
def list_dhcp_agent_hosting_networks(self, network, **_params):
"""Fetches a list of dhcp agents hosting a network."""
return self.get((self.network_path + self.DHCP_AGENTS) % network,
params=_params)
def list_networks_on_dhcp_agent(self, dhcp_agent, **_params):
"""Fetches a list of networks hosted on a DHCP agent."""
return self.get((self.agent_path + self.DHCP_NETS) % dhcp_agent,
params=_params)
def add_network_to_dhcp_agent(self, dhcp_agent, body=None):
"""Adds a network to dhcp agent."""
return self.post((self.agent_path + self.DHCP_NETS) % dhcp_agent,
body=body)
def remove_network_from_dhcp_agent(self, dhcp_agent, network_id):
"""Remove a network from dhcp agent."""
return self.delete((self.agent_path + self.DHCP_NETS + "/%s") % (
dhcp_agent, network_id))
def list_l3_agent_hosting_routers(self, router, **_params):
"""Fetches a list of L3 agents hosting a router."""
return self.get((self.router_path + self.L3_AGENTS) % router,
params=_params)
def list_routers_on_l3_agent(self, l3_agent, **_params):
"""Fetches a list of routers hosted on an L3 agent."""
return self.get((self.agent_path + self.L3_ROUTERS) % l3_agent,
params=_params)
def add_router_to_l3_agent(self, l3_agent, body):
"""Adds a router to L3 agent."""
return self.post((self.agent_path + self.L3_ROUTERS) % l3_agent,
body=body)
def list_dragents_hosting_bgp_speaker(self, bgp_speaker, **_params):
"""Fetches a list of Dynamic Routing agents hosting a BGP speaker."""
return self.get((self.bgp_speaker_path + self.BGP_DRAGENTS)
% bgp_speaker, params=_params)
def add_bgp_speaker_to_dragent(self, bgp_dragent, body):
"""Adds a BGP speaker to Dynamic Routing agent."""
return self.post((self.agent_path + self.BGP_DRINSTANCES)
% bgp_dragent, body=body)
def remove_bgp_speaker_from_dragent(self, bgp_dragent, bgpspeaker_id):
"""Removes a BGP speaker from Dynamic Routing agent."""
return self.delete((self.agent_path + self.BGP_DRINSTANCES + "/%s")
% (bgp_dragent, bgpspeaker_id))
def list_bgp_speaker_on_dragent(self, bgp_dragent, **_params):
"""Fetches a list of BGP speakers hosted by Dynamic Routing agent."""
return self.get((self.agent_path + self.BGP_DRINSTANCES)
% bgp_dragent, params=_params)
def list_firewall_rules(self, retrieve_all=True, **_params):
"""Fetches a list of all firewall rules for a project."""
# Pass filters in "params" argument to do_request
return self.list('firewall_rules', self.firewall_rules_path,
retrieve_all, **_params)
def show_firewall_rule(self, firewall_rule, **_params):
"""Fetches information of a certain firewall rule."""
return self.get(self.firewall_rule_path % (firewall_rule),
params=_params)
def create_firewall_rule(self, body=None):
"""Creates a new firewall rule."""
return self.post(self.firewall_rules_path, body=body)
def update_firewall_rule(self, firewall_rule, body=None):
"""Updates a firewall rule."""
return self.put(self.firewall_rule_path % (firewall_rule), body=body)
def delete_firewall_rule(self, firewall_rule):
"""Deletes the specified firewall rule."""
return self.delete(self.firewall_rule_path % (firewall_rule))
def list_firewall_policies(self, retrieve_all=True, **_params):
"""Fetches a list of all firewall policies for a project."""
# Pass filters in "params" argument to do_request
return self.list('firewall_policies', self.firewall_policies_path,
retrieve_all, **_params)
def show_firewall_policy(self, firewall_policy, **_params):
"""Fetches information of a certain firewall policy."""
return self.get(self.firewall_policy_path % (firewall_policy),
params=_params)
def create_firewall_policy(self, body=None):
"""Creates a new firewall policy."""
return self.post(self.firewall_policies_path, body=body)
def update_firewall_policy(self, firewall_policy, body=None):
"""Updates a firewall policy."""
return self.put(self.firewall_policy_path % (firewall_policy),
body=body)
def delete_firewall_policy(self, firewall_policy):
"""Deletes the specified firewall policy."""
return self.delete(self.firewall_policy_path % (firewall_policy))
def firewall_policy_insert_rule(self, firewall_policy, body=None):
"""Inserts specified rule into firewall policy."""
return self.put(self.firewall_policy_insert_path % (firewall_policy),
body=body)
def firewall_policy_remove_rule(self, firewall_policy, body=None):
"""Removes specified rule from firewall policy."""
return self.put(self.firewall_policy_remove_path % (firewall_policy),
body=body)
def list_firewalls(self, retrieve_all=True, **_params):
"""Fetches a list of all firewalls for a project."""
# Pass filters in "params" argument to do_request
return self.list('firewalls', self.firewalls_path, retrieve_all,
**_params)
def show_firewall(self, firewall, **_params):
"""Fetches information of a certain firewall."""
return self.get(self.firewall_path % (firewall), params=_params)
def create_firewall(self, body=None):
"""Creates a new firewall."""
return self.post(self.firewalls_path, body=body)
def update_firewall(self, firewall, body=None):
"""Updates a firewall."""
return self.put(self.firewall_path % (firewall), body=body)
def delete_firewall(self, firewall):
"""Deletes the specified firewall."""
return self.delete(self.firewall_path % (firewall))
def list_fwaas_firewall_groups(self, retrieve_all=True, **_params):
"""Fetches a list of all firewall groups for a project"""
return self.list('firewall_groups', self.fwaas_firewall_groups_path,
retrieve_all, **_params)
def show_fwaas_firewall_group(self, fwg, **_params):
"""Fetches information of a certain firewall group"""
return self.get(self.fwaas_firewall_group_path % (fwg), params=_params)
def create_fwaas_firewall_group(self, body=None):
"""Creates a new firewall group"""
return self.post(self.fwaas_firewall_groups_path, body=body)
def update_fwaas_firewall_group(self, fwg, body=None):
"""Updates a firewall group"""
return self.put(self.fwaas_firewall_group_path % (fwg), body=body)
def delete_fwaas_firewall_group(self, fwg):
"""Deletes the specified firewall group"""
return self.delete(self.fwaas_firewall_group_path % (fwg))
def list_fwaas_firewall_rules(self, retrieve_all=True, **_params):
"""Fetches a list of all firewall rules for a project"""
# Pass filters in "params" argument to do_request
return self.list('firewall_rules', self.fwaas_firewall_rules_path,
retrieve_all, **_params)
def show_fwaas_firewall_rule(self, firewall_rule, **_params):
"""Fetches information of a certain firewall rule"""
return self.get(self.fwaas_firewall_rule_path % (firewall_rule),
params=_params)
def create_fwaas_firewall_rule(self, body=None):
"""Creates a new firewall rule"""
return self.post(self.fwaas_firewall_rules_path, body=body)
def update_fwaas_firewall_rule(self, firewall_rule, body=None):
"""Updates a firewall rule"""
return self.put(self.fwaas_firewall_rule_path % (firewall_rule),
body=body)
def delete_fwaas_firewall_rule(self, firewall_rule):
"""Deletes the specified firewall rule"""
return self.delete(self.fwaas_firewall_rule_path % (firewall_rule))
def list_fwaas_firewall_policies(self, retrieve_all=True, **_params):
"""Fetches a list of all firewall policies for a project"""
# Pass filters in "params" argument to do_request
return self.list('firewall_policies',
self.fwaas_firewall_policies_path,
retrieve_all, **_params)
def show_fwaas_firewall_policy(self, firewall_policy, **_params):
"""Fetches information of a certain firewall policy"""
return self.get(self.fwaas_firewall_policy_path % (firewall_policy),
params=_params)
def create_fwaas_firewall_policy(self, body=None):
"""Creates a new firewall policy"""
return self.post(self.fwaas_firewall_policies_path, body=body)
def update_fwaas_firewall_policy(self, firewall_policy, body=None):
"""Updates a firewall policy"""
return self.put(self.fwaas_firewall_policy_path % (firewall_policy),
body=body)
def delete_fwaas_firewall_policy(self, firewall_policy):
"""Deletes the specified firewall policy"""
return self.delete(self.fwaas_firewall_policy_path % (firewall_policy))
def insert_rule_fwaas_firewall_policy(self, firewall_policy, body=None):
"""Inserts specified rule into firewall policy"""
return self.put((self.fwaas_firewall_policy_insert_path %
(firewall_policy)), body=body)
def remove_rule_fwaas_firewall_policy(self, firewall_policy, body=None):
"""Removes specified rule from firewall policy"""
return self.put((self.fwaas_firewall_policy_remove_path %
(firewall_policy)), body=body)
def remove_router_from_l3_agent(self, l3_agent, router_id):
"""Remove a router from l3 agent."""
return self.delete((self.agent_path + self.L3_ROUTERS + "/%s") % (
l3_agent, router_id))
def get_lbaas_agent_hosting_pool(self, pool, **_params):
"""Fetches a loadbalancer agent hosting a pool."""
return self.get((self.pool_path + self.LOADBALANCER_AGENT) % pool,
params=_params)
def list_pools_on_lbaas_agent(self, lbaas_agent, **_params):
"""Fetches a list of pools hosted by the loadbalancer agent."""
return self.get((self.agent_path + self.LOADBALANCER_POOLS) %
lbaas_agent, params=_params)
def get_lbaas_agent_hosting_loadbalancer(self, loadbalancer, **_params):
"""Fetches a loadbalancer agent hosting a loadbalancer."""
return self.get((self.lbaas_loadbalancer_path +
self.LOADBALANCER_HOSTING_AGENT) % loadbalancer,
params=_params)
def list_loadbalancers_on_lbaas_agent(self, lbaas_agent, **_params):
"""Fetches a list of loadbalancers hosted by the loadbalancer agent."""
return self.get((self.agent_path + self.AGENT_LOADBALANCERS) %
lbaas_agent, params=_params)
def list_service_providers(self, retrieve_all=True, **_params):
"""Fetches service providers."""
# Pass filters in "params" argument to do_request
return self.list('service_providers', self.service_providers_path,
retrieve_all, **_params)
def create_metering_label(self, body=None):
"""Creates a metering label."""
return self.post(self.metering_labels_path, body=body)
def delete_metering_label(self, label):
"""Deletes the specified metering label."""
return self.delete(self.metering_label_path % (label))
def list_metering_labels(self, retrieve_all=True, **_params):
"""Fetches a list of all metering labels for a project."""
return self.list('metering_labels', self.metering_labels_path,
retrieve_all, **_params)
def show_metering_label(self, metering_label, **_params):
"""Fetches information of a certain metering label."""
return self.get(self.metering_label_path %
(metering_label), params=_params)
def create_metering_label_rule(self, body=None):
"""Creates a metering label rule."""
return self.post(self.metering_label_rules_path, body=body)
def delete_metering_label_rule(self, rule):
"""Deletes the specified metering label rule."""
return self.delete(self.metering_label_rule_path % (rule))
def list_metering_label_rules(self, retrieve_all=True, **_params):
"""Fetches a list of all metering label rules for a label."""
return self.list('metering_label_rules',
self.metering_label_rules_path, retrieve_all,
**_params)
def show_metering_label_rule(self, metering_label_rule, **_params):
"""Fetches information of a certain metering label rule."""
return self.get(self.metering_label_rule_path %
(metering_label_rule), params=_params)
def create_rbac_policy(self, body=None):
"""Create a new RBAC policy."""
return self.post(self.rbac_policies_path, body=body)
def update_rbac_policy(self, rbac_policy_id, body=None):
"""Update a RBAC policy."""
return self.put(self.rbac_policy_path % rbac_policy_id, body=body)
def list_rbac_policies(self, retrieve_all=True, **_params):
"""Fetch a list of all RBAC policies for a project."""
return self.list('rbac_policies', self.rbac_policies_path,
retrieve_all, **_params)
def show_rbac_policy(self, rbac_policy_id, **_params):
"""Fetch information of a certain RBAC policy."""
return self.get(self.rbac_policy_path % rbac_policy_id,
params=_params)
def delete_rbac_policy(self, rbac_policy_id):
"""Delete the specified RBAC policy."""
return self.delete(self.rbac_policy_path % rbac_policy_id)
def list_qos_policies(self, retrieve_all=True, **_params):
"""Fetches a list of all qos policies for a project."""
# Pass filters in "params" argument to do_request
return self.list('policies', self.qos_policies_path,
retrieve_all, **_params)
def show_qos_policy(self, qos_policy, **_params):
"""Fetches information of a certain qos policy."""
return self.get(self.qos_policy_path % qos_policy,
params=_params)
def create_qos_policy(self, body=None):
"""Creates a new qos policy."""
return self.post(self.qos_policies_path, body=body)
def update_qos_policy(self, qos_policy, body=None, revision_number=None):
"""Updates a qos policy."""
return self._update_resource(self.qos_policy_path % qos_policy,
body=body,
revision_number=revision_number)
def delete_qos_policy(self, qos_policy):
"""Deletes the specified qos policy."""
return self.delete(self.qos_policy_path % qos_policy)
def list_qos_rule_types(self, retrieve_all=True, **_params):
"""List available qos rule types."""
return self.list('rule_types', self.qos_rule_types_path,
retrieve_all, **_params)
def list_bandwidth_limit_rules(self, policy_id,
retrieve_all=True, **_params):
"""Fetches a list of all bandwidth limit rules for the given policy."""
return self.list('bandwidth_limit_rules',
self.qos_bandwidth_limit_rules_path % policy_id,
retrieve_all, **_params)
def show_bandwidth_limit_rule(self, rule, policy, **_params):
"""Fetches information of a certain bandwidth limit rule."""
return self.get(self.qos_bandwidth_limit_rule_path %
(policy, rule), params=_params)
def create_bandwidth_limit_rule(self, policy, body=None):
"""Creates a new bandwidth limit rule."""
return self.post(self.qos_bandwidth_limit_rules_path % policy,
body=body)
def update_bandwidth_limit_rule(self, rule, policy, body=None):
"""Updates a bandwidth limit rule."""
return self.put(self.qos_bandwidth_limit_rule_path %
(policy, rule), body=body)
def delete_bandwidth_limit_rule(self, rule, policy):
"""Deletes a bandwidth limit rule."""
return self.delete(self.qos_bandwidth_limit_rule_path %
(policy, rule))
def list_dscp_marking_rules(self, policy_id,
retrieve_all=True, **_params):
"""Fetches a list of all DSCP marking rules for the given policy."""
return self.list('dscp_marking_rules',
self.qos_dscp_marking_rules_path % policy_id,
retrieve_all, **_params)
def show_dscp_marking_rule(self, rule, policy, **_params):
"""Shows information of a certain DSCP marking rule."""
return self.get(self.qos_dscp_marking_rule_path %
(policy, rule), params=_params)
def create_dscp_marking_rule(self, policy, body=None):
"""Creates a new DSCP marking rule."""
return self.post(self.qos_dscp_marking_rules_path % policy,
body=body)
def update_dscp_marking_rule(self, rule, policy, body=None):
"""Updates a DSCP marking rule."""
return self.put(self.qos_dscp_marking_rule_path %
(policy, rule), body=body)
def delete_dscp_marking_rule(self, rule, policy):
"""Deletes a DSCP marking rule."""
return self.delete(self.qos_dscp_marking_rule_path %
(policy, rule))
def list_minimum_bandwidth_rules(self, policy_id, retrieve_all=True,
**_params):
"""Fetches a list of all minimum bandwidth rules for the given policy.
"""
return self.list('minimum_bandwidth_rules',
self.qos_minimum_bandwidth_rules_path %
policy_id, retrieve_all, **_params)
def show_minimum_bandwidth_rule(self, rule, policy, body=None):
"""Fetches information of a certain minimum bandwidth rule."""
return self.get(self.qos_minimum_bandwidth_rule_path %
(policy, rule), body=body)
def create_minimum_bandwidth_rule(self, policy, body=None):
"""Creates a new minimum bandwidth rule."""
return self.post(self.qos_minimum_bandwidth_rules_path % policy,
body=body)
def list_packet_rate_limit_rules(self, policy_id, retrieve_all=True,
**_params):
"""Fetches a list of all packet rate limit rules for the given policy
"""
return self.list('packet_rate_limit_rules',
self.qos_packet_rate_limit_rules_path %
policy_id, retrieve_all, **_params)
def show_packet_rate_limit_rule(self, rule, policy, body=None):
"""Fetches information of a certain packet rate limit rule."""
return self.get(self.qos_packet_rate_limit_rule_path %
(policy, rule), body=body)
def create_packet_rate_limit_rule(self, policy, body=None):
"""Creates a new packet rate limit rule."""
return self.post(self.qos_packet_rate_limit_rules_path % policy,
body=body)
def update_packet_rate_limit_rule(self, rule, policy, body=None):
"""Updates a packet rate limit rule."""
return self.put(self.qos_packet_rate_limit_rule_path %
(policy, rule), body=body)
def delete_packet_rate_limit_rule(self, rule, policy):
"""Deletes a packet rate limit rule."""
return self.delete(self.qos_packet_rate_limit_rule_path %
(policy, rule))
def update_minimum_bandwidth_rule(self, rule, policy, body=None):
"""Updates a minimum bandwidth rule."""
return self.put(self.qos_minimum_bandwidth_rule_path %
(policy, rule), body=body)
def delete_minimum_bandwidth_rule(self, rule, policy):
"""Deletes a minimum bandwidth rule."""
return self.delete(self.qos_minimum_bandwidth_rule_path %
(policy, rule))
def list_minimum_packet_rate_rules(self, policy_id, retrieve_all=True,
**_params):
"""Fetches a list of all minimum packet rate rules for the given policy
"""
return self.list('minimum_packet_rate_rules',
self.qos_minimum_packet_rate_rules_path %
policy_id, retrieve_all, **_params)
def show_minimum_packet_rate_rule(self, rule, policy, body=None):
"""Fetches information of a certain minimum packet rate rule."""
return self.get(self.qos_minimum_packet_rate_rule_path %
(policy, rule), body=body)
def create_minimum_packet_rate_rule(self, policy, body=None):
"""Creates a new minimum packet rate rule."""
return self.post(self.qos_minimum_packet_rate_rules_path % policy,
body=body)
def update_minimum_packet_rate_rule(self, rule, policy, body=None):
"""Updates a minimum packet rate rule."""
return self.put(self.qos_minimum_packet_rate_rule_path %
(policy, rule), body=body)
def delete_minimum_packet_rate_rule(self, rule, policy):
"""Deletes a minimum packet rate rule."""
return self.delete(self.qos_minimum_packet_rate_rule_path %
(policy, rule))
def create_flavor(self, body=None):
"""Creates a new Neutron service flavor."""
return self.post(self.flavors_path, body=body)
def delete_flavor(self, flavor):
"""Deletes the specified Neutron service flavor."""
return self.delete(self.flavor_path % (flavor))
def list_flavors(self, retrieve_all=True, **_params):
"""Fetches a list of all Neutron service flavors for a project."""
return self.list('flavors', self.flavors_path, retrieve_all,
**_params)
def show_flavor(self, flavor, **_params):
"""Fetches information for a certain Neutron service flavor."""
return self.get(self.flavor_path % (flavor), params=_params)
def update_flavor(self, flavor, body):
"""Update a Neutron service flavor."""
return self.put(self.flavor_path % (flavor), body=body)
def associate_flavor(self, flavor, body):
"""Associate a Neutron service flavor with a profile."""
return self.post(self.flavor_profile_bindings_path %
(flavor), body=body)
def disassociate_flavor(self, flavor, flavor_profile):
"""Disassociate a Neutron service flavor with a profile."""
return self.delete(self.flavor_profile_binding_path %
(flavor, flavor_profile))
def create_service_profile(self, body=None):
"""Creates a new Neutron service flavor profile."""
return self.post(self.service_profiles_path, body=body)
def delete_service_profile(self, flavor_profile):
"""Deletes the specified Neutron service flavor profile."""
return self.delete(self.service_profile_path % (flavor_profile))
def list_service_profiles(self, retrieve_all=True, **_params):
"""Fetches a list of all Neutron service flavor profiles."""
return self.list('service_profiles', self.service_profiles_path,
retrieve_all, **_params)
def show_service_profile(self, flavor_profile, **_params):
"""Fetches information for a certain Neutron service flavor profile."""
return self.get(self.service_profile_path % (flavor_profile),
params=_params)
def update_service_profile(self, service_profile, body):
"""Update a Neutron service profile."""
return self.put(self.service_profile_path % (service_profile),
body=body)
def list_availability_zones(self, retrieve_all=True, **_params):
"""Fetches a list of all availability zones."""
return self.list('availability_zones', self.availability_zones_path,
retrieve_all, **_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def get_auto_allocated_topology(self, project_id, **_params):
"""Fetch information about a project's auto-allocated topology."""
return self.get(
self.auto_allocated_topology_path % project_id,
params=_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def delete_auto_allocated_topology(self, project_id, **_params):
"""Delete a project's auto-allocated topology."""
return self.delete(
self.auto_allocated_topology_path % project_id,
params=_params)
@debtcollector.renames.renamed_kwarg(
'tenant_id', 'project_id', replace=True)
def validate_auto_allocated_topology_requirements(self, project_id):
"""Validate requirements for getting an auto-allocated topology."""
return self.get_auto_allocated_topology(project_id, fields=['dry-run'])
def list_bgp_speakers(self, retrieve_all=True, **_params):
"""Fetches a list of all BGP speakers for a project."""
return self.list('bgp_speakers', self.bgp_speakers_path, retrieve_all,
**_params)
def show_bgp_speaker(self, bgp_speaker_id, **_params):
"""Fetches information of a certain BGP speaker."""
return self.get(self.bgp_speaker_path % (bgp_speaker_id),
params=_params)
def create_bgp_speaker(self, body=None):
"""Creates a new BGP speaker."""
return self.post(self.bgp_speakers_path, body=body)
def update_bgp_speaker(self, bgp_speaker_id, body=None):
"""Update a BGP speaker."""
return self.put(self.bgp_speaker_path % bgp_speaker_id, body=body)
def delete_bgp_speaker(self, speaker_id):
"""Deletes the specified BGP speaker."""
return self.delete(self.bgp_speaker_path % (speaker_id))
def add_peer_to_bgp_speaker(self, speaker_id, body=None):
"""Adds a peer to BGP speaker."""
return self.put((self.bgp_speaker_path % speaker_id) +
"/add_bgp_peer", body=body)
def remove_peer_from_bgp_speaker(self, speaker_id, body=None):
"""Removes a peer from BGP speaker."""
return self.put((self.bgp_speaker_path % speaker_id) +
"/remove_bgp_peer", body=body)
def add_network_to_bgp_speaker(self, speaker_id, body=None):
"""Adds a network to BGP speaker."""
return self.put((self.bgp_speaker_path % speaker_id) +
"/add_gateway_network", body=body)
def remove_network_from_bgp_speaker(self, speaker_id, body=None):
"""Removes a network from BGP speaker."""
return self.put((self.bgp_speaker_path % speaker_id) +
"/remove_gateway_network", body=body)
def list_route_advertised_from_bgp_speaker(self, speaker_id, **_params):
"""Fetches a list of all routes advertised by BGP speaker."""
return self.get((self.bgp_speaker_path % speaker_id) +
"/get_advertised_routes", params=_params)
def list_bgp_peers(self, **_params):
"""Fetches a list of all BGP peers."""
return self.get(self.bgp_peers_path, params=_params)
def show_bgp_peer(self, peer_id, **_params):
"""Fetches information of a certain BGP peer."""
return self.get(self.bgp_peer_path % peer_id,
params=_params)
def create_bgp_peer(self, body=None):
"""Create a new BGP peer."""
return self.post(self.bgp_peers_path, body=body)
def update_bgp_peer(self, bgp_peer_id, body=None):
"""Update a BGP peer."""
return self.put(self.bgp_peer_path % bgp_peer_id, body=body)
def delete_bgp_peer(self, peer_id):
"""Deletes the specified BGP peer."""
return self.delete(self.bgp_peer_path % peer_id)
def list_network_ip_availabilities(self, retrieve_all=True, **_params):
"""Fetches IP availability information for all networks"""
return self.list('network_ip_availabilities',
self.network_ip_availabilities_path,
retrieve_all, **_params)
def show_network_ip_availability(self, network, **_params):
"""Fetches IP availability information for a specified network"""
return self.get(self.network_ip_availability_path % (network),
params=_params)
def add_tag(self, resource_type, resource_id, tag, **_params):
"""Add a tag on the resource."""
return self.put(self.tag_path % (resource_type, resource_id, tag))
def replace_tag(self, resource_type, resource_id, body, **_params):
"""Replace tags on the resource."""
return self.put(self.tags_path % (resource_type, resource_id), body)
def remove_tag(self, resource_type, resource_id, tag, **_params):
"""Remove a tag on the resource."""
return self.delete(self.tag_path % (resource_type, resource_id, tag))
def remove_tag_all(self, resource_type, resource_id, **_params):
"""Remove all tags on the resource."""
return self.delete(self.tags_path % (resource_type, resource_id))
def create_trunk(self, body=None):
"""Create a trunk port."""
return self.post(self.trunks_path, body=body)
def update_trunk(self, trunk, body=None, revision_number=None):
"""Update a trunk port."""
return self._update_resource(self.trunk_path % trunk, body=body,
revision_number=revision_number)
def delete_trunk(self, trunk):
"""Delete a trunk port."""
return self.delete(self.trunk_path % (trunk))
def list_trunks(self, retrieve_all=True, **_params):
"""Fetch a list of all trunk ports."""
return self.list('trunks', self.trunks_path, retrieve_all,
**_params)
def show_trunk(self, trunk, **_params):
"""Fetch information for a certain trunk port."""
return self.get(self.trunk_path % (trunk), params=_params)
def trunk_add_subports(self, trunk, body=None):
"""Add specified subports to the trunk."""
return self.put(self.subports_add_path % (trunk), body=body)
def trunk_remove_subports(self, trunk, body=None):
"""Removes specified subports from the trunk."""
return self.put(self.subports_remove_path % (trunk), body=body)
def trunk_get_subports(self, trunk, **_params):
"""Fetch a list of all subports attached to given trunk."""
return self.get(self.subports_path % (trunk), params=_params)
def list_bgpvpns(self, retrieve_all=True, **_params):
"""Fetches a list of all BGP VPNs for a project"""
return self.list('bgpvpns', self.bgpvpns_path, retrieve_all, **_params)
def show_bgpvpn(self, bgpvpn, **_params):
"""Fetches information of a certain BGP VPN"""
return self.get(self.bgpvpn_path % bgpvpn, params=_params)
def create_bgpvpn(self, body=None):
"""Creates a new BGP VPN"""
return self.post(self.bgpvpns_path, body=body)
def update_bgpvpn(self, bgpvpn, body=None):
"""Updates a BGP VPN"""
return self.put(self.bgpvpn_path % bgpvpn, body=body)
def delete_bgpvpn(self, bgpvpn):
"""Deletes the specified BGP VPN"""
return self.delete(self.bgpvpn_path % bgpvpn)
def list_bgpvpn_network_assocs(self, bgpvpn, retrieve_all=True, **_params):
"""Fetches a list of network associations for a given BGP VPN."""
return self.list('network_associations',
self.bgpvpn_network_associations_path % bgpvpn,
retrieve_all, **_params)
def show_bgpvpn_network_assoc(self, bgpvpn, net_assoc, **_params):
"""Fetches information of a certain BGP VPN's network association"""
return self.get(
self.bgpvpn_network_association_path % (bgpvpn, net_assoc),
params=_params)
def create_bgpvpn_network_assoc(self, bgpvpn, body=None):
"""Creates a new BGP VPN network association"""
return self.post(self.bgpvpn_network_associations_path % bgpvpn,
body=body)
def update_bgpvpn_network_assoc(self, bgpvpn, net_assoc, body=None):
"""Updates a BGP VPN network association"""
return self.put(
self.bgpvpn_network_association_path % (bgpvpn, net_assoc),
body=body)
def delete_bgpvpn_network_assoc(self, bgpvpn, net_assoc):
"""Deletes the specified BGP VPN network association"""
return self.delete(
self.bgpvpn_network_association_path % (bgpvpn, net_assoc))
def list_bgpvpn_router_assocs(self, bgpvpn, retrieve_all=True, **_params):
"""Fetches a list of router associations for a given BGP VPN."""
return self.list('router_associations',
self.bgpvpn_router_associations_path % bgpvpn,
retrieve_all, **_params)
def show_bgpvpn_router_assoc(self, bgpvpn, router_assoc, **_params):
"""Fetches information of a certain BGP VPN's router association"""
return self.get(
self.bgpvpn_router_association_path % (bgpvpn, router_assoc),
params=_params)
def create_bgpvpn_router_assoc(self, bgpvpn, body=None):
"""Creates a new BGP VPN router association"""
return self.post(self.bgpvpn_router_associations_path % bgpvpn,
body=body)
def update_bgpvpn_router_assoc(self, bgpvpn, router_assoc, body=None):
"""Updates a BGP VPN router association"""
return self.put(
self.bgpvpn_router_association_path % (bgpvpn, router_assoc),
body=body)
def delete_bgpvpn_router_assoc(self, bgpvpn, router_assoc):
"""Deletes the specified BGP VPN router association"""
return self.delete(
self.bgpvpn_router_association_path % (bgpvpn, router_assoc))
def list_bgpvpn_port_assocs(self, bgpvpn, retrieve_all=True, **_params):
"""Fetches a list of port associations for a given BGP VPN."""
return self.list('port_associations',
self.bgpvpn_port_associations_path % bgpvpn,
retrieve_all, **_params)
def show_bgpvpn_port_assoc(self, bgpvpn, port_assoc, **_params):
"""Fetches information of a certain BGP VPN's port association"""
return self.get(
self.bgpvpn_port_association_path % (bgpvpn, port_assoc),
params=_params)
def create_bgpvpn_port_assoc(self, bgpvpn, body=None):
"""Creates a new BGP VPN port association"""
return self.post(self.bgpvpn_port_associations_path % bgpvpn,
body=body)
def update_bgpvpn_port_assoc(self, bgpvpn, port_assoc, body=None):
"""Updates a BGP VPN port association"""
return self.put(
self.bgpvpn_port_association_path % (bgpvpn, port_assoc),
body=body)
def delete_bgpvpn_port_assoc(self, bgpvpn, port_assoc):
"""Deletes the specified BGP VPN port association"""
return self.delete(
self.bgpvpn_port_association_path % (bgpvpn, port_assoc))
def create_sfc_port_pair(self, body=None):
"""Creates a new Port Pair."""
return self.post(self.sfc_port_pairs_path, body=body)
def update_sfc_port_pair(self, port_pair, body=None):
"""Update a Port Pair."""
return self.put(self.sfc_port_pair_path % port_pair, body=body)
def delete_sfc_port_pair(self, port_pair):
"""Deletes the specified Port Pair."""
return self.delete(self.sfc_port_pair_path % (port_pair))
def list_sfc_port_pairs(self, retrieve_all=True, **_params):
"""Fetches a list of all Port Pairs."""
return self.list('port_pairs', self.sfc_port_pairs_path, retrieve_all,
**_params)
def show_sfc_port_pair(self, port_pair, **_params):
"""Fetches information of a certain Port Pair."""
return self.get(self.sfc_port_pair_path % (port_pair), params=_params)
def create_sfc_port_pair_group(self, body=None):
"""Creates a new Port Pair Group."""
return self.post(self.sfc_port_pair_groups_path, body=body)
def update_sfc_port_pair_group(self, port_pair_group, body=None):
"""Update a Port Pair Group."""
return self.put(self.sfc_port_pair_group_path % port_pair_group,
body=body)
def delete_sfc_port_pair_group(self, port_pair_group):
"""Deletes the specified Port Pair Group."""
return self.delete(self.sfc_port_pair_group_path % (port_pair_group))
def list_sfc_port_pair_groups(self, retrieve_all=True, **_params):
"""Fetches a list of all Port Pair Groups."""
return self.list('port_pair_groups', self.sfc_port_pair_groups_path,
retrieve_all, **_params)
def show_sfc_port_pair_group(self, port_pair_group, **_params):
"""Fetches information of a certain Port Pair Group."""
return self.get(self.sfc_port_pair_group_path % (port_pair_group),
params=_params)
def create_sfc_port_chain(self, body=None):
"""Creates a new Port Chain."""
return self.post(self.sfc_port_chains_path, body=body)
def update_sfc_port_chain(self, port_chain, body=None):
"""Update a Port Chain."""
return self.put(self.sfc_port_chain_path % port_chain, body=body)
def delete_sfc_port_chain(self, port_chain):
"""Deletes the specified Port Chain."""
return self.delete(self.sfc_port_chain_path % (port_chain))
def list_sfc_port_chains(self, retrieve_all=True, **_params):
"""Fetches a list of all Port Chains."""
return self.list('port_chains', self.sfc_port_chains_path,
retrieve_all, **_params)
def show_sfc_port_chain(self, port_chain, **_params):
"""Fetches information of a certain Port Chain."""
return self.get(self.sfc_port_chain_path % (port_chain),
params=_params)
def create_sfc_flow_classifier(self, body=None):
"""Creates a new Flow Classifier."""
return self.post(self.sfc_flow_classifiers_path, body=body)
def update_sfc_flow_classifier(self, flow_classifier, body=None):
"""Update a Flow Classifier."""
return self.put(self.sfc_flow_classifier_path % flow_classifier,
body=body)
def delete_sfc_flow_classifier(self, flow_classifier):
"""Deletes the specified Flow Classifier."""
return self.delete(self.sfc_flow_classifier_path % (flow_classifier))
def list_sfc_flow_classifiers(self, retrieve_all=True, **_params):
"""Fetches a list of all Flow Classifiers."""
return self.list('flow_classifiers', self.sfc_flow_classifiers_path,
retrieve_all, **_params)
def show_sfc_flow_classifier(self, flow_classifier, **_params):
"""Fetches information of a certain Flow Classifier."""
return self.get(self.sfc_flow_classifier_path % (flow_classifier),
params=_params)
def create_sfc_service_graph(self, body=None):
"""Create the specified Service Graph."""
return self.post(self.sfc_service_graphs_path, body=body)
def update_sfc_service_graph(self, service_graph, body=None):
"""Update a Service Graph."""
return self.put(self.sfc_service_graph_path % service_graph,
body=body)
def delete_sfc_service_graph(self, service_graph):
"""Deletes the specified Service Graph."""
return self.delete(self.sfc_service_graph_path % service_graph)
def list_sfc_service_graphs(self, retrieve_all=True, **_params):
"""Fetches a list of all Service Graphs."""
return self.list('service_graphs', self.sfc_service_graphs_path,
retrieve_all, **_params)
def show_sfc_service_graph(self, service_graph, **_params):
"""Fetches information of a certain Service Graph."""
return self.get(self.sfc_service_graph_path % service_graph,
params=_params)
def create_network_log(self, body=None):
"""Create a network log."""
return self.post(self.network_logs_path, body=body)
def delete_network_log(self, net_log):
"""Delete a network log."""
return self.delete(self.network_log_path % net_log)
def list_network_logs(self, retrieve_all=True, **_params):
"""Fetch a list of all network logs."""
return self.list(
'logs', self.network_logs_path, retrieve_all, **_params)
def show_network_log(self, net_log, **_params):
"""Fetch information for a certain network log."""
return self.get(self.network_log_path % net_log, params=_params)
def update_network_log(self, net_log, body=None):
"""Update a network log."""
return self.put(self.network_log_path % net_log, body=body)
def list_network_loggable_resources(self, retrieve_all=True, **_params):
"""Fetch a list of supported resource types for network log."""
return self.list('loggable_resources', self.network_loggables_path,
retrieve_all, **_params)
def onboard_network_subnets(self, subnetpool, body=None):
"""Onboard the specified network's subnets into a subnet pool."""
return self.put(self.onboard_network_subnets_path % (subnetpool),
body=body)
def __init__(self, **kwargs):
"""Initialize a new client for the Neutron v2.0 API."""
super(Client, self).__init__(**kwargs)
self._register_extensions(self.version)
def _update_resource(self, path, **kwargs):
revision_number = kwargs.pop('revision_number', None)
if revision_number:
headers = kwargs.setdefault('headers', {})
headers['If-Match'] = 'revision_number=%s' % revision_number
return self.put(path, **kwargs)
def extend_show(self, resource_singular, path, parent_resource):
def _fx(obj, **_params):
return self.show_ext(path, obj, **_params)
def _parent_fx(obj, parent_id, **_params):
return self.show_ext(path % parent_id, obj, **_params)
fn = _fx if not parent_resource else _parent_fx
setattr(self, "show_%s" % resource_singular, fn)
def extend_list(self, resource_plural, path, parent_resource):
def _fx(retrieve_all=True, **_params):
return self.list_ext(resource_plural, path,
retrieve_all, **_params)
def _parent_fx(parent_id, retrieve_all=True, **_params):
return self.list_ext(resource_plural, path % parent_id,
retrieve_all, **_params)
fn = _fx if not parent_resource else _parent_fx
setattr(self, "list_%s" % resource_plural, fn)
def extend_create(self, resource_singular, path, parent_resource):
def _fx(body=None):
return self.create_ext(path, body)
def _parent_fx(parent_id, body=None):
return self.create_ext(path % parent_id, body)
fn = _fx if not parent_resource else _parent_fx
setattr(self, "create_%s" % resource_singular, fn)
def extend_delete(self, resource_singular, path, parent_resource):
def _fx(obj):
return self.delete_ext(path, obj)
def _parent_fx(obj, parent_id):
return self.delete_ext(path % parent_id, obj)
fn = _fx if not parent_resource else _parent_fx
setattr(self, "delete_%s" % resource_singular, fn)
def extend_update(self, resource_singular, path, parent_resource):
def _fx(obj, body=None):
return self.update_ext(path, obj, body)
def _parent_fx(obj, parent_id, body=None):
return self.update_ext(path % parent_id, obj, body)
fn = _fx if not parent_resource else _parent_fx
setattr(self, "update_%s" % resource_singular, fn)
def _extend_client_with_module(self, module, version):
classes = inspect.getmembers(module, inspect.isclass)
for cls_name, cls in classes:
if hasattr(cls, 'versions'):
if version not in cls.versions:
continue
parent_resource = getattr(cls, 'parent_resource', None)
if issubclass(cls, client_extension.ClientExtensionList):
self.extend_list(cls.resource_plural, cls.object_path,
parent_resource)
elif issubclass(cls, client_extension.ClientExtensionCreate):
self.extend_create(cls.resource, cls.object_path,
parent_resource)
elif issubclass(cls, client_extension.ClientExtensionUpdate):
self.extend_update(cls.resource, cls.resource_path,
parent_resource)
elif issubclass(cls, client_extension.ClientExtensionDelete):
self.extend_delete(cls.resource, cls.resource_path,
parent_resource)
elif issubclass(cls, client_extension.ClientExtensionShow):
self.extend_show(cls.resource, cls.resource_path,
parent_resource)
elif issubclass(cls, client_extension.NeutronClientExtension):
setattr(self, "%s_path" % cls.resource_plural,
cls.object_path)
setattr(self, "%s_path" % cls.resource, cls.resource_path)
self.EXTED_PLURALS.update({cls.resource_plural: cls.resource})
def _register_extensions(self, version):
for name, module in itertools.chain(
client_extension._discover_via_entry_points()):
self._extend_client_with_module(module, version)
| codeparrot/github-code-clean |
#Copyright 2008 Orbitz WorldWide
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from datetime import date, datetime, timedelta
from functools import partial
from itertools import izip, imap
import math
import re
import random
import time
from graphite.logger import log
from graphite.render.attime import parseTimeOffset
from graphite.events import models
#XXX format_units() should go somewhere else
from os import environ
if environ.get('READTHEDOCS'):
format_units = lambda *args, **kwargs: (0,'')
else:
from graphite.render.glyph import format_units
from graphite.render.datalib import TimeSeries
from graphite.util import timestamp
NAN = float('NaN')
INF = float('inf')
DAY = 86400
HOUR = 3600
MINUTE = 60
#Utility functions
def safeSum(values):
safeValues = [v for v in values if v is not None]
if safeValues:
return sum(safeValues)
def safeDiff(values):
safeValues = [v for v in values if v is not None]
if safeValues:
values = map(lambda x: x*-1, safeValues[1:])
values.insert(0, safeValues[0])
return sum(values)
def safeLen(values):
return len([v for v in values if v is not None])
def safeDiv(a, b):
if a is None: return None
if b in (0,None): return None
return float(a) / float(b)
def safeMul(*factors):
if None in factors:
return None
factors = [float(x) for x in factors]
product = reduce(lambda x,y: x*y, factors)
return product
def safeSubtract(a,b):
if a is None or b is None: return None
return float(a) - float(b)
def safeAvg(a):
return safeDiv(safeSum(a),safeLen(a))
def safeStdDev(a):
sm = safeSum(a)
ln = safeLen(a)
avg = safeDiv(sm,ln)
sum = 0
safeValues = [v for v in a if v is not None]
for val in safeValues:
sum = sum + (val - avg) * (val - avg)
return math.sqrt(sum/ln)
def safeLast(values):
for v in reversed(values):
if v is not None: return v
def safeMin(values):
safeValues = [v for v in values if v is not None]
if safeValues:
return min(safeValues)
def safeMax(values):
safeValues = [v for v in values if v is not None]
if safeValues:
return max(safeValues)
def safeMap(function, values):
safeValues = [v for v in values if v is not None]
if safeValues:
return [function(x) for x in values]
def safeAbs(value):
if value is None: return None
return abs(value)
# Greatest common divisor
def gcd(a, b):
if b == 0:
return a
return gcd(b, a%b)
# Least common multiple
def lcm(a, b):
if a == b: return a
if a < b: (a, b) = (b, a) #ensure a > b
return a / gcd(a,b) * b
def normalize(seriesLists):
seriesList = reduce(lambda L1,L2: L1+L2,seriesLists)
step = reduce(lcm,[s.step for s in seriesList])
for s in seriesList:
s.consolidate( step / s.step )
start = min([s.start for s in seriesList])
end = max([s.end for s in seriesList])
end -= (end - start) % step
return (seriesList,start,end,step)
def formatPathExpressions(seriesList):
# remove duplicates
pathExpressions = []
[pathExpressions.append(s.pathExpression) for s in seriesList if not pathExpressions.count(s.pathExpression)]
return ','.join(pathExpressions)
# Series Functions
#NOTE: Some of the functions below use izip, which may be problematic.
#izip stops when it hits the end of the shortest series
#in practice this *shouldn't* matter because all series will cover
#the same interval, despite having possibly different steps...
def sumSeries(requestContext, *seriesLists):
"""
Short form: sum()
This will add metrics together and return the sum at each datapoint. (See
integral for a sum over time)
Example:
.. code-block:: none
&target=sum(company.server.application*.requestsHandled)
This would show the sum of all requests handled per minute (provided
requestsHandled are collected once a minute). If metrics with different
retention rates are combined, the coarsest metric is graphed, and the sum
of the other metrics is averaged for the metrics with finer retention rates.
"""
try:
(seriesList,start,end,step) = normalize(seriesLists)
except:
return []
name = "sumSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeSum(row) for row in izip(*seriesList) )
series = TimeSeries(name,start,end,step,values)
series.pathExpression = name
return [series]
def sumSeriesWithWildcards(requestContext, seriesList, *position): #XXX
"""
Call sumSeries after inserting wildcards at the given position(s).
Example:
.. code-block:: none
&target=sumSeriesWithWildcards(host.cpu-[0-7].cpu-{user,system}.value, 1)
This would be the equivalent of
``target=sumSeries(host.*.cpu-user.value)&target=sumSeries(host.*.cpu-system.value)``
"""
if isinstance(position, int):
positions = [position]
else:
positions = position
newSeries = {}
newNames = list()
for series in seriesList:
newname = '.'.join(map(lambda x: x[1], filter(lambda i: i[0] not in positions, enumerate(series.name.split('.')))))
if newname in newSeries.keys():
newSeries[newname] = sumSeries(requestContext, (series, newSeries[newname]))[0]
else:
newSeries[newname] = series
newNames.append(newname)
newSeries[newname].name = newname
return [newSeries[name] for name in newNames]
def averageSeriesWithWildcards(requestContext, seriesList, *position): #XXX
"""
Call averageSeries after inserting wildcards at the given position(s).
Example:
.. code-block:: none
&target=averageSeriesWithWildcards(host.cpu-[0-7].cpu-{user,system}.value, 1)
This would be the equivalent of
``target=averageSeries(host.*.cpu-user.value)&target=averageSeries(host.*.cpu-system.value)``
"""
if isinstance(position, int):
positions = [position]
else:
positions = position
result = []
matchedList = {}
for series in seriesList:
newname = '.'.join(map(lambda x: x[1], filter(lambda i: i[0] not in positions, enumerate(series.name.split('.')))))
if newname not in matchedList:
matchedList[newname] = []
matchedList[newname].append(series)
for name in matchedList.keys():
result.append( averageSeries(requestContext, (matchedList[name]))[0] )
result[-1].name = name
return result
def diffSeries(requestContext, *seriesLists):
"""
Can take two or more metrics, or a single metric and a constant.
Subtracts parameters 2 through n from parameter 1.
Example:
.. code-block:: none
&target=diffSeries(service.connections.total,service.connections.failed)
&target=diffSeries(service.connections.total,5)
"""
(seriesList,start,end,step) = normalize(seriesLists)
name = "diffSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeDiff(row) for row in izip(*seriesList) )
series = TimeSeries(name,start,end,step,values)
series.pathExpression = name
return [series]
def averageSeries(requestContext, *seriesLists):
"""
Short Alias: avg()
Takes one metric or a wildcard seriesList.
Draws the average value of all metrics passed at each time.
Example:
.. code-block:: none
&target=averageSeries(company.server.*.threads.busy)
"""
(seriesList,start,end,step) = normalize(seriesLists)
name = "averageSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeDiv(safeSum(row),safeLen(row)) for row in izip(*seriesList) )
series = TimeSeries(name,start,end,step,values)
series.pathExpression = name
return [series]
def stddevSeries(requestContext, *seriesLists):
"""
Takes one metric or a wildcard seriesList.
Draws the standard deviation of all metrics passed at each time.
Example:
.. code-block:: none
&target=stddevSeries(company.server.*.threads.busy)
"""
(seriesList,start,end,step) = normalize(seriesLists)
name = "stddevSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeStdDev(row) for row in izip(*seriesList) )
series = TimeSeries(name,start,end,step,values)
series.pathExpression = name
return [series]
def minSeries(requestContext, *seriesLists):
"""
Takes one metric or a wildcard seriesList.
For each datapoint from each metric passed in, pick the minimum value and graph it.
Example:
.. code-block:: none
&target=minSeries(Server*.connections.total)
"""
(seriesList, start, end, step) = normalize(seriesLists)
name = "minSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeMin(row) for row in izip(*seriesList) )
series = TimeSeries(name, start, end, step, values)
series.pathExpression = name
return [series]
def maxSeries(requestContext, *seriesLists):
"""
Takes one metric or a wildcard seriesList.
For each datapoint from each metric passed in, pick the maximum value and graph it.
Example:
.. code-block:: none
&target=maxSeries(Server*.connections.total)
"""
(seriesList, start, end, step) = normalize(seriesLists)
name = "maxSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeMax(row) for row in izip(*seriesList) )
series = TimeSeries(name, start, end, step, values)
series.pathExpression = name
return [series]
def rangeOfSeries(requestContext, *seriesLists):
"""
Takes a wildcard seriesList.
Distills down a set of inputs into the range of the series
Example:
.. code-block:: none
&target=rangeOfSeries(Server*.connections.total)
"""
(seriesList,start,end,step) = normalize(seriesLists)
name = "rangeOfSeries(%s)" % formatPathExpressions(seriesList)
values = ( safeSubtract(max(row), min(row)) for row in izip(*seriesList) )
series = TimeSeries(name,start,end,step,values)
series.pathExpression = name
return [series]
def percentileOfSeries(requestContext, seriesList, n, interpolate=False):
"""
percentileOfSeries returns a single series which is composed of the n-percentile
values taken across a wildcard series at each point. Unless `interpolate` is
set to True, percentile values are actual values contained in one of the
supplied series.
"""
if n <= 0:
raise ValueError('The requested percent is required to be greater than 0')
name = 'percentilesOfSeries(%s,%g)' % (seriesList[0].pathExpression, n)
(start, end, step) = normalize([seriesList])[1:]
values = [ _getPercentile(row, n, interpolate) for row in izip(*seriesList) ]
resultSeries = TimeSeries(name, start, end, step, values)
resultSeries.pathExpression = name
return [resultSeries]
def keepLastValue(requestContext, seriesList, limit = INF):
"""
Takes one metric or a wildcard seriesList, and optionally a limit to the number of 'None' values to skip over.
Continues the line with the last received value when gaps ('None' values) appear in your data, rather than breaking your line.
Example:
.. code-block:: none
&target=keepLastValue(Server01.connections.handled)
&target=keepLastValue(Server01.connections.handled, 10)
"""
for series in seriesList:
series.name = "keepLastValue(%s)" % (series.name)
series.pathExpression = series.name
consecutiveNones = 0
for i,value in enumerate(series):
series[i] = value
# No 'keeping' can be done on the first value because we have no idea
# what came before it.
if i == 0:
continue
if value is None:
consecutiveNones += 1
else:
if 0 < consecutiveNones <= limit:
# If a non-None value is seen before the limit of Nones is hit,
# backfill all the missing datapoints with the last known value.
for index in xrange(i - consecutiveNones, i):
series[index] = series[i - consecutiveNones - 1]
consecutiveNones = 0
# If the series ends with some None values, try to backfill a bit to cover it.
if 0 < consecutiveNones < limit:
for index in xrange(len(series) - consecutiveNones, len(series)):
series[index] = series[len(series) - consecutiveNones - 1]
return seriesList
def asPercent(requestContext, seriesList, total=None):
"""
Calculates a percentage of the total of a wildcard series. If `total` is specified,
each series will be calculated as a percentage of that total. If `total` is not specified,
the sum of all points in the wildcard series will be used instead.
The `total` parameter may be a single series or a numeric value.
Example:
.. code-block:: none
&target=asPercent(Server01.connections.{failed,succeeded}, Server01.connections.attempted)
&target=asPercent(apache01.threads.busy,1500)
&target=asPercent(Server01.cpu.*.jiffies)
"""
normalize([seriesList])
if total is None:
totalValues = [ safeSum(row) for row in izip(*seriesList) ]
totalText = None # series.pathExpression
elif isinstance(total, list):
if len(total) != 1:
raise ValueError("asPercent second argument must reference exactly 1 series")
normalize([seriesList, total])
totalValues = total[0]
totalText = totalValues.name
else:
totalValues = [total] * len(seriesList[0])
totalText = str(total)
resultList = []
for series in seriesList:
resultValues = [ safeMul(safeDiv(val, totalVal), 100.0) for val,totalVal in izip(series,totalValues) ]
name = "asPercent(%s, %s)" % (series.name, totalText or series.pathExpression)
resultSeries = TimeSeries(name,series.start,series.end,series.step,resultValues)
resultSeries.pathExpression = name
resultList.append(resultSeries)
return resultList
def divideSeries(requestContext, dividendSeriesList, divisorSeriesList):
"""
Takes a dividend metric and a divisor metric and draws the division result.
A constant may *not* be passed. To divide by a constant, use the scale()
function (which is essentially a multiplication operation) and use the inverse
of the dividend. (Division by 8 = multiplication by 1/8 or 0.125)
Example:
.. code-block:: none
&target=divideSeries(Series.dividends,Series.divisors)
"""
if len(divisorSeriesList) != 1:
raise ValueError("divideSeries second argument must reference exactly 1 series")
divisorSeries = divisorSeriesList[0]
results = []
for dividendSeries in dividendSeriesList:
name = "divideSeries(%s,%s)" % (dividendSeries.name, divisorSeries.name)
bothSeries = (dividendSeries, divisorSeries)
step = reduce(lcm,[s.step for s in bothSeries])
for s in bothSeries:
s.consolidate( step / s.step )
start = min([s.start for s in bothSeries])
end = max([s.end for s in bothSeries])
end -= (end - start) % step
values = ( safeDiv(v1,v2) for v1,v2 in izip(*bothSeries) )
quotientSeries = TimeSeries(name, start, end, step, values)
quotientSeries.pathExpression = name
results.append(quotientSeries)
return results
def multiplySeries(requestContext, *seriesLists):
"""
Takes two or more series and multiplies their points. A constant may not be
used. To multiply by a constant, use the scale() function.
Example:
.. code-block:: none
&target=multiplySeries(Series.dividends,Series.divisors)
"""
(seriesList,start,end,step) = normalize(seriesLists)
if len(seriesList) == 1:
return seriesList
name = "multiplySeries(%s)" % ','.join([s.name for s in seriesList])
product = imap(lambda x: safeMul(*x), izip(*seriesList))
resultSeries = TimeSeries(name, start, end, step, product)
resultSeries.pathExpression = name
return [ resultSeries ]
def weightedAverage(requestContext, seriesListAvg, seriesListWeight, node):
"""
Takes a series of average values and a series of weights and
produces a weighted average for all values.
The corresponding values should share a node as defined
by the node parameter, 0-indexed.
Example:
.. code-block:: none
&target=weightedAverage(*.transactions.mean,*.transactions.count,0)
"""
sortedSeries={}
for seriesAvg, seriesWeight in izip(seriesListAvg , seriesListWeight):
key = seriesAvg.name.split(".")[node]
if key not in sortedSeries:
sortedSeries[key]={}
sortedSeries[key]['avg']=seriesAvg
key = seriesWeight.name.split(".")[node]
if key not in sortedSeries:
sortedSeries[key]={}
sortedSeries[key]['weight']=seriesWeight
productList = []
for key in sortedSeries.keys():
if 'weight' not in sortedSeries[key]:
continue
if 'avg' not in sortedSeries[key]:
continue
seriesWeight = sortedSeries[key]['weight']
seriesAvg = sortedSeries[key]['avg']
productValues = [ safeMul(val1, val2) for val1,val2 in izip(seriesAvg,seriesWeight) ]
name='product(%s,%s)' % (seriesWeight.name, seriesAvg.name)
productSeries = TimeSeries(name,seriesAvg.start,seriesAvg.end,seriesAvg.step,productValues)
productSeries.pathExpression=name
productList.append(productSeries)
sumProducts=sumSeries(requestContext, productList)[0]
sumWeights=sumSeries(requestContext, seriesListWeight)[0]
resultValues = [ safeDiv(val1, val2) for val1,val2 in izip(sumProducts,sumWeights) ]
name = "weightedAverage(%s, %s)" % (','.join(set(s.pathExpression for s in seriesListAvg)) ,','.join(set(s.pathExpression for s in seriesListWeight)))
resultSeries = TimeSeries(name,sumProducts.start,sumProducts.end,sumProducts.step,resultValues)
resultSeries.pathExpression = name
return resultSeries
def movingMedian(requestContext, seriesList, windowSize):
"""
Graphs the moving median of a metric (or metrics) over a fixed number of
past points, or a time interval.
Takes one metric or a wildcard seriesList followed by a number N of datapoints
or a quoted string with a length of time like '1hour' or '5min' (See ``from /
until`` in the render\_api_ for examples of time formats). Graphs the
median of the preceeding datapoints for each point on the graph. All
previous datapoints are set to None at the beginning of the graph.
Example:
.. code-block:: none
&target=movingMedian(Server.instance01.threads.busy,10)
&target=movingMedian(Server.instance*.threads.idle,'5min')
"""
windowInterval = None
if isinstance(windowSize, basestring):
delta = parseTimeOffset(windowSize)
windowInterval = abs(delta.seconds + (delta.days * 86400))
if windowInterval:
bootstrapSeconds = windowInterval
else:
bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize)
bootstrapList = _fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds)
result = []
for bootstrap, series in zip(bootstrapList, seriesList):
if windowInterval:
windowPoints = windowInterval / series.step
else:
windowPoints = int(windowSize)
if isinstance(windowSize, basestring):
newName = 'movingMedian(%s,"%s")' % (series.name, windowSize)
else:
newName = "movingMedian(%s,%d)" % (series.name, windowPoints)
newSeries = TimeSeries(newName, series.start, series.end, series.step, [])
newSeries.pathExpression = newName
offset = len(bootstrap) - len(series)
for i in range(len(series)):
window = bootstrap[i + offset - windowPoints:i + offset]
nonNull = [v for v in window if v is not None]
if nonNull:
m_index = len(nonNull) / 2
newSeries.append(sorted(nonNull)[m_index])
else:
newSeries.append(None)
result.append(newSeries)
return result
def scale(requestContext, seriesList, factor):
"""
Takes one metric or a wildcard seriesList followed by a constant, and multiplies the datapoint
by the constant provided at each point.
Example:
.. code-block:: none
&target=scale(Server.instance01.threads.busy,10)
&target=scale(Server.instance*.threads.busy,10)
"""
for series in seriesList:
series.name = "scale(%s,%g)" % (series.name,float(factor))
series.pathExpression = series.name
for i,value in enumerate(series):
series[i] = safeMul(value,factor)
return seriesList
def invert(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList, and inverts each datapoint (i.e. 1/x).
Example:
.. code-block:: none
&target=invert(Server.instance01.threads.busy)
"""
for series in seriesList:
series.name = "invert(%s)" % (series.name)
for i,value in enumerate(series):
series[i] = safeDiv(1,value)
return seriesList
def scaleToSeconds(requestContext, seriesList, seconds):
"""
Takes one metric or a wildcard seriesList and returns "value per seconds" where
seconds is a last argument to this functions.
Useful in conjunction with derivative or integral function if you want
to normalize its result to a known resolution for arbitrary retentions
"""
for series in seriesList:
series.name = "scaleToSeconds(%s,%d)" % (series.name,seconds)
series.pathExpression = series.name
for i,value in enumerate(series):
factor = seconds * 1.0 / series.step
series[i] = safeMul(value,factor)
return seriesList
def absolute(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList and applies the mathematical abs function to each
datapoint transforming it to its absolute value.
Example:
.. code-block:: none
&target=absolute(Server.instance01.threads.busy)
&target=absolute(Server.instance*.threads.busy)
"""
for series in seriesList:
series.name = "absolute(%s)" % (series.name)
series.pathExpression = series.name
for i,value in enumerate(series):
series[i] = safeAbs(value)
return seriesList
def offset(requestContext, seriesList, factor):
"""
Takes one metric, a wildcard seriesList followed by a constant or single time
serie, and adds the value to each datapoint.
Example:
.. code-block:: none
&target=offset(Server.instance01.threads.busy,10)
&target=scale(offset(Server.instance01.threads.*.last_change,
scale(Server.instance01.uptime, -1)),-1)
"""
for series in seriesList:
if isinstance(factor, list):
if len(factor) != 1:
raise ValueError("offset second argument must reference exactly 1 series")
factor_serie = factor[0]
series.name = "offset(%s,%s)" % (series.name,factor_serie.name)
series.pathExpression = series.name
for i,value in enumerate(series):
if value is not None:
series[i] = value + factor_serie[i]
else:
series.name = "offset(%s,%g)" % (series.name,float(factor))
series.pathExpression = series.name
for i,value in enumerate(series):
if value is not None:
series[i] = value + factor
return seriesList
def offsetToZero(requestContext, seriesList):
"""
Offsets a metric or wildcard seriesList by subtracting the minimum
value in the series from each datapoint.
Useful to compare different series where the values in each series
may be higher or lower on average but you're only interested in the
relative difference.
An example use case is for comparing different round trip time
results. When measuring RTT (like pinging a server), different
devices may come back with consistently different results due to
network latency which will be different depending on how many
network hops between the probe and the device. To compare different
devices in the same graph, the network latency to each has to be
factored out of the results. This is a shortcut that takes the
fastest response (lowest number in the series) and sets that to zero
and then offsets all of the other datapoints in that series by that
amount. This makes the assumption that the lowest response is the
fastest the device can respond, of course the more datapoints that
are in the series the more accurate this assumption is.
Example:
.. code-block:: none
&target=offsetToZero(Server.instance01.responseTime)
&target=offsetToZero(Server.instance*.responseTime)
"""
for series in seriesList:
series.name = "offsetToZero(%s)" % (series.name)
minimum = safeMin(series)
for i,value in enumerate(series):
if value is not None:
series[i] = value - minimum
return seriesList
def movingAverage(requestContext, seriesList, windowSize):
"""
Graphs the moving average of a metric (or metrics) over a fixed number of
past points, or a time interval.
Takes one metric or a wildcard seriesList followed by a number N of datapoints
or a quoted string with a length of time like '1hour' or '5min' (See ``from /
until`` in the render\_api_ for examples of time formats). Graphs the
average of the preceeding datapoints for each point on the graph. All
previous datapoints are set to None at the beginning of the graph.
Example:
.. code-block:: none
&target=movingAverage(Server.instance01.threads.busy,10)
&target=movingAverage(Server.instance*.threads.idle,'5min')
"""
windowInterval = None
if isinstance(windowSize, basestring):
delta = parseTimeOffset(windowSize)
windowInterval = abs(delta.seconds + (delta.days * 86400))
if windowInterval:
bootstrapSeconds = windowInterval
else:
bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize)
bootstrapList = _fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds)
result = []
for bootstrap, series in zip(bootstrapList, seriesList):
if windowInterval:
windowPoints = windowInterval / series.step
else:
windowPoints = int(windowSize)
if isinstance(windowSize, basestring):
newName = 'movingAverage(%s,"%s")' % (series.name, windowSize)
else:
newName = "movingAverage(%s,%s)" % (series.name, windowSize)
newSeries = TimeSeries(newName, series.start, series.end, series.step, [])
newSeries.pathExpression = newName
offset = len(bootstrap) - len(series)
for i in range(len(series)):
window = bootstrap[i + offset - windowPoints:i + offset]
newSeries.append(safeAvg(window))
result.append(newSeries)
return result
def cumulative(requestContext, seriesList, consolidationFunc='sum'):
"""
Takes one metric or a wildcard seriesList, and an optional function.
Valid functions are 'sum', 'average', 'min', and 'max'
Sets the consolidation function to 'sum' for the given metric seriesList.
Alias for :func:`consolidateBy(series, 'sum') <graphite.render.functions.consolidateBy>`
.. code-block:: none
&target=cumulative(Sales.widgets.largeBlue)
"""
return consolidateBy(requestContext, seriesList, 'sum')
def consolidateBy(requestContext, seriesList, consolidationFunc):
"""
Takes one metric or a wildcard seriesList and a consolidation function name.
Valid function names are 'sum', 'average', 'min', and 'max'
When a graph is drawn where width of the graph size in pixels is smaller than
the number of datapoints to be graphed, Graphite consolidates the values to
to prevent line overlap. The consolidateBy() function changes the consolidation
function from the default of 'average' to one of 'sum', 'max', or 'min'. This is
especially useful in sales graphs, where fractional values make no sense and a 'sum'
of consolidated values is appropriate.
.. code-block:: none
&target=consolidateBy(Sales.widgets.largeBlue, 'sum')
&target=consolidateBy(Servers.web01.sda1.free_space, 'max')
"""
for series in seriesList:
# datalib will throw an exception, so it's not necessary to validate here
series.consolidationFunc = consolidationFunc
series.name = 'consolidateBy(%s,"%s")' % (series.name, series.consolidationFunc)
series.pathExpression = series.name
return seriesList
def derivative(requestContext, seriesList):
"""
This is the opposite of the integral function. This is useful for taking a
running total metric and calculating the delta between subsequent data points.
This function does not normalize for periods of time, as a true derivative would.
Instead see the perSecond() function to calculate a rate of change over time.
Example:
.. code-block:: none
&target=derivative(company.server.application01.ifconfig.TXPackets)
Each time you run ifconfig, the RX and TXPackets are higher (assuming there
is network traffic.) By applying the derivative function, you can get an
idea of the packets per minute sent or received, even though you're only
recording the total.
"""
results = []
for series in seriesList:
newValues = []
prev = None
for val in series:
if None in (prev,val):
newValues.append(None)
prev = val
continue
newValues.append(val - prev)
prev = val
newName = "derivative(%s)" % series.name
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def perSecond(requestContext, seriesList, maxValue=None):
"""
Derivative adjusted for the series time interval
This is useful for taking a running total metric and showing how many requests
per second were handled.
Example:
.. code-block:: none
&target=perSecond(company.server.application01.ifconfig.TXPackets)
Each time you run ifconfig, the RX and TXPackets are higher (assuming there
is network traffic.) By applying the derivative function, you can get an
idea of the packets per minute sent or received, even though you're only
recording the total.
"""
results = []
for series in seriesList:
newValues = []
prev = None
for val in series:
step = series.step
if None in (prev,val):
newValues.append(None)
prev = val
continue
diff = val - prev
if diff >= 0:
newValues.append(diff / step)
elif maxValue is not None and maxValue >= val:
newValues.append( ((maxValue - prev) + val + 1) / step )
else:
newValues.append(None)
prev = val
newName = "perSecond(%s)" % series.name
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def integral(requestContext, seriesList):
"""
This will show the sum over time, sort of like a continuous addition function.
Useful for finding totals or trends in metrics that are collected per minute.
Example:
.. code-block:: none
&target=integral(company.sales.perMinute)
This would start at zero on the left side of the graph, adding the sales each
minute, and show the total sales for the time period selected at the right
side, (time now, or the time specified by '&until=').
"""
results = []
for series in seriesList:
newValues = []
current = 0.0
for val in series:
if val is None:
newValues.append(None)
else:
current += val
newValues.append(current)
newName = "integral(%s)" % series.name
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def nonNegativeDerivative(requestContext, seriesList, maxValue=None):
"""
Same as the derivative function above, but ignores datapoints that trend
down. Useful for counters that increase for a long time, then wrap or
reset. (Such as if a network interface is destroyed and recreated by unloading
and re-loading a kernel module, common with USB / WiFi cards.
Example:
.. code-block:: none
&target=nonNegativederivative(company.server.application01.ifconfig.TXPackets)
"""
results = []
for series in seriesList:
newValues = []
prev = None
for val in series:
if None in (prev, val):
newValues.append(None)
prev = val
continue
diff = val - prev
if diff >= 0:
newValues.append(diff)
elif maxValue is not None and maxValue >= val:
newValues.append( (maxValue - prev) + val + 1 )
else:
newValues.append(None)
prev = val
newName = "nonNegativeDerivative(%s)" % series.name
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def stacked(requestContext,seriesLists,stackName='__DEFAULT__'):
"""
Takes one metric or a wildcard seriesList and change them so they are
stacked. This is a way of stacking just a couple of metrics without having
to use the stacked area mode (that stacks everything). By means of this a mixed
stacked and non stacked graph can be made
It can also take an optional argument with a name of the stack, in case there is
more than one, e.g. for input and output metrics.
Example:
.. code-block:: none
&target=stacked(company.server.application01.ifconfig.TXPackets, 'tx')
"""
if 'totalStack' in requestContext:
totalStack = requestContext['totalStack'].get(stackName, [])
else:
requestContext['totalStack'] = {}
totalStack = [];
results = []
for series in seriesLists:
newValues = []
for i in range(len(series)):
if len(totalStack) <= i: totalStack.append(0)
if series[i] is not None:
totalStack[i] += series[i]
newValues.append(totalStack[i])
else:
newValues.append(None)
# Work-around for the case when legend is set
if stackName=='__DEFAULT__':
newName = "stacked(%s)" % series.name
else:
newName = series.name
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.options['stacked'] = True
newSeries.pathExpression = newName
results.append(newSeries)
requestContext['totalStack'][stackName] = totalStack
return results
def areaBetween(requestContext, seriesList):
"""
Draws the area in between the two series in seriesList
"""
assert len(seriesList) == 2, "areaBetween series argument must reference *exactly* 2 series"
lower = seriesList[0]
upper = seriesList[1]
lower.options['stacked'] = True
lower.options['invisible'] = True
upper.options['stacked'] = True
lower.name = upper.name = "areaBetween(%s)" % upper.pathExpression
return seriesList
def aliasSub(requestContext, seriesList, search, replace):
"""
Runs series names through a regex search/replace.
.. code-block:: none
&target=aliasSub(ip.*TCP*,"^.*TCP(\d+)","\\1")
"""
try:
seriesList.name = re.sub(search, replace, seriesList.name)
except AttributeError:
for series in seriesList:
series.name = re.sub(search, replace, series.name)
return seriesList
def alias(requestContext, seriesList, newName):
"""
Takes one metric or a wildcard seriesList and a string in quotes.
Prints the string instead of the metric name in the legend.
.. code-block:: none
&target=alias(Sales.widgets.largeBlue,"Large Blue Widgets")
"""
try:
seriesList.name = newName
except AttributeError:
for series in seriesList:
series.name = newName
return seriesList
def cactiStyle(requestContext, seriesList, system=None):
"""
Takes a series list and modifies the aliases to provide column aligned
output with Current, Max, and Min values in the style of cacti. Optonally
takes a "system" value to apply unit formatting in the same style as the
Y-axis.
NOTE: column alignment only works with monospace fonts such as terminus.
.. code-block:: none
&target=cactiStyle(ganglia.*.net.bytes_out,"si")
"""
if 0 == len(seriesList):
return seriesList
if system:
fmt = lambda x:"%.2f%s" % format_units(x,system=system)
else:
fmt = lambda x:"%.2f"%x
nameLen = max([0] + [len(getattr(series,"name")) for series in seriesList])
lastLen = max([0] + [len(fmt(int(safeLast(series) or 3))) for series in seriesList]) + 3
maxLen = max([0] + [len(fmt(int(safeMax(series) or 3))) for series in seriesList]) + 3
minLen = max([0] + [len(fmt(int(safeMin(series) or 3))) for series in seriesList]) + 3
for series in seriesList:
name = series.name
last = safeLast(series)
maximum = safeMax(series)
minimum = safeMin(series)
if last is None:
last = NAN
else:
last = fmt(float(last))
if maximum is None:
maximum = NAN
else:
maximum = fmt(float(maximum))
if minimum is None:
minimum = NAN
else:
minimum = fmt(float(minimum))
series.name = "%*s Current:%*s Max:%*s Min:%*s " % \
(-nameLen, series.name,
-lastLen, last,
-maxLen, maximum,
-minLen, minimum)
return seriesList
def aliasByNode(requestContext, seriesList, *nodes):
"""
Takes a seriesList and applies an alias derived from one or more "node"
portion/s of the target name. Node indices are 0 indexed.
.. code-block:: none
&target=aliasByNode(ganglia.*.cpu.load5,1)
"""
if isinstance(nodes, int):
nodes=[nodes]
for series in seriesList:
metric_pieces = re.search('(?:.*\()?(?P<name>[-\w*\.]+)(?:,|\)?.*)?',series.name).groups()[0].split('.')
series.name = '.'.join(metric_pieces[n] for n in nodes)
return seriesList
def aliasByMetric(requestContext, seriesList):
"""
Takes a seriesList and applies an alias derived from the base metric name.
.. code-block:: none
&target=aliasByMetric(carbon.agents.graphite.creates)
"""
for series in seriesList:
series.name = series.name.split('.')[-1]
return seriesList
def legendValue(requestContext, seriesList, *valueTypes):
"""
Takes one metric or a wildcard seriesList and a string in quotes.
Appends a value to the metric name in the legend. Currently one or several of: `last`, `avg`,
`total`, `min`, `max`.
The last argument can be `si` (default) or `binary`, in that case values will be formatted in the
corresponding system.
.. code-block:: none
&target=legendValue(Sales.widgets.largeBlue, 'avg', 'max', 'si')
"""
def last(s):
"Work-around for the missing last point"
v = s[-1]
if v is None:
return s[-2]
return v
valueFuncs = {
'avg': lambda s: safeDiv(safeSum(s), safeLen(s)),
'total': safeSum,
'min': safeMin,
'max': safeMax,
'last': last
}
system = None
if valueTypes[-1] in ('si', 'binary'):
system = valueTypes[-1]
valueTypes = valueTypes[:-1]
for valueType in valueTypes:
valueFunc = valueFuncs.get(valueType, lambda s: '(?)')
if system is None:
for series in seriesList:
series.name += " (%s: %s)" % (valueType, valueFunc(series))
else:
for series in seriesList:
value = valueFunc(series)
formatted = None
if value is not None:
formatted = "%.2f%s" % format_units(abs(value), system=system)
series.name = "%-20s%-5s%-10s" % (series.name, valueType, formatted)
return seriesList
def alpha(requestContext, seriesList, alpha):
"""
Assigns the given alpha transparency setting to the series. Takes a float value between 0 and 1.
"""
for series in seriesList:
series.options['alpha'] = alpha
return seriesList
def color(requestContext, seriesList, theColor):
"""
Assigns the given color to the seriesList
Example:
.. code-block:: none
&target=color(collectd.hostname.cpu.0.user, 'green')
&target=color(collectd.hostname.cpu.0.system, 'ff0000')
&target=color(collectd.hostname.cpu.0.idle, 'gray')
&target=color(collectd.hostname.cpu.0.idle, '6464ffaa')
"""
for series in seriesList:
series.color = theColor
return seriesList
def substr(requestContext, seriesList, start=0, stop=0):
"""
Takes one metric or a wildcard seriesList followed by 1 or 2 integers. Assume that the
metric name is a list or array, with each element separated by dots. Prints
n - length elements of the array (if only one integer n is passed) or n - m
elements of the array (if two integers n and m are passed). The list starts
with element 0 and ends with element (length - 1).
Example:
.. code-block:: none
&target=substr(carbon.agents.hostname.avgUpdateTime,2,4)
The label would be printed as "hostname.avgUpdateTime".
"""
for series in seriesList:
left = series.name.rfind('(') + 1
right = series.name.find(')')
if right < 0:
right = len(series.name)+1
cleanName = series.name[left:right:]
if int(stop) == 0:
series.name = '.'.join(cleanName.split('.')[int(start)::])
else:
series.name = '.'.join(cleanName.split('.')[int(start):int(stop):])
# substr(func(a.b,'c'),1) becomes b instead of b,'c'
series.name = re.sub(',.*$', '', series.name)
return seriesList
def logarithm(requestContext, seriesList, base=10):
"""
Takes one metric or a wildcard seriesList, a base, and draws the y-axis in logarithmic
format. If base is omitted, the function defaults to base 10.
Example:
.. code-block:: none
&target=log(carbon.agents.hostname.avgUpdateTime,2)
"""
results = []
for series in seriesList:
newValues = []
for val in series:
if val is None:
newValues.append(None)
elif val <= 0:
newValues.append(None)
else:
newValues.append(math.log(val, base))
newName = "log(%s, %s)" % (series.name, base)
newSeries = TimeSeries(newName, series.start, series.end, series.step, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def maximumAbove(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by a constant n.
Draws only the metrics with a maximum value above n.
Example:
.. code-block:: none
&target=maximumAbove(system.interface.eth*.packetsSent,1000)
This would only display interfaces which sent more than 1000 packets/min.
"""
results = []
for series in seriesList:
if max(series) > n:
results.append(series)
return results
def minimumAbove(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by a constant n.
Draws only the metrics with a minimum value above n.
Example:
.. code-block:: none
&target=minimumAbove(system.interface.eth*.packetsSent,1000)
This would only display interfaces which sent more than 1000 packets/min.
"""
results = []
for series in seriesList:
if min(series) > n:
results.append(series)
return results
def maximumBelow(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by a constant n.
Draws only the metrics with a maximum value below n.
Example:
.. code-block:: none
&target=maximumBelow(system.interface.eth*.packetsSent,1000)
This would only display interfaces which sent less than 1000 packets/min.
"""
result = []
for series in seriesList:
if max(series) <= n:
result.append(series)
return result
def highestCurrent(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the N metrics with the highest value
at the end of the time period specified.
Example:
.. code-block:: none
&target=highestCurrent(server*.instance*.threads.busy,5)
Draws the 5 servers with the highest busy threads.
"""
return sorted( seriesList, key=safeLast )[-n:]
def highestMax(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the N metrics with the highest maximum
value in the time period specified.
Example:
.. code-block:: none
&target=highestMax(server*.instance*.threads.busy,5)
Draws the top 5 servers who have had the most busy threads during the time
period specified.
"""
result_list = sorted( seriesList, key=lambda s: max(s) )[-n:]
return sorted(result_list, key=lambda s: max(s), reverse=True)
def lowestCurrent(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the N metrics with the lowest value at
the end of the time period specified.
Example:
.. code-block:: none
&target=lowestCurrent(server*.instance*.threads.busy,5)
Draws the 5 servers with the least busy threads right now.
"""
return sorted( seriesList, key=safeLast )[:n]
def currentAbove(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the metrics whose value is above N
at the end of the time period specified.
Example:
.. code-block:: none
&target=currentAbove(server*.instance*.threads.busy,50)
Draws the servers with more than 50 busy threads.
"""
return [ series for series in seriesList if safeLast(series) >= n ]
def currentBelow(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the metrics whose value is below N
at the end of the time period specified.
Example:
.. code-block:: none
&target=currentBelow(server*.instance*.threads.busy,3)
Draws the servers with less than 3 busy threads.
"""
return [ series for series in seriesList if safeLast(series) <= n ]
def highestAverage(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the top N metrics with the highest
average value for the time period specified.
Example:
.. code-block:: none
&target=highestAverage(server*.instance*.threads.busy,5)
Draws the top 5 servers with the highest average value.
"""
return sorted( seriesList, key=lambda s: safeDiv(safeSum(s),safeLen(s)) )[-n:]
def lowestAverage(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the bottom N metrics with the lowest
average value for the time period specified.
Example:
.. code-block:: none
&target=lowestAverage(server*.instance*.threads.busy,5)
Draws the bottom 5 servers with the lowest average value.
"""
return sorted( seriesList, key=lambda s: safeDiv(safeSum(s),safeLen(s)) )[:n]
def averageAbove(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the metrics with an average value
above N for the time period specified.
Example:
.. code-block:: none
&target=averageAbove(server*.instance*.threads.busy,25)
Draws the servers with average values above 25.
"""
return [ series for series in seriesList if safeDiv(safeSum(series),safeLen(series)) >= n ]
def averageBelow(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Out of all metrics passed, draws only the metrics with an average value
below N for the time period specified.
Example:
.. code-block:: none
&target=averageBelow(server*.instance*.threads.busy,25)
Draws the servers with average values below 25.
"""
return [ series for series in seriesList if safeDiv(safeSum(series),safeLen(series)) <= n ]
def _getPercentile(points, n, interpolate=False):
"""
Percentile is calculated using the method outlined in the NIST Engineering
Statistics Handbook:
http://www.itl.nist.gov/div898/handbook/prc/section2/prc252.htm
"""
sortedPoints = sorted([ p for p in points if p is not None])
if len(sortedPoints) == 0:
return None
fractionalRank = (n/100.0) * (len(sortedPoints) + 1)
rank = int(fractionalRank)
rankFraction = fractionalRank - rank
if not interpolate:
rank += int(math.ceil(rankFraction))
if rank == 0:
percentile = sortedPoints[0]
elif rank - 1 == len(sortedPoints):
percentile = sortedPoints[-1]
else:
percentile = sortedPoints[rank - 1] # Adjust for 0-index
if interpolate:
if rank != len(sortedPoints): # if a next value exists
nextValue = sortedPoints[rank]
percentile = percentile + rankFraction * (nextValue - percentile)
return percentile
def nPercentile(requestContext, seriesList, n):
"""Returns n-percent of each series in the seriesList."""
assert n, 'The requested percent is required to be greater than 0'
results = []
for s in seriesList:
# Create a sorted copy of the TimeSeries excluding None values in the values list.
s_copy = TimeSeries( s.name, s.start, s.end, s.step, sorted( [item for item in s if item is not None] ) )
if not s_copy:
continue # Skip this series because it is empty.
perc_val = _getPercentile(s_copy, n)
if perc_val is not None:
name = 'nPercentile(%s, %g)' % (s_copy.name, n)
point_count = int((s.end - s.start)/s.step)
perc_series = TimeSeries(name, s_copy.start, s_copy.end, s_copy.step, [perc_val] * point_count )
perc_series.pathExpression = name
results.append(perc_series)
return results
def averageOutsidePercentile(requestContext, seriesList, n):
"""
Removes functions lying inside an average percentile interval
"""
averages = []
for s in seriesList:
averages.append(safeDiv(safeSum(s), safeLen(s)))
if n < 50:
n = 100 - n;
lowPercentile = _getPercentile(averages, 100 - n)
highPercentile = _getPercentile(averages, n)
return [s for s in seriesList if not lowPercentile < safeDiv(safeSum(s), safeLen(s)) < highPercentile]
def removeBetweenPercentile(requestContext, seriesList, n):
"""
Removes lines who do not have an value lying in the x-percentile of all the values at a moment
"""
if n < 50:
n = 100 - n
transposed = zip(*seriesList)
lowPercentiles = [_getPercentile(col, 100-n) for col in transposed]
highPercentiles = [_getPercentile(col, n) for col in transposed]
return [l for l in seriesList if sum([not lowPercentiles[val_i] < val < highPercentiles[val_i]
for (val_i, val) in enumerate(l)]) > 0]
def removeAbovePercentile(requestContext, seriesList, n):
"""
Removes data above the nth percentile from the series or list of series provided.
Values above this percentile are assigned a value of None.
"""
for s in seriesList:
s.name = 'removeAbovePercentile(%s, %d)' % (s.name, n)
s.pathExpression = s.name
percentile = nPercentile(requestContext, [s], n)[0][0]
for (index, val) in enumerate(s):
if val > percentile:
s[index] = None
return seriesList
def removeAboveValue(requestContext, seriesList, n):
"""
Removes data above the given threshold from the series or list of series provided.
Values above this threshole are assigned a value of None
"""
for s in seriesList:
s.name = 'removeAboveValue(%s, %d)' % (s.name, n)
s.pathExpression = s.name
for (index, val) in enumerate(s):
if val > n:
s[index] = None
return seriesList
def removeBelowPercentile(requestContext, seriesList, n):
"""
Removes data below the nth percentile from the series or list of series provided.
Values below this percentile are assigned a value of None.
"""
for s in seriesList:
s.name = 'removeBelowPercentile(%s, %d)' % (s.name, n)
s.pathExpression = s.name
percentile = nPercentile(requestContext, [s], n)[0][0]
for (index, val) in enumerate(s):
if val < percentile:
s[index] = None
return seriesList
def removeBelowValue(requestContext, seriesList, n):
"""
Removes data below the given threshold from the series or list of series provided.
Values below this threshole are assigned a value of None
"""
for s in seriesList:
s.name = 'removeBelowValue(%s, %d)' % (s.name, n)
s.pathExpression = s.name
for (index, val) in enumerate(s):
if val < n:
s[index] = None
return seriesList
def limit(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Only draw the first N metrics. Useful when testing a wildcard in a metric.
Example:
.. code-block:: none
&target=limit(server*.instance*.memory.free,5)
Draws only the first 5 instance's memory free.
"""
return seriesList[0:n]
def sortByName(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList.
Sorts the list of metrics by the metric name.
"""
def compare(x,y):
return cmp(x.name, y.name)
seriesList.sort(compare)
return seriesList
def sortByTotal(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList.
Sorts the list of metrics by the sum of values across the time period
specified.
"""
def compare(x,y):
return cmp(safeSum(y), safeSum(x))
seriesList.sort(compare)
return seriesList
def sortByMaxima(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList.
Sorts the list of metrics by the maximum value across the time period
specified. Useful with the &areaMode=all parameter, to keep the
lowest value lines visible.
Example:
.. code-block:: none
&target=sortByMaxima(server*.instance*.memory.free)
"""
def compare(x,y):
return cmp(max(y), max(x))
seriesList.sort(compare)
return seriesList
def sortByMinima(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList.
Sorts the list of metrics by the lowest value across the time period
specified.
Example:
.. code-block:: none
&target=sortByMinima(server*.instance*.memory.free)
"""
def compare(x,y):
return cmp(min(x), min(y))
newSeries = [series for series in seriesList if max(series) > 0]
newSeries.sort(compare)
return newSeries
def sortByName(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList.
Sorts the list of metrics by the name.
"""
def compare(x,y):
return 1 if x.name > y.name else -1
seriesList.sort(compare)
return seriesList
def useSeriesAbove(requestContext, seriesList, value, search, replace):
"""
Compares the maximum of each series against the given `value`. If the series
maximum is greater than `value`, the regular expression search and replace is
applied against the series name to plot a related metric
e.g. given useSeriesAbove(ganglia.metric1.reqs,10,'reqs','time'),
the response time metric will be plotted only when the maximum value of the
corresponding request/s metric is > 10
.. code-block:: none
&target=useSeriesAbove(ganglia.metric1.reqs,10,"reqs","time")
"""
newSeries = []
for series in seriesList:
newname = re.sub(search, replace, series.name)
if max(series) > value:
n = evaluateTarget(requestContext, newname)
if n is not None and len(n) > 0:
newSeries.append(n[0])
return newSeries
def mostDeviant(requestContext, seriesList, n):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Draws the N most deviant metrics.
To find the deviants, the standard deviation (sigma) of each series
is taken and ranked. The top N standard deviations are returned.
Example:
.. code-block:: none
&target=mostDeviant(5, server*.instance*.memory.free)
Draws the 5 instances furthest from the average memory free.
"""
deviants = []
for series in seriesList:
mean = safeDiv( safeSum(series), safeLen(series) )
if mean is None: continue
square_sum = sum([ (value - mean) ** 2 for value in series if value is not None ])
sigma = safeDiv(square_sum, safeLen(series))
if sigma is None: continue
deviants.append( (sigma, series) )
deviants.sort(key=lambda i: i[0], reverse=True) #sort by sigma
return [ series for (sigma,series) in deviants ][:n] #return the n most deviant series
def stdev(requestContext, seriesList, points, windowTolerance=0.1):
"""
Takes one metric or a wildcard seriesList followed by an integer N.
Draw the Standard Deviation of all metrics passed for the past N datapoints.
If the ratio of null points in the window is greater than windowTolerance,
skip the calculation. The default for windowTolerance is 0.1 (up to 10% of points
in the window can be missing). Note that if this is set to 0.0, it will cause large
gaps in the output anywhere a single point is missing.
Example:
.. code-block:: none
&target=stdev(server*.instance*.threads.busy,30)
&target=stdev(server*.instance*.cpu.system,30,0.0)
"""
# For this we take the standard deviation in terms of the moving average
# and the moving average of series squares.
for (seriesIndex,series) in enumerate(seriesList):
stddevSeries = TimeSeries("stddev(%s,%d)" % (series.name, int(points)), series.start, series.end, series.step, [])
stddevSeries.pathExpression = "stddev(%s,%d)" % (series.name, int(points))
validPoints = 0
currentSum = 0
currentSumOfSquares = 0
for (index, newValue) in enumerate(series):
# Mark whether we've reached our window size - dont drop points out otherwise
if index < points:
bootstrapping = True
droppedValue = None
else:
bootstrapping = False
droppedValue = series[index - points]
# Track non-None points in window
if not bootstrapping and droppedValue is not None:
validPoints -= 1
if newValue is not None:
validPoints += 1
# Remove the value that just dropped out of the window
if not bootstrapping and droppedValue is not None:
currentSum -= droppedValue
currentSumOfSquares -= droppedValue**2
# Add in the value that just popped in the window
if newValue is not None:
currentSum += newValue
currentSumOfSquares += newValue**2
if validPoints > 0 and \
float(validPoints)/points >= windowTolerance:
try:
deviation = math.sqrt(validPoints * currentSumOfSquares - currentSum**2)/validPoints
except ValueError:
deviation = None
stddevSeries.append(deviation)
else:
stddevSeries.append(None)
seriesList[seriesIndex] = stddevSeries
return seriesList
def secondYAxis(requestContext, seriesList):
"""
Graph the series on the secondary Y axis.
"""
for series in seriesList:
series.options['secondYAxis'] = True
series.name= 'secondYAxis(%s)' % series.name
return seriesList
def _fetchWithBootstrap(requestContext, seriesList, **delta_kwargs):
'Request the same data but with a bootstrap period at the beginning'
bootstrapContext = requestContext.copy()
bootstrapContext['startTime'] = requestContext['startTime'] - timedelta(**delta_kwargs)
bootstrapContext['endTime'] = requestContext['startTime']
bootstrapList = []
for series in seriesList:
if series.pathExpression in [ b.pathExpression for b in bootstrapList ]:
# This pathExpression returns multiple series and we already fetched it
continue
bootstraps = evaluateTarget(bootstrapContext, series.pathExpression)
bootstrapList.extend(bootstraps)
newSeriesList = []
for bootstrap, original in zip(bootstrapList, seriesList):
newValues = []
if bootstrap.step != original.step:
ratio = bootstrap.step / original.step
for value in bootstrap:
#XXX For series with aggregationMethod = sum this should also
# divide by the ratio to bring counts to the same time unit
# ...but we have no way of knowing whether that's the case
newValues.extend([ value ] * ratio)
else:
newValues.extend(bootstrap)
newValues.extend(original)
newSeries = TimeSeries(original.name, bootstrap.start, original.end, original.step, newValues)
newSeries.pathExpression = series.pathExpression
newSeriesList.append(newSeries)
return newSeriesList
def _trimBootstrap(bootstrap, original):
'Trim the bootstrap period off the front of this series so it matches the original'
original_len = len(original)
bootstrap_len = len(bootstrap)
length_limit = (original_len * original.step) / bootstrap.step
trim_start = bootstrap.end - (length_limit * bootstrap.step)
trimmed = TimeSeries(bootstrap.name, trim_start, bootstrap.end, bootstrap.step,
bootstrap[-length_limit:])
return trimmed
def holtWintersIntercept(alpha,actual,last_season,last_intercept,last_slope):
return alpha * (actual - last_season) \
+ (1 - alpha) * (last_intercept + last_slope)
def holtWintersSlope(beta,intercept,last_intercept,last_slope):
return beta * (intercept - last_intercept) + (1 - beta) * last_slope
def holtWintersSeasonal(gamma,actual,intercept,last_season):
return gamma * (actual - intercept) + (1 - gamma) * last_season
def holtWintersDeviation(gamma,actual,prediction,last_seasonal_dev):
if prediction is None:
prediction = 0
return gamma * math.fabs(actual - prediction) + (1 - gamma) * last_seasonal_dev
def holtWintersAnalysis(series):
alpha = gamma = 0.1
beta = 0.0035
# season is currently one day
season_length = (24*60*60) / series.step
intercept = 0
slope = 0
pred = 0
intercepts = list()
slopes = list()
seasonals = list()
predictions = list()
deviations = list()
def getLastSeasonal(i):
j = i - season_length
if j >= 0:
return seasonals[j]
return 0
def getLastDeviation(i):
j = i - season_length
if j >= 0:
return deviations[j]
return 0
last_seasonal = 0
last_seasonal_dev = 0
next_last_seasonal = 0
next_pred = None
for i,actual in enumerate(series):
if actual is None:
# missing input values break all the math
# do the best we can and move on
intercepts.append(None)
slopes.append(0)
seasonals.append(0)
predictions.append(next_pred)
deviations.append(0)
next_pred = None
continue
if i == 0:
last_intercept = actual
last_slope = 0
# seed the first prediction as the first actual
prediction = actual
else:
last_intercept = intercepts[-1]
last_slope = slopes[-1]
if last_intercept is None:
last_intercept = actual
prediction = next_pred
last_seasonal = getLastSeasonal(i)
next_last_seasonal = getLastSeasonal(i+1)
last_seasonal_dev = getLastDeviation(i)
intercept = holtWintersIntercept(alpha,actual,last_seasonal
,last_intercept,last_slope)
slope = holtWintersSlope(beta,intercept,last_intercept,last_slope)
seasonal = holtWintersSeasonal(gamma,actual,intercept,last_seasonal)
next_pred = intercept + slope + next_last_seasonal
deviation = holtWintersDeviation(gamma,actual,prediction,last_seasonal_dev)
intercepts.append(intercept)
slopes.append(slope)
seasonals.append(seasonal)
predictions.append(prediction)
deviations.append(deviation)
# make the new forecast series
forecastName = "holtWintersForecast(%s)" % series.name
forecastSeries = TimeSeries(forecastName, series.start, series.end
, series.step, predictions)
forecastSeries.pathExpression = forecastName
# make the new deviation series
deviationName = "holtWintersDeviation(%s)" % series.name
deviationSeries = TimeSeries(deviationName, series.start, series.end
, series.step, deviations)
deviationSeries.pathExpression = deviationName
results = { 'predictions': forecastSeries
, 'deviations': deviationSeries
, 'intercepts': intercepts
, 'slopes': slopes
, 'seasonals': seasonals
}
return results
def holtWintersForecast(requestContext, seriesList):
"""
Performs a Holt-Winters forecast using the series as input data. Data from
one week previous to the series is used to bootstrap the initial forecast.
"""
results = []
bootstrapList = _fetchWithBootstrap(requestContext, seriesList, days=7)
for bootstrap, series in zip(bootstrapList, seriesList):
analysis = holtWintersAnalysis(bootstrap)
results.append(_trimBootstrap(analysis['predictions'], series))
return results
def holtWintersConfidenceBands(requestContext, seriesList, delta=3):
"""
Performs a Holt-Winters forecast using the series as input data and plots
upper and lower bands with the predicted forecast deviations.
"""
results = []
bootstrapList = _fetchWithBootstrap(requestContext, seriesList, days=7)
for bootstrap,series in zip(bootstrapList, seriesList):
analysis = holtWintersAnalysis(bootstrap)
forecast = _trimBootstrap(analysis['predictions'], series)
deviation = _trimBootstrap(analysis['deviations'], series)
seriesLength = len(forecast)
i = 0
upperBand = list()
lowerBand = list()
while i < seriesLength:
forecast_item = forecast[i]
deviation_item = deviation[i]
i = i + 1
if forecast_item is None or deviation_item is None:
upperBand.append(None)
lowerBand.append(None)
else:
scaled_deviation = delta * deviation_item
upperBand.append(forecast_item + scaled_deviation)
lowerBand.append(forecast_item - scaled_deviation)
upperName = "holtWintersConfidenceUpper(%s)" % series.name
lowerName = "holtWintersConfidenceLower(%s)" % series.name
upperSeries = TimeSeries(upperName, forecast.start, forecast.end
, forecast.step, upperBand)
lowerSeries = TimeSeries(lowerName, forecast.start, forecast.end
, forecast.step, lowerBand)
upperSeries.pathExpression = series.pathExpression
lowerSeries.pathExpression = series.pathExpression
results.append(lowerSeries)
results.append(upperSeries)
return results
def holtWintersAberration(requestContext, seriesList, delta=3):
"""
Performs a Holt-Winters forecast using the series as input data and plots the
positive or negative deviation of the series data from the forecast.
"""
results = []
for series in seriesList:
confidenceBands = holtWintersConfidenceBands(requestContext, [series], delta)
lowerBand = confidenceBands[0]
upperBand = confidenceBands[1]
aberration = list()
for i, actual in enumerate(series):
if series[i] is None:
aberration.append(0)
elif upperBand[i] is not None and series[i] > upperBand[i]:
aberration.append(series[i] - upperBand[i])
elif lowerBand[i] is not None and series[i] < lowerBand[i]:
aberration.append(series[i] - lowerBand[i])
else:
aberration.append(0)
newName = "holtWintersAberration(%s)" % series.name
results.append(TimeSeries(newName, series.start, series.end
, series.step, aberration))
return results
def holtWintersConfidenceArea(requestContext, seriesList, delta=3):
"""
Performs a Holt-Winters forecast using the series as input data and plots the
area between the upper and lower bands of the predicted forecast deviations.
"""
bands = holtWintersConfidenceBands(requestContext, seriesList, delta)
results = areaBetween(requestContext, bands)
for series in results:
series.name = series.name.replace('areaBetween', 'holtWintersConfidenceArea')
return results
def drawAsInfinite(requestContext, seriesList):
"""
Takes one metric or a wildcard seriesList.
If the value is zero, draw the line at 0. If the value is above zero, draw
the line at infinity. If the value is null or less than zero, do not draw
the line.
Useful for displaying on/off metrics, such as exit codes. (0 = success,
anything else = failure.)
Example:
.. code-block:: none
drawAsInfinite(Testing.script.exitCode)
"""
for series in seriesList:
series.options['drawAsInfinite'] = True
series.name = 'drawAsInfinite(%s)' % series.name
return seriesList
def lineWidth(requestContext, seriesList, width):
"""
Takes one metric or a wildcard seriesList, followed by a float F.
Draw the selected metrics with a line width of F, overriding the default
value of 1, or the &lineWidth=X.X parameter.
Useful for highlighting a single metric out of many, or having multiple
line widths in one graph.
Example:
.. code-block:: none
&target=lineWidth(server01.instance01.memory.free,5)
"""
for series in seriesList:
series.options['lineWidth'] = width
return seriesList
def dashed(requestContext, *seriesList):
"""
Takes one metric or a wildcard seriesList, followed by a float F.
Draw the selected metrics with a dotted line with segments of length F
If omitted, the default length of the segments is 5.0
Example:
.. code-block:: none
&target=dashed(server01.instance01.memory.free,2.5)
"""
if len(seriesList) == 2:
dashLength = seriesList[1]
else:
dashLength = 5
for series in seriesList[0]:
series.name = 'dashed(%s, %d)' % (series.name, dashLength)
series.options['dashed'] = dashLength
return seriesList[0]
def timeStack(requestContext, seriesList, timeShiftUnit, timeShiftStart, timeShiftEnd):
"""
Takes one metric or a wildcard seriesList, followed by a quoted string with the
length of time (See ``from / until`` in the render\_api_ for examples of time formats).
Also takes a start multiplier and end multiplier for the length of time
create a seriesList which is composed the orginal metric series stacked with time shifts
starting time shifts from the start multiplier through the end multiplier
Useful for looking at history, or feeding into seriesAverage or seriesStdDev
Example:
.. code-block:: none
&target=timeStack(Sales.widgets.largeBlue,"1d",0,7) # create a series for today and each of the previous 7 days
"""
# Default to negative. parseTimeOffset defaults to +
if timeShiftUnit[0].isdigit():
timeShiftUnit = '-' + timeShiftUnit
delta = parseTimeOffset(timeShiftUnit)
series = seriesList[0] # if len(seriesList) > 1, they will all have the same pathExpression, which is all we care about.
results = []
timeShiftStartint = int(timeShiftStart)
timeShiftEndint = int(timeShiftEnd)
for shft in range(timeShiftStartint,timeShiftEndint):
myContext = requestContext.copy()
innerDelta = delta * shft
myContext['startTime'] = requestContext['startTime'] + innerDelta
myContext['endTime'] = requestContext['endTime'] + innerDelta
for shiftedSeries in evaluateTarget(myContext, series.pathExpression):
shiftedSeries.name = 'timeShift(%s, %s, %s)' % (shiftedSeries.name, timeShiftUnit,shft)
shiftedSeries.pathExpression = shiftedSeries.name
shiftedSeries.start = series.start
shiftedSeries.end = series.end
results.append(shiftedSeries)
return results
def timeShift(requestContext, seriesList, timeShift, resetEnd=True):
"""
Takes one metric or a wildcard seriesList, followed by a quoted string with the
length of time (See ``from / until`` in the render\_api_ for examples of time formats).
Draws the selected metrics shifted in time. If no sign is given, a minus sign ( - ) is
implied which will shift the metric back in time. If a plus sign ( + ) is given, the
metric will be shifted forward in time.
Will reset the end date range automatically to the end of the base stat unless
resetEnd is False. Example case is when you timeshift to last week and have the graph
date range set to include a time in the future, will limit this timeshift to pretend
ending at the current time. If resetEnd is False, will instead draw full range including
future time.
Useful for comparing a metric against itself at a past periods or correcting data
stored at an offset.
Example:
.. code-block:: none
&target=timeShift(Sales.widgets.largeBlue,"7d")
&target=timeShift(Sales.widgets.largeBlue,"-7d")
&target=timeShift(Sales.widgets.largeBlue,"+1h")
"""
# Default to negative. parseTimeOffset defaults to +
if timeShift[0].isdigit():
timeShift = '-' + timeShift
delta = parseTimeOffset(timeShift)
myContext = requestContext.copy()
myContext['startTime'] = requestContext['startTime'] + delta
myContext['endTime'] = requestContext['endTime'] + delta
results = []
if len(seriesList) > 0:
series = seriesList[0] # if len(seriesList) > 1, they will all have the same pathExpression, which is all we care about.
for shiftedSeries in evaluateTarget(myContext, series.pathExpression):
shiftedSeries.name = 'timeShift(%s, %s)' % (shiftedSeries.name, timeShift)
if resetEnd:
shiftedSeries.end = series.end
else:
shiftedSeries.end = shiftedSeries.end - shiftedSeries.start + series.start
shiftedSeries.start = series.start
results.append(shiftedSeries)
return results
def constantLine(requestContext, value):
"""
Takes a float F.
Draws a horizontal line at value F across the graph.
Example:
.. code-block:: none
&target=constantLine(123.456)
"""
start = timestamp( requestContext['startTime'] )
end = timestamp( requestContext['endTime'] )
step = (end - start) / 1.0
series = TimeSeries(str(value), start, end, step, [value, value])
return [series]
def aggregateLine(requestContext, seriesList, func='avg'):
"""
Draws a horizontal line based the function applied to the series.
Note: By default, the graphite renderer consolidates data points by
averaging data points over time. If you are using the 'min' or 'max'
function for aggregateLine, this can cause an unusual gap in the
line drawn by this function and the data itself. To fix this, you
should use the consolidateBy() function with the same function
argument you are using for aggregateLine. This will ensure that the
proper data points are retained and the graph should line up
correctly.
Example:
.. code-block:: none
&target=aggregateLineSeries(server.connections.total, 'avg')
"""
t_funcs = { 'avg': safeAvg, 'min': safeMin, 'max': safeMax }
if func not in t_funcs:
raise ValueError("Invalid function %s" % func)
value = t_funcs[func]( seriesList[0] )
name = 'aggregateLine(%s,%d)' % (seriesList[0].pathExpression, value)
series = constantLine(requestContext, value)[0]
series.name = name
return [series]
def threshold(requestContext, value, label=None, color=None):
"""
Takes a float F, followed by a label (in double quotes) and a color.
(See ``bgcolor`` in the render\_api_ for valid color names & formats.)
Draws a horizontal line at value F across the graph.
Example:
.. code-block:: none
&target=threshold(123.456, "omgwtfbbq", red)
"""
series = constantLine(requestContext, value)[0]
if label:
series.name = label
if color:
series.color = color
return [series]
def transformNull(requestContext, seriesList, default=0):
"""
Takes a metric or wild card seriesList and an optional value
to transform Nulls to. Default is 0. This method compliments
drawNullAsZero flag in graphical mode but also works in text only
mode.
Example:
.. code-block:: none
&target=transformNull(webapp.pages.*.views,-1)
This would take any page that didn't have values and supply negative 1 as a default.
Any other numeric value may be used as well.
"""
def transform(v):
if v is None: return default
else: return v
for series in seriesList:
series.name = "transformNull(%s,%g)" % (series.name, default)
series.pathExpression = series.name
values = [transform(v) for v in series]
series.extend(values)
del series[:len(values)]
return seriesList
def isNonNull(requestContext, seriesList):
"""
Takes a metric or wild card seriesList and counts up how many
non-null values are specified. This is useful for understanding
which metrics have data at a given point in time (ie, to count
which servers are alive).
Example:
.. code-block:: none
&target=isNonNull(webapp.pages.*.views)
Returns a seriesList where 1 is specified for non-null values, and
0 is specified for null values.
"""
def transform(v):
if v is None: return 0
else: return 1
for series in seriesList:
series.name = "isNonNull(%s)" % (series.name)
series.pathExpression = series.name
values = [transform(v) for v in series]
series.extend(values)
del series[:len(values)]
return seriesList
def upperBound(requestContext, seriesList, boundary):
"""
Takes a metric or wild card seriesList and returns min(value, boundary) for
non-null values. This is useful for when you only care about the value
up to a certain point - for example if you are logging error codes and you
only care if the value is >= 1 and not the value itself.
Example:
.. code-block:: none
&target=upperBound(application.myapp.*.exitcode, 1.0)
Returns a seriesList where the maximum value is the boundary or lower.
"""
def transform(v):
if v is None: return None
return min(v, boundary)
for series in seriesList:
series.name = "upperBound(%s, %d)" % (series.name, boundary)
series.pathExpression = series.name
values = [transform(v) for v in series]
series.extend(values)
del series[:len(values)]
return seriesList
def lowerBound(requestContext, seriesList, boundary):
"""
Takes a metric or wild card seriesList and returns max(value, boundary) for
non-null values. This is useful for when you only care about the value
up to a certain point - for example if you are logging error codes and you
only care if the value is <= -1 and not the value itself.
Example:
.. code-block:: none
&target=lowerBound(application.myapp.*.exitcode, -1.0)
Returns a seriesList where the minimum value is the boundary or greater.
"""
def transform(v):
if v is None: return None
return max(v, boundary)
for series in seriesList:
series.name = "lowerBound(%s, %d)" % (series.name, boundary)
series.pathExpression = series.name
values = [transform(v) for v in series]
series.extend(values)
del series[:len(values)]
return seriesList
def identity(requestContext, name):
"""
Identity function:
Returns datapoints where the value equals the timestamp of the datapoint.
Useful when you have another series where the value is a timestamp, and
you want to compare it to the time of the datapoint, to render an age
Example:
.. code-block:: none
&target=identity("The.time.series")
This would create a series named "The.time.series" that contains points where
x(t) == t.
"""
step = 60
delta = timedelta(seconds=step)
start = time.mktime(requestContext["startTime"].timetuple())
end = time.mktime(requestContext["endTime"].timetuple())
values = range(start, end, step)
series = TimeSeries(name, start, end, step, values)
series.pathExpression = 'identity("%s")' % name
return [series]
def countSeries(requestContext, *seriesLists):
"""
Draws a horizontal line representing the number of nodes found in the seriesList.
.. code-block:: none
&target=countSeries(carbon.agents.*.*)
"""
(seriesList,start,end,step) = normalize(seriesLists)
name = "countSeries(%s)" % formatPathExpressions(seriesList)
values = ( int(len(row)) for row in izip(*seriesList) )
series = TimeSeries(name,start,end,step,values)
series.pathExpression = name
return [series]
def group(requestContext, *seriesLists):
"""
Takes an arbitrary number of seriesLists and adds them to a single seriesList. This is used
to pass multiple seriesLists to a function which only takes one
"""
seriesGroup = []
for s in seriesLists:
seriesGroup.extend(s)
return seriesGroup
def mapSeries(requestContext, seriesList, mapNode):
"""
Takes a seriesList and maps it to a list of sub-seriesList. Each sub-seriesList has the
given mapNode in common.
Example:
.. code-block:: none
map(servers.*.cpu.*,1) =>
[
servers.server1.cpu.*,
servers.server2.cpu.*,
...
servers.serverN.cpu.*
]
"""
metaSeries = {}
keys = []
for series in seriesList:
key = series.name.split(".")[mapNode]
if key not in metaSeries:
metaSeries[key] = [series]
keys.append(key)
else:
metaSeries[key].append(series)
return [ metaSeries[key] for key in keys ]
def reduceSeries(requestContext, seriesLists, reduceFunction, reduceNode, *reduceMatchers):
"""
Takes a list of seriesLists and reduces it to a list of series by means of the reduceFunction.
Reduction is performed by matching the reduceNode in each series against the list of
reduceMatchers. The each series is then passed to the reduceFunction as arguments in the order
given by reduceMatchers. The reduceFunction should yield a single series.
Example:
.. code-block:: none
reduce(map(servers.*.disk.*,1),3,"asPercent","bytes_used","total_bytes") =>
asPercent(servers.server1.disk.bytes_used,servers.server1.disk.total_bytes),
asPercent(servers.server2.disk.bytes_used,servers.server2.disk.total_bytes),
...
asPercent(servers.serverN.disk.bytes_used,servers.serverN.disk.total_bytes)
The resulting list of series are aliased so that they can easily be nested in other functions.
In the above example, the resulting series names would become:
.. code-block:: none
servers.server1.disk.reduce.asPercent,
servers.server2.disk.reduce.asPercent,
...
servers.serverN.disk.reduce.asPercent
"""
metaSeries = {}
keys = []
for seriesList in seriesLists:
for series in seriesList:
nodes = series.name.split('.')
node = nodes[reduceNode]
reduceSeriesName = '.'.join(nodes[0:reduceNode]) + '.reduce.' + reduceFunction
if node in reduceMatchers:
if reduceSeriesName not in metaSeries:
metaSeries[reduceSeriesName] = [None] * len(reduceMatchers)
keys.append(reduceSeriesName)
i = reduceMatchers.index(node)
metaSeries[reduceSeriesName][i] = series
for key in keys:
metaSeries[key] = SeriesFunctions[reduceFunction](requestContext,metaSeries[key])[0]
metaSeries[key].name = key
return [ metaSeries[key] for key in keys ]
def groupByNode(requestContext, seriesList, nodeNum, callback):
"""
Takes a serieslist and maps a callback to subgroups within as defined by a common node
.. code-block:: none
&target=groupByNode(ganglia.by-function.*.*.cpu.load5,2,"sumSeries")
Would return multiple series which are each the result of applying the "sumSeries" function
to groups joined on the second node (0 indexed) resulting in a list of targets like
sumSeries(ganglia.by-function.server1.*.cpu.load5),sumSeries(ganglia.by-function.server2.*.cpu.load5),...
"""
metaSeries = {}
keys = []
for series in seriesList:
key = series.name.split(".")[nodeNum]
if key not in metaSeries.keys():
metaSeries[key] = [series]
keys.append(key)
else:
metaSeries[key].append(series)
for key in metaSeries.keys():
metaSeries[key] = SeriesFunctions[callback](requestContext,
metaSeries[key])[0]
metaSeries[key].name = key
return [ metaSeries[key] for key in keys ]
def exclude(requestContext, seriesList, pattern):
"""
Takes a metric or a wildcard seriesList, followed by a regular expression
in double quotes. Excludes metrics that match the regular expression.
Example:
.. code-block:: none
&target=exclude(servers*.instance*.threads.busy,"server02")
"""
regex = re.compile(pattern)
return [s for s in seriesList if not regex.search(s.name)]
def grep(requestContext, seriesList, pattern):
"""
Takes a metric or a wildcard seriesList, followed by a regular expression
in double quotes. Excludes metrics that don't match the regular expression.
Example:
.. code-block:: none
&target=grep(servers*.instance*.threads.busy,"server02")
"""
regex = re.compile(pattern)
return [s for s in seriesList if regex.search(s.name)]
def smartSummarize(requestContext, seriesList, intervalString, func='sum', alignToFrom=False):
"""
Smarter experimental version of summarize.
The alignToFrom parameter has been deprecated, it no longer has any effect.
Alignment happens automatically for days, hours, and minutes.
"""
if alignToFrom:
log.info("Deprecated parameter 'alignToFrom' is being ignored.")
results = []
delta = parseTimeOffset(intervalString)
interval = delta.seconds + (delta.days * 86400)
# Adjust the start time to fit an entire day for intervals >= 1 day
requestContext = requestContext.copy()
s = requestContext['startTime']
if interval >= DAY:
requestContext['startTime'] = datetime(s.year, s.month, s.day)
elif interval >= HOUR:
requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour)
elif interval >= MINUTE:
requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour, s.minute)
for i,series in enumerate(seriesList):
# XXX: breaks with summarize(metric.{a,b})
# each series.pathExpression == metric.{a,b}
newSeries = evaluateTarget(requestContext, series.pathExpression)[0]
series[0:len(series)] = newSeries
series.start = newSeries.start
series.end = newSeries.end
series.step = newSeries.step
for series in seriesList:
buckets = {} # { timestamp: [values] }
timestamps = range( int(series.start), int(series.end), int(series.step) )
datapoints = zip(timestamps, series)
# Populate buckets
for (timestamp, value) in datapoints:
bucketInterval = int((timestamp - series.start) / interval)
if bucketInterval not in buckets:
buckets[bucketInterval] = []
if value is not None:
buckets[bucketInterval].append(value)
newValues = []
for timestamp in range(series.start, series.end, interval):
bucketInterval = int((timestamp - series.start) / interval)
bucket = buckets.get(bucketInterval, [])
if bucket:
if func == 'avg':
newValues.append( float(sum(bucket)) / float(len(bucket)) )
elif func == 'last':
newValues.append( bucket[len(bucket)-1] )
elif func == 'max':
newValues.append( max(bucket) )
elif func == 'min':
newValues.append( min(bucket) )
else:
newValues.append( sum(bucket) )
else:
newValues.append( None )
newName = "smartSummarize(%s, \"%s\", \"%s\")" % (series.name, intervalString, func)
alignedEnd = series.start + (bucketInterval * interval) + interval
newSeries = TimeSeries(newName, series.start, alignedEnd, interval, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def summarize(requestContext, seriesList, intervalString, func='sum', alignToFrom=False):
"""
Summarize the data into interval buckets of a certain size.
By default, the contents of each interval bucket are summed together. This is
useful for counters where each increment represents a discrete event and
retrieving a "per X" value requires summing all the events in that interval.
Specifying 'avg' instead will return the mean for each bucket, which can be more
useful when the value is a gauge that represents a certain value in time.
'max', 'min' or 'last' can also be specified.
By default, buckets are caculated by rounding to the nearest interval. This
works well for intervals smaller than a day. For example, 22:32 will end up
in the bucket 22:00-23:00 when the interval=1hour.
Passing alignToFrom=true will instead create buckets starting at the from
time. In this case, the bucket for 22:32 depends on the from time. If
from=6:30 then the 1hour bucket for 22:32 is 22:30-23:30.
Example:
.. code-block:: none
&target=summarize(counter.errors, "1hour") # total errors per hour
&target=summarize(nonNegativeDerivative(gauge.num_users), "1week") # new users per week
&target=summarize(queue.size, "1hour", "avg") # average queue size per hour
&target=summarize(queue.size, "1hour", "max") # maximum queue size during each hour
&target=summarize(metric, "13week", "avg", true)&from=midnight+20100101 # 2010 Q1-4
"""
results = []
delta = parseTimeOffset(intervalString)
interval = delta.seconds + (delta.days * 86400)
for series in seriesList:
buckets = {}
timestamps = range( int(series.start), int(series.end), int(series.step) )
datapoints = zip(timestamps, series)
for (timestamp, value) in datapoints:
if alignToFrom:
bucketInterval = int((timestamp - series.start) / interval)
else:
bucketInterval = timestamp - (timestamp % interval)
if bucketInterval not in buckets:
buckets[bucketInterval] = []
if value is not None:
buckets[bucketInterval].append(value)
if alignToFrom:
newStart = series.start
newEnd = series.end
else:
newStart = series.start - (series.start % interval)
newEnd = series.end - (series.end % interval) + interval
newValues = []
for timestamp in range(newStart, newEnd, interval):
if alignToFrom:
newEnd = timestamp
bucketInterval = int((timestamp - series.start) / interval)
else:
bucketInterval = timestamp - (timestamp % interval)
bucket = buckets.get(bucketInterval, [])
if bucket:
if func == 'avg':
newValues.append( float(sum(bucket)) / float(len(bucket)) )
elif func == 'last':
newValues.append( bucket[len(bucket)-1] )
elif func == 'max':
newValues.append( max(bucket) )
elif func == 'min':
newValues.append( min(bucket) )
else:
newValues.append( sum(bucket) )
else:
newValues.append( None )
if alignToFrom:
newEnd += interval
newName = "summarize(%s, \"%s\", \"%s\"%s)" % (series.name, intervalString, func, alignToFrom and ", true" or "")
newSeries = TimeSeries(newName, newStart, newEnd, interval, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def hitcount(requestContext, seriesList, intervalString, alignToInterval = False):
"""
Estimate hit counts from a list of time series.
This function assumes the values in each time series represent
hits per second. It calculates hits per some larger interval
such as per day or per hour. This function is like summarize(),
except that it compensates automatically for different time scales
(so that a similar graph results from using either fine-grained
or coarse-grained records) and handles rarely-occurring events
gracefully.
"""
results = []
delta = parseTimeOffset(intervalString)
interval = int(delta.seconds + (delta.days * 86400))
if alignToInterval:
requestContext = requestContext.copy()
s = requestContext['startTime']
if interval >= DAY:
requestContext['startTime'] = datetime(s.year, s.month, s.day)
elif interval >= HOUR:
requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour)
elif interval >= MINUTE:
requestContext['startTime'] = datetime(s.year, s.month, s.day, s.hour, s.minute)
for i,series in enumerate(seriesList):
newSeries = evaluateTarget(requestContext, series.pathExpression)[0]
intervalCount = int((series.end - series.start) / interval)
series[0:len(series)] = newSeries
series.start = newSeries.start
series.end = newSeries.start + (intervalCount * interval) + interval
series.step = newSeries.step
for series in seriesList:
length = len(series)
step = int(series.step)
bucket_count = int(math.ceil(float(series.end - series.start) / interval))
buckets = [[] for _ in range(bucket_count)]
newStart = int(series.end - bucket_count * interval)
for i, value in enumerate(series):
if value is None:
continue
start_time = int(series.start + i * step)
start_bucket, start_mod = divmod(start_time - newStart, interval)
end_time = start_time + step
end_bucket, end_mod = divmod(end_time - newStart, interval)
if end_bucket >= bucket_count:
end_bucket = bucket_count - 1
end_mod = interval
if start_bucket == end_bucket:
# All of the hits go to a single bucket.
if start_bucket >= 0:
buckets[start_bucket].append(value * (end_mod - start_mod))
else:
# Spread the hits among 2 or more buckets.
if start_bucket >= 0:
buckets[start_bucket].append(value * (interval - start_mod))
hits_per_bucket = value * interval
for j in range(start_bucket + 1, end_bucket):
buckets[j].append(hits_per_bucket)
if end_mod > 0:
buckets[end_bucket].append(value * end_mod)
newValues = []
for bucket in buckets:
if bucket:
newValues.append( sum(bucket) )
else:
newValues.append(None)
newName = 'hitcount(%s, "%s"%s)' % (series.name, intervalString, alignToInterval and ", true" or "")
newSeries = TimeSeries(newName, newStart, series.end, interval, newValues)
newSeries.pathExpression = newName
results.append(newSeries)
return results
def timeFunction(requestContext, name):
"""
Short Alias: time()
Just returns the timestamp for each X value. T
Example:
.. code-block:: none
&target=time("The.time.series")
This would create a series named "The.time.series" that contains in Y the same
value (in seconds) as X.
"""
step = 60
delta = timedelta(seconds=step)
when = requestContext["startTime"]
values = []
while when < requestContext["endTime"]:
values.append(time.mktime(when.timetuple()))
when += delta
series = TimeSeries(name,
int(time.mktime(requestContext["startTime"].timetuple())),
int(time.mktime(requestContext["endTime"].timetuple())),
step, values)
series.pathExpression = name
return [series]
def sinFunction(requestContext, name, amplitude=1):
"""
Short Alias: sin()
Just returns the sine of the current time. The optional amplitude parameter
changes the amplitude of the wave.
Example:
.. code-block:: none
&target=sin("The.time.series", 2)
This would create a series named "The.time.series" that contains sin(x)*2.
"""
step = 60
delta = timedelta(seconds=step)
when = requestContext["startTime"]
values = []
while when < requestContext["endTime"]:
values.append(math.sin(time.mktime(when.timetuple()))*amplitude)
when += delta
return [TimeSeries(name,
int(time.mktime(requestContext["startTime"].timetuple())),
int(time.mktime(requestContext["endTime"].timetuple())),
step, values)]
def randomWalkFunction(requestContext, name):
"""
Short Alias: randomWalk()
Returns a random walk starting at 0. This is great for testing when there is
no real data in whisper.
Example:
.. code-block:: none
&target=randomWalk("The.time.series")
This would create a series named "The.time.series" that contains points where
x(t) == x(t-1)+random()-0.5, and x(0) == 0.
"""
step = 60
delta = timedelta(seconds=step)
when = requestContext["startTime"]
values = []
current = 0
while when < requestContext["endTime"]:
values.append(current)
current += random.random() - 0.5
when += delta
return [TimeSeries(name,
int(time.mktime(requestContext["startTime"].timetuple())),
int(time.mktime(requestContext["endTime"].timetuple())),
step, values)]
def events(requestContext, *tags):
"""
Returns the number of events at this point in time. Usable with
drawAsInfinite.
Example:
.. code-block:: none
&target=events("tag-one", "tag-two")
&target=events("*")
Returns all events tagged as "tag-one" and "tag-two" and the second one
returns all events.
"""
def to_epoch(datetime_object):
return int(time.mktime(datetime_object.timetuple()))
step = 1
name = "events(" + ", ".join(tags) + ")"
if tags == ("*",):
tags = None
# Django returns database timestamps in timezone-ignorant datetime objects
# so we use epoch seconds and do the conversion ourselves
start_timestamp = to_epoch(requestContext["startTime"])
start_timestamp = start_timestamp - start_timestamp % step
end_timestamp = to_epoch(requestContext["endTime"])
end_timestamp = end_timestamp - end_timestamp % step
points = (end_timestamp - start_timestamp)/step
events = models.Event.find_events(datetime.fromtimestamp(start_timestamp),
datetime.fromtimestamp(end_timestamp),
tags=tags)
values = [None] * points
for event in events:
event_timestamp = to_epoch(event.when)
value_offset = (event_timestamp - start_timestamp)/step
if values[value_offset] is None:
values[value_offset] = 1
else:
values[value_offset] += 1
result_series = TimeSeries(name, start_timestamp, end_timestamp, step, values, 'sum')
result_series.pathExpression = name
return [result_series]
def pieAverage(requestContext, series):
return safeDiv(safeSum(series),safeLen(series))
def pieMaximum(requestContext, series):
return max(series)
def pieMinimum(requestContext, series):
return min(series)
PieFunctions = {
'average' : pieAverage,
'maximum' : pieMaximum,
'minimum' : pieMinimum,
}
SeriesFunctions = {
# Combine functions
'sumSeries' : sumSeries,
'sum' : sumSeries,
'multiplySeries' : multiplySeries,
'averageSeries' : averageSeries,
'stddevSeries' : stddevSeries,
'avg' : averageSeries,
'sumSeriesWithWildcards': sumSeriesWithWildcards,
'averageSeriesWithWildcards': averageSeriesWithWildcards,
'minSeries' : minSeries,
'maxSeries' : maxSeries,
'rangeOfSeries': rangeOfSeries,
'percentileOfSeries': percentileOfSeries,
'countSeries': countSeries,
'weightedAverage': weightedAverage,
# Transform functions
'scale' : scale,
'invert' : invert,
'scaleToSeconds' : scaleToSeconds,
'offset' : offset,
'offsetToZero' : offsetToZero,
'derivative' : derivative,
'perSecond' : perSecond,
'integral' : integral,
'percentileOfSeries': percentileOfSeries,
'nonNegativeDerivative' : nonNegativeDerivative,
'log' : logarithm,
'timeStack': timeStack,
'timeShift': timeShift,
'summarize' : summarize,
'smartSummarize' : smartSummarize,
'hitcount' : hitcount,
'absolute' : absolute,
# Calculate functions
'movingAverage' : movingAverage,
'movingMedian' : movingMedian,
'stdev' : stdev,
'holtWintersForecast': holtWintersForecast,
'holtWintersConfidenceBands': holtWintersConfidenceBands,
'holtWintersConfidenceArea': holtWintersConfidenceArea,
'holtWintersAberration': holtWintersAberration,
'asPercent' : asPercent,
'pct' : asPercent,
'diffSeries' : diffSeries,
'divideSeries' : divideSeries,
# Series Filter functions
'mostDeviant' : mostDeviant,
'highestCurrent' : highestCurrent,
'lowestCurrent' : lowestCurrent,
'highestMax' : highestMax,
'currentAbove' : currentAbove,
'currentBelow' : currentBelow,
'highestAverage' : highestAverage,
'lowestAverage' : lowestAverage,
'averageAbove' : averageAbove,
'averageBelow' : averageBelow,
'maximumAbove' : maximumAbove,
'minimumAbove' : minimumAbove,
'maximumBelow' : maximumBelow,
'nPercentile' : nPercentile,
'limit' : limit,
'sortByTotal' : sortByTotal,
'sortByName' : sortByName,
'averageOutsidePercentile' : averageOutsidePercentile,
'removeBetweenPercentile' : removeBetweenPercentile,
'sortByMaxima' : sortByMaxima,
'sortByMinima' : sortByMinima,
'sortByName' : sortByName,
'useSeriesAbove': useSeriesAbove,
'exclude' : exclude,
# Data Filter functions
'removeAbovePercentile' : removeAbovePercentile,
'removeAboveValue' : removeAboveValue,
'removeBelowPercentile' : removeBelowPercentile,
'removeBelowValue' : removeBelowValue,
# Special functions
'legendValue' : legendValue,
'alias' : alias,
'aliasSub' : aliasSub,
'aliasByNode' : aliasByNode,
'aliasByMetric' : aliasByMetric,
'cactiStyle' : cactiStyle,
'color' : color,
'alpha' : alpha,
'cumulative' : cumulative,
'consolidateBy' : consolidateBy,
'keepLastValue' : keepLastValue,
'drawAsInfinite' : drawAsInfinite,
'secondYAxis': secondYAxis,
'lineWidth' : lineWidth,
'dashed' : dashed,
'substr' : substr,
'group' : group,
'map': mapSeries,
'reduce': reduceSeries,
'groupByNode' : groupByNode,
'constantLine' : constantLine,
'stacked' : stacked,
'areaBetween' : areaBetween,
'threshold' : threshold,
'transformNull' : transformNull,
'isNonNull' : isNonNull,
'identity': identity,
'aggregateLine' : aggregateLine,
'upperBound' : upperBound,
'lowerBound' : lowerBound,
# test functions
'time': timeFunction,
"sin": sinFunction,
"randomWalk": randomWalkFunction,
'timeFunction': timeFunction,
"sinFunction": sinFunction,
"randomWalkFunction": randomWalkFunction,
#events
'events': events,
}
#Avoid import circularity
if not environ.get('READTHEDOCS'):
from graphite.render.evaluator import evaluateTarget
| codeparrot/github-code-clean |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from keystoneauth1 import exceptions as ks_exc
import mock
from six.moves.urllib import parse
import nova.conf
from nova import context
from nova import exception
from nova import objects
from nova import rc_fields as fields
from nova.scheduler.client import report
from nova.scheduler import utils as scheduler_utils
from nova import test
from nova.tests.unit import fake_requests
from nova.tests import uuidsentinel as uuids
CONF = nova.conf.CONF
class SafeConnectedTestCase(test.NoDBTestCase):
"""Test the safe_connect decorator for the scheduler client."""
def setUp(self):
super(SafeConnectedTestCase, self).setUp()
self.context = context.get_admin_context()
with mock.patch('keystoneauth1.loading.load_auth_from_conf_options'):
self.client = report.SchedulerReportClient()
@mock.patch('keystoneauth1.session.Session.request')
def test_missing_endpoint(self, req):
"""Test EndpointNotFound behavior.
A missing endpoint entry should not explode.
"""
req.side_effect = ks_exc.EndpointNotFound()
self.client._get_resource_provider(self.context, "fake")
# reset the call count to demonstrate that future calls still
# work
req.reset_mock()
self.client._get_resource_provider(self.context, "fake")
self.assertTrue(req.called)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_client')
@mock.patch('keystoneauth1.session.Session.request')
def test_missing_endpoint_create_client(self, req, create_client):
"""Test EndpointNotFound retry behavior.
A missing endpoint should cause _create_client to be called.
"""
req.side_effect = ks_exc.EndpointNotFound()
self.client._get_resource_provider(self.context, "fake")
# This is the second time _create_client is called, but the first since
# the mock was created.
self.assertTrue(create_client.called)
@mock.patch('keystoneauth1.session.Session.request')
def test_missing_auth(self, req):
"""Test Missing Auth handled correctly.
A missing auth configuration should not explode.
"""
req.side_effect = ks_exc.MissingAuthPlugin()
self.client._get_resource_provider(self.context, "fake")
# reset the call count to demonstrate that future calls still
# work
req.reset_mock()
self.client._get_resource_provider(self.context, "fake")
self.assertTrue(req.called)
@mock.patch('keystoneauth1.session.Session.request')
def test_unauthorized(self, req):
"""Test Unauthorized handled correctly.
An unauthorized configuration should not explode.
"""
req.side_effect = ks_exc.Unauthorized()
self.client._get_resource_provider(self.context, "fake")
# reset the call count to demonstrate that future calls still
# work
req.reset_mock()
self.client._get_resource_provider(self.context, "fake")
self.assertTrue(req.called)
@mock.patch('keystoneauth1.session.Session.request')
def test_connect_fail(self, req):
"""Test Connect Failure handled correctly.
If we get a connect failure, this is transient, and we expect
that this will end up working correctly later.
"""
req.side_effect = ks_exc.ConnectFailure()
self.client._get_resource_provider(self.context, "fake")
# reset the call count to demonstrate that future calls do
# work
req.reset_mock()
self.client._get_resource_provider(self.context, "fake")
self.assertTrue(req.called)
@mock.patch.object(report, 'LOG')
def test_warning_limit(self, mock_log):
# Assert that __init__ initializes _warn_count as we expect
self.assertEqual(0, self.client._warn_count)
mock_self = mock.MagicMock()
mock_self._warn_count = 0
for i in range(0, report.WARN_EVERY + 3):
report.warn_limit(mock_self, 'warning')
mock_log.warning.assert_has_calls([mock.call('warning'),
mock.call('warning')])
@mock.patch('keystoneauth1.session.Session.request')
def test_failed_discovery(self, req):
"""Test DiscoveryFailure behavior.
Failed discovery should not blow up.
"""
req.side_effect = ks_exc.DiscoveryFailure()
self.client._get_resource_provider(self.context, "fake")
# reset the call count to demonstrate that future calls still
# work
req.reset_mock()
self.client._get_resource_provider(self.context, "fake")
self.assertTrue(req.called)
class TestConstructor(test.NoDBTestCase):
@mock.patch('keystoneauth1.loading.load_session_from_conf_options')
@mock.patch('keystoneauth1.loading.load_auth_from_conf_options')
def test_constructor(self, load_auth_mock, load_sess_mock):
client = report.SchedulerReportClient()
load_auth_mock.assert_called_once_with(CONF, 'placement')
load_sess_mock.assert_called_once_with(CONF, 'placement',
auth=load_auth_mock.return_value)
self.assertEqual(['internal', 'public'], client._client.interface)
self.assertEqual({'accept': 'application/json'},
client._client.additional_headers)
@mock.patch('keystoneauth1.loading.load_session_from_conf_options')
@mock.patch('keystoneauth1.loading.load_auth_from_conf_options')
def test_constructor_admin_interface(self, load_auth_mock, load_sess_mock):
self.flags(valid_interfaces='admin', group='placement')
client = report.SchedulerReportClient()
load_auth_mock.assert_called_once_with(CONF, 'placement')
load_sess_mock.assert_called_once_with(CONF, 'placement',
auth=load_auth_mock.return_value)
self.assertEqual(['admin'], client._client.interface)
self.assertEqual({'accept': 'application/json'},
client._client.additional_headers)
class SchedulerReportClientTestCase(test.NoDBTestCase):
def setUp(self):
super(SchedulerReportClientTestCase, self).setUp()
self.context = context.get_admin_context()
self.ks_adap_mock = mock.Mock()
self.compute_node = objects.ComputeNode(
uuid=uuids.compute_node,
hypervisor_hostname='foo',
vcpus=8,
cpu_allocation_ratio=16.0,
memory_mb=1024,
ram_allocation_ratio=1.5,
local_gb=10,
disk_allocation_ratio=1.0,
)
with test.nested(
mock.patch('keystoneauth1.adapter.Adapter',
return_value=self.ks_adap_mock),
mock.patch('keystoneauth1.loading.load_auth_from_conf_options')
):
self.client = report.SchedulerReportClient()
def _init_provider_tree(self, generation_override=None,
resources_override=None):
cn = self.compute_node
resources = resources_override
if resources_override is None:
resources = {
'VCPU': {
'total': cn.vcpus,
'reserved': 0,
'min_unit': 1,
'max_unit': cn.vcpus,
'step_size': 1,
'allocation_ratio': cn.cpu_allocation_ratio,
},
'MEMORY_MB': {
'total': cn.memory_mb,
'reserved': 512,
'min_unit': 1,
'max_unit': cn.memory_mb,
'step_size': 1,
'allocation_ratio': cn.ram_allocation_ratio,
},
'DISK_GB': {
'total': cn.local_gb,
'reserved': 0,
'min_unit': 1,
'max_unit': cn.local_gb,
'step_size': 1,
'allocation_ratio': cn.disk_allocation_ratio,
},
}
generation = generation_override or 1
rp_uuid = self.client._provider_tree.new_root(
cn.hypervisor_hostname,
cn.uuid,
generation=generation,
)
self.client._provider_tree.update_inventory(rp_uuid, resources)
def _validate_provider(self, name_or_uuid, **kwargs):
"""Validates existence and values of a provider in this client's
_provider_tree.
:param name_or_uuid: The name or UUID of the provider to validate.
:param kwargs: Optional keyword arguments of ProviderData attributes
whose values are to be validated.
"""
found = self.client._provider_tree.data(name_or_uuid)
# If kwargs provided, their names indicate ProviderData attributes
for attr, expected in kwargs.items():
try:
self.assertEqual(getattr(found, attr), expected)
except AttributeError:
self.fail("Provider with name or UUID %s doesn't have "
"attribute %s (expected value: %s)" %
(name_or_uuid, attr, expected))
class TestPutAllocations(SchedulerReportClientTestCase):
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.put')
def test_put_allocations(self, mock_put):
mock_put.return_value.status_code = 204
mock_put.return_value.text = "cool"
rp_uuid = mock.sentinel.rp
consumer_uuid = mock.sentinel.consumer
data = {"MEMORY_MB": 1024}
expected_url = "/allocations/%s" % consumer_uuid
resp = self.client.put_allocations(self.context, rp_uuid,
consumer_uuid, data,
mock.sentinel.project_id,
mock.sentinel.user_id)
self.assertTrue(resp)
mock_put.assert_called_once_with(
expected_url, mock.ANY, version='1.8',
global_request_id=self.context.global_id)
@mock.patch.object(report.LOG, 'warning')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.put')
def test_put_allocations_fail(self, mock_put, mock_warn):
mock_put.return_value.status_code = 400
mock_put.return_value.text = "not cool"
rp_uuid = mock.sentinel.rp
consumer_uuid = mock.sentinel.consumer
data = {"MEMORY_MB": 1024}
expected_url = "/allocations/%s" % consumer_uuid
resp = self.client.put_allocations(self.context, rp_uuid,
consumer_uuid, data,
mock.sentinel.project_id,
mock.sentinel.user_id)
self.assertFalse(resp)
mock_put.assert_called_once_with(
expected_url, mock.ANY, version='1.8',
global_request_id=self.context.global_id)
log_msg = mock_warn.call_args[0][0]
self.assertIn("Unable to submit allocation for instance", log_msg)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.put')
def test_put_allocations_retries_conflict(self, mock_put):
failed = mock.MagicMock()
failed.status_code = 409
failed.text = "concurrently updated"
succeeded = mock.MagicMock()
succeeded.status_code = 204
mock_put.side_effect = (failed, succeeded)
rp_uuid = mock.sentinel.rp
consumer_uuid = mock.sentinel.consumer
data = {"MEMORY_MB": 1024}
expected_url = "/allocations/%s" % consumer_uuid
resp = self.client.put_allocations(self.context, rp_uuid,
consumer_uuid, data,
mock.sentinel.project_id,
mock.sentinel.user_id)
self.assertTrue(resp)
mock_put.assert_has_calls([
mock.call(expected_url, mock.ANY, version='1.8',
global_request_id=self.context.global_id)] * 2)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.put')
def test_put_allocations_retry_gives_up(self, mock_put):
failed = mock.MagicMock()
failed.status_code = 409
failed.text = "concurrently updated"
mock_put.return_value = failed
rp_uuid = mock.sentinel.rp
consumer_uuid = mock.sentinel.consumer
data = {"MEMORY_MB": 1024}
expected_url = "/allocations/%s" % consumer_uuid
resp = self.client.put_allocations(self.context, rp_uuid,
consumer_uuid, data,
mock.sentinel.project_id,
mock.sentinel.user_id)
self.assertFalse(resp)
mock_put.assert_has_calls([
mock.call(expected_url, mock.ANY, version='1.8',
global_request_id=self.context.global_id)] * 3)
def test_claim_resources_success_with_old_version(self):
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {}, # build instance, not move
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
alloc_req = {
'allocations': [
{
'resource_provider': {
'uuid': uuids.cn1
},
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
}
},
],
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(
self.context, consumer_uuid, alloc_req, project_id, user_id)
expected_url = "/allocations/%s" % consumer_uuid
expected_payload = {
'allocations': {
alloc['resource_provider']['uuid']: {
'resources': alloc['resources']
}
for alloc in alloc_req['allocations']
}
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=expected_payload,
raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertTrue(res)
def test_claim_resources_success(self):
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {}, # build instance, not move
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
alloc_req = {
'allocations': {
uuids.cn1: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
}
},
},
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
expected_payload = {'allocations': {
rp_uuid: alloc
for rp_uuid, alloc in alloc_req['allocations'].items()}}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=expected_payload,
raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertTrue(res)
def test_claim_resources_success_move_operation_no_shared(self):
"""Tests that when a move operation is detected (existing allocations
for the same instance UUID) that we end up constructing an appropriate
allocation that contains the original resources on the source host
as well as the resources on the destination host.
"""
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {
uuids.source: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
},
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
alloc_req = {
'allocations': {
uuids.destination: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024
}
},
},
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
# New allocation should include resources claimed on both the source
# and destination hosts
expected_payload = {
'allocations': {
uuids.source: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024
}
},
uuids.destination: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024
}
},
},
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=mock.ANY,
raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
# We have to pull the json body from the mock call_args to validate
# it separately otherwise hash seed issues get in the way.
actual_payload = self.ks_adap_mock.put.call_args[1]['json']
self.assertEqual(expected_payload, actual_payload)
self.assertTrue(res)
def test_claim_resources_success_move_operation_with_shared(self):
"""Tests that when a move operation is detected (existing allocations
for the same instance UUID) that we end up constructing an appropriate
allocation that contains the original resources on the source host
as well as the resources on the destination host but that when a shared
storage provider is claimed against in both the original allocation as
well as the new allocation request, we don't double that allocation
resource request up.
"""
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {
uuids.source: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
uuids.shared_storage: {
'resource_provider_generation': 42,
'resources': {
'DISK_GB': 100,
},
},
},
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
alloc_req = {
'allocations': {
uuids.destination: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
}
},
uuids.shared_storage: {
'resources': {
'DISK_GB': 100,
}
},
}
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
# New allocation should include resources claimed on both the source
# and destination hosts but not have a doubled-up request for the disk
# resources on the shared provider
expected_payload = {
'allocations': {
uuids.source: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024
}
},
uuids.shared_storage: {
'resources': {
'DISK_GB': 100
}
},
uuids.destination: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024
}
},
},
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=mock.ANY,
raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
# We have to pull the allocations from the json body from the
# mock call_args to validate it separately otherwise hash seed
# issues get in the way.
actual_payload = self.ks_adap_mock.put.call_args[1]['json']
self.assertEqual(expected_payload, actual_payload)
self.assertTrue(res)
def test_claim_resources_success_resize_to_same_host_no_shared(self):
"""Tests that when a resize to the same host operation is detected
(existing allocations for the same instance UUID and same resource
provider) that we end up constructing an appropriate allocation that
contains the original resources on the source host as well as the
resources on the destination host, which in this case are the same.
"""
get_current_allocations_resp_mock = mock.Mock(status_code=200)
get_current_allocations_resp_mock.json.return_value = {
'allocations': {
uuids.same_host: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
'DISK_GB': 20
},
},
},
}
self.ks_adap_mock.get.return_value = get_current_allocations_resp_mock
put_allocations_resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = put_allocations_resp_mock
consumer_uuid = uuids.consumer_uuid
# This is the resize-up allocation where VCPU, MEMORY_MB and DISK_GB
# are all being increased but on the same host. We also throw a custom
# resource class in the new allocation to make sure it's not lost and
# that we don't have a KeyError when merging the allocations.
alloc_req = {
'allocations': {
uuids.same_host: {
'resources': {
'VCPU': 2,
'MEMORY_MB': 2048,
'DISK_GB': 40,
'CUSTOM_FOO': 1
}
},
},
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
# New allocation should include doubled resources claimed on the same
# host.
expected_payload = {
'allocations': {
uuids.same_host: {
'resources': {
'VCPU': 3,
'MEMORY_MB': 3072,
'DISK_GB': 60,
'CUSTOM_FOO': 1
}
},
},
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=mock.ANY, raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
# We have to pull the json body from the mock call_args to validate
# it separately otherwise hash seed issues get in the way.
actual_payload = self.ks_adap_mock.put.call_args[1]['json']
self.assertEqual(expected_payload, actual_payload)
self.assertTrue(res)
def test_claim_resources_success_resize_to_same_host_with_shared(self):
"""Tests that when a resize to the same host operation is detected
(existing allocations for the same instance UUID and same resource
provider) that we end up constructing an appropriate allocation that
contains the original resources on the source host as well as the
resources on the destination host, which in this case are the same.
This test adds the fun wrinkle of throwing a shared storage provider
in the mix when doing resize to the same host.
"""
get_current_allocations_resp_mock = mock.Mock(status_code=200)
get_current_allocations_resp_mock.json.return_value = {
'allocations': {
uuids.same_host: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024
},
},
uuids.shared_storage: {
'resource_provider_generation': 42,
'resources': {
'DISK_GB': 20,
},
},
},
}
self.ks_adap_mock.get.return_value = get_current_allocations_resp_mock
put_allocations_resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = put_allocations_resp_mock
consumer_uuid = uuids.consumer_uuid
# This is the resize-up allocation where VCPU, MEMORY_MB and DISK_GB
# are all being increased but DISK_GB is on a shared storage provider.
alloc_req = {
'allocations': {
uuids.same_host: {
'resources': {
'VCPU': 2,
'MEMORY_MB': 2048
}
},
uuids.shared_storage: {
'resources': {
'DISK_GB': 40,
}
},
},
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
# New allocation should include doubled resources claimed on the same
# host.
expected_payload = {
'allocations': {
uuids.same_host: {
'resources': {
'VCPU': 3,
'MEMORY_MB': 3072
}
},
uuids.shared_storage: {
'resources': {
'DISK_GB': 60
}
},
},
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=mock.ANY, raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
# We have to pull the json body from the mock call_args to validate
# it separately otherwise hash seed issues get in the way.
actual_payload = self.ks_adap_mock.put.call_args[1]['json']
self.assertEqual(expected_payload, actual_payload)
self.assertTrue(res)
def test_claim_resources_fail_retry_success(self):
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {}, # build instance, not move
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mocks = [
mock.Mock(
status_code=409,
text='Inventory changed while attempting to allocate: '
'Another thread concurrently updated the data. '
'Please retry your update'),
mock.Mock(status_code=204),
]
self.ks_adap_mock.put.side_effect = resp_mocks
consumer_uuid = uuids.consumer_uuid
alloc_req = {
'allocations': {
uuids.cn1: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
}
},
},
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
expected_payload = {
'allocations':
{rp_uuid: res
for rp_uuid, res in alloc_req['allocations'].items()}
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
# We should have exactly two calls to the placement API that look
# identical since we're retrying the same HTTP request
expected_calls = [
mock.call(expected_url, microversion='1.12', json=expected_payload,
raise_exc=False,
headers={'X-Openstack-Request-Id':
self.context.global_id})] * 2
self.assertEqual(len(expected_calls),
self.ks_adap_mock.put.call_count)
self.ks_adap_mock.put.assert_has_calls(expected_calls)
self.assertTrue(res)
@mock.patch.object(report.LOG, 'warning')
def test_claim_resources_failure(self, mock_log):
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {}, # build instance, not move
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=409, text='not cool')
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
alloc_req = {
'allocations': {
uuids.cn1: {
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
}
},
},
}
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.claim_resources(self.context, consumer_uuid,
alloc_req, project_id, user_id,
allocation_request_version='1.12')
expected_url = "/allocations/%s" % consumer_uuid
expected_payload = {
'allocations':
{rp_uuid: res
for rp_uuid, res in alloc_req['allocations'].items()}
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.12', json=expected_payload,
raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertFalse(res)
self.assertTrue(mock_log.called)
def test_remove_provider_from_inst_alloc_no_shared(self):
"""Tests that the method which manipulates an existing doubled-up
allocation for a move operation to remove the source host results in
sending placement the proper payload to PUT
/allocations/{consumer_uuid} call.
"""
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {
uuids.source: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
uuids.destination: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
},
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.remove_provider_from_instance_allocation(
self.context, consumer_uuid, uuids.source, user_id, project_id,
mock.Mock())
expected_url = "/allocations/%s" % consumer_uuid
# New allocations should only include the destination...
expected_payload = {
'allocations': [
{
'resource_provider': {
'uuid': uuids.destination,
},
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
],
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
# We have to pull the json body from the mock call_args to validate
# it separately otherwise hash seed issues get in the way.
actual_payload = self.ks_adap_mock.put.call_args[1]['json']
sort_by_uuid = lambda x: x['resource_provider']['uuid']
expected_allocations = sorted(expected_payload['allocations'],
key=sort_by_uuid)
actual_allocations = sorted(actual_payload['allocations'],
key=sort_by_uuid)
self.assertEqual(expected_allocations, actual_allocations)
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.10', json=mock.ANY, raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertTrue(res)
def test_remove_provider_from_inst_alloc_with_shared(self):
"""Tests that the method which manipulates an existing doubled-up
allocation with DISK_GB being consumed from a shared storage provider
for a move operation to remove the source host results in sending
placement the proper payload to PUT /allocations/{consumer_uuid}
call.
"""
get_resp_mock = mock.Mock(status_code=200)
get_resp_mock.json.return_value = {
'allocations': {
uuids.source: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
uuids.shared_storage: {
'resource_provider_generation': 42,
'resources': {
'DISK_GB': 100,
},
},
uuids.destination: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
},
}
self.ks_adap_mock.get.return_value = get_resp_mock
resp_mock = mock.Mock(status_code=204)
self.ks_adap_mock.put.return_value = resp_mock
consumer_uuid = uuids.consumer_uuid
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.remove_provider_from_instance_allocation(
self.context, consumer_uuid, uuids.source, user_id, project_id,
mock.Mock())
expected_url = "/allocations/%s" % consumer_uuid
# New allocations should only include the destination...
expected_payload = {
'allocations': [
{
'resource_provider': {
'uuid': uuids.shared_storage,
},
'resources': {
'DISK_GB': 100,
},
},
{
'resource_provider': {
'uuid': uuids.destination,
},
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
],
}
expected_payload['project_id'] = project_id
expected_payload['user_id'] = user_id
# We have to pull the json body from the mock call_args to validate
# it separately otherwise hash seed issues get in the way.
actual_payload = self.ks_adap_mock.put.call_args[1]['json']
sort_by_uuid = lambda x: x['resource_provider']['uuid']
expected_allocations = sorted(expected_payload['allocations'],
key=sort_by_uuid)
actual_allocations = sorted(actual_payload['allocations'],
key=sort_by_uuid)
self.assertEqual(expected_allocations, actual_allocations)
self.ks_adap_mock.put.assert_called_once_with(
expected_url, microversion='1.10', json=mock.ANY, raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertTrue(res)
def test_remove_provider_from_inst_alloc_no_source(self):
"""Tests that if remove_provider_from_instance_allocation() fails to
find any allocations for the source host, it just returns True and
does not attempt to rewrite the allocation for the consumer.
"""
get_resp_mock = mock.Mock(status_code=200)
# Act like the allocations already did not include the source host for
# some reason
get_resp_mock.json.return_value = {
'allocations': {
uuids.shared_storage: {
'resource_provider_generation': 42,
'resources': {
'DISK_GB': 100,
},
},
uuids.destination: {
'resource_provider_generation': 42,
'resources': {
'VCPU': 1,
'MEMORY_MB': 1024,
},
},
},
}
self.ks_adap_mock.get.return_value = get_resp_mock
consumer_uuid = uuids.consumer_uuid
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.remove_provider_from_instance_allocation(
self.context, consumer_uuid, uuids.source, user_id, project_id,
mock.Mock())
self.ks_adap_mock.get.assert_called()
self.ks_adap_mock.put.assert_not_called()
self.assertTrue(res)
def test_remove_provider_from_inst_alloc_fail_get_allocs(self):
"""Tests that we gracefully exit with False from
remove_provider_from_instance_allocation() if the call to get the
existing allocations fails for some reason
"""
get_resp_mock = mock.Mock(status_code=500)
self.ks_adap_mock.get.return_value = get_resp_mock
consumer_uuid = uuids.consumer_uuid
project_id = uuids.project_id
user_id = uuids.user_id
res = self.client.remove_provider_from_instance_allocation(
self.context, consumer_uuid, uuids.source, user_id, project_id,
mock.Mock())
self.ks_adap_mock.get.assert_called()
self.ks_adap_mock.put.assert_not_called()
self.assertFalse(res)
class TestSetAndClearAllocations(SchedulerReportClientTestCase):
def setUp(self):
super(TestSetAndClearAllocations, self).setUp()
# We want to reuse the mock throughout the class, but with
# different return values.
self.mock_post = mock.patch(
'nova.scheduler.client.report.SchedulerReportClient.post').start()
self.addCleanup(self.mock_post.stop)
self.mock_post.return_value.status_code = 204
self.rp_uuid = mock.sentinel.rp
self.consumer_uuid = mock.sentinel.consumer
self.data = {"MEMORY_MB": 1024}
self.project_id = mock.sentinel.project_id
self.user_id = mock.sentinel.user_id
self.expected_url = '/allocations'
def test_url_microversion(self):
expected_microversion = '1.13'
resp = self.client.set_and_clear_allocations(
self.context, self.rp_uuid, self.consumer_uuid, self.data,
self.project_id, self.user_id)
self.assertTrue(resp)
self.mock_post.assert_called_once_with(
self.expected_url, mock.ANY,
version=expected_microversion,
global_request_id=self.context.global_id)
def test_payload_no_clear(self):
expected_payload = {
self.consumer_uuid: {
'user_id': self.user_id,
'project_id': self.project_id,
'allocations': {
self.rp_uuid: {
'resources': {
'MEMORY_MB': 1024
}
}
}
}
}
resp = self.client.set_and_clear_allocations(
self.context, self.rp_uuid, self.consumer_uuid, self.data,
self.project_id, self.user_id)
self.assertTrue(resp)
args, kwargs = self.mock_post.call_args
payload = args[1]
self.assertEqual(expected_payload, payload)
def test_payload_with_clear(self):
expected_payload = {
self.consumer_uuid: {
'user_id': self.user_id,
'project_id': self.project_id,
'allocations': {
self.rp_uuid: {
'resources': {
'MEMORY_MB': 1024
}
}
}
},
mock.sentinel.migration_uuid: {
'user_id': self.user_id,
'project_id': self.project_id,
'allocations': {}
}
}
resp = self.client.set_and_clear_allocations(
self.context, self.rp_uuid, self.consumer_uuid, self.data,
self.project_id, self.user_id,
consumer_to_clear=mock.sentinel.migration_uuid)
self.assertTrue(resp)
args, kwargs = self.mock_post.call_args
payload = args[1]
self.assertEqual(expected_payload, payload)
@mock.patch('time.sleep')
def test_409_concurrent_update(self, mock_sleep):
self.mock_post.return_value.status_code = 409
self.mock_post.return_value.text = 'concurrently updated'
resp = self.client.set_and_clear_allocations(
self.context, self.rp_uuid, self.consumer_uuid, self.data,
self.project_id, self.user_id,
consumer_to_clear=mock.sentinel.migration_uuid)
self.assertFalse(resp)
# Post was attempted four times.
self.assertEqual(4, self.mock_post.call_count)
@mock.patch('nova.scheduler.client.report.LOG.warning')
def test_not_409_failure(self, mock_log):
error_message = 'placement not there'
self.mock_post.return_value.status_code = 503
self.mock_post.return_value.text = error_message
resp = self.client.set_and_clear_allocations(
self.context, self.rp_uuid, self.consumer_uuid, self.data,
self.project_id, self.user_id,
consumer_to_clear=mock.sentinel.migration_uuid)
self.assertFalse(resp)
args, kwargs = mock_log.call_args
log_message = args[0]
log_args = args[1]
self.assertIn('Unable to post allocations', log_message)
self.assertEqual(error_message, log_args['text'])
class TestProviderOperations(SchedulerReportClientTestCase):
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_aggregates')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_traits')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_sharing_providers')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
def test_ensure_resource_provider_get(self, get_rpt_mock, get_shr_mock,
get_trait_mock, get_agg_mock, get_inv_mock, create_rp_mock):
# No resource provider exists in the client's cache, so validate that
# if we get the resource provider from the placement API that we don't
# try to create the resource provider.
get_rpt_mock.return_value = [{
'uuid': uuids.compute_node,
'name': mock.sentinel.name,
'generation': 1,
}]
get_inv_mock.return_value = None
get_agg_mock.return_value = set([uuids.agg1])
get_trait_mock.return_value = set(['CUSTOM_GOLD'])
get_shr_mock.return_value = []
self.client._ensure_resource_provider(self.context, uuids.compute_node)
get_rpt_mock.assert_called_once_with(self.context, uuids.compute_node)
self.assertTrue(self.client._provider_tree.exists(uuids.compute_node))
get_agg_mock.assert_called_once_with(self.context, uuids.compute_node)
self.assertTrue(
self.client._provider_tree.in_aggregates(uuids.compute_node,
[uuids.agg1]))
self.assertFalse(
self.client._provider_tree.in_aggregates(uuids.compute_node,
[uuids.agg2]))
get_trait_mock.assert_called_once_with(self.context,
uuids.compute_node)
self.assertTrue(
self.client._provider_tree.has_traits(uuids.compute_node,
['CUSTOM_GOLD']))
self.assertFalse(
self.client._provider_tree.has_traits(uuids.compute_node,
['CUSTOM_SILVER']))
get_shr_mock.assert_called_once_with(self.context, set([uuids.agg1]))
self.assertTrue(self.client._provider_tree.exists(uuids.compute_node))
self.assertFalse(create_rp_mock.called)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_associations')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
def test_ensure_resource_provider_create_fail(self, get_rpt_mock,
refresh_mock, create_rp_mock):
# No resource provider exists in the client's cache, and
# _create_provider raises, indicating there was an error with the
# create call. Ensure we don't populate the resource provider cache
get_rpt_mock.return_value = []
create_rp_mock.side_effect = exception.ResourceProviderCreationFailed(
name=uuids.compute_node)
self.assertRaises(
exception.ResourceProviderCreationFailed,
self.client._ensure_resource_provider, self.context,
uuids.compute_node)
get_rpt_mock.assert_called_once_with(self.context, uuids.compute_node)
create_rp_mock.assert_called_once_with(
self.context, uuids.compute_node, uuids.compute_node,
parent_provider_uuid=None)
self.assertFalse(self.client._provider_tree.exists(uuids.compute_node))
self.assertFalse(refresh_mock.called)
self.assertRaises(
ValueError,
self.client._provider_tree.in_aggregates, uuids.compute_node, [])
self.assertRaises(
ValueError,
self.client._provider_tree.has_traits, uuids.compute_node, [])
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_resource_provider', return_value=None)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_associations')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
def test_ensure_resource_provider_create_no_placement(self, get_rpt_mock,
refresh_mock, create_rp_mock):
# No resource provider exists in the client's cache, and
# @safe_connect on _create_resource_provider returns None because
# Placement isn't running yet. Ensure we don't populate the resource
# provider cache.
get_rpt_mock.return_value = []
self.assertRaises(
exception.ResourceProviderCreationFailed,
self.client._ensure_resource_provider, self.context,
uuids.compute_node)
get_rpt_mock.assert_called_once_with(self.context, uuids.compute_node)
create_rp_mock.assert_called_once_with(
self.context, uuids.compute_node, uuids.compute_node,
parent_provider_uuid=None)
self.assertFalse(self.client._provider_tree.exists(uuids.compute_node))
refresh_mock.assert_not_called()
self.assertRaises(
ValueError,
self.client._provider_tree.in_aggregates, uuids.compute_node, [])
self.assertRaises(
ValueError,
self.client._provider_tree.has_traits, uuids.compute_node, [])
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_and_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_associations')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
def test_ensure_resource_provider_create(self, get_rpt_mock,
refresh_inv_mock,
refresh_assoc_mock,
create_rp_mock):
# No resource provider exists in the client's cache and no resource
# provider was returned from the placement API, so verify that in this
# case we try to create the resource provider via the placement API.
get_rpt_mock.return_value = []
create_rp_mock.return_value = {
'uuid': uuids.compute_node,
'name': 'compute-name',
'generation': 1,
}
self.assertEqual(
uuids.compute_node,
self.client._ensure_resource_provider(self.context,
uuids.compute_node))
self._validate_provider(uuids.compute_node, name='compute-name',
generation=1, parent_uuid=None,
aggregates=set(), traits=set())
# We don't refresh for a just-created provider
refresh_inv_mock.assert_not_called()
refresh_assoc_mock.assert_not_called()
get_rpt_mock.assert_called_once_with(self.context, uuids.compute_node)
create_rp_mock.assert_called_once_with(
self.context,
uuids.compute_node,
uuids.compute_node, # name param defaults to UUID if None
parent_provider_uuid=None,
)
self.assertTrue(self.client._provider_tree.exists(uuids.compute_node))
create_rp_mock.reset_mock()
# Validate the path where we specify a name (don't default to the UUID)
self.client._ensure_resource_provider(
self.context, uuids.cn2, 'a-name')
create_rp_mock.assert_called_once_with(
self.context, uuids.cn2, 'a-name', parent_provider_uuid=None)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_associations', new=mock.Mock())
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
def test_ensure_resource_provider_tree(self, get_rpt_mock, create_rp_mock):
"""Test _ensure_resource_provider with a tree of providers."""
def _create_resource_provider(context, uuid, name,
parent_provider_uuid=None):
"""Mock side effect for creating the RP with the specified args."""
return {
'uuid': uuid,
'name': name,
'generation': 0,
'parent_provider_uuid': parent_provider_uuid
}
create_rp_mock.side_effect = _create_resource_provider
# Not initially in the placement database, so we have to create it.
get_rpt_mock.return_value = []
# Create the root
root = self.client._ensure_resource_provider(self.context, uuids.root)
self.assertEqual(uuids.root, root)
# Now create a child
child1 = self.client._ensure_resource_provider(
self.context, uuids.child1, name='junior',
parent_provider_uuid=uuids.root)
self.assertEqual(uuids.child1, child1)
# If we re-ensure the child, we get the object from the tree, not a
# newly-created one - i.e. the early .find() works like it should.
self.assertIs(child1,
self.client._ensure_resource_provider(self.context,
uuids.child1))
# Make sure we can create a grandchild
grandchild = self.client._ensure_resource_provider(
self.context, uuids.grandchild,
parent_provider_uuid=uuids.child1)
self.assertEqual(uuids.grandchild, grandchild)
# Now create a second child of the root and make sure it doesn't wind
# up in some crazy wrong place like under child1 or grandchild
child2 = self.client._ensure_resource_provider(
self.context, uuids.child2, parent_provider_uuid=uuids.root)
self.assertEqual(uuids.child2, child2)
# At this point we should get all the providers.
self.assertEqual(
set([uuids.root, uuids.child1, uuids.child2, uuids.grandchild]),
set(self.client._provider_tree.get_provider_uuids()))
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_and_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_associations')
def test_ensure_resource_provider_refresh_fetch(self, mock_ref_assoc,
mock_ref_inv, mock_gpit):
"""Make sure refreshes are called with the appropriate UUIDs and flags
when we fetch the provider tree from placement.
"""
tree_uuids = set([uuids.root, uuids.one, uuids.two])
mock_gpit.return_value = [{'uuid': u, 'name': u, 'generation': 42}
for u in tree_uuids]
self.assertEqual(uuids.root,
self.client._ensure_resource_provider(self.context,
uuids.root))
mock_gpit.assert_called_once_with(self.context, uuids.root)
mock_ref_inv.assert_has_calls([mock.call(self.context, uuid)
for uuid in tree_uuids])
mock_ref_assoc.assert_has_calls(
[mock.call(self.context, uuid, generation=42, force=True)
for uuid in tree_uuids])
self.assertEqual(tree_uuids,
set(self.client._provider_tree.get_provider_uuids()))
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_providers_in_tree')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_create_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_refresh_associations')
def test_ensure_resource_provider_refresh_create(self, mock_refresh,
mock_create, mock_gpit):
"""Make sure refresh is not called when we create the RP."""
mock_gpit.return_value = []
mock_create.return_value = {'name': 'cn', 'uuid': uuids.cn,
'generation': 42}
self.assertEqual(uuids.root,
self.client._ensure_resource_provider(self.context,
uuids.root))
mock_gpit.assert_called_once_with(self.context, uuids.root)
mock_create.assert_called_once_with(self.context, uuids.root,
uuids.root,
parent_provider_uuid=None)
mock_refresh.assert_not_called()
self.assertEqual([uuids.cn],
self.client._provider_tree.get_provider_uuids())
def test_get_allocation_candidates(self):
resp_mock = mock.Mock(status_code=200)
json_data = {
'allocation_requests': mock.sentinel.alloc_reqs,
'provider_summaries': mock.sentinel.p_sums,
}
resources = scheduler_utils.ResourceRequest.from_extra_specs({
'resources:VCPU': '1',
'resources:MEMORY_MB': '1024',
'trait:HW_CPU_X86_AVX': 'required',
'trait:CUSTOM_TRAIT1': 'required',
'trait:CUSTOM_TRAIT2': 'preferred',
'trait:CUSTOM_TRAIT3': 'forbidden',
'trait:CUSTOM_TRAIT4': 'forbidden',
'resources1:DISK_GB': '30',
'trait1:STORAGE_DISK_SSD': 'required',
'resources2:VGPU': '2',
'trait2:HW_GPU_RESOLUTION_W2560H1600': 'required',
'trait2:HW_GPU_API_VULKAN': 'required',
'resources3:SRIOV_NET_VF': '1',
'resources3:CUSTOM_NET_EGRESS_BYTES_SEC': '125000',
'group_policy': 'isolate',
# These are ignored because misspelled, bad value, etc.
'resources02:CUSTOM_WIDGET': '123',
'trait:HW_NIC_OFFLOAD_LRO': 'preferred',
'group_policy3': 'none',
})
resources.get_request_group(None).member_of = [
('agg1', 'agg2', 'agg3'), ('agg1', 'agg2')]
expected_path = '/allocation_candidates'
expected_query = [
('group_policy', 'isolate'),
('limit', '1000'),
('member_of', 'in:agg1,agg2'),
('member_of', 'in:agg1,agg2,agg3'),
('required', 'CUSTOM_TRAIT1,HW_CPU_X86_AVX,!CUSTOM_TRAIT3,'
'!CUSTOM_TRAIT4'),
('required1', 'STORAGE_DISK_SSD'),
('required2', 'HW_GPU_API_VULKAN,HW_GPU_RESOLUTION_W2560H1600'),
('resources', 'MEMORY_MB:1024,VCPU:1'),
('resources1', 'DISK_GB:30'),
('resources2', 'VGPU:2'),
('resources3', 'CUSTOM_NET_EGRESS_BYTES_SEC:125000,SRIOV_NET_VF:1')
]
resp_mock.json.return_value = json_data
self.ks_adap_mock.get.return_value = resp_mock
alloc_reqs, p_sums, allocation_request_version = (
self.client.get_allocation_candidates(self.context, resources))
url = self.ks_adap_mock.get.call_args[0][0]
split_url = parse.urlsplit(url)
query = parse.parse_qsl(split_url.query)
self.assertEqual(expected_path, split_url.path)
self.assertEqual(expected_query, query)
expected_url = '/allocation_candidates?%s' % parse.urlencode(
expected_query)
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.25',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(mock.sentinel.alloc_reqs, alloc_reqs)
self.assertEqual(mock.sentinel.p_sums, p_sums)
def test_get_ac_no_trait_bogus_group_policy_custom_limit(self):
self.flags(max_placement_results=42, group='scheduler')
resp_mock = mock.Mock(status_code=200)
json_data = {
'allocation_requests': mock.sentinel.alloc_reqs,
'provider_summaries': mock.sentinel.p_sums,
}
resources = scheduler_utils.ResourceRequest.from_extra_specs({
'resources:VCPU': '1',
'resources:MEMORY_MB': '1024',
'resources1:DISK_GB': '30',
'group_policy': 'bogus',
})
expected_path = '/allocation_candidates'
expected_query = [
('limit', '42'),
('resources', 'MEMORY_MB:1024,VCPU:1'),
('resources1', 'DISK_GB:30'),
]
resp_mock.json.return_value = json_data
self.ks_adap_mock.get.return_value = resp_mock
alloc_reqs, p_sums, allocation_request_version = (
self.client.get_allocation_candidates(self.context, resources))
url = self.ks_adap_mock.get.call_args[0][0]
split_url = parse.urlsplit(url)
query = parse.parse_qsl(split_url.query)
self.assertEqual(expected_path, split_url.path)
self.assertEqual(expected_query, query)
expected_url = '/allocation_candidates?%s' % parse.urlencode(
expected_query)
self.assertEqual(mock.sentinel.alloc_reqs, alloc_reqs)
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.25',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(mock.sentinel.p_sums, p_sums)
def test_get_allocation_candidates_not_found(self):
# Ensure _get_resource_provider() just returns None when the placement
# API doesn't find a resource provider matching a UUID
resp_mock = mock.Mock(status_code=404)
self.ks_adap_mock.get.return_value = resp_mock
expected_path = '/allocation_candidates'
expected_query = {'resources': ['MEMORY_MB:1024'],
'limit': ['100']}
# Make sure we're also honoring the configured limit
self.flags(max_placement_results=100, group='scheduler')
resources = scheduler_utils.ResourceRequest.from_extra_specs(
{'resources:MEMORY_MB': '1024'})
res = self.client.get_allocation_candidates(self.context, resources)
self.ks_adap_mock.get.assert_called_once_with(
mock.ANY, raise_exc=False, microversion='1.25',
headers={'X-Openstack-Request-Id': self.context.global_id})
url = self.ks_adap_mock.get.call_args[0][0]
split_url = parse.urlsplit(url)
query = parse.parse_qs(split_url.query)
self.assertEqual(expected_path, split_url.path)
self.assertEqual(expected_query, query)
self.assertIsNone(res[0])
def test_get_resource_provider_found(self):
# Ensure _get_resource_provider() returns a dict of resource provider
# if it finds a resource provider record from the placement API
uuid = uuids.compute_node
resp_mock = mock.Mock(status_code=200)
json_data = {
'uuid': uuid,
'name': uuid,
'generation': 42,
'parent_provider_uuid': None,
}
resp_mock.json.return_value = json_data
self.ks_adap_mock.get.return_value = resp_mock
result = self.client._get_resource_provider(self.context, uuid)
expected_provider_dict = dict(
uuid=uuid,
name=uuid,
generation=42,
parent_provider_uuid=None,
)
expected_url = '/resource_providers/' + uuid
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.14',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(expected_provider_dict, result)
def test_get_resource_provider_not_found(self):
# Ensure _get_resource_provider() just returns None when the placement
# API doesn't find a resource provider matching a UUID
resp_mock = mock.Mock(status_code=404)
self.ks_adap_mock.get.return_value = resp_mock
uuid = uuids.compute_node
result = self.client._get_resource_provider(self.context, uuid)
expected_url = '/resource_providers/' + uuid
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.14',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertIsNone(result)
@mock.patch.object(report.LOG, 'error')
def test_get_resource_provider_error(self, logging_mock):
# Ensure _get_resource_provider() sets the error flag when trying to
# communicate with the placement API and not getting an error we can
# deal with
resp_mock = mock.Mock(status_code=503)
self.ks_adap_mock.get.return_value = resp_mock
self.ks_adap_mock.get.return_value.headers = {
'x-openstack-request-id': uuids.request_id}
uuid = uuids.compute_node
self.assertRaises(
exception.ResourceProviderRetrievalFailed,
self.client._get_resource_provider, self.context, uuid)
expected_url = '/resource_providers/' + uuid
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.14',
headers={'X-Openstack-Request-Id': self.context.global_id})
# A 503 Service Unavailable should trigger an error log that
# includes the placement request id and return None
# from _get_resource_provider()
self.assertTrue(logging_mock.called)
self.assertEqual(uuids.request_id,
logging_mock.call_args[0][1]['placement_req_id'])
def test_get_sharing_providers(self):
resp_mock = mock.Mock(status_code=200)
rpjson = [
{
'uuid': uuids.sharing1,
'name': 'bandwidth_provider',
'generation': 42,
'parent_provider_uuid': None,
'root_provider_uuid': None,
'links': [],
},
{
'uuid': uuids.sharing2,
'name': 'storage_provider',
'generation': 42,
'parent_provider_uuid': None,
'root_provider_uuid': None,
'links': [],
},
]
resp_mock.json.return_value = {'resource_providers': rpjson}
self.ks_adap_mock.get.return_value = resp_mock
result = self.client._get_sharing_providers(
self.context, [uuids.agg1, uuids.agg2])
expected_url = ('/resource_providers?member_of=in:' +
','.join((uuids.agg1, uuids.agg2)) +
'&required=MISC_SHARES_VIA_AGGREGATE')
self.ks_adap_mock.get.assert_called_once_with(
expected_url, microversion='1.18', raise_exc=False,
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(rpjson, result)
def test_get_sharing_providers_emptylist(self):
self.assertEqual(
[], self.client._get_sharing_providers(self.context, []))
self.ks_adap_mock.get.assert_not_called()
@mock.patch.object(report.LOG, 'error')
def test_get_sharing_providers_error(self, logging_mock):
# Ensure _get_sharing_providers() logs an error and raises if the
# placement API call doesn't respond 200
resp_mock = mock.Mock(status_code=503)
self.ks_adap_mock.get.return_value = resp_mock
self.ks_adap_mock.get.return_value.headers = {
'x-openstack-request-id': uuids.request_id}
uuid = uuids.agg
self.assertRaises(exception.ResourceProviderRetrievalFailed,
self.client._get_sharing_providers,
self.context, [uuid])
expected_url = ('/resource_providers?member_of=in:' + uuid +
'&required=MISC_SHARES_VIA_AGGREGATE')
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.18',
headers={'X-Openstack-Request-Id': self.context.global_id})
# A 503 Service Unavailable should trigger an error log that
# includes the placement request id
self.assertTrue(logging_mock.called)
self.assertEqual(uuids.request_id,
logging_mock.call_args[0][1]['placement_req_id'])
def test_get_providers_in_tree(self):
# Ensure _get_providers_in_tree() returns a list of resource
# provider dicts if it finds a resource provider record from the
# placement API
root = uuids.compute_node
child = uuids.child
resp_mock = mock.Mock(status_code=200)
rpjson = [
{
'uuid': root,
'name': 'daddy', 'generation': 42,
'parent_provider_uuid': None,
},
{
'uuid': child,
'name': 'junior',
'generation': 42,
'parent_provider_uuid': root,
},
]
resp_mock.json.return_value = {'resource_providers': rpjson}
self.ks_adap_mock.get.return_value = resp_mock
result = self.client._get_providers_in_tree(self.context, root)
expected_url = '/resource_providers?in_tree=' + root
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.14',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(rpjson, result)
@mock.patch.object(report.LOG, 'error')
def test_get_providers_in_tree_error(self, logging_mock):
# Ensure _get_providers_in_tree() logs an error and raises if the
# placement API call doesn't respond 200
resp_mock = mock.Mock(status_code=503)
self.ks_adap_mock.get.return_value = resp_mock
self.ks_adap_mock.get.return_value.headers = {
'x-openstack-request-id': 'req-' + uuids.request_id}
uuid = uuids.compute_node
self.assertRaises(exception.ResourceProviderRetrievalFailed,
self.client._get_providers_in_tree, self.context,
uuid)
expected_url = '/resource_providers?in_tree=' + uuid
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.14',
headers={'X-Openstack-Request-Id': self.context.global_id})
# A 503 Service Unavailable should trigger an error log that includes
# the placement request id
self.assertTrue(logging_mock.called)
self.assertEqual('req-' + uuids.request_id,
logging_mock.call_args[0][1]['placement_req_id'])
def test_create_resource_provider(self):
"""Test that _create_resource_provider() sends a dict of resource
provider information without a parent provider UUID.
"""
uuid = uuids.compute_node
name = 'computehost'
resp_mock = mock.Mock(status_code=200)
self.ks_adap_mock.post.return_value = resp_mock
self.assertEqual(
resp_mock.json.return_value,
self.client._create_resource_provider(self.context, uuid, name))
expected_payload = {
'uuid': uuid,
'name': name,
}
expected_url = '/resource_providers'
self.ks_adap_mock.post.assert_called_once_with(
expected_url, json=expected_payload, raise_exc=False,
microversion='1.20',
headers={'X-Openstack-Request-Id': self.context.global_id})
def test_create_resource_provider_with_parent(self):
"""Test that when specifying a parent provider UUID, that the
parent_provider_uuid part of the payload is properly specified.
"""
parent_uuid = uuids.parent
uuid = uuids.compute_node
name = 'computehost'
resp_mock = mock.Mock(status_code=200)
self.ks_adap_mock.post.return_value = resp_mock
self.assertEqual(
resp_mock.json.return_value,
self.client._create_resource_provider(
self.context,
uuid,
name,
parent_provider_uuid=parent_uuid,
)
)
expected_payload = {
'uuid': uuid,
'name': name,
'parent_provider_uuid': parent_uuid,
}
expected_url = '/resource_providers'
self.ks_adap_mock.post.assert_called_once_with(
expected_url, json=expected_payload, raise_exc=False,
microversion='1.20',
headers={'X-Openstack-Request-Id': self.context.global_id})
@mock.patch.object(report.LOG, 'info')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_resource_provider')
def test_create_resource_provider_concurrent_create(self, get_rp_mock,
logging_mock):
# Ensure _create_resource_provider() returns a dict of resource
# provider gotten from _get_resource_provider() if the call to create
# the resource provider in the placement API returned a 409 Conflict,
# indicating another thread concurrently created the resource provider
# record.
uuid = uuids.compute_node
name = 'computehost'
self.ks_adap_mock.post.return_value = fake_requests.FakeResponse(
409, content='not a name conflict',
headers={'x-openstack-request-id': uuids.request_id})
get_rp_mock.return_value = mock.sentinel.get_rp
result = self.client._create_resource_provider(self.context, uuid,
name)
expected_payload = {
'uuid': uuid,
'name': name,
}
expected_url = '/resource_providers'
self.ks_adap_mock.post.assert_called_once_with(
expected_url, json=expected_payload, raise_exc=False,
microversion='1.20',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(mock.sentinel.get_rp, result)
# The 409 response will produce a message to the info log.
self.assertTrue(logging_mock.called)
self.assertEqual(uuids.request_id,
logging_mock.call_args[0][1]['placement_req_id'])
def test_create_resource_provider_name_conflict(self):
# When the API call to create the resource provider fails 409 with a
# name conflict, we raise an exception.
self.ks_adap_mock.post.return_value = fake_requests.FakeResponse(
409, content='<stuff>Conflicting resource provider name: foo '
'already exists.</stuff>')
self.assertRaises(
exception.ResourceProviderCreationFailed,
self.client._create_resource_provider, self.context,
uuids.compute_node, 'foo')
@mock.patch.object(report.LOG, 'error')
def test_create_resource_provider_error(self, logging_mock):
# Ensure _create_resource_provider() sets the error flag when trying to
# communicate with the placement API and not getting an error we can
# deal with
uuid = uuids.compute_node
name = 'computehost'
self.ks_adap_mock.post.return_value = fake_requests.FakeResponse(
503, headers={'x-openstack-request-id': uuids.request_id})
self.assertRaises(
exception.ResourceProviderCreationFailed,
self.client._create_resource_provider, self.context, uuid, name)
expected_payload = {
'uuid': uuid,
'name': name,
}
expected_url = '/resource_providers'
self.ks_adap_mock.post.assert_called_once_with(
expected_url, json=expected_payload, raise_exc=False,
microversion='1.20',
headers={'X-Openstack-Request-Id': self.context.global_id})
# A 503 Service Unavailable should log an error that
# includes the placement request id and
# _create_resource_provider() should return None
self.assertTrue(logging_mock.called)
self.assertEqual(uuids.request_id,
logging_mock.call_args[0][1]['placement_req_id'])
def test_put_empty(self):
# A simple put with an empty (not None) payload should send the empty
# payload through.
# Bug #1744786
url = '/resource_providers/%s/aggregates' % uuids.foo
self.client.put(url, [])
self.ks_adap_mock.put.assert_called_once_with(
url, json=[], raise_exc=False, microversion=None, headers={})
def test_delete_provider(self):
delete_mock = fake_requests.FakeResponse(None)
self.ks_adap_mock.delete.return_value = delete_mock
for status_code in (204, 404):
delete_mock.status_code = status_code
# Seed the caches
self.client._provider_tree.new_root('compute', uuids.root,
generation=0)
self.client._association_refresh_time[uuids.root] = 1234
self.client._delete_provider(uuids.root, global_request_id='gri')
self.ks_adap_mock.delete.assert_called_once_with(
'/resource_providers/' + uuids.root,
headers={'X-Openstack-Request-Id': 'gri'}, microversion=None,
raise_exc=False)
self.assertFalse(self.client._provider_tree.exists(uuids.root))
self.assertNotIn(uuids.root, self.client._association_refresh_time)
self.ks_adap_mock.delete.reset_mock()
def test_delete_provider_fail(self):
delete_mock = fake_requests.FakeResponse(None)
self.ks_adap_mock.delete.return_value = delete_mock
resp_exc_map = {409: exception.ResourceProviderInUse,
503: exception.ResourceProviderDeletionFailed}
for status_code, exc in resp_exc_map.items():
delete_mock.status_code = status_code
self.assertRaises(exc, self.client._delete_provider, uuids.root)
self.ks_adap_mock.delete.assert_called_once_with(
'/resource_providers/' + uuids.root, microversion=None,
headers={}, raise_exc=False)
self.ks_adap_mock.delete.reset_mock()
def test_set_aggregates_for_provider(self):
aggs = [uuids.agg1, uuids.agg2]
resp_mock = mock.Mock(status_code=200)
resp_mock.json.return_value = {
'aggregates': aggs,
}
self.ks_adap_mock.put.return_value = resp_mock
# Prime the provider tree cache
self.client._provider_tree.new_root('rp', uuids.rp, generation=0)
self.assertEqual(set(),
self.client._provider_tree.data(uuids.rp).aggregates)
self.client.set_aggregates_for_provider(self.context, uuids.rp, aggs)
self.ks_adap_mock.put.assert_called_once_with(
'/resource_providers/%s/aggregates' % uuids.rp, json=aggs,
raise_exc=False, microversion='1.1',
headers={'X-Openstack-Request-Id': self.context.global_id})
# Cache was updated
self.assertEqual(set(aggs),
self.client._provider_tree.data(uuids.rp).aggregates)
def test_set_aggregates_for_provider_fail(self):
self.ks_adap_mock.put.return_value = mock.Mock(status_code=503)
# Prime the provider tree cache
self.client._provider_tree.new_root('rp', uuids.rp, generation=0)
self.assertRaises(
exception.ResourceProviderUpdateFailed,
self.client.set_aggregates_for_provider,
self.context, uuids.rp, [uuids.agg])
# The cache wasn't updated
self.assertEqual(set(),
self.client._provider_tree.data(uuids.rp).aggregates)
class TestAggregates(SchedulerReportClientTestCase):
def test_get_provider_aggregates_found(self):
uuid = uuids.compute_node
resp_mock = mock.Mock(status_code=200)
aggs = [
uuids.agg1,
uuids.agg2,
]
resp_mock.json.return_value = {'aggregates': aggs}
self.ks_adap_mock.get.return_value = resp_mock
result = self.client._get_provider_aggregates(self.context, uuid)
expected_url = '/resource_providers/' + uuid + '/aggregates'
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.1',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertEqual(set(aggs), result)
@mock.patch.object(report.LOG, 'error')
def test_get_provider_aggregates_error(self, log_mock):
"""Test that when the placement API returns any error when looking up a
provider's aggregates, we raise an exception.
"""
uuid = uuids.compute_node
resp_mock = mock.Mock(headers={
'x-openstack-request-id': uuids.request_id})
self.ks_adap_mock.get.return_value = resp_mock
for status_code in (400, 404, 503):
resp_mock.status_code = status_code
self.assertRaises(
exception.ResourceProviderAggregateRetrievalFailed,
self.client._get_provider_aggregates, self.context, uuid)
expected_url = '/resource_providers/' + uuid + '/aggregates'
self.ks_adap_mock.get.assert_called_once_with(
expected_url, raise_exc=False, microversion='1.1',
headers={'X-Openstack-Request-Id': self.context.global_id})
self.assertTrue(log_mock.called)
self.assertEqual(uuids.request_id,
log_mock.call_args[0][1]['placement_req_id'])
self.ks_adap_mock.get.reset_mock()
log_mock.reset_mock()
class TestTraits(SchedulerReportClientTestCase):
trait_api_kwargs = {'raise_exc': False, 'microversion': '1.6'}
def test_get_provider_traits_found(self):
uuid = uuids.compute_node
resp_mock = mock.Mock(status_code=200)
traits = [
'CUSTOM_GOLD',
'CUSTOM_SILVER',
]
resp_mock.json.return_value = {'traits': traits}
self.ks_adap_mock.get.return_value = resp_mock
result = self.client._get_provider_traits(self.context, uuid)
expected_url = '/resource_providers/' + uuid + '/traits'
self.ks_adap_mock.get.assert_called_once_with(
expected_url,
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.assertEqual(set(traits), result)
@mock.patch.object(report.LOG, 'error')
def test_get_provider_traits_error(self, log_mock):
"""Test that when the placement API returns any error when looking up a
provider's traits, we raise an exception.
"""
uuid = uuids.compute_node
resp_mock = mock.Mock(headers={
'x-openstack-request-id': uuids.request_id})
self.ks_adap_mock.get.return_value = resp_mock
for status_code in (400, 404, 503):
resp_mock.status_code = status_code
self.assertRaises(
exception.ResourceProviderTraitRetrievalFailed,
self.client._get_provider_traits, self.context, uuid)
expected_url = '/resource_providers/' + uuid + '/traits'
self.ks_adap_mock.get.assert_called_once_with(
expected_url,
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.assertTrue(log_mock.called)
self.assertEqual(uuids.request_id,
log_mock.call_args[0][1]['placement_req_id'])
self.ks_adap_mock.get.reset_mock()
log_mock.reset_mock()
def test_ensure_traits(self):
"""Successful paths, various permutations of traits existing or needing
to be created.
"""
standard_traits = ['HW_NIC_OFFLOAD_UCS', 'HW_NIC_OFFLOAD_RDMA']
custom_traits = ['CUSTOM_GOLD', 'CUSTOM_SILVER']
all_traits = standard_traits + custom_traits
get_mock = mock.Mock(status_code=200)
self.ks_adap_mock.get.return_value = get_mock
# Request all traits; custom traits need to be created
get_mock.json.return_value = {'traits': standard_traits}
self.client._ensure_traits(self.context, all_traits)
self.ks_adap_mock.get.assert_called_once_with(
'/traits?name=in:' + ','.join(all_traits),
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.ks_adap_mock.put.assert_has_calls(
[mock.call('/traits/' + trait,
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
for trait in custom_traits], any_order=True)
self.ks_adap_mock.reset_mock()
# Request standard traits; no traits need to be created
get_mock.json.return_value = {'traits': standard_traits}
self.client._ensure_traits(self.context, standard_traits)
self.ks_adap_mock.get.assert_called_once_with(
'/traits?name=in:' + ','.join(standard_traits),
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.ks_adap_mock.put.assert_not_called()
self.ks_adap_mock.reset_mock()
# Request no traits - short circuit
self.client._ensure_traits(self.context, None)
self.client._ensure_traits(self.context, [])
self.ks_adap_mock.get.assert_not_called()
self.ks_adap_mock.put.assert_not_called()
def test_ensure_traits_fail_retrieval(self):
self.ks_adap_mock.get.return_value = mock.Mock(status_code=400)
self.assertRaises(exception.TraitRetrievalFailed,
self.client._ensure_traits,
self.context, ['FOO'])
self.ks_adap_mock.get.assert_called_once_with(
'/traits?name=in:FOO',
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.ks_adap_mock.put.assert_not_called()
def test_ensure_traits_fail_creation(self):
get_mock = mock.Mock(status_code=200)
get_mock.json.return_value = {'traits': []}
self.ks_adap_mock.get.return_value = get_mock
self.ks_adap_mock.put.return_value = fake_requests.FakeResponse(400)
self.assertRaises(exception.TraitCreationFailed,
self.client._ensure_traits,
self.context, ['FOO'])
self.ks_adap_mock.get.assert_called_once_with(
'/traits?name=in:FOO',
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.ks_adap_mock.put.assert_called_once_with(
'/traits/FOO',
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
def test_set_traits_for_provider(self):
traits = ['HW_NIC_OFFLOAD_UCS', 'HW_NIC_OFFLOAD_RDMA']
# Make _ensure_traits succeed without PUTting
get_mock = mock.Mock(status_code=200)
get_mock.json.return_value = {'traits': traits}
self.ks_adap_mock.get.return_value = get_mock
# Prime the provider tree cache
self.client._provider_tree.new_root('rp', uuids.rp, generation=0)
# Mock the /rp/{u}/traits PUT to succeed
put_mock = mock.Mock(status_code=200)
put_mock.json.return_value = {'traits': traits,
'resource_provider_generation': 1}
self.ks_adap_mock.put.return_value = put_mock
# Invoke
self.client.set_traits_for_provider(self.context, uuids.rp, traits)
# Verify API calls
self.ks_adap_mock.get.assert_called_once_with(
'/traits?name=in:' + ','.join(traits),
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
self.ks_adap_mock.put.assert_called_once_with(
'/resource_providers/%s/traits' % uuids.rp,
json={'traits': traits, 'resource_provider_generation': 0},
headers={'X-Openstack-Request-Id': self.context.global_id},
**self.trait_api_kwargs)
# And ensure the provider tree cache was updated appropriately
self.assertFalse(
self.client._provider_tree.have_traits_changed(uuids.rp, traits))
# Validate the generation
self.assertEqual(
1, self.client._provider_tree.data(uuids.rp).generation)
def test_set_traits_for_provider_fail(self):
traits = ['HW_NIC_OFFLOAD_UCS', 'HW_NIC_OFFLOAD_RDMA']
get_mock = mock.Mock()
self.ks_adap_mock.get.return_value = get_mock
# Prime the provider tree cache
self.client._provider_tree.new_root('rp', uuids.rp, generation=0)
# _ensure_traits exception bubbles up
get_mock.status_code = 400
self.assertRaises(
exception.TraitRetrievalFailed,
self.client.set_traits_for_provider,
self.context, uuids.rp, traits)
self.ks_adap_mock.put.assert_not_called()
get_mock.status_code = 200
get_mock.json.return_value = {'traits': traits}
# Conflict
self.ks_adap_mock.put.return_value = mock.Mock(status_code=409)
self.assertRaises(
exception.ResourceProviderUpdateConflict,
self.client.set_traits_for_provider,
self.context, uuids.rp, traits)
# Other error
self.ks_adap_mock.put.return_value = mock.Mock(status_code=503)
self.assertRaises(
exception.ResourceProviderUpdateFailed,
self.client.set_traits_for_provider,
self.context, uuids.rp, traits)
class TestAssociations(SchedulerReportClientTestCase):
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_aggregates')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_traits')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_sharing_providers')
def test_refresh_associations_no_last(self, mock_shr_get, mock_trait_get,
mock_agg_get):
"""Test that associations are refreshed when stale."""
uuid = uuids.compute_node
# Seed the provider tree so _refresh_associations finds the provider
self.client._provider_tree.new_root('compute', uuid, generation=1)
mock_agg_get.return_value = set([uuids.agg1])
mock_trait_get.return_value = set(['CUSTOM_GOLD'])
self.client._refresh_associations(self.context, uuid)
mock_agg_get.assert_called_once_with(self.context, uuid)
mock_trait_get.assert_called_once_with(self.context, uuid)
mock_shr_get.assert_called_once_with(
self.context, mock_agg_get.return_value)
self.assertIn(uuid, self.client._association_refresh_time)
self.assertTrue(
self.client._provider_tree.in_aggregates(uuid, [uuids.agg1]))
self.assertFalse(
self.client._provider_tree.in_aggregates(uuid, [uuids.agg2]))
self.assertTrue(
self.client._provider_tree.has_traits(uuid, ['CUSTOM_GOLD']))
self.assertFalse(
self.client._provider_tree.has_traits(uuid, ['CUSTOM_SILVER']))
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_aggregates')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_traits')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_sharing_providers')
def test_refresh_associations_no_refresh_sharing(self, mock_shr_get,
mock_trait_get,
mock_agg_get):
"""Test refresh_sharing=False."""
uuid = uuids.compute_node
# Seed the provider tree so _refresh_associations finds the provider
self.client._provider_tree.new_root('compute', uuid, generation=1)
mock_agg_get.return_value = set([uuids.agg1])
mock_trait_get.return_value = set(['CUSTOM_GOLD'])
self.client._refresh_associations(self.context, uuid,
refresh_sharing=False)
mock_agg_get.assert_called_once_with(self.context, uuid)
mock_trait_get.assert_called_once_with(self.context, uuid)
mock_shr_get.assert_not_called()
self.assertIn(uuid, self.client._association_refresh_time)
self.assertTrue(
self.client._provider_tree.in_aggregates(uuid, [uuids.agg1]))
self.assertFalse(
self.client._provider_tree.in_aggregates(uuid, [uuids.agg2]))
self.assertTrue(
self.client._provider_tree.has_traits(uuid, ['CUSTOM_GOLD']))
self.assertFalse(
self.client._provider_tree.has_traits(uuid, ['CUSTOM_SILVER']))
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_aggregates')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_traits')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_sharing_providers')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_associations_stale')
def test_refresh_associations_not_stale(self, mock_stale, mock_shr_get,
mock_trait_get, mock_agg_get):
"""Test that refresh associations is not called when the map is
not stale.
"""
mock_stale.return_value = False
uuid = uuids.compute_node
self.client._refresh_associations(self.context, uuid)
mock_agg_get.assert_not_called()
mock_trait_get.assert_not_called()
mock_shr_get.assert_not_called()
self.assertFalse(self.client._association_refresh_time)
@mock.patch.object(report.LOG, 'debug')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_aggregates')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_provider_traits')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_sharing_providers')
def test_refresh_associations_time(self, mock_shr_get, mock_trait_get,
mock_agg_get, log_mock):
"""Test that refresh associations is called when the map is stale."""
uuid = uuids.compute_node
# Seed the provider tree so _refresh_associations finds the provider
self.client._provider_tree.new_root('compute', uuid, generation=1)
mock_agg_get.return_value = set([])
mock_trait_get.return_value = set([])
mock_shr_get.return_value = []
# Called a first time because association_refresh_time is empty.
now = time.time()
self.client._refresh_associations(self.context, uuid)
mock_agg_get.assert_called_once_with(self.context, uuid)
mock_trait_get.assert_called_once_with(self.context, uuid)
mock_shr_get.assert_called_once_with(self.context, set())
log_mock.assert_has_calls([
mock.call('Refreshing aggregate associations for resource '
'provider %s, aggregates: %s', uuid, 'None'),
mock.call('Refreshing trait associations for resource '
'provider %s, traits: %s', uuid, 'None')
])
self.assertIn(uuid, self.client._association_refresh_time)
# Clear call count.
mock_agg_get.reset_mock()
mock_trait_get.reset_mock()
mock_shr_get.reset_mock()
with mock.patch('time.time') as mock_future:
# Not called a second time because not enough time has passed.
mock_future.return_value = (now +
CONF.compute.resource_provider_association_refresh / 2)
self.client._refresh_associations(self.context, uuid)
mock_agg_get.assert_not_called()
mock_trait_get.assert_not_called()
mock_shr_get.assert_not_called()
# Called because time has passed.
mock_future.return_value = (now +
CONF.compute.resource_provider_association_refresh + 1)
self.client._refresh_associations(self.context, uuid)
mock_agg_get.assert_called_once_with(self.context, uuid)
mock_trait_get.assert_called_once_with(self.context, uuid)
mock_shr_get.assert_called_once_with(self.context, set())
class TestComputeNodeToInventoryDict(test.NoDBTestCase):
def test_compute_node_inventory(self):
uuid = uuids.compute_node
name = 'computehost'
compute_node = objects.ComputeNode(uuid=uuid,
hypervisor_hostname=name,
vcpus=2,
cpu_allocation_ratio=16.0,
memory_mb=1024,
ram_allocation_ratio=1.5,
local_gb=10,
disk_allocation_ratio=1.0)
self.flags(reserved_host_memory_mb=1000)
self.flags(reserved_host_disk_mb=200)
self.flags(reserved_host_cpus=1)
result = report._compute_node_to_inventory_dict(compute_node)
expected = {
'VCPU': {
'total': compute_node.vcpus,
'reserved': CONF.reserved_host_cpus,
'min_unit': 1,
'max_unit': compute_node.vcpus,
'step_size': 1,
'allocation_ratio': compute_node.cpu_allocation_ratio,
},
'MEMORY_MB': {
'total': compute_node.memory_mb,
'reserved': CONF.reserved_host_memory_mb,
'min_unit': 1,
'max_unit': compute_node.memory_mb,
'step_size': 1,
'allocation_ratio': compute_node.ram_allocation_ratio,
},
'DISK_GB': {
'total': compute_node.local_gb,
'reserved': 1, # this is ceil(1000/1024)
'min_unit': 1,
'max_unit': compute_node.local_gb,
'step_size': 1,
'allocation_ratio': compute_node.disk_allocation_ratio,
},
}
self.assertEqual(expected, result)
def test_compute_node_inventory_empty(self):
uuid = uuids.compute_node
name = 'computehost'
compute_node = objects.ComputeNode(uuid=uuid,
hypervisor_hostname=name,
vcpus=0,
cpu_allocation_ratio=16.0,
memory_mb=0,
ram_allocation_ratio=1.5,
local_gb=0,
disk_allocation_ratio=1.0)
result = report._compute_node_to_inventory_dict(compute_node)
self.assertEqual({}, result)
class TestInventory(SchedulerReportClientTestCase):
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory')
def test_update_compute_node(self, mock_ui, mock_erp):
cn = self.compute_node
self.client.update_compute_node(self.context, cn)
mock_erp.assert_called_once_with(self.context, cn.uuid,
cn.hypervisor_hostname)
expected_inv_data = {
'VCPU': {
'total': 8,
'reserved': CONF.reserved_host_cpus,
'min_unit': 1,
'max_unit': 8,
'step_size': 1,
'allocation_ratio': 16.0,
},
'MEMORY_MB': {
'total': 1024,
'reserved': 512,
'min_unit': 1,
'max_unit': 1024,
'step_size': 1,
'allocation_ratio': 1.5,
},
'DISK_GB': {
'total': 10,
'reserved': 0,
'min_unit': 1,
'max_unit': 10,
'step_size': 1,
'allocation_ratio': 1.0,
},
}
mock_ui.assert_called_once_with(
self.context,
cn.uuid,
expected_inv_data,
)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory')
def test_update_compute_node_no_inv(self, mock_ui, mock_erp):
"""Ensure that if there are no inventory records, we still call
_update_inventory().
"""
cn = self.compute_node
cn.vcpus = 0
cn.memory_mb = 0
cn.local_gb = 0
self.client.update_compute_node(self.context, cn)
mock_erp.assert_called_once_with(self.context, cn.uuid,
cn.hypervisor_hostname)
mock_ui.assert_called_once_with(self.context, cn.uuid, {})
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'get')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
def test_update_inventory_initial_empty(self, mock_put, mock_get):
# Ensure _update_inventory() returns a list of Inventories objects
# after creating or updating the existing values
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self._init_provider_tree(resources_override={})
mock_get.return_value.json.return_value = {
'resource_provider_generation': 43,
'inventories': {
'VCPU': {'total': 16},
'MEMORY_MB': {'total': 1024},
'DISK_GB': {'total': 10},
}
}
mock_put.return_value.status_code = 200
mock_put.return_value.json.return_value = {
'resource_provider_generation': 44,
'inventories': {
'VCPU': {'total': 16},
'MEMORY_MB': {'total': 1024},
'DISK_GB': {'total': 10},
}
}
inv_data = report._compute_node_to_inventory_dict(compute_node)
result = self.client._update_inventory_attempt(
self.context, compute_node.uuid, inv_data
)
self.assertTrue(result)
exp_url = '/resource_providers/%s/inventories' % uuid
mock_get.assert_called_once_with(
exp_url, global_request_id=self.context.global_id)
# Updated with the new inventory from the PUT call
self._validate_provider(uuid, generation=44)
expected = {
# Called with the newly-found generation from the existing
# inventory
'resource_provider_generation': 43,
'inventories': {
'VCPU': {
'total': 8,
'reserved': CONF.reserved_host_cpus,
'min_unit': 1,
'max_unit': compute_node.vcpus,
'step_size': 1,
'allocation_ratio': compute_node.cpu_allocation_ratio,
},
'MEMORY_MB': {
'total': 1024,
'reserved': CONF.reserved_host_memory_mb,
'min_unit': 1,
'max_unit': compute_node.memory_mb,
'step_size': 1,
'allocation_ratio': compute_node.ram_allocation_ratio,
},
'DISK_GB': {
'total': 10,
'reserved': 0, # reserved_host_disk_mb is 0 by default
'min_unit': 1,
'max_unit': compute_node.local_gb,
'step_size': 1,
'allocation_ratio': compute_node.disk_allocation_ratio,
},
}
}
mock_put.assert_called_once_with(
exp_url, expected, global_request_id=self.context.global_id)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'get')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
def test_update_inventory(self, mock_put, mock_get):
self.flags(reserved_host_disk_mb=1000)
# Ensure _update_inventory() returns a list of Inventories objects
# after creating or updating the existing values
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self._init_provider_tree()
new_vcpus_total = 240
mock_get.return_value.json.return_value = {
'resource_provider_generation': 43,
'inventories': {
'VCPU': {'total': 16},
'MEMORY_MB': {'total': 1024},
'DISK_GB': {'total': 10},
}
}
mock_put.return_value.status_code = 200
mock_put.return_value.json.return_value = {
'resource_provider_generation': 44,
'inventories': {
'VCPU': {'total': new_vcpus_total},
'MEMORY_MB': {'total': 1024},
'DISK_GB': {'total': 10},
}
}
inv_data = report._compute_node_to_inventory_dict(compute_node)
# Make a change to trigger the update...
inv_data['VCPU']['total'] = new_vcpus_total
result = self.client._update_inventory_attempt(
self.context, compute_node.uuid, inv_data
)
self.assertTrue(result)
exp_url = '/resource_providers/%s/inventories' % uuid
mock_get.assert_called_once_with(
exp_url, global_request_id=self.context.global_id)
# Updated with the new inventory from the PUT call
self._validate_provider(uuid, generation=44)
expected = {
# Called with the newly-found generation from the existing
# inventory
'resource_provider_generation': 43,
'inventories': {
'VCPU': {
'total': new_vcpus_total,
'reserved': 0,
'min_unit': 1,
'max_unit': compute_node.vcpus,
'step_size': 1,
'allocation_ratio': compute_node.cpu_allocation_ratio,
},
'MEMORY_MB': {
'total': 1024,
'reserved': CONF.reserved_host_memory_mb,
'min_unit': 1,
'max_unit': compute_node.memory_mb,
'step_size': 1,
'allocation_ratio': compute_node.ram_allocation_ratio,
},
'DISK_GB': {
'total': 10,
'reserved': 1, # this is ceil for 1000MB
'min_unit': 1,
'max_unit': compute_node.local_gb,
'step_size': 1,
'allocation_ratio': compute_node.disk_allocation_ratio,
},
}
}
mock_put.assert_called_once_with(
exp_url, expected, global_request_id=self.context.global_id)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'get')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
def test_update_inventory_no_update(self, mock_put, mock_get):
"""Simulate situation where scheduler client is first starting up and
ends up loading information from the placement API via a GET against
the resource provider's inventory but has no local cached inventory
information for a resource provider.
"""
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self._init_provider_tree(generation_override=42, resources_override={})
mock_get.return_value.json.return_value = {
'resource_provider_generation': 43,
'inventories': {
'VCPU': {
'total': 8,
'reserved': CONF.reserved_host_cpus,
'min_unit': 1,
'max_unit': compute_node.vcpus,
'step_size': 1,
'allocation_ratio': compute_node.cpu_allocation_ratio,
},
'MEMORY_MB': {
'total': 1024,
'reserved': CONF.reserved_host_memory_mb,
'min_unit': 1,
'max_unit': compute_node.memory_mb,
'step_size': 1,
'allocation_ratio': compute_node.ram_allocation_ratio,
},
'DISK_GB': {
'total': 10,
'reserved': 0,
'min_unit': 1,
'max_unit': compute_node.local_gb,
'step_size': 1,
'allocation_ratio': compute_node.disk_allocation_ratio,
},
}
}
inv_data = report._compute_node_to_inventory_dict(compute_node)
result = self.client._update_inventory_attempt(
self.context, compute_node.uuid, inv_data
)
self.assertTrue(result)
exp_url = '/resource_providers/%s/inventories' % uuid
mock_get.assert_called_once_with(
exp_url, global_request_id=self.context.global_id)
# No update so put should not be called
self.assertFalse(mock_put.called)
# Make sure we updated the generation from the inventory records
self._validate_provider(uuid, generation=43)
@mock.patch.object(report.LOG, 'info')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
def test_update_inventory_concurrent_update(self, mock_ensure,
mock_put, mock_get, mock_info):
# Ensure _update_inventory() returns a list of Inventories objects
# after creating or updating the existing values
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self.client._provider_tree.new_root(
compute_node.hypervisor_hostname,
compute_node.uuid,
generation=42,
)
mock_get.return_value = {
'resource_provider_generation': 42,
'inventories': {},
}
mock_put.return_value.status_code = 409
mock_put.return_value.text = 'Does not match inventory in use'
mock_put.return_value.headers = {'x-openstack-request-id':
uuids.request_id}
inv_data = report._compute_node_to_inventory_dict(compute_node)
result = self.client._update_inventory_attempt(
self.context, compute_node.uuid, inv_data
)
self.assertFalse(result)
# Invalidated the cache
self.assertFalse(self.client._provider_tree.exists(uuid))
# Refreshed our resource provider
mock_ensure.assert_called_once_with(self.context, uuid)
# Logged the request id in the log message
self.assertEqual(uuids.request_id,
mock_info.call_args[0][1]['placement_req_id'])
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
def test_update_inventory_inventory_in_use(self, mock_put, mock_get):
# Ensure _update_inventory() returns a list of Inventories objects
# after creating or updating the existing values
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self.client._provider_tree.new_root(
compute_node.hypervisor_hostname,
compute_node.uuid,
generation=42,
)
mock_get.return_value = {
'resource_provider_generation': 42,
'inventories': {},
}
mock_put.return_value.status_code = 409
mock_put.return_value.text = (
"update conflict: Inventory for VCPU on "
"resource provider 123 in use"
)
inv_data = report._compute_node_to_inventory_dict(compute_node)
self.assertRaises(
exception.InventoryInUse,
self.client._update_inventory_attempt,
self.context,
compute_node.uuid,
inv_data,
)
# Did NOT invalidate the cache
self.assertTrue(self.client._provider_tree.exists(uuid))
@mock.patch.object(report.LOG, 'info')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
def test_update_inventory_unknown_response(self, mock_put, mock_get,
mock_info):
# Ensure _update_inventory() returns a list of Inventories objects
# after creating or updating the existing values
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self.client._provider_tree.new_root(
compute_node.hypervisor_hostname,
compute_node.uuid,
generation=42,
)
mock_get.return_value = {
'resource_provider_generation': 42,
'inventories': {},
}
mock_put.return_value.status_code = 234
mock_put.return_value.headers = {'x-openstack-request-id':
uuids.request_id}
inv_data = report._compute_node_to_inventory_dict(compute_node)
result = self.client._update_inventory_attempt(
self.context, compute_node.uuid, inv_data
)
self.assertFalse(result)
# No cache invalidation
self.assertTrue(self.client._provider_tree.exists(uuid))
@mock.patch.object(report.LOG, 'warning')
@mock.patch.object(report.LOG, 'debug')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_get_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
def test_update_inventory_failed(self, mock_put, mock_get,
mock_debug, mock_warn):
# Ensure _update_inventory() returns a list of Inventories objects
# after creating or updating the existing values
uuid = uuids.compute_node
compute_node = self.compute_node
# Make sure the resource provider exists for preventing to call the API
self.client._provider_tree.new_root(
compute_node.hypervisor_hostname,
compute_node.uuid,
generation=42,
)
mock_get.return_value = {
'resource_provider_generation': 42,
'inventories': {},
}
mock_put.return_value = fake_requests.FakeResponse(
400, headers={'x-openstack-request-id': uuids.request_id})
inv_data = report._compute_node_to_inventory_dict(compute_node)
result = self.client._update_inventory_attempt(
self.context, compute_node.uuid, inv_data
)
self.assertFalse(result)
# No cache invalidation
self.assertTrue(self.client._provider_tree.exists(uuid))
# Logged the request id in the log messages
self.assertEqual(uuids.request_id,
mock_debug.call_args[0][1]['placement_req_id'])
self.assertEqual(uuids.request_id,
mock_warn.call_args[0][1]['placement_req_id'])
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory_attempt')
@mock.patch('time.sleep')
def test_update_inventory_fails_and_then_succeeds(self, mock_sleep,
mock_update,
mock_ensure):
# Ensure _update_inventory() fails if we have a conflict when updating
# but retries correctly.
cn = self.compute_node
mock_update.side_effect = (False, True)
self.client._provider_tree.new_root(
cn.hypervisor_hostname,
cn.uuid,
generation=42,
)
result = self.client._update_inventory(
self.context, cn.uuid, mock.sentinel.inv_data
)
self.assertTrue(result)
# Only slept once
mock_sleep.assert_called_once_with(1)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory_attempt')
@mock.patch('time.sleep')
def test_update_inventory_never_succeeds(self, mock_sleep,
mock_update,
mock_ensure):
# but retries correctly.
cn = self.compute_node
mock_update.side_effect = (False, False, False)
self.client._provider_tree.new_root(
cn.hypervisor_hostname,
cn.uuid,
generation=42,
)
result = self.client._update_inventory(
self.context, cn.uuid, mock.sentinel.inv_data
)
self.assertFalse(result)
# Slept three times
mock_sleep.assert_has_calls([mock.call(1), mock.call(1), mock.call(1)])
# Three attempts to update
mock_update.assert_has_calls([
mock.call(self.context, cn.uuid, mock.sentinel.inv_data),
mock.call(self.context, cn.uuid, mock.sentinel.inv_data),
mock.call(self.context, cn.uuid, mock.sentinel.inv_data),
])
# Slept three times
mock_sleep.assert_has_calls([mock.call(1), mock.call(1), mock.call(1)])
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_classes')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
def test_set_inventory_for_provider_no_custom(self, mock_erp, mock_erc,
mock_upd):
"""Tests that inventory records of all standard resource classes are
passed to the report client's _update_inventory() method.
"""
inv_data = {
'VCPU': {
'total': 24,
'reserved': 0,
'min_unit': 1,
'max_unit': 24,
'step_size': 1,
'allocation_ratio': 1.0,
},
'MEMORY_MB': {
'total': 1024,
'reserved': 0,
'min_unit': 1,
'max_unit': 1024,
'step_size': 1,
'allocation_ratio': 1.0,
},
'DISK_GB': {
'total': 100,
'reserved': 0,
'min_unit': 1,
'max_unit': 100,
'step_size': 1,
'allocation_ratio': 1.0,
},
}
self.client.set_inventory_for_provider(
self.context,
mock.sentinel.rp_uuid,
mock.sentinel.rp_name,
inv_data,
)
mock_erp.assert_called_once_with(
self.context,
mock.sentinel.rp_uuid,
mock.sentinel.rp_name,
parent_provider_uuid=None,
)
# No custom resource classes to ensure...
mock_erc.assert_called_once_with(self.context,
set(['VCPU', 'MEMORY_MB', 'DISK_GB']))
mock_upd.assert_called_once_with(
self.context,
mock.sentinel.rp_uuid,
inv_data,
)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_classes')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
def test_set_inventory_for_provider_no_inv(self, mock_erp, mock_erc,
mock_upd):
"""Tests that passing empty set of inventory records triggers a delete
of inventory for the provider.
"""
inv_data = {}
self.client.set_inventory_for_provider(
self.context,
mock.sentinel.rp_uuid,
mock.sentinel.rp_name,
inv_data,
)
mock_erp.assert_called_once_with(
self.context,
mock.sentinel.rp_uuid,
mock.sentinel.rp_name,
parent_provider_uuid=None,
)
mock_erc.assert_called_once_with(self.context, set())
mock_upd.assert_called_once_with(
self.context, mock.sentinel.rp_uuid, {})
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_update_inventory')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_classes')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
def test_set_inventory_for_provider_with_custom(self, mock_erp, mock_erc,
mock_upd):
"""Tests that inventory records that include a custom resource class
are passed to the report client's _update_inventory() method and that
the custom resource class is auto-created.
"""
inv_data = {
'VCPU': {
'total': 24,
'reserved': 0,
'min_unit': 1,
'max_unit': 24,
'step_size': 1,
'allocation_ratio': 1.0,
},
'MEMORY_MB': {
'total': 1024,
'reserved': 0,
'min_unit': 1,
'max_unit': 1024,
'step_size': 1,
'allocation_ratio': 1.0,
},
'DISK_GB': {
'total': 100,
'reserved': 0,
'min_unit': 1,
'max_unit': 100,
'step_size': 1,
'allocation_ratio': 1.0,
},
'CUSTOM_IRON_SILVER': {
'total': 1,
'reserved': 0,
'min_unit': 1,
'max_unit': 1,
'step_size': 1,
'allocation_ratio': 1.0,
}
}
self.client.set_inventory_for_provider(
self.context,
mock.sentinel.rp_uuid,
mock.sentinel.rp_name,
inv_data,
)
mock_erp.assert_called_once_with(
self.context,
mock.sentinel.rp_uuid,
mock.sentinel.rp_name,
parent_provider_uuid=None,
)
mock_erc.assert_called_once_with(
self.context,
set(['VCPU', 'MEMORY_MB', 'DISK_GB', 'CUSTOM_IRON_SILVER']))
mock_upd.assert_called_once_with(
self.context,
mock.sentinel.rp_uuid,
inv_data,
)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_classes', new=mock.Mock())
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'_ensure_resource_provider')
def test_set_inventory_for_provider_with_parent(self, mock_erp):
"""Ensure parent UUID is sent through."""
self.client.set_inventory_for_provider(
self.context, uuids.child, 'junior', {},
parent_provider_uuid=uuids.parent)
mock_erp.assert_called_once_with(
self.context, uuids.child, 'junior',
parent_provider_uuid=uuids.parent)
class TestAllocations(SchedulerReportClientTestCase):
@mock.patch('nova.compute.utils.is_volume_backed_instance')
def test_instance_to_allocations_dict(self, mock_vbi):
mock_vbi.return_value = False
inst = objects.Instance(
uuid=uuids.inst,
flavor=objects.Flavor(root_gb=10,
swap=1023,
ephemeral_gb=100,
memory_mb=1024,
vcpus=2,
extra_specs={}))
result = report._instance_to_allocations_dict(inst)
expected = {
'MEMORY_MB': 1024,
'VCPU': 2,
'DISK_GB': 111,
}
self.assertEqual(expected, result)
@mock.patch('nova.compute.utils.is_volume_backed_instance')
def test_instance_to_allocations_dict_overrides(self, mock_vbi):
"""Test that resource overrides in an instance's flavor extra_specs
are reported to placement.
"""
mock_vbi.return_value = False
specs = {
'resources:CUSTOM_DAN': '123',
'resources:%s' % fields.ResourceClass.VCPU: '4',
'resources:NOTATHING': '456',
'resources:NOTEVENANUMBER': 'catfood',
'resources:': '7',
'resources:ferret:weasel': 'smelly',
'foo': 'bar',
}
inst = objects.Instance(
uuid=uuids.inst,
flavor=objects.Flavor(root_gb=10,
swap=1023,
ephemeral_gb=100,
memory_mb=1024,
vcpus=2,
extra_specs=specs))
result = report._instance_to_allocations_dict(inst)
expected = {
'MEMORY_MB': 1024,
'VCPU': 4,
'DISK_GB': 111,
'CUSTOM_DAN': 123,
}
self.assertEqual(expected, result)
@mock.patch('nova.compute.utils.is_volume_backed_instance')
def test_instance_to_allocations_dict_boot_from_volume(self, mock_vbi):
mock_vbi.return_value = True
inst = objects.Instance(
uuid=uuids.inst,
flavor=objects.Flavor(root_gb=10,
swap=1,
ephemeral_gb=100,
memory_mb=1024,
vcpus=2,
extra_specs={}))
result = report._instance_to_allocations_dict(inst)
expected = {
'MEMORY_MB': 1024,
'VCPU': 2,
'DISK_GB': 101,
}
self.assertEqual(expected, result)
@mock.patch('nova.compute.utils.is_volume_backed_instance')
def test_instance_to_allocations_dict_zero_disk(self, mock_vbi):
mock_vbi.return_value = True
inst = objects.Instance(
uuid=uuids.inst,
flavor=objects.Flavor(root_gb=10,
swap=0,
ephemeral_gb=0,
memory_mb=1024,
vcpus=2,
extra_specs={}))
result = report._instance_to_allocations_dict(inst)
expected = {
'MEMORY_MB': 1024,
'VCPU': 2,
}
self.assertEqual(expected, result)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'get')
@mock.patch('nova.scheduler.client.report.'
'_instance_to_allocations_dict')
def test_update_instance_allocation_new(self, mock_a, mock_get,
mock_put):
cn = objects.ComputeNode(uuid=uuids.cn)
inst = objects.Instance(uuid=uuids.inst, project_id=uuids.project,
user_id=uuids.user)
mock_get.return_value.json.return_value = {'allocations': {}}
expected = {
'allocations': [
{'resource_provider': {'uuid': cn.uuid},
'resources': mock_a.return_value}],
'project_id': inst.project_id,
'user_id': inst.user_id,
}
self.client.update_instance_allocation(self.context, cn, inst, 1)
mock_put.assert_called_once_with(
'/allocations/%s' % inst.uuid,
expected, version='1.8',
global_request_id=self.context.global_id)
self.assertTrue(mock_get.called)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'get')
@mock.patch('nova.scheduler.client.report.'
'_instance_to_allocations_dict')
def test_update_instance_allocation_existing(self, mock_a, mock_get,
mock_put):
cn = objects.ComputeNode(uuid=uuids.cn)
inst = objects.Instance(uuid=uuids.inst)
mock_get.return_value.json.return_value = {'allocations': {
cn.uuid: {
'generation': 2,
'resources': {
'DISK_GB': 123,
'MEMORY_MB': 456,
}
}}
}
mock_a.return_value = {
'DISK_GB': 123,
'MEMORY_MB': 456,
}
self.client.update_instance_allocation(self.context, cn, inst, 1)
self.assertFalse(mock_put.called)
mock_get.assert_called_once_with(
'/allocations/%s' % inst.uuid,
global_request_id=self.context.global_id)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'get')
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'put')
@mock.patch('nova.scheduler.client.report.'
'_instance_to_allocations_dict')
@mock.patch.object(report.LOG, 'warning')
def test_update_instance_allocation_new_failed(self, mock_warn, mock_a,
mock_put, mock_get):
cn = objects.ComputeNode(uuid=uuids.cn)
inst = objects.Instance(uuid=uuids.inst, project_id=uuids.project,
user_id=uuids.user)
mock_put.return_value = fake_requests.FakeResponse(400)
self.client.update_instance_allocation(self.context, cn, inst, 1)
self.assertTrue(mock_warn.called)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'delete')
def test_update_instance_allocation_delete(self, mock_delete):
cn = objects.ComputeNode(uuid=uuids.cn)
inst = objects.Instance(uuid=uuids.inst)
self.client.update_instance_allocation(self.context, cn, inst, -1)
mock_delete.assert_called_once_with(
'/allocations/%s' % inst.uuid,
global_request_id=self.context.global_id)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'delete')
@mock.patch.object(report.LOG, 'warning')
def test_update_instance_allocation_delete_failed(self, mock_warn,
mock_delete):
cn = objects.ComputeNode(uuid=uuids.cn)
inst = objects.Instance(uuid=uuids.inst)
mock_delete.return_value = fake_requests.FakeResponse(400)
self.client.update_instance_allocation(self.context, cn, inst, -1)
self.assertTrue(mock_warn.called)
@mock.patch('nova.scheduler.client.report.SchedulerReportClient.'
'delete')
@mock.patch('nova.scheduler.client.report.LOG')
def test_delete_allocation_for_instance_ignore_404(self, mock_log,
mock_delete):
"""Tests that we don't log a warning on a 404 response when trying to
delete an allocation record.
"""
mock_delete.return_value = fake_requests.FakeResponse(404)
self.client.delete_allocation_for_instance(self.context, uuids.rp_uuid)
# make sure we didn't screw up the logic or the mock
mock_log.info.assert_not_called()
# make sure warning wasn't called for the 404
mock_log.warning.assert_not_called()
@mock.patch("nova.scheduler.client.report.SchedulerReportClient."
"delete")
@mock.patch("nova.scheduler.client.report.SchedulerReportClient."
"delete_allocation_for_instance")
@mock.patch("nova.objects.InstanceList.get_by_host_and_node")
def test_delete_resource_provider_cascade(self, mock_by_host,
mock_del_alloc, mock_delete):
self.client._provider_tree.new_root(uuids.cn, uuids.cn, generation=1)
cn = objects.ComputeNode(uuid=uuids.cn, host="fake_host",
hypervisor_hostname="fake_hostname", )
inst1 = objects.Instance(uuid=uuids.inst1)
inst2 = objects.Instance(uuid=uuids.inst2)
mock_by_host.return_value = objects.InstanceList(
objects=[inst1, inst2])
resp_mock = mock.Mock(status_code=204)
mock_delete.return_value = resp_mock
self.client.delete_resource_provider(self.context, cn, cascade=True)
self.assertEqual(2, mock_del_alloc.call_count)
exp_url = "/resource_providers/%s" % uuids.cn
mock_delete.assert_called_once_with(
exp_url, global_request_id=self.context.global_id)
self.assertFalse(self.client._provider_tree.exists(uuids.cn))
@mock.patch("nova.scheduler.client.report.SchedulerReportClient."
"delete")
@mock.patch("nova.scheduler.client.report.SchedulerReportClient."
"delete_allocation_for_instance")
@mock.patch("nova.objects.InstanceList.get_by_host_and_node")
def test_delete_resource_provider_no_cascade(self, mock_by_host,
mock_del_alloc, mock_delete):
self.client._provider_tree.new_root(uuids.cn, uuids.cn, generation=1)
self.client._association_refresh_time[uuids.cn] = mock.Mock()
cn = objects.ComputeNode(uuid=uuids.cn, host="fake_host",
hypervisor_hostname="fake_hostname", )
inst1 = objects.Instance(uuid=uuids.inst1)
inst2 = objects.Instance(uuid=uuids.inst2)
mock_by_host.return_value = objects.InstanceList(
objects=[inst1, inst2])
resp_mock = mock.Mock(status_code=204)
mock_delete.return_value = resp_mock
self.client.delete_resource_provider(self.context, cn)
mock_del_alloc.assert_not_called()
exp_url = "/resource_providers/%s" % uuids.cn
mock_delete.assert_called_once_with(
exp_url, global_request_id=self.context.global_id)
self.assertNotIn(uuids.cn, self.client._association_refresh_time)
@mock.patch("nova.scheduler.client.report.SchedulerReportClient."
"delete")
@mock.patch('nova.scheduler.client.report.LOG')
def test_delete_resource_provider_log_calls(self, mock_log, mock_delete):
# First, check a successful call
self.client._provider_tree.new_root(uuids.cn, uuids.cn, generation=1)
cn = objects.ComputeNode(uuid=uuids.cn, host="fake_host",
hypervisor_hostname="fake_hostname", )
resp_mock = fake_requests.FakeResponse(204)
mock_delete.return_value = resp_mock
self.client.delete_resource_provider(self.context, cn)
# With a 204, only the info should be called
self.assertEqual(1, mock_log.info.call_count)
self.assertEqual(0, mock_log.warning.call_count)
# Now check a 404 response
mock_log.reset_mock()
resp_mock.status_code = 404
self.client.delete_resource_provider(self.context, cn)
# With a 404, neither log message should be called
self.assertEqual(0, mock_log.info.call_count)
self.assertEqual(0, mock_log.warning.call_count)
# Finally, check a 409 response
mock_log.reset_mock()
resp_mock.status_code = 409
self.client.delete_resource_provider(self.context, cn)
# With a 409, only the error should be called
self.assertEqual(0, mock_log.info.call_count)
self.assertEqual(1, mock_log.error.call_count)
class TestResourceClass(SchedulerReportClientTestCase):
def setUp(self):
super(TestResourceClass, self).setUp()
_put_patch = mock.patch(
"nova.scheduler.client.report.SchedulerReportClient.put")
self.addCleanup(_put_patch.stop)
self.mock_put = _put_patch.start()
def test_ensure_resource_classes(self):
rcs = ['VCPU', 'CUSTOM_FOO', 'MEMORY_MB', 'CUSTOM_BAR']
self.client._ensure_resource_classes(self.context, rcs)
self.mock_put.assert_has_calls([
mock.call('/resource_classes/%s' % rc, None, version='1.7',
global_request_id=self.context.global_id)
for rc in ('CUSTOM_FOO', 'CUSTOM_BAR')
], any_order=True)
def test_ensure_resource_classes_none(self):
for empty in ([], (), set(), {}):
self.client._ensure_resource_classes(self.context, empty)
self.mock_put.assert_not_called()
def test_ensure_resource_classes_put_fail(self):
self.mock_put.return_value = fake_requests.FakeResponse(503)
rcs = ['VCPU', 'MEMORY_MB', 'CUSTOM_BAD']
self.assertRaises(
exception.InvalidResourceClass,
self.client._ensure_resource_classes, self.context, rcs)
# Only called with the "bad" one
self.mock_put.assert_called_once_with(
'/resource_classes/CUSTOM_BAD', None, version='1.7',
global_request_id=self.context.global_id)
| codeparrot/github-code-clean |
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import copy
import eventlet
import mock
import netaddr
from oslo.config import cfg
from oslo import messaging
from testtools import matchers
from neutron.agent.common import config as agent_config
from neutron.agent.l3 import agent as l3_agent
from neutron.agent.l3 import config as l3_config
from neutron.agent.l3 import dvr
from neutron.agent.l3 import dvr_router
from neutron.agent.l3 import ha
from neutron.agent.l3 import link_local_allocator as lla
from neutron.agent.l3 import router_info as l3router
from neutron.agent.linux import interface
from neutron.agent.linux import ra
from neutron.agent.metadata import driver as metadata_driver
from neutron.common import config as base_config
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.common import utils as common_utils
from neutron.i18n import _LE
from neutron.openstack.common import log
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants as p_const
from neutron.tests import base
_uuid = uuidutils.generate_uuid
HOSTNAME = 'myhost'
FAKE_ID = _uuid()
FAKE_ID_2 = _uuid()
FIP_PRI = 32768
class FakeDev(object):
def __init__(self, name):
self.name = name
def router_append_interface(router, count=1, ip_version=4, ra_mode=None,
addr_mode=None):
if ip_version == 4:
ip_pool = '35.4.%i.4'
cidr_pool = '35.4.%i.0/24'
gw_pool = '35.4.%i.1'
elif ip_version == 6:
ip_pool = 'fd01:%x::6'
cidr_pool = 'fd01:%x::/64'
gw_pool = 'fd01:%x::1'
else:
raise ValueError("Invalid ip_version: %s" % ip_version)
interfaces = router[l3_constants.INTERFACE_KEY]
current = sum(
[netaddr.IPNetwork(p['subnet']['cidr']).version == ip_version
for p in interfaces])
mac_address = netaddr.EUI('ca:fe:de:ad:be:ef')
mac_address.dialect = netaddr.mac_unix
for i in range(current, current + count):
interfaces.append(
{'id': _uuid(),
'network_id': _uuid(),
'admin_state_up': True,
'fixed_ips': [{'ip_address': ip_pool % i,
'subnet_id': _uuid()}],
'mac_address': str(mac_address),
'subnet': {'cidr': cidr_pool % i,
'gateway_ip': gw_pool % i,
'ipv6_ra_mode': ra_mode,
'ipv6_address_mode': addr_mode}})
mac_address.value += 1
def prepare_router_data(ip_version=4, enable_snat=None, num_internal_ports=1,
enable_floating_ip=False, enable_ha=False,
extra_routes=False):
if ip_version == 4:
ip_addr = '19.4.4.4'
cidr = '19.4.4.0/24'
gateway_ip = '19.4.4.1'
elif ip_version == 6:
ip_addr = 'fd00::4'
cidr = 'fd00::/64'
gateway_ip = 'fd00::1'
else:
raise ValueError("Invalid ip_version: %s" % ip_version)
router_id = _uuid()
ex_gw_port = {'id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ee',
'network_id': _uuid(),
'fixed_ips': [{'ip_address': ip_addr,
'subnet_id': _uuid()}],
'subnet': {'cidr': cidr,
'gateway_ip': gateway_ip}}
routes = []
if extra_routes:
routes = [{'destination': '8.8.8.0/24', 'nexthop': ip_addr}]
router = {
'id': router_id,
'distributed': False,
l3_constants.INTERFACE_KEY: [],
'routes': routes,
'gw_port': ex_gw_port}
if enable_floating_ip:
router[l3_constants.FLOATINGIP_KEY] = [{
'id': _uuid(),
'port_id': _uuid(),
'floating_ip_address': '19.4.4.2',
'fixed_ip_address': '10.0.0.1'}]
router_append_interface(router, count=num_internal_ports,
ip_version=ip_version)
if enable_ha:
router['ha'] = True
router['ha_vr_id'] = 1
router[l3_constants.HA_INTERFACE_KEY] = get_ha_interface()
if enable_snat is not None:
router['enable_snat'] = enable_snat
return router
def _get_subnet_id(port):
return port['fixed_ips'][0]['subnet_id']
#TODO(jschwarz): This is a shared function with both the unit tests
# and the functional tests, and should be moved elsewhere (probably
# neutron/tests/common/).
def get_ha_interface(ip='169.254.192.1', mac='12:34:56:78:2b:5d'):
return {'admin_state_up': True,
'device_id': _uuid(),
'device_owner': 'network:router_ha_interface',
'fixed_ips': [{'ip_address': ip,
'subnet_id': _uuid()}],
'id': _uuid(),
'mac_address': mac,
'name': u'L3 HA Admin port 0',
'network_id': _uuid(),
'status': u'ACTIVE',
'subnet': {'cidr': '169.254.192.0/18',
'gateway_ip': '169.254.255.254',
'id': _uuid()},
'tenant_id': '',
'agent_id': _uuid(),
'agent_host': 'aaa',
'priority': 1}
class TestBasicRouterOperations(base.BaseTestCase):
def setUp(self):
super(TestBasicRouterOperations, self).setUp()
self.conf = agent_config.setup_conf()
self.conf.register_opts(base_config.core_opts)
self.conf.register_cli_opts(log.common_cli_opts)
self.conf.register_cli_opts(log.logging_cli_opts)
self.conf.register_opts(l3_config.OPTS)
self.conf.register_opts(ha.OPTS)
agent_config.register_interface_driver_opts_helper(self.conf)
agent_config.register_use_namespaces_opts_helper(self.conf)
agent_config.register_root_helper(self.conf)
self.conf.register_opts(interface.OPTS)
self.conf.set_override('router_id', 'fake_id')
self.conf.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
self.conf.set_override('send_arp_for_ha', 1)
self.conf.set_override('state_path', '')
self.conf.root_helper = 'sudo'
self.device_exists_p = mock.patch(
'neutron.agent.linux.ip_lib.device_exists')
self.device_exists = self.device_exists_p.start()
mock.patch('neutron.agent.l3.ha.AgentMixin'
'._init_ha_conf_path').start()
mock.patch('neutron.agent.linux.keepalived.KeepalivedNotifierMixin'
'._get_full_config_file_path').start()
self.utils_exec_p = mock.patch(
'neutron.agent.linux.utils.execute')
self.utils_exec = self.utils_exec_p.start()
self.utils_replace_file_p = mock.patch(
'neutron.agent.linux.utils.replace_file')
self.utils_replace_file = self.utils_replace_file_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager')
self.external_process = self.external_process_p.start()
self.send_arp_p = mock.patch(
'neutron.agent.linux.ip_lib.send_gratuitous_arp')
self.send_arp = self.send_arp_p.start()
self.send_arp_proxyarp_p = mock.patch(
'neutron.agent.linux.ip_lib.send_garp_for_proxyarp')
self.send_arp_proxyarp = self.send_arp_proxyarp_p.start()
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
driver_cls = self.dvr_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
driver_cls.return_value = self.mock_driver
self.ip_cls_p = mock.patch('neutron.agent.linux.ip_lib.IPWrapper')
ip_cls = self.ip_cls_p.start()
self.mock_ip = mock.MagicMock()
ip_cls.return_value = self.mock_ip
ip_rule = mock.patch('neutron.agent.linux.ip_lib.IpRule').start()
self.mock_rule = mock.MagicMock()
ip_rule.return_value = self.mock_rule
ip_dev = mock.patch('neutron.agent.linux.ip_lib.IPDevice').start()
self.mock_ip_dev = mock.MagicMock()
ip_dev.return_value = self.mock_ip_dev
self.l3pluginApi_cls_p = mock.patch(
'neutron.agent.l3.agent.L3PluginApi')
l3pluginApi_cls = self.l3pluginApi_cls_p.start()
self.plugin_api = mock.MagicMock()
l3pluginApi_cls.return_value = self.plugin_api
self.looping_call_p = mock.patch(
'neutron.openstack.common.loopingcall.FixedIntervalLoopingCall')
self.looping_call_p.start()
self.snat_ports = [{'subnet': {'cidr': '152.2.0.0/16',
'gateway_ip': '152.2.0.1',
'id': _uuid()},
'network_id': _uuid(),
'device_owner': 'network:router_centralized_snat',
'ip_cidr': '152.2.0.13/16',
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': _uuid(),
'ip_address': '152.2.0.13'}],
'id': _uuid(), 'device_id': _uuid()},
{'subnet': {'cidr': '152.10.0.0/16',
'gateway_ip': '152.10.0.1',
'id': _uuid()},
'network_id': _uuid(),
'device_owner': 'network:router_centralized_snat',
'ip_cidr': '152.10.0.13/16',
'mac_address': 'fa:16:3e:80:8d:80',
'fixed_ips': [{'subnet_id': _uuid(),
'ip_address': '152.10.0.13'}],
'id': _uuid(), 'device_id': _uuid()}]
def _prepare_internal_network_data(self):
port_id = _uuid()
router_id = _uuid()
network_id = _uuid()
router = prepare_router_data(num_internal_ports=2)
router_id = router['id']
ri = l3router.RouterInfo(router_id, self.conf.root_helper,
router=router)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
cidr = '99.0.1.9/24'
mac = 'ca:fe:de:ad:be:ef'
port = {'network_id': network_id,
'id': port_id, 'ip_cidr': cidr,
'mac_address': mac}
return agent, ri, port
def test_periodic_sync_routers_task_raise_exception(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_routers.side_effect = ValueError()
with mock.patch.object(agent, '_cleanup_namespaces') as f:
self.assertRaises(ValueError, agent.periodic_sync_routers_task,
agent.context)
self.assertTrue(agent.fullsync)
self.assertFalse(f.called)
def test_l3_initial_full_sync_done(self):
with mock.patch.object(l3_agent.L3NATAgent,
'periodic_sync_routers_task') as router_sync:
with mock.patch.object(eventlet, 'spawn_n'):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.after_start()
router_sync.assert_called_once_with(agent.context)
def test_periodic_sync_routers_task_call_clean_stale_namespaces(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_routers.return_value = []
with mock.patch.object(agent, '_cleanup_namespaces') as f:
agent.periodic_sync_routers_task(agent.context)
self.assertTrue(f.called)
def test_router_info_create(self):
id = _uuid()
ns = "ns-" + id
ri = l3router.RouterInfo(id, self.conf.root_helper, {}, ns_name=ns)
self.assertTrue(ri.ns_name.endswith(id))
def test_router_info_create_with_router(self):
id = _uuid()
ex_gw_port = {'id': _uuid(),
'network_id': _uuid(),
'fixed_ips': [{'ip_address': '19.4.4.4',
'subnet_id': _uuid()}],
'subnet': {'cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1'}}
router = {
'id': _uuid(),
'enable_snat': True,
'routes': [],
'gw_port': ex_gw_port}
ns = "ns-" + id
ri = l3router.RouterInfo(id, self.conf.root_helper, router, ns_name=ns)
self.assertTrue(ri.ns_name.endswith(id))
self.assertEqual(ri.router, router)
def test_agent_create(self):
l3_agent.L3NATAgent(HOSTNAME, self.conf)
def _test_internal_network_action(self, action):
agent, ri, port = self._prepare_internal_network_data()
interface_name = agent.get_internal_device_name(port['id'])
if action == 'add':
self.device_exists.return_value = False
agent.internal_network_added(ri, port)
self.assertEqual(self.mock_driver.plug.call_count, 1)
self.assertEqual(self.mock_driver.init_l3.call_count, 1)
self.send_arp.assert_called_once_with(ri.ns_name, interface_name,
'99.0.1.9',
mock.ANY, mock.ANY)
elif action == 'remove':
self.device_exists.return_value = True
agent.internal_network_removed(ri, port)
self.assertEqual(self.mock_driver.unplug.call_count, 1)
else:
raise Exception("Invalid action %s" % action)
def _test_internal_network_action_dist(self, action):
agent, ri, port = self._prepare_internal_network_data()
ri.router['distributed'] = True
ri.router['gw_port_host'] = HOSTNAME
agent.host = HOSTNAME
agent.conf.agent_mode = 'dvr_snat'
sn_port = {'fixed_ips': [{'ip_address': '20.0.0.31',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.31/24'}
if action == 'add':
self.device_exists.return_value = False
agent._map_internal_interfaces = mock.Mock(return_value=sn_port)
agent._snat_redirect_add = mock.Mock()
agent._set_subnet_info = mock.Mock()
agent._internal_network_added = mock.Mock()
agent.internal_network_added(ri, port)
self.assertEqual(agent._snat_redirect_add.call_count, 1)
self.assertEqual(agent._set_subnet_info.call_count, 1)
self.assertEqual(agent._internal_network_added.call_count, 2)
agent._internal_network_added.assert_called_with(
agent.get_snat_ns_name(ri.router['id']),
sn_port['network_id'],
sn_port['id'],
sn_port['ip_cidr'],
sn_port['mac_address'],
agent.get_snat_int_device_name(sn_port['id']),
dvr.SNAT_INT_DEV_PREFIX)
def test_agent_add_internal_network(self):
self._test_internal_network_action('add')
def test_agent_add_internal_network_dist(self):
self._test_internal_network_action_dist('add')
def test_agent_remove_internal_network(self):
self._test_internal_network_action('remove')
def _test_external_gateway_action(self, action, router):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router,
ns_name=agent.get_ns_name(router['id']))
# Special setup for dvr routers
if router.get('distributed'):
agent.conf.agent_mode = 'dvr_snat'
agent.host = HOSTNAME
agent._create_dvr_gateway = mock.Mock()
agent.get_snat_interfaces = mock.Mock(return_value=self.snat_ports)
ex_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
interface_name = agent.get_external_device_name(ex_gw_port['id'])
if action == 'add':
self.device_exists.return_value = False
fake_fip = {'floatingips': [{'id': _uuid(),
'floating_ip_address': '192.168.1.34',
'fixed_ip_address': '192.168.0.1',
'port_id': _uuid()}]}
router[l3_constants.FLOATINGIP_KEY] = fake_fip['floatingips']
agent.external_gateway_added(ri, ex_gw_port, interface_name)
if not router.get('distributed'):
self.assertEqual(self.mock_driver.plug.call_count, 1)
self.assertEqual(self.mock_driver.init_l3.call_count, 1)
self.send_arp.assert_called_once_with(ri.ns_name,
interface_name,
'20.0.0.30',
mock.ANY, mock.ANY)
kwargs = {'preserve_ips': ['192.168.1.34/32'],
'namespace': 'qrouter-' + router['id'],
'gateway': '20.0.0.1',
'extra_subnets': [{'cidr': '172.16.0.0/24'}]}
self.mock_driver.init_l3.assert_called_with(interface_name,
['20.0.0.30/24'],
**kwargs)
else:
agent._create_dvr_gateway.assert_called_once_with(
ri, ex_gw_port, interface_name,
self.snat_ports)
elif action == 'remove':
self.device_exists.return_value = True
agent.external_gateway_removed(ri, ex_gw_port, interface_name)
self.assertEqual(self.mock_driver.unplug.call_count, 1)
else:
raise Exception("Invalid action %s" % action)
def _prepare_ext_gw_test(self, agent):
ex_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'extra_subnets': [{'cidr': '172.16.0.0/24'}],
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
interface_name = agent.get_external_device_name(ex_gw_port['id'])
self.device_exists.return_value = True
return interface_name, ex_gw_port
def test_external_gateway_updated(self):
router = prepare_router_data(num_internal_ports=2)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router,
ns_name=agent.get_ns_name(router['id']))
interface_name, ex_gw_port = self._prepare_ext_gw_test(agent)
fake_fip = {'floatingips': [{'id': _uuid(),
'floating_ip_address': '192.168.1.34',
'fixed_ip_address': '192.168.0.1',
'port_id': _uuid()}]}
router[l3_constants.FLOATINGIP_KEY] = fake_fip['floatingips']
agent.external_gateway_updated(ri, ex_gw_port,
interface_name)
self.assertEqual(self.mock_driver.plug.call_count, 0)
self.assertEqual(self.mock_driver.init_l3.call_count, 1)
self.send_arp.assert_called_once_with(ri.ns_name, interface_name,
'20.0.0.30', mock.ANY, mock.ANY)
kwargs = {'preserve_ips': ['192.168.1.34/32'],
'namespace': 'qrouter-' + router['id'],
'gateway': '20.0.0.1',
'extra_subnets': [{'cidr': '172.16.0.0/24'}]}
self.mock_driver.init_l3.assert_called_with(interface_name,
['20.0.0.30/24'],
**kwargs)
def _test_ext_gw_updated_dvr_agent_mode(self, host,
agent_mode, expected_call_count):
router = prepare_router_data(num_internal_ports=2)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
interface_name, ex_gw_port = self._prepare_ext_gw_test(agent)
agent._external_gateway_added = mock.Mock()
# test agent mode = dvr (compute node)
router['distributed'] = True
router['gw_port_host'] = host
agent.conf.agent_mode = agent_mode
agent.external_gateway_updated(ri, ex_gw_port,
interface_name)
# no gateway should be added on dvr node
self.assertEqual(expected_call_count,
agent._external_gateway_added.call_count)
def test_ext_gw_updated_dvr_agent_mode(self):
# no gateway should be added on dvr node
self._test_ext_gw_updated_dvr_agent_mode('any-foo', 'dvr', 0)
def test_ext_gw_updated_dvr_snat_agent_mode_no_host(self):
# no gateway should be added on dvr_snat node without host match
self._test_ext_gw_updated_dvr_agent_mode('any-foo', 'dvr_snat', 0)
def test_ext_gw_updated_dvr_snat_agent_mode_host(self):
# gateway should be added on dvr_snat node
self._test_ext_gw_updated_dvr_agent_mode(self.conf.host,
'dvr_snat', 1)
def test_agent_add_external_gateway(self):
router = prepare_router_data(num_internal_ports=2)
self._test_external_gateway_action('add', router)
def test_agent_add_external_gateway_dist(self):
router = prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
self._test_external_gateway_action('add', router)
def test_agent_remove_external_gateway(self):
router = prepare_router_data(num_internal_ports=2)
self._test_external_gateway_action('remove', router)
def test_agent_remove_external_gateway_dist(self):
router = prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
self._test_external_gateway_action('remove', router)
def _check_agent_method_called(self, agent, calls, namespace):
self.mock_ip.netns.execute.assert_has_calls(
[mock.call(call, check_exit_code=False) for call in calls],
any_order=True)
def _test_routing_table_update(self, namespace):
if not namespace:
self.conf.set_override('use_namespaces', False)
router_id = _uuid()
ri = l3router.RouterInfo(router_id, self.conf.root_helper, {})
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
fake_route1 = {'destination': '135.207.0.0/16',
'nexthop': '1.2.3.4'}
fake_route2 = {'destination': '135.207.111.111/32',
'nexthop': '1.2.3.4'}
agent._update_routing_table(ri, 'replace', fake_route1)
expected = [['ip', 'route', 'replace', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(agent, expected, namespace)
agent._update_routing_table(ri, 'delete', fake_route1)
expected = [['ip', 'route', 'delete', 'to', '135.207.0.0/16',
'via', '1.2.3.4']]
self._check_agent_method_called(agent, expected, namespace)
agent._update_routing_table(ri, 'replace', fake_route2)
expected = [['ip', 'route', 'replace', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(agent, expected, namespace)
agent._update_routing_table(ri, 'delete', fake_route2)
expected = [['ip', 'route', 'delete', 'to', '135.207.111.111/32',
'via', '1.2.3.4']]
self._check_agent_method_called(agent, expected, namespace)
def test_agent_routing_table_updated(self):
self._test_routing_table_update(namespace=True)
def test_agent_routing_table_updated_no_namespace(self):
self._test_routing_table_update(namespace=False)
def test_routes_updated(self):
self._test_routes_updated(namespace=True)
def test_routes_updated_no_namespace(self):
self._test_routes_updated(namespace=False)
def _test_routes_updated(self, namespace=True):
if not namespace:
self.conf.set_override('use_namespaces', False)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
ri = l3router.RouterInfo(router_id, self.conf.root_helper, {})
ri.router = {}
fake_old_routes = []
fake_new_routes = [{'destination': "110.100.31.0/24",
'nexthop': "10.100.10.30"},
{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.routes = fake_old_routes
ri.router['routes'] = fake_new_routes
agent.routes_updated(ri)
expected = [['ip', 'route', 'replace', 'to', '110.100.30.0/24',
'via', '10.100.10.30'],
['ip', 'route', 'replace', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(agent, expected, namespace)
fake_new_routes = [{'destination': "110.100.30.0/24",
'nexthop': "10.100.10.30"}]
ri.router['routes'] = fake_new_routes
agent.routes_updated(ri)
expected = [['ip', 'route', 'delete', 'to', '110.100.31.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(agent, expected, namespace)
fake_new_routes = []
ri.router['routes'] = fake_new_routes
agent.routes_updated(ri)
expected = [['ip', 'route', 'delete', 'to', '110.100.30.0/24',
'via', '10.100.10.30']]
self._check_agent_method_called(agent, expected, namespace)
def _verify_snat_rules(self, rules, router, negate=False):
interfaces = router[l3_constants.INTERFACE_KEY]
source_cidrs = []
for iface in interfaces:
prefix = iface['subnet']['cidr'].split('/')[1]
source_cidr = "%s/%s" % (iface['fixed_ips'][0]['ip_address'],
prefix)
source_cidrs.append(source_cidr)
source_nat_ip = router['gw_port']['fixed_ips'][0]['ip_address']
interface_name = ('qg-%s' % router['gw_port']['id'])[:14]
expected_rules = [
'! -i %s ! -o %s -m conntrack ! --ctstate DNAT -j ACCEPT' %
(interface_name, interface_name),
'-o %s -j SNAT --to-source %s' % (interface_name, source_nat_ip)]
for r in rules:
if negate:
self.assertNotIn(r.rule, expected_rules)
else:
self.assertIn(r.rule, expected_rules)
def test__get_snat_idx_ipv4(self):
ip_cidr = '101.12.13.00/24'
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
snat_idx = agent._get_snat_idx(ip_cidr)
# 0x650C0D00 is numerical value of 101.12.13.00
self.assertEqual(0x650C0D00, snat_idx)
def test__get_snat_idx_ipv6(self):
ip_cidr = '2620:0:a03:e100::/64'
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
snat_idx = agent._get_snat_idx(ip_cidr)
# 0x3D345705 is 30 bit xor folded crc32 of the ip_cidr
self.assertEqual(0x3D345705, snat_idx)
def test__get_snat_idx_ipv6_below_32768(self):
ip_cidr = 'd488::/30'
# crc32 of this ip_cidr is 0x1BD7
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
snat_idx = agent._get_snat_idx(ip_cidr)
# 0x1BD7 + 0x3FFFFFFF = 0x40001BD6
self.assertEqual(0x40001BD6, snat_idx)
def test__map_internal_interfaces(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=4)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
test_port = {
'mac_address': '00:12:23:34:45:56',
'fixed_ips': [{'subnet_id': _get_subnet_id(
router[l3_constants.INTERFACE_KEY][0]),
'ip_address': '101.12.13.14'}]}
internal_ports = ri.router.get(l3_constants.INTERFACE_KEY, [])
# test valid case
res_port = agent._map_internal_interfaces(ri,
internal_ports[0],
[test_port])
self.assertEqual(test_port, res_port)
# test invalid case
test_port['fixed_ips'][0]['subnet_id'] = 1234
res_ip = agent._map_internal_interfaces(ri,
internal_ports[0],
[test_port])
self.assertNotEqual(test_port, res_ip)
self.assertIsNone(res_ip)
def test_get_internal_port(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=4)
subnet_ids = [_get_subnet_id(port) for port in
router[l3_constants.INTERFACE_KEY]]
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
# Test Basic cases
port = agent.get_internal_port(ri, subnet_ids[0])
fips = port.get('fixed_ips', [])
subnet_id = fips[0]['subnet_id']
self.assertEqual(subnet_ids[0], subnet_id)
port = agent.get_internal_port(ri, subnet_ids[1])
fips = port.get('fixed_ips', [])
subnet_id = fips[0]['subnet_id']
self.assertEqual(subnet_ids[1], subnet_id)
port = agent.get_internal_port(ri, subnet_ids[3])
fips = port.get('fixed_ips', [])
subnet_id = fips[0]['subnet_id']
self.assertEqual(subnet_ids[3], subnet_id)
# Test miss cases
no_port = agent.get_internal_port(ri, FAKE_ID)
self.assertIsNone(no_port)
port = agent.get_internal_port(ri, subnet_ids[0])
fips = port.get('fixed_ips', [])
subnet_id = fips[0]['subnet_id']
self.assertNotEqual(subnet_ids[3], subnet_id)
def test__set_subnet_arp_info(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=2)
router['distributed'] = True
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
ports = ri.router.get(l3_constants.INTERFACE_KEY, [])
test_ports = [{'mac_address': '00:11:22:33:44:55',
'device_owner': 'network:dhcp',
'subnet_id': _get_subnet_id(ports[0]),
'fixed_ips': [{'ip_address': '1.2.3.4'}]}]
self.plugin_api.get_ports_by_subnet.return_value = test_ports
# Test basic case
ports[0]['subnet']['id'] = _get_subnet_id(ports[0])
agent._set_subnet_arp_info(ri, ports[0])
self.mock_ip_dev.neigh.add.assert_called_once_with(
4, '1.2.3.4', '00:11:22:33:44:55')
# Test negative case
router['distributed'] = False
agent._set_subnet_arp_info(ri, ports[0])
self.mock_ip_dev.neigh.add.never_called()
def test_add_arp_entry(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=2)
subnet_id = _get_subnet_id(router[l3_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.7.23.11',
'mac_address': '00:11:22:33:44:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._router_added(router['id'], router)
agent.add_arp_entry(None, payload)
agent.router_deleted(None, router['id'])
self.mock_ip_dev.neigh.add.assert_called_once_with(
4, '1.7.23.11', '00:11:22:33:44:55')
def test_add_arp_entry_no_routerinfo(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=2)
subnet_id = _get_subnet_id(router[l3_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.7.23.11',
'mac_address': '00:11:22:33:44:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._update_arp_entry = mock.Mock()
agent.add_arp_entry(None, payload)
self.assertFalse(agent._update_arp_entry.called)
def test__update_arp_entry_with_no_subnet(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(
'foo_router_id', mock.ANY,
{'distributed': True, 'gw_port_host': HOSTNAME})
with mock.patch.object(l3_agent.ip_lib, 'IPDevice') as f:
agent._update_arp_entry(ri, mock.ANY, mock.ANY,
'foo_subnet_id', 'add')
self.assertFalse(f.call_count)
def test_del_arp_entry(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=2)
subnet_id = _get_subnet_id(router[l3_constants.INTERFACE_KEY][0])
arp_table = {'ip_address': '1.5.25.15',
'mac_address': '00:44:33:22:11:55',
'subnet_id': subnet_id}
payload = {'arp_table': arp_table, 'router_id': router['id']}
agent._router_added(router['id'], router)
# first add the entry
agent.add_arp_entry(None, payload)
# now delete it
agent.del_arp_entry(None, payload)
self.mock_ip_dev.neigh.delete.assert_called_once_with(
4, '1.5.25.15', '00:44:33:22:11:55')
agent.router_deleted(None, router['id'])
@mock.patch('neutron.agent.linux.ip_lib.IPDevice')
def _test_scan_fip_ports(self, ri, ip_list, IPDevice):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.device_exists.return_value = True
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = ip_list
agent.scan_fip_ports(ri)
def test_scan_fip_ports_restart_fips(self):
router = prepare_router_data()
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
ri.router['distributed'] = True
ip_list = [{'cidr': '111.2.3.4/32'}, {'cidr': '111.2.3.5/32'}]
self._test_scan_fip_ports(ri, ip_list)
self.assertEqual(ri.dist_fip_count, 2)
def test_scan_fip_ports_restart_none(self):
router = prepare_router_data()
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
ri.router['distributed'] = True
ip_list = []
self._test_scan_fip_ports(ri, ip_list)
self.assertEqual(ri.dist_fip_count, 0)
def test_scan_fip_ports_restart_zero(self):
router = prepare_router_data()
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
ri.router['distributed'] = True
ri.dist_fip_count = 0
ip_list = None
self._test_scan_fip_ports(ri, ip_list)
self.assertEqual(ri.dist_fip_count, 0)
def test_process_cent_router(self):
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
self._test_process_router(ri)
def test_process_dist_router(self):
router = prepare_router_data()
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
subnet_id = _get_subnet_id(router[l3_constants.INTERFACE_KEY][0])
ri.router['distributed'] = True
ri.router['_snat_router_interfaces'] = [{
'fixed_ips': [{'subnet_id': subnet_id,
'ip_address': '1.2.3.4'}]}]
ri.router['gw_port_host'] = None
self._test_process_router(ri)
def _test_process_router(self, ri):
router = ri.router
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.host = HOSTNAME
fake_fip_id = 'fake_fip_id'
agent.create_dvr_fip_interfaces = mock.Mock()
agent.process_router_floating_ip_addresses = mock.Mock()
agent.process_router_floating_ip_nat_rules = mock.Mock()
agent.process_router_floating_ip_addresses.return_value = {
fake_fip_id: 'ACTIVE'}
agent.external_gateway_added = mock.Mock()
agent.external_gateway_updated = mock.Mock()
fake_floatingips1 = {'floatingips': [
{'id': fake_fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'port_id': _uuid(),
'host': HOSTNAME}]}
agent.process_router(ri)
ex_gw_port = agent._get_ex_gw_port(ri)
agent.process_router_floating_ip_addresses.assert_called_with(
ri, ex_gw_port)
agent.process_router_floating_ip_addresses.reset_mock()
agent.process_router_floating_ip_nat_rules.assert_called_with(ri)
agent.process_router_floating_ip_nat_rules.reset_mock()
agent.external_gateway_added.reset_mock()
# remap floating IP to a new fixed ip
fake_floatingips2 = copy.deepcopy(fake_floatingips1)
fake_floatingips2['floatingips'][0]['fixed_ip_address'] = '7.7.7.8'
router[l3_constants.FLOATINGIP_KEY] = fake_floatingips2['floatingips']
agent.process_router(ri)
ex_gw_port = agent._get_ex_gw_port(ri)
agent.process_router_floating_ip_addresses.assert_called_with(
ri, ex_gw_port)
agent.process_router_floating_ip_addresses.reset_mock()
agent.process_router_floating_ip_nat_rules.assert_called_with(ri)
agent.process_router_floating_ip_nat_rules.reset_mock()
self.assertEqual(agent.external_gateway_added.call_count, 0)
self.assertEqual(agent.external_gateway_updated.call_count, 0)
agent.external_gateway_added.reset_mock()
agent.external_gateway_updated.reset_mock()
# change the ex_gw_port a bit to test gateway update
new_gw_port = copy.deepcopy(ri.router['gw_port'])
ri.router['gw_port'] = new_gw_port
old_ip = (netaddr.IPAddress(ri.router['gw_port']
['fixed_ips'][0]['ip_address']))
ri.router['gw_port']['fixed_ips'][0]['ip_address'] = str(old_ip + 1)
agent.process_router(ri)
ex_gw_port = agent._get_ex_gw_port(ri)
agent.process_router_floating_ip_addresses.reset_mock()
agent.process_router_floating_ip_nat_rules.reset_mock()
self.assertEqual(agent.external_gateway_added.call_count, 0)
self.assertEqual(agent.external_gateway_updated.call_count, 1)
# remove just the floating ips
del router[l3_constants.FLOATINGIP_KEY]
agent.process_router(ri)
ex_gw_port = agent._get_ex_gw_port(ri)
agent.process_router_floating_ip_addresses.assert_called_with(
ri, ex_gw_port)
agent.process_router_floating_ip_addresses.reset_mock()
agent.process_router_floating_ip_nat_rules.assert_called_with(ri)
agent.process_router_floating_ip_nat_rules.reset_mock()
# now no ports so state is torn down
del router[l3_constants.INTERFACE_KEY]
del router['gw_port']
agent.process_router(ri)
self.assertEqual(self.send_arp.call_count, 1)
distributed = ri.router.get('distributed', False)
self.assertEqual(agent.process_router_floating_ip_addresses.called,
distributed)
self.assertEqual(agent.process_router_floating_ip_nat_rules.called,
distributed)
@mock.patch('neutron.agent.linux.ip_lib.IPDevice')
def _test_process_router_floating_ip_addresses_add(self, ri,
agent, IPDevice):
floating_ips = agent.get_floating_ips(ri)
fip_id = floating_ips[0]['id']
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = []
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
ex_gw_port = {'id': _uuid()}
with mock.patch.object(lla.LinkLocalAllocator, '_write'):
if ri.router['distributed']:
agent.create_dvr_fip_interfaces(ri, ex_gw_port)
fip_statuses = agent.process_router_floating_ip_addresses(
ri, ex_gw_port)
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
device.addr.add.assert_called_once_with(4, '15.1.2.3/32', '15.1.2.3')
def test_process_router_floating_ip_nat_rules_add(self):
fip = {
'id': _uuid(), 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1'
}
ri = mock.MagicMock()
ri.router['distributed'].__nonzero__ = lambda self: False
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.get_floating_ips = mock.Mock(return_value=[fip])
agent.process_router_floating_ip_nat_rules(ri)
nat = ri.iptables_manager.ipv4['nat']
nat.clear_rules_by_tag.assert_called_once_with('floating_ip')
rules = agent.floating_forward_rules('15.1.2.3', '192.168.0.1')
for chain, rule in rules:
nat.add_rule.assert_any_call(chain, rule, tag='floating_ip')
def test_process_router_cent_floating_ip_add(self):
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid(),
'host': HOSTNAME}]}
router = prepare_router_data(enable_snat=True)
router[l3_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self._test_process_router_floating_ip_addresses_add(ri, agent)
def test_process_router_dist_floating_ip_add(self):
fake_floatingips = {'floatingips': [
{'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid()},
{'id': _uuid(),
'host': 'some-other-host',
'floating_ip_address': '15.1.2.4',
'fixed_ip_address': '192.168.0.10',
'floating_network_id': _uuid(),
'port_id': _uuid()}]}
router = prepare_router_data(enable_snat=True)
router[l3_constants.FLOATINGIP_KEY] = fake_floatingips['floatingips']
router['distributed'] = True
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
ri.iptables_manager.ipv4['nat'] = mock.MagicMock()
ri.dist_fip_count = 0
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.host = HOSTNAME
agent.agent_gateway_port = (
{'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
)
self._test_process_router_floating_ip_addresses_add(ri, agent)
def test_get_router_cidrs_returns_cidrs(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = mock.MagicMock()
ri.is_ha = False
addresses = ['15.1.2.2/24', '15.1.2.3/32']
device = mock.MagicMock()
device.addr.list.return_value = [{'cidr': addresses[0]},
{'cidr': addresses[1]}]
self.assertEqual(set(addresses), agent._get_router_cidrs(ri, device))
def test_get_router_cidrs_returns_ha_cidrs(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = mock.MagicMock()
ri.is_ha = True
device = mock.MagicMock()
device.name.return_value = 'eth2'
addresses = ['15.1.2.2/24', '15.1.2.3/32']
agent._ha_get_existing_cidrs = mock.MagicMock()
agent._ha_get_existing_cidrs.return_value = addresses
self.assertEqual(set(addresses), agent._get_router_cidrs(ri, device))
# TODO(mrsmith): refactor for DVR cases
@mock.patch('neutron.agent.linux.ip_lib.IPDevice')
def test_process_router_floating_ip_addresses_remove(self, IPDevice):
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = mock.MagicMock()
ri.router.get.return_value = []
type(ri).is_ha = mock.PropertyMock(return_value=False)
ri.router['distributed'].__nonzero__ = lambda self: False
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
fip_statuses = agent.process_router_floating_ip_addresses(
ri, {'id': _uuid()})
self.assertEqual({}, fip_statuses)
device.addr.delete.assert_called_once_with(4, '15.1.2.3/32')
self.mock_driver.delete_conntrack_state.assert_called_once_with(
root_helper=self.conf.root_helper,
namespace=ri.ns_name,
ip='15.1.2.3/32')
def test_process_router_floating_ip_nat_rules_remove(self):
ri = mock.MagicMock()
ri.router.get.return_value = []
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.process_router_floating_ip_nat_rules(ri)
nat = ri.iptables_manager.ipv4['nat']
nat = ri.iptables_manager.ipv4['nat`']
nat.clear_rules_by_tag.assert_called_once_with('floating_ip')
@mock.patch('neutron.agent.linux.ip_lib.IPDevice')
def test_process_router_floating_ip_addresses_remap(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
IPDevice.return_value = device = mock.Mock()
device.addr.list.return_value = [{'cidr': '15.1.2.3/32'}]
ri = mock.MagicMock()
ri.router['distributed'].__nonzero__ = lambda self: False
type(ri).is_ha = mock.PropertyMock(return_value=False)
ri.router.get.return_value = [fip]
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
fip_statuses = agent.process_router_floating_ip_addresses(
ri, {'id': _uuid()})
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE},
fip_statuses)
self.assertFalse(device.addr.add.called)
self.assertFalse(device.addr.delete.called)
@mock.patch('neutron.agent.linux.ip_lib.IPDevice')
def test_process_router_with_disabled_floating_ip(self, IPDevice):
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
ri = mock.MagicMock()
ri.floating_ips = [fip]
ri.router.get.return_value = []
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
fip_statuses = agent.process_router_floating_ip_addresses(
ri, {'id': _uuid()})
self.assertIsNone(fip_statuses.get(fip_id))
@mock.patch('neutron.agent.linux.ip_lib.IPDevice')
def test_process_router_floating_ip_with_device_add_error(self, IPDevice):
IPDevice.return_value = device = mock.Mock()
device.addr.add.side_effect = RuntimeError()
device.addr.list.return_value = []
fip_id = _uuid()
fip = {
'id': fip_id, 'port_id': _uuid(),
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.2'
}
ri = mock.MagicMock()
type(ri).is_ha = mock.PropertyMock(return_value=False)
ri.router.get.return_value = [fip]
ri.router['distributed'].__nonzero__ = lambda self: False
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
fip_statuses = agent.process_router_floating_ip_addresses(
ri, {'id': _uuid()})
self.assertEqual({fip_id: l3_constants.FLOATINGIP_STATUS_ERROR},
fip_statuses)
def test_process_router_snat_disabled(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(enable_snat=True)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# Process with NAT
agent.process_router(ri)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# Reprocess without NAT
router['enable_snat'] = False
# Reassign the router object to RouterInfo
ri.router = router
agent.process_router(ri)
# For some reason set logic does not work well with
# IpTablesRule instances
nat_rules_delta = [r for r in orig_nat_rules
if r not in ri.iptables_manager.ipv4['nat'].rules]
self.assertEqual(len(nat_rules_delta), 2)
self._verify_snat_rules(nat_rules_delta, router)
self.assertEqual(self.send_arp.call_count, 1)
def test_process_router_snat_enabled(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(enable_snat=False)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# Process without NAT
agent.process_router(ri)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# Reprocess with NAT
router['enable_snat'] = True
# Reassign the router object to RouterInfo
ri.router = router
agent.process_router(ri)
# For some reason set logic does not work well with
# IpTablesRule instances
nat_rules_delta = [r for r in ri.iptables_manager.ipv4['nat'].rules
if r not in orig_nat_rules]
self.assertEqual(len(nat_rules_delta), 2)
self._verify_snat_rules(nat_rules_delta, router)
self.assertEqual(self.send_arp.call_count, 1)
def test_process_router_interface_added(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# Process with NAT
agent.process_router(ri)
# Add an interface and reprocess
router_append_interface(router)
# Reassign the router object to RouterInfo
ri.router = router
agent.process_router(ri)
# send_arp is called both times process_router is called
self.assertEqual(self.send_arp.call_count, 2)
def test_process_ipv6_only_gw(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(ip_version=6)
# Get NAT rules without the gw_port
gw_port = router['gw_port']
router['gw_port'] = None
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
agent.process_router(ri)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# Get NAT rules with the gw_port
router['gw_port'] = gw_port
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
with mock.patch.object(
agent,
'external_gateway_nat_rules') as external_gateway_nat_rules:
agent.process_router(ri)
new_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# There should be no change with the NAT rules
self.assertFalse(external_gateway_nat_rules.called)
self.assertEqual(orig_nat_rules, new_nat_rules)
def _process_router_ipv6_interface_added(
self, router, ra_mode=None, addr_mode=None):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# Process with NAT
agent.process_router(ri)
orig_nat_rules = ri.iptables_manager.ipv4['nat'].rules[:]
# Add an IPv6 interface and reprocess
router_append_interface(router, count=1, ip_version=6, ra_mode=ra_mode,
addr_mode=addr_mode)
# Reassign the router object to RouterInfo
ri.router = router
agent.process_router(ri)
# IPv4 NAT rules should not be changed by adding an IPv6 interface
nat_rules_delta = [r for r in ri.iptables_manager.ipv4['nat'].rules
if r not in orig_nat_rules]
self.assertFalse(nat_rules_delta)
return ri
def _expected_call_lookup_ri_process(self, ri, process):
"""Expected call if a process is looked up in a router instance."""
return [mock.call(cfg.CONF,
ri.router['id'],
self.conf.root_helper,
ri.ns_name,
process)]
def _assert_ri_process_enabled(self, ri, process):
"""Verify that process was enabled for a router instance."""
expected_calls = self._expected_call_lookup_ri_process(ri, process)
expected_calls.append(mock.call().enable(mock.ANY, True))
self.assertEqual(expected_calls, self.external_process.mock_calls)
def _assert_ri_process_disabled(self, ri, process):
"""Verify that process was disabled for a router instance."""
expected_calls = self._expected_call_lookup_ri_process(ri, process)
expected_calls.append(mock.call().disable())
self.assertEqual(expected_calls, self.external_process.mock_calls)
def test_process_router_ipv6_interface_added(self):
router = prepare_router_data()
ri = self._process_router_ipv6_interface_added(router)
self._assert_ri_process_enabled(ri, 'radvd')
# Expect radvd configured without prefix
self.assertNotIn('prefix',
self.utils_replace_file.call_args[0][1].split())
def test_process_router_ipv6_slaac_interface_added(self):
router = prepare_router_data()
ri = self._process_router_ipv6_interface_added(
router, ra_mode=l3_constants.IPV6_SLAAC)
self._assert_ri_process_enabled(ri, 'radvd')
# Expect radvd configured with prefix
self.assertIn('prefix',
self.utils_replace_file.call_args[0][1].split())
def test_process_router_ipv6v4_interface_added(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# Process with NAT
agent.process_router(ri)
# Add an IPv4 and IPv6 interface and reprocess
router_append_interface(router, count=1, ip_version=4)
router_append_interface(router, count=1, ip_version=6)
# Reassign the router object to RouterInfo
ri.router = router
agent.process_router(ri)
self._assert_ri_process_enabled(ri, 'radvd')
def test_process_router_interface_removed(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data(num_internal_ports=2)
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# Process with NAT
agent.process_router(ri)
# Add an interface and reprocess
del router[l3_constants.INTERFACE_KEY][1]
# Reassign the router object to RouterInfo
ri.router = router
agent.process_router(ri)
# send_arp is called both times process_router is called
self.assertEqual(self.send_arp.call_count, 2)
def test_process_router_ipv6_interface_removed(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
ri.router = router
agent.process_router(ri)
# Add an IPv6 interface and reprocess
router_append_interface(router, count=1, ip_version=6)
agent.process_router(ri)
self._assert_ri_process_enabled(ri, 'radvd')
# Reset the calls so we can check for disable radvd
self.external_process.reset_mock()
# Remove the IPv6 interface and reprocess
del router[l3_constants.INTERFACE_KEY][1]
agent.process_router(ri)
self._assert_ri_process_disabled(ri, 'radvd')
def test_process_router_internal_network_added_unexpected_error(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
with mock.patch.object(
l3_agent.L3NATAgent,
'internal_network_added') as internal_network_added:
# raise RuntimeError to simulate that an unexpected exception
# occurs
internal_network_added.side_effect = RuntimeError
self.assertRaises(RuntimeError, agent.process_router, ri)
self.assertNotIn(
router[l3_constants.INTERFACE_KEY][0], ri.internal_ports)
# The unexpected exception has been fixed manually
internal_network_added.side_effect = None
# periodic_sync_routers_task finds out that _rpc_loop failed to
# process the router last time, it will retry in the next run.
agent.process_router(ri)
# We were able to add the port to ri.internal_ports
self.assertIn(
router[l3_constants.INTERFACE_KEY][0], ri.internal_ports)
def test_process_router_internal_network_removed_unexpected_error(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
# add an internal port
agent.process_router(ri)
with mock.patch.object(
l3_agent.L3NATAgent,
'internal_network_removed') as internal_net_removed:
# raise RuntimeError to simulate that an unexpected exception
# occurs
internal_net_removed.side_effect = RuntimeError
ri.internal_ports[0]['admin_state_up'] = False
# The above port is set to down state, remove it.
self.assertRaises(RuntimeError, agent.process_router, ri)
self.assertIn(
router[l3_constants.INTERFACE_KEY][0], ri.internal_ports)
# The unexpected exception has been fixed manually
internal_net_removed.side_effect = None
# periodic_sync_routers_task finds out that _rpc_loop failed to
# process the router last time, it will retry in the next run.
agent.process_router(ri)
# We were able to remove the port from ri.internal_ports
self.assertNotIn(
router[l3_constants.INTERFACE_KEY][0], ri.internal_ports)
def test_process_router_floatingip_disabled(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
with mock.patch.object(
agent.plugin_rpc,
'update_floatingip_statuses') as mock_update_fip_status:
fip_id = _uuid()
router = prepare_router_data(num_internal_ports=1)
router[l3_constants.FLOATINGIP_KEY] = [
{'id': fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'port_id': router[l3_constants.INTERFACE_KEY][0]['id']}]
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
agent.process_router(ri)
# Assess the call for putting the floating IP up was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: l3_constants.FLOATINGIP_STATUS_ACTIVE})
mock_update_fip_status.reset_mock()
# Process the router again, this time without floating IPs
router[l3_constants.FLOATINGIP_KEY] = []
ri.router = router
agent.process_router(ri)
# Assess the call for putting the floating IP up was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: l3_constants.FLOATINGIP_STATUS_DOWN})
def test_process_router_floatingip_exception(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.process_router_floating_ip_addresses = mock.Mock()
agent.process_router_floating_ip_addresses.side_effect = RuntimeError
with mock.patch.object(
agent.plugin_rpc,
'update_floatingip_statuses') as mock_update_fip_status:
fip_id = _uuid()
router = prepare_router_data(num_internal_ports=1)
router[l3_constants.FLOATINGIP_KEY] = [
{'id': fip_id,
'floating_ip_address': '8.8.8.8',
'fixed_ip_address': '7.7.7.7',
'port_id': router[l3_constants.INTERFACE_KEY][0]['id']}]
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
agent.external_gateway_added = mock.Mock()
agent.process_router(ri)
# Assess the call for putting the floating IP into Error
# was performed
mock_update_fip_status.assert_called_once_with(
mock.ANY, ri.router_id,
{fip_id: l3_constants.FLOATINGIP_STATUS_ERROR})
def test_handle_router_snat_rules_distributed_without_snat_manager(self):
ri = dvr_router.DvrRouter(
'foo_router_id', mock.ANY, {'distributed': True})
ri.iptables_manager = mock.Mock()
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
with mock.patch.object(l3_agent.LOG, 'debug') as log_debug:
agent._handle_router_snat_rules(
ri, mock.ANY, mock.ANY, mock.ANY)
self.assertIsNone(ri.snat_iptables_manager)
self.assertFalse(ri.iptables_manager.called)
self.assertTrue(log_debug.called)
def test_handle_router_snat_rules_add_back_jump(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = mock.MagicMock()
port = {'fixed_ips': [{'ip_address': '192.168.1.4'}]}
ri.router = {'distributed': False}
agent._handle_router_snat_rules(ri, port, "iface", "add_rules")
nat = ri.iptables_manager.ipv4['nat']
nat.empty_chain.assert_any_call('snat')
nat.add_rule.assert_any_call('snat', '-j $float-snat')
for call in nat.mock_calls:
name, args, kwargs = call
if name == 'add_rule':
self.assertEqual(args, ('snat', '-j $float-snat'))
self.assertEqual(kwargs, {})
break
def test_handle_router_snat_rules_add_rules(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = l3router.RouterInfo(_uuid(), self.conf.root_helper, {})
ex_gw_port = {'fixed_ips': [{'ip_address': '192.168.1.4'}]}
ri.router = {'distributed': False}
agent._handle_router_snat_rules(ri, ex_gw_port,
"iface", "add_rules")
nat_rules = map(str, ri.iptables_manager.ipv4['nat'].rules)
wrap_name = ri.iptables_manager.wrap_name
jump_float_rule = "-A %s-snat -j %s-float-snat" % (wrap_name,
wrap_name)
snat_rule = ("-A %s-snat -o iface -j SNAT --to-source %s") % (
wrap_name, ex_gw_port['fixed_ips'][0]['ip_address'])
self.assertIn(jump_float_rule, nat_rules)
self.assertIn(snat_rule, nat_rules)
self.assertThat(nat_rules.index(jump_float_rule),
matchers.LessThan(nat_rules.index(snat_rule)))
def test_process_router_delete_stale_internal_devices(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
stale_devlist = [FakeDev('qr-a1b2c3d4-e5'),
FakeDev('qr-b2c3d4e5-f6')]
stale_devnames = [dev.name for dev in stale_devlist]
get_devices_return = []
get_devices_return.extend(stale_devlist)
self.mock_ip.get_devices.return_value = get_devices_return
router = prepare_router_data(enable_snat=True, num_internal_ports=1)
ri = l3router.RouterInfo(router['id'],
self.conf.root_helper,
router=router)
internal_ports = ri.router.get(l3_constants.INTERFACE_KEY, [])
self.assertEqual(len(internal_ports), 1)
internal_port = internal_ports[0]
with contextlib.nested(mock.patch.object(l3_agent.L3NATAgent,
'internal_network_removed'),
mock.patch.object(l3_agent.L3NATAgent,
'internal_network_added'),
mock.patch.object(l3_agent.L3NATAgent,
'external_gateway_removed'),
mock.patch.object(l3_agent.L3NATAgent,
'external_gateway_added')
) as (internal_network_removed,
internal_network_added,
external_gateway_removed,
external_gateway_added):
agent.process_router(ri)
self.assertEqual(external_gateway_added.call_count, 1)
self.assertFalse(external_gateway_removed.called)
self.assertFalse(internal_network_removed.called)
internal_network_added.assert_called_once_with(
ri, internal_port)
self.assertEqual(self.mock_driver.unplug.call_count,
len(stale_devnames))
calls = [mock.call(stale_devname,
namespace=ri.ns_name,
prefix=l3_agent.INTERNAL_DEV_PREFIX)
for stale_devname in stale_devnames]
self.mock_driver.unplug.assert_has_calls(calls, any_order=True)
def test_process_router_delete_stale_external_devices(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
stale_devlist = [FakeDev('qg-a1b2c3d4-e5')]
stale_devnames = [dev.name for dev in stale_devlist]
router = prepare_router_data(enable_snat=True, num_internal_ports=1)
del router['gw_port']
ri = l3router.RouterInfo(router['id'],
self.conf.root_helper,
router=router)
self.mock_ip.get_devices.return_value = stale_devlist
agent.process_router(ri)
self.mock_driver.unplug.assert_called_with(
stale_devnames[0],
bridge="br-ex",
namespace=ri.ns_name,
prefix=l3_agent.EXTERNAL_DEV_PREFIX)
def test_router_deleted(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.router_deleted(None, FAKE_ID)
self.assertEqual(1, agent._queue.add.call_count)
def test_routers_updated(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.routers_updated(None, [FAKE_ID])
self.assertEqual(1, agent._queue.add.call_count)
def test_removed_from_agent(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.router_removed_from_agent(None, {'router_id': FAKE_ID})
self.assertEqual(1, agent._queue.add.call_count)
def test_added_to_agent(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._queue = mock.Mock()
agent.router_added_to_agent(None, [FAKE_ID])
self.assertEqual(1, agent._queue.add.call_count)
def test_destroy_fip_namespace(self):
namespaces = ['qrouter-foo', 'qrouter-bar']
self.mock_ip.get_namespaces.return_value = namespaces
self.mock_ip.get_devices.return_value = [FakeDev('fpr-aaaa'),
FakeDev('fg-aaaa')]
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._destroy_fip_namespace(namespaces[0])
self.mock_driver.unplug.assert_called_once_with('fg-aaaa',
bridge='br-ex',
prefix='fg-',
namespace='qrouter'
'-foo')
self.mock_ip.del_veth.assert_called_once_with('fpr-aaaa')
def test_destroy_namespace(self):
namespace = 'qrouter-bar'
self.mock_ip.get_namespaces.return_value = [namespace]
self.mock_ip.get_devices.return_value = [FakeDev('qr-aaaa'),
FakeDev('rfp-aaaa')]
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._destroy_namespace(namespace)
self.mock_driver.unplug.assert_called_once_with('qr-aaaa',
prefix='qr-',
namespace='qrouter'
'-bar')
self.mock_ip.del_veth.assert_called_once_with('rfp-aaaa')
def test_destroy_router_namespace_skips_ns_removal(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._destroy_router_namespace("fakens")
self.assertEqual(self.mock_ip.netns.delete.call_count, 0)
def test_destroy_router_namespace_removes_ns(self):
self.conf.set_override('router_delete_namespaces', True)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent._destroy_router_namespace("fakens")
self.mock_ip.netns.delete.assert_called_once_with("fakens")
def _configure_metadata_proxy(self, enableflag=True):
if not enableflag:
self.conf.set_override('enable_metadata_proxy', False)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
router = {'id': router_id,
'external_gateway_info': {},
'routes': [],
'distributed': False}
driver = metadata_driver.MetadataDriver
with mock.patch.object(
driver, '_destroy_metadata_proxy') as destroy_proxy:
with mock.patch.object(
driver, '_spawn_metadata_proxy') as spawn_proxy:
agent._process_added_router(router)
if enableflag:
spawn_proxy.assert_called_with(router_id,
mock.ANY,
mock.ANY)
else:
self.assertFalse(spawn_proxy.call_count)
agent._router_removed(router_id)
if enableflag:
destroy_proxy.assert_called_with(router_id,
mock.ANY,
mock.ANY)
else:
self.assertFalse(destroy_proxy.call_count)
def test_enable_metadata_proxy(self):
self._configure_metadata_proxy()
def test_disable_metadata_proxy_spawn(self):
self._configure_metadata_proxy(enableflag=False)
def test_router_id_specified_in_conf(self):
self.conf.set_override('use_namespaces', False)
self.conf.set_override('router_id', '')
self.assertRaises(SystemExit, l3_agent.L3NATAgent,
HOSTNAME, self.conf)
self.conf.set_override('router_id', '1234')
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertEqual('1234', agent.conf.router_id)
self.assertFalse(agent._clean_stale_namespaces)
def test_process_router_if_compatible_with_no_ext_net_in_conf(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
self.plugin_api.get_external_network_id.assert_called_with(
agent.context)
def test_process_router_if_compatible_with_cached_ext_net(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
agent.target_ex_net_id = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
self.assertFalse(self.plugin_api.get_external_network_id.called)
def test_process_router_if_compatible_with_stale_cached_ext_net(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
agent.target_ex_net_id = 'bbb'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
self.plugin_api.get_external_network_id.assert_called_with(
agent.context)
def test_process_router_if_compatible_w_no_ext_net_and_2_net_plugin(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent.router_info = {}
self.plugin_api.get_external_network_id.side_effect = (
n_exc.TooManyExternalNetworks())
self.assertRaises(n_exc.TooManyExternalNetworks,
agent._process_router_if_compatible,
router)
self.assertNotIn(router['id'], agent.router_info)
def test_process_router_if_compatible_with_ext_net_in_conf(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'bbb'}}
agent.router_info = {}
self.conf.set_override('gateway_external_network_id', 'aaa')
self.assertRaises(n_exc.RouterNotCompatibleWithAgent,
agent._process_router_if_compatible,
router)
self.assertNotIn(router['id'], agent.router_info)
def test_process_router_if_compatible_with_no_bridge_no_ext_net(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.plugin_api.get_external_network_id.return_value = 'aaa'
router = {'id': _uuid(),
'routes': [],
'admin_state_up': True,
'external_gateway_info': {'network_id': 'aaa'}}
agent.router_info = {}
self.conf.set_override('external_network_bridge', '')
agent._process_router_if_compatible(router)
self.assertIn(router['id'], agent.router_info)
def test_nonexistent_interface_driver(self):
self.conf.set_override('interface_driver', None)
with mock.patch.object(l3_agent, 'LOG') as log:
self.assertRaises(SystemExit, l3_agent.L3NATAgent,
HOSTNAME, self.conf)
msg = 'An interface driver must be specified'
log.error.assert_called_once_with(msg)
self.conf.set_override('interface_driver', 'wrong_driver')
with mock.patch.object(l3_agent, 'LOG') as log:
self.assertRaises(SystemExit, l3_agent.L3NATAgent,
HOSTNAME, self.conf)
msg = _LE("Error importing interface driver '%s'")
log.error.assert_called_once_with(msg, 'wrong_driver')
def _cleanup_namespace_test(self,
stale_namespace_list,
router_list,
other_namespaces):
self.conf.set_override('router_delete_namespaces', True)
good_namespace_list = [l3_agent.NS_PREFIX + r['id']
for r in router_list]
good_namespace_list += [dvr.SNAT_NS_PREFIX + r['id']
for r in router_list]
self.mock_ip.get_namespaces.return_value = (stale_namespace_list +
good_namespace_list +
other_namespaces)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertTrue(agent._clean_stale_namespaces)
pm = self.external_process.return_value
pm.reset_mock()
agent._destroy_router_namespace = mock.MagicMock()
agent._destroy_snat_namespace = mock.MagicMock()
ns_list = agent._list_namespaces()
agent._cleanup_namespaces(ns_list, [r['id'] for r in router_list])
# Expect process manager to disable metadata proxy per qrouter ns
qrouters = [n for n in stale_namespace_list
if n.startswith(l3_agent.NS_PREFIX)]
self.assertEqual(agent._destroy_router_namespace.call_count,
len(qrouters))
self.assertEqual(agent._destroy_snat_namespace.call_count,
len(stale_namespace_list) - len(qrouters))
expected_args = [mock.call(ns) for ns in qrouters]
agent._destroy_router_namespace.assert_has_calls(expected_args,
any_order=True)
self.assertFalse(agent._clean_stale_namespaces)
def test_cleanup_namespace(self):
self.conf.set_override('router_id', None)
stale_namespaces = [l3_agent.NS_PREFIX + 'foo',
l3_agent.NS_PREFIX + 'bar',
dvr.SNAT_NS_PREFIX + 'foo']
other_namespaces = ['unknown']
self._cleanup_namespace_test(stale_namespaces,
[],
other_namespaces)
def test_cleanup_namespace_with_registered_router_ids(self):
self.conf.set_override('router_id', None)
stale_namespaces = [l3_agent.NS_PREFIX + 'cccc',
l3_agent.NS_PREFIX + 'eeeee',
dvr.SNAT_NS_PREFIX + 'fffff']
router_list = [{'id': 'foo', 'distributed': False},
{'id': 'aaaa', 'distributed': False}]
other_namespaces = ['qdhcp-aabbcc', 'unknown']
self._cleanup_namespace_test(stale_namespaces,
router_list,
other_namespaces)
def test_cleanup_namespace_with_conf_router_id(self):
self.conf.set_override('router_id', 'bbbbb')
stale_namespaces = [l3_agent.NS_PREFIX + 'cccc',
l3_agent.NS_PREFIX + 'eeeee',
l3_agent.NS_PREFIX + self.conf.router_id]
router_list = [{'id': 'foo', 'distributed': False},
{'id': 'aaaa', 'distributed': False}]
other_namespaces = ['qdhcp-aabbcc', 'unknown']
self._cleanup_namespace_test(stale_namespaces,
router_list,
other_namespaces)
def test_create_dvr_gateway(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = l3router.RouterInfo(router['id'], self.conf.root_helper,
router=router)
port_id = _uuid()
dvr_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'id': port_id,
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
interface_name = agent.get_snat_int_device_name(port_id)
self.device_exists.return_value = False
agent._create_dvr_gateway(ri, dvr_gw_port, interface_name,
self.snat_ports)
# check 2 internal ports are plugged
# check 1 ext-gw-port is plugged
self.assertEqual(self.mock_driver.plug.call_count, 3)
self.assertEqual(self.mock_driver.init_l3.call_count, 3)
def test_agent_gateway_added(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
network_id = _uuid()
port_id = _uuid()
agent_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'id': port_id,
'network_id': network_id,
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
fip_ns_name = (
agent.get_fip_ns_name(str(network_id)))
interface_name = (
agent.get_fip_ext_device_name(port_id))
self.device_exists.return_value = False
agent.agent_gateway_added(fip_ns_name, agent_gw_port,
interface_name)
self.assertEqual(self.mock_driver.plug.call_count, 1)
self.assertEqual(self.mock_driver.init_l3.call_count, 1)
if self.conf.use_namespaces:
self.send_arp.assert_called_once_with(fip_ns_name, interface_name,
'20.0.0.30',
mock.ANY, mock.ANY)
else:
self.utils_exec.assert_any_call(
check_exit_code=True, root_helper=self.conf.root_helper)
def test_create_rtr_2_fip_link(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
fip = {'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid()}
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
rtr_2_fip_name = agent.get_rtr_int_device_name(ri.router_id)
fip_2_rtr_name = agent.get_fip_int_device_name(ri.router_id)
fip_ns_name = agent.get_fip_ns_name(str(fip['floating_network_id']))
with mock.patch.object(lla.LinkLocalAllocator, '_write'):
self.device_exists.return_value = False
agent.create_rtr_2_fip_link(ri, fip['floating_network_id'])
self.mock_ip.add_veth.assert_called_with(rtr_2_fip_name,
fip_2_rtr_name, fip_ns_name)
# TODO(mrsmith): add more aasserts -
self.mock_ip_dev.route.add_gateway.assert_called_once_with(
'169.254.31.29', table=16)
# TODO(mrsmith): test _create_agent_gateway_port
def test_create_rtr_2_fip_link_already_exists(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
self.device_exists.return_value = True
with mock.patch.object(lla.LinkLocalAllocator, '_write'):
agent.create_rtr_2_fip_link(ri, {})
self.assertFalse(self.mock_ip.add_veth.called)
def test_floating_ip_added_dist(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
agent_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
fip = {'id': _uuid(),
'host': HOSTNAME,
'floating_ip_address': '15.1.2.3',
'fixed_ip_address': '192.168.0.1',
'floating_network_id': _uuid(),
'port_id': _uuid()}
agent.agent_gateway_port = agent_gw_port
ri.rtr_fip_subnet = lla.LinkLocalAddressPair('169.254.30.42/31')
ri.dist_fip_count = 0
ip_cidr = common_utils.ip_to_cidr(fip['floating_ip_address'])
agent.floating_ip_added_dist(ri, fip, ip_cidr)
self.mock_rule.add_rule_from.assert_called_with('192.168.0.1',
16, FIP_PRI)
# TODO(mrsmith): add more asserts
@mock.patch.object(l3_agent.L3NATAgent, '_fip_ns_unsubscribe')
@mock.patch.object(lla.LinkLocalAllocator, '_write')
def test_floating_ip_removed_dist(self, write, unsubscribe):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router = prepare_router_data()
agent_gw_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
fip_cidr = '11.22.33.44/24'
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper,
router=router)
ri.dist_fip_count = 2
agent.fip_ns_subscribers.add(ri.router_id)
ri.floating_ips_dict['11.22.33.44'] = FIP_PRI
ri.fip_2_rtr = '11.22.33.42'
ri.rtr_2_fip = '11.22.33.40'
agent.agent_gateway_port = agent_gw_port
s = lla.LinkLocalAddressPair('169.254.30.42/31')
ri.rtr_fip_subnet = s
agent.floating_ip_removed_dist(ri, fip_cidr)
self.mock_rule.delete_rule_priority.assert_called_with(FIP_PRI)
self.mock_ip_dev.route.delete_route.assert_called_with(fip_cidr,
str(s.ip))
self.assertFalse(unsubscribe.called, '_fip_ns_unsubscribe called!')
with mock.patch.object(agent, '_destroy_fip_namespace') as f:
ri.dist_fip_count = 1
fip_ns_name = agent.get_fip_ns_name(
str(agent._fetch_external_net_id()))
ri.rtr_fip_subnet = agent.local_subnets.allocate(ri.router_id)
_, fip_to_rtr = ri.rtr_fip_subnet.get_pair()
agent.floating_ip_removed_dist(ri, fip_cidr)
self.mock_ip.del_veth.assert_called_once_with(
agent.get_fip_int_device_name(router['id']))
self.mock_ip_dev.route.delete_gateway.assert_called_once_with(
str(fip_to_rtr.ip), table=16)
f.assert_called_once_with(fip_ns_name)
unsubscribe.assert_called_once_with(ri.router_id)
def test_get_service_plugin_list(self):
service_plugins = [p_const.L3_ROUTER_NAT]
self.plugin_api.get_service_plugin_list.return_value = service_plugins
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertEqual(service_plugins, agent.neutron_service_plugins)
self.assertTrue(self.plugin_api.get_service_plugin_list.called)
def test_get_service_plugin_list_failed(self):
raise_rpc = messaging.RemoteError()
self.plugin_api.get_service_plugin_list.side_effect = raise_rpc
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertIsNone(agent.neutron_service_plugins)
self.assertTrue(self.plugin_api.get_service_plugin_list.called)
def test_get_service_plugin_list_retried(self):
raise_timeout = messaging.MessagingTimeout()
# Raise a timeout the first 2 times it calls
# get_service_plugin_list then return a empty tuple
self.plugin_api.get_service_plugin_list.side_effect = (
raise_timeout, raise_timeout, tuple()
)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
self.assertEqual(agent.neutron_service_plugins, tuple())
def test_get_service_plugin_list_retried_max(self):
raise_timeout = messaging.MessagingTimeout()
# Raise a timeout 5 times
self.plugin_api.get_service_plugin_list.side_effect = (
(raise_timeout, ) * 5
)
self.assertRaises(messaging.MessagingTimeout, l3_agent.L3NATAgent,
HOSTNAME, self.conf)
def test__fip_ns_subscribe_is_first_true(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
is_first = agent._fip_ns_subscribe(router_id)
self.assertTrue(is_first)
self.assertEqual(len(agent.fip_ns_subscribers), 1)
def test__fip_ns_subscribe_is_first_false(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
router2_id = _uuid()
agent._fip_ns_subscribe(router_id)
is_first = agent._fip_ns_subscribe(router2_id)
self.assertFalse(is_first)
self.assertEqual(len(agent.fip_ns_subscribers), 2)
def test__fip_ns_unsubscribe_is_last_true(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
agent.fip_ns_subscribers.add(router_id)
is_last = agent._fip_ns_unsubscribe(router_id)
self.assertTrue(is_last)
self.assertEqual(len(agent.fip_ns_subscribers), 0)
def test__fip_ns_unsubscribe_is_last_false(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
router_id = _uuid()
router2_id = _uuid()
agent.fip_ns_subscribers.add(router_id)
agent.fip_ns_subscribers.add(router2_id)
is_last = agent._fip_ns_unsubscribe(router_id)
self.assertFalse(is_last)
self.assertEqual(len(agent.fip_ns_subscribers), 1)
def test_external_gateway_removed_ext_gw_port_and_fip(self):
self.conf.set_override('state_path', '/tmp')
self.conf.set_override('router_delete_namespaces', True)
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.conf.agent_mode = 'dvr'
agent.agent_gateway_port = {'fixed_ips': [{'ip_address': '20.0.0.30',
'subnet_id': _uuid()}],
'subnet': {'gateway_ip': '20.0.0.1'},
'id': _uuid(),
'network_id': _uuid(),
'mac_address': 'ca:fe:de:ad:be:ef',
'ip_cidr': '20.0.0.30/24'}
external_net_id = _uuid()
agent._fetch_external_net_id = mock.Mock(return_value=external_net_id)
router = prepare_router_data(num_internal_ports=2)
router['distributed'] = True
router['gw_port_host'] = HOSTNAME
ri = dvr_router.DvrRouter(router['id'], self.conf.root_helper, router)
vm_floating_ip = '19.4.4.2'
ri.floating_ips_dict[vm_floating_ip] = FIP_PRI
ri.dist_fip_count = 1
ri.ex_gw_port = ri.router['gw_port']
del ri.router['gw_port']
ri.rtr_fip_subnet = agent.local_subnets.allocate(ri.router_id)
_, fip_to_rtr = ri.rtr_fip_subnet.get_pair()
nat = ri.iptables_manager.ipv4['nat']
nat.clear_rules_by_tag = mock.Mock()
nat.add_rule = mock.Mock()
self.mock_ip.get_devices.return_value = [
FakeDev(agent.get_fip_ext_device_name(_uuid()))]
self.mock_ip_dev.addr.list.return_value = [
{'cidr': vm_floating_ip + '/32'},
{'cidr': '19.4.4.1/24'}]
self.device_exists.return_value = True
agent.external_gateway_removed(
ri, ri.ex_gw_port,
agent.get_external_device_name(ri.ex_gw_port['id']))
self.mock_ip.del_veth.assert_called_once_with(
agent.get_fip_int_device_name(ri.router['id']))
self.mock_ip_dev.route.delete_gateway.assert_called_once_with(
str(fip_to_rtr.ip), table=dvr.FIP_RT_TBL)
self.assertEqual(ri.dist_fip_count, 0)
self.assertEqual(len(agent.fip_ns_subscribers), 0)
self.assertEqual(self.mock_driver.unplug.call_count, 1)
self.assertIsNone(agent.agent_gateway_port)
self.mock_ip.netns.delete.assert_called_once_with(
agent.get_fip_ns_name(external_net_id))
self.assertFalse(nat.add_rule.called)
nat.clear_rules_by_tag.assert_called_once_with('floating_ip')
def test_spawn_radvd(self):
router = prepare_router_data()
conffile = '/fake/radvd.conf'
pidfile = '/fake/radvd.pid'
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
# we don't want the whole process manager to be mocked to be
# able to catch execute() calls
self.external_process_p.stop()
self.ip_cls_p.stop()
get_pid_file_name = ('neutron.agent.linux.external_process.'
'ProcessManager.get_pid_file_name')
with mock.patch('neutron.agent.linux.utils.execute') as execute:
with mock.patch(get_pid_file_name) as get_pid:
get_pid.return_value = pidfile
ra._spawn_radvd(router['id'],
conffile,
agent.get_ns_name(router['id']),
self.conf.root_helper)
cmd = execute.call_args[0][0]
self.assertIn('radvd', cmd)
_join = lambda *args: ' '.join(args)
cmd = _join(*cmd)
self.assertIn(_join('-C', conffile), cmd)
self.assertIn(_join('-p', pidfile), cmd)
self.assertIn(_join('-m', 'syslog'), cmd)
def test_generate_radvd_conf_other_flag(self):
# we don't check other flag for stateful since it's redundant
# for this mode and can be ignored by clients, as per RFC4861
expected = {l3_constants.IPV6_SLAAC: False,
l3_constants.DHCPV6_STATELESS: True}
for ra_mode, flag_set in expected.iteritems():
router = prepare_router_data()
ri = self._process_router_ipv6_interface_added(router,
ra_mode=ra_mode)
ra._generate_radvd_conf(ri.router['id'],
router[l3_constants.INTERFACE_KEY],
mock.Mock())
asserter = self.assertIn if flag_set else self.assertNotIn
asserter('AdvOtherConfigFlag on;',
self.utils_replace_file.call_args[0][1])
def test__put_fips_in_error_state(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
ri = mock.Mock()
ri.router.get.return_value = [{'id': mock.sentinel.id1},
{'id': mock.sentinel.id2}]
statuses = agent._put_fips_in_error_state(ri)
expected = [{mock.sentinel.id1: l3_constants.FLOATINGIP_STATUS_ERROR,
mock.sentinel.id2: l3_constants.FLOATINGIP_STATUS_ERROR}]
self.assertNotEqual(expected, statuses)
def test__process_snat_dnat_for_fip(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.process_router_floating_ip_nat_rules = mock.Mock(
side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
agent._process_snat_dnat_for_fip,
mock.sentinel.ri)
agent.process_router_floating_ip_nat_rules.assert_called_with(
mock.sentinel.ri)
def test__configure_fip_addresses(self):
agent = l3_agent.L3NATAgent(HOSTNAME, self.conf)
agent.process_router_floating_ip_addresses = mock.Mock(
side_effect=Exception)
self.assertRaises(n_exc.FloatingIpSetupException,
agent._configure_fip_addresses,
mock.sentinel.ri,
mock.sentinel.ex_gw_port)
agent.process_router_floating_ip_addresses.assert_called_with(
mock.sentinel.ri,
mock.sentinel.ex_gw_port)
| codeparrot/github-code-clean |
#!/usr/bin/env python
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Run xDS integration tests on GCP using Traffic Director."""
import argparse
import datetime
import json
import logging
import os
import random
import shlex
import socket
import subprocess
import sys
import tempfile
import time
import uuid
from google.protobuf import json_format
import googleapiclient.discovery
import grpc
from oauth2client.client import GoogleCredentials
import python_utils.jobset as jobset
import python_utils.report_utils as report_utils
from src.proto.grpc.health.v1 import health_pb2
from src.proto.grpc.health.v1 import health_pb2_grpc
from src.proto.grpc.testing import empty_pb2
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import test_pb2_grpc
# Envoy protos provided by PyPI package xds-protos
# Needs to import the generated Python file to load descriptors
try:
from envoy.extensions.filters.common.fault.v3 import fault_pb2
from envoy.extensions.filters.http.fault.v3 import fault_pb2
from envoy.extensions.filters.http.router.v3 import router_pb2
from envoy.extensions.filters.network.http_connection_manager.v3 import \
http_connection_manager_pb2
from envoy.service.status.v3 import csds_pb2
from envoy.service.status.v3 import csds_pb2_grpc
except ImportError:
# These protos are required by CSDS test. We should not fail the entire
# script for one test case.
pass
logger = logging.getLogger()
console_handler = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(asctime)s: %(levelname)-8s %(message)s')
console_handler.setFormatter(formatter)
logger.handlers = []
logger.addHandler(console_handler)
logger.setLevel(logging.WARNING)
# Suppress excessive logs for gRPC Python
original_grpc_trace = os.environ.pop('GRPC_TRACE', None)
original_grpc_verbosity = os.environ.pop('GRPC_VERBOSITY', None)
# Suppress not-essential logs for GCP clients
logging.getLogger('google_auth_httplib2').setLevel(logging.WARNING)
logging.getLogger('googleapiclient.discovery').setLevel(logging.WARNING)
_TEST_CASES = [
'backends_restart',
'change_backend_service',
'gentle_failover',
'load_report_based_failover',
'ping_pong',
'remove_instance_group',
'round_robin',
'secondary_locality_gets_no_requests_on_partial_primary_failure',
'secondary_locality_gets_requests_on_primary_failure',
'traffic_splitting',
'path_matching',
'header_matching',
'api_listener',
'forwarding_rule_port_match',
'forwarding_rule_default_port',
'metadata_filter',
]
# Valid test cases, but not in all. So the tests can only run manually, and
# aren't enabled automatically for all languages.
#
# TODO: Move them into _TEST_CASES when support is ready in all languages.
_ADDITIONAL_TEST_CASES = [
'circuit_breaking',
'timeout',
'fault_injection',
'csds',
]
# Test cases that require the V3 API. Skipped in older runs.
_V3_TEST_CASES = frozenset(['timeout', 'fault_injection', 'csds'])
# Test cases that require the alpha API. Skipped for stable API runs.
_ALPHA_TEST_CASES = frozenset(['timeout'])
def parse_test_cases(arg):
if arg == '':
return []
arg_split = arg.split(',')
test_cases = set()
all_test_cases = _TEST_CASES + _ADDITIONAL_TEST_CASES
for arg in arg_split:
if arg == "all":
test_cases = test_cases.union(_TEST_CASES)
else:
test_cases = test_cases.union([arg])
if not all([test_case in all_test_cases for test_case in test_cases]):
raise Exception('Failed to parse test cases %s' % arg)
# Perserve order.
return [x for x in all_test_cases if x in test_cases]
def parse_port_range(port_arg):
try:
port = int(port_arg)
return list(range(port, port + 1))
except:
port_min, port_max = port_arg.split(':')
return list(range(int(port_min), int(port_max) + 1))
argp = argparse.ArgumentParser(description='Run xDS interop tests on GCP')
# TODO(zdapeng): remove default value of project_id and project_num
argp.add_argument('--project_id', default='grpc-testing', help='GCP project id')
argp.add_argument('--project_num',
default='830293263384',
help='GCP project number')
argp.add_argument(
'--gcp_suffix',
default='',
help='Optional suffix for all generated GCP resource names. Useful to '
'ensure distinct names across test runs.')
argp.add_argument(
'--test_case',
default='ping_pong',
type=parse_test_cases,
help='Comma-separated list of test cases to run. Available tests: %s, '
'(or \'all\' to run every test). '
'Alternative tests not included in \'all\': %s' %
(','.join(_TEST_CASES), ','.join(_ADDITIONAL_TEST_CASES)))
argp.add_argument(
'--bootstrap_file',
default='',
help='File to reference via GRPC_XDS_BOOTSTRAP. Disables built-in '
'bootstrap generation')
argp.add_argument(
'--xds_v3_support',
default=False,
action='store_true',
help='Support xDS v3 via GRPC_XDS_EXPERIMENTAL_V3_SUPPORT. '
'If a pre-created bootstrap file is provided via the --bootstrap_file '
'parameter, it should include xds_v3 in its server_features field.')
argp.add_argument(
'--client_cmd',
default=None,
help='Command to launch xDS test client. {server_uri}, {stats_port} and '
'{qps} references will be replaced using str.format(). GRPC_XDS_BOOTSTRAP '
'will be set for the command')
argp.add_argument(
'--client_hosts',
default=None,
help='Comma-separated list of hosts running client processes. If set, '
'--client_cmd is ignored and client processes are assumed to be running on '
'the specified hosts.')
argp.add_argument('--zone', default='us-central1-a')
argp.add_argument('--secondary_zone',
default='us-west1-b',
help='Zone to use for secondary TD locality tests')
argp.add_argument('--qps', default=100, type=int, help='Client QPS')
argp.add_argument(
'--wait_for_backend_sec',
default=1200,
type=int,
help='Time limit for waiting for created backend services to report '
'healthy when launching or updated GCP resources')
argp.add_argument(
'--use_existing_gcp_resources',
default=False,
action='store_true',
help=
'If set, find and use already created GCP resources instead of creating new'
' ones.')
argp.add_argument(
'--keep_gcp_resources',
default=False,
action='store_true',
help=
'Leave GCP VMs and configuration running after test. Default behavior is '
'to delete when tests complete.')
argp.add_argument('--halt_after_fail',
action='store_true',
help='Halt and save the resources when test failed.')
argp.add_argument(
'--compute_discovery_document',
default=None,
type=str,
help=
'If provided, uses this file instead of retrieving via the GCP discovery '
'API')
argp.add_argument(
'--alpha_compute_discovery_document',
default=None,
type=str,
help='If provided, uses this file instead of retrieving via the alpha GCP '
'discovery API')
argp.add_argument('--network',
default='global/networks/default',
help='GCP network to use')
_DEFAULT_PORT_RANGE = '8080:8280'
argp.add_argument('--service_port_range',
default=_DEFAULT_PORT_RANGE,
type=parse_port_range,
help='Listening port for created gRPC backends. Specified as '
'either a single int or as a range in the format min:max, in '
'which case an available port p will be chosen s.t. min <= p '
'<= max')
argp.add_argument(
'--stats_port',
default=8079,
type=int,
help='Local port for the client process to expose the LB stats service')
argp.add_argument('--xds_server',
default='trafficdirector.googleapis.com:443',
help='xDS server')
argp.add_argument('--source_image',
default='projects/debian-cloud/global/images/family/debian-9',
help='Source image for VMs created during the test')
argp.add_argument('--path_to_server_binary',
default=None,
type=str,
help='If set, the server binary must already be pre-built on '
'the specified source image')
argp.add_argument('--machine_type',
default='e2-standard-2',
help='Machine type for VMs created during the test')
argp.add_argument(
'--instance_group_size',
default=2,
type=int,
help='Number of VMs to create per instance group. Certain test cases (e.g., '
'round_robin) may not give meaningful results if this is set to a value '
'less than 2.')
argp.add_argument('--verbose',
help='verbose log output',
default=False,
action='store_true')
# TODO(ericgribkoff) Remove this param once the sponge-formatted log files are
# visible in all test environments.
argp.add_argument('--log_client_output',
help='Log captured client output',
default=False,
action='store_true')
# TODO(ericgribkoff) Remove this flag once all test environments are verified to
# have access to the alpha compute APIs.
argp.add_argument('--only_stable_gcp_apis',
help='Do not use alpha compute APIs. Some tests may be '
'incompatible with this option (gRPC health checks are '
'currently alpha and required for simulating server failure',
default=False,
action='store_true')
args = argp.parse_args()
if args.verbose:
logger.setLevel(logging.DEBUG)
CLIENT_HOSTS = []
if args.client_hosts:
CLIENT_HOSTS = args.client_hosts.split(',')
# Each of the config propagation in the control plane should finish within 600s.
# Otherwise, it indicates a bug in the control plane. The config propagation
# includes all kinds of traffic config update, like updating urlMap, creating
# the resources for the first time, updating BackendService, and changing the
# status of endpoints in BackendService.
_WAIT_FOR_URL_MAP_PATCH_SEC = 600
# In general, fetching load balancing stats only takes ~10s. However, slow
# config update could lead to empty EDS or similar symptoms causing the
# connection to hang for a long period of time. So, we want to extend the stats
# wait time to be the same as urlMap patch time.
_WAIT_FOR_STATS_SEC = _WAIT_FOR_URL_MAP_PATCH_SEC
_DEFAULT_SERVICE_PORT = 80
_WAIT_FOR_BACKEND_SEC = args.wait_for_backend_sec
_WAIT_FOR_OPERATION_SEC = 1200
_INSTANCE_GROUP_SIZE = args.instance_group_size
_NUM_TEST_RPCS = 10 * args.qps
_CONNECTION_TIMEOUT_SEC = 60
_GCP_API_RETRIES = 5
_BOOTSTRAP_TEMPLATE = """
{{
"node": {{
"id": "{node_id}",
"metadata": {{
"TRAFFICDIRECTOR_NETWORK_NAME": "%s",
"com.googleapis.trafficdirector.config_time_trace": "TRUE"
}},
"locality": {{
"zone": "%s"
}}
}},
"xds_servers": [{{
"server_uri": "%s",
"channel_creds": [
{{
"type": "google_default",
"config": {{}}
}}
],
"server_features": {server_features}
}}]
}}""" % (args.network.split('/')[-1], args.zone, args.xds_server)
# TODO(ericgribkoff) Add change_backend_service to this list once TD no longer
# sends an update with no localities when adding the MIG to the backend service
# can race with the URL map patch.
_TESTS_TO_FAIL_ON_RPC_FAILURE = ['ping_pong', 'round_robin']
# Tests that run UnaryCall and EmptyCall.
_TESTS_TO_RUN_MULTIPLE_RPCS = ['path_matching', 'header_matching']
# Tests that make UnaryCall with test metadata.
_TESTS_TO_SEND_METADATA = ['header_matching']
_TEST_METADATA_KEY = 'xds_md'
_TEST_METADATA_VALUE_UNARY = 'unary_yranu'
_TEST_METADATA_VALUE_EMPTY = 'empty_ytpme'
# Extra RPC metadata whose value is a number, sent with UnaryCall only.
_TEST_METADATA_NUMERIC_KEY = 'xds_md_numeric'
_TEST_METADATA_NUMERIC_VALUE = '159'
_PATH_MATCHER_NAME = 'path-matcher'
_BASE_TEMPLATE_NAME = 'test-template'
_BASE_INSTANCE_GROUP_NAME = 'test-ig'
_BASE_HEALTH_CHECK_NAME = 'test-hc'
_BASE_FIREWALL_RULE_NAME = 'test-fw-rule'
_BASE_BACKEND_SERVICE_NAME = 'test-backend-service'
_BASE_URL_MAP_NAME = 'test-map'
_BASE_SERVICE_HOST = 'grpc-test'
_BASE_TARGET_PROXY_NAME = 'test-target-proxy'
_BASE_FORWARDING_RULE_NAME = 'test-forwarding-rule'
_TEST_LOG_BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../../reports')
_SPONGE_LOG_NAME = 'sponge_log.log'
_SPONGE_XML_NAME = 'sponge_log.xml'
def get_client_stats(num_rpcs, timeout_sec):
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
with grpc.insecure_channel('%s:%d' %
(host, args.stats_port)) as channel:
stub = test_pb2_grpc.LoadBalancerStatsServiceStub(channel)
request = messages_pb2.LoadBalancerStatsRequest()
request.num_rpcs = num_rpcs
request.timeout_sec = timeout_sec
rpc_timeout = timeout_sec + _CONNECTION_TIMEOUT_SEC
logger.debug('Invoking GetClientStats RPC to %s:%d:', host,
args.stats_port)
response = stub.GetClientStats(request,
wait_for_ready=True,
timeout=rpc_timeout)
logger.debug('Invoked GetClientStats RPC to %s: %s', host,
json_format.MessageToJson(response))
return response
def get_client_accumulated_stats():
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
with grpc.insecure_channel('%s:%d' %
(host, args.stats_port)) as channel:
stub = test_pb2_grpc.LoadBalancerStatsServiceStub(channel)
request = messages_pb2.LoadBalancerAccumulatedStatsRequest()
logger.debug('Invoking GetClientAccumulatedStats RPC to %s:%d:',
host, args.stats_port)
response = stub.GetClientAccumulatedStats(
request, wait_for_ready=True, timeout=_CONNECTION_TIMEOUT_SEC)
logger.debug('Invoked GetClientAccumulatedStats RPC to %s: %s',
host, response)
return response
def get_client_xds_config_dump():
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
server_address = '%s:%d' % (host, args.stats_port)
with grpc.insecure_channel(server_address) as channel:
stub = csds_pb2_grpc.ClientStatusDiscoveryServiceStub(channel)
logger.debug('Fetching xDS config dump from %s', server_address)
response = stub.FetchClientStatus(csds_pb2.ClientStatusRequest(),
wait_for_ready=True,
timeout=_CONNECTION_TIMEOUT_SEC)
logger.debug('Fetched xDS config dump from %s', server_address)
if len(response.config) != 1:
logger.error('Unexpected number of ClientConfigs %d: %s',
len(response.config), response)
return None
else:
# Converting the ClientStatusResponse into JSON, because many
# fields are packed in google.protobuf.Any. It will require many
# duplicated code to unpack proto message and inspect values.
return json_format.MessageToDict(
response.config[0], preserving_proto_field_name=True)
def configure_client(rpc_types, metadata=[], timeout_sec=None):
if CLIENT_HOSTS:
hosts = CLIENT_HOSTS
else:
hosts = ['localhost']
for host in hosts:
with grpc.insecure_channel('%s:%d' %
(host, args.stats_port)) as channel:
stub = test_pb2_grpc.XdsUpdateClientConfigureServiceStub(channel)
request = messages_pb2.ClientConfigureRequest()
request.types.extend(rpc_types)
for rpc_type, md_key, md_value in metadata:
md = request.metadata.add()
md.type = rpc_type
md.key = md_key
md.value = md_value
if timeout_sec:
request.timeout_sec = timeout_sec
logger.debug(
'Invoking XdsUpdateClientConfigureService RPC to %s:%d: %s',
host, args.stats_port, request)
stub.Configure(request,
wait_for_ready=True,
timeout=_CONNECTION_TIMEOUT_SEC)
logger.debug('Invoked XdsUpdateClientConfigureService RPC to %s',
host)
class RpcDistributionError(Exception):
pass
def _verify_rpcs_to_given_backends(backends, timeout_sec, num_rpcs,
allow_failures):
start_time = time.time()
error_msg = None
logger.debug('Waiting for %d sec until backends %s receive load' %
(timeout_sec, backends))
while time.time() - start_time <= timeout_sec:
error_msg = None
stats = get_client_stats(num_rpcs, timeout_sec)
rpcs_by_peer = stats.rpcs_by_peer
for backend in backends:
if backend not in rpcs_by_peer:
error_msg = 'Backend %s did not receive load' % backend
break
if not error_msg and len(rpcs_by_peer) > len(backends):
error_msg = 'Unexpected backend received load: %s' % rpcs_by_peer
if not allow_failures and stats.num_failures > 0:
error_msg = '%d RPCs failed' % stats.num_failures
if not error_msg:
return
raise RpcDistributionError(error_msg)
def wait_until_all_rpcs_go_to_given_backends_or_fail(backends,
timeout_sec,
num_rpcs=_NUM_TEST_RPCS):
_verify_rpcs_to_given_backends(backends,
timeout_sec,
num_rpcs,
allow_failures=True)
def wait_until_all_rpcs_go_to_given_backends(backends,
timeout_sec,
num_rpcs=_NUM_TEST_RPCS):
_verify_rpcs_to_given_backends(backends,
timeout_sec,
num_rpcs,
allow_failures=False)
def wait_until_no_rpcs_go_to_given_backends(backends, timeout_sec):
start_time = time.time()
while time.time() - start_time <= timeout_sec:
stats = get_client_stats(_NUM_TEST_RPCS, timeout_sec)
error_msg = None
rpcs_by_peer = stats.rpcs_by_peer
for backend in backends:
if backend in rpcs_by_peer:
error_msg = 'Unexpected backend %s receives load' % backend
break
if not error_msg:
return
raise Exception('Unexpected RPCs going to given backends')
def wait_until_rpcs_in_flight(rpc_type, timeout_sec, num_rpcs, threshold):
'''Block until the test client reaches the state with the given number
of RPCs being outstanding stably.
Args:
rpc_type: A string indicating the RPC method to check for. Either
'UnaryCall' or 'EmptyCall'.
timeout_sec: Maximum number of seconds to wait until the desired state
is reached.
num_rpcs: Expected number of RPCs to be in-flight.
threshold: Number within [0,100], the tolerable percentage by which
the actual number of RPCs in-flight can differ from the expected number.
'''
if threshold < 0 or threshold > 100:
raise ValueError('Value error: Threshold should be between 0 to 100')
threshold_fraction = threshold / 100.0
start_time = time.time()
error_msg = None
logger.debug(
'Waiting for %d sec until %d %s RPCs (with %d%% tolerance) in-flight' %
(timeout_sec, num_rpcs, rpc_type, threshold))
while time.time() - start_time <= timeout_sec:
error_msg = _check_rpcs_in_flight(rpc_type, num_rpcs, threshold,
threshold_fraction)
if error_msg:
logger.debug('Progress: %s', error_msg)
time.sleep(2)
else:
break
# Ensure the number of outstanding RPCs is stable.
if not error_msg:
time.sleep(5)
error_msg = _check_rpcs_in_flight(rpc_type, num_rpcs, threshold,
threshold_fraction)
if error_msg:
raise Exception("Wrong number of %s RPCs in-flight: %s" %
(rpc_type, error_msg))
def _check_rpcs_in_flight(rpc_type, num_rpcs, threshold, threshold_fraction):
error_msg = None
stats = get_client_accumulated_stats()
rpcs_started = stats.num_rpcs_started_by_method[rpc_type]
rpcs_succeeded = stats.num_rpcs_succeeded_by_method[rpc_type]
rpcs_failed = stats.num_rpcs_failed_by_method[rpc_type]
rpcs_in_flight = rpcs_started - rpcs_succeeded - rpcs_failed
if rpcs_in_flight < (num_rpcs * (1 - threshold_fraction)):
error_msg = ('actual(%d) < expected(%d - %d%%)' %
(rpcs_in_flight, num_rpcs, threshold))
elif rpcs_in_flight > (num_rpcs * (1 + threshold_fraction)):
error_msg = ('actual(%d) > expected(%d + %d%%)' %
(rpcs_in_flight, num_rpcs, threshold))
return error_msg
def compare_distributions(actual_distribution, expected_distribution,
threshold):
"""Compare if two distributions are similar.
Args:
actual_distribution: A list of floats, contains the actual distribution.
expected_distribution: A list of floats, contains the expected distribution.
threshold: Number within [0,100], the threshold percentage by which the
actual distribution can differ from the expected distribution.
Returns:
The similarity between the distributions as a boolean. Returns true if the
actual distribution lies within the threshold of the expected
distribution, false otherwise.
Raises:
ValueError: if threshold is not with in [0,100].
Exception: containing detailed error messages.
"""
if len(expected_distribution) != len(actual_distribution):
raise Exception(
'Error: expected and actual distributions have different size (%d vs %d)'
% (len(expected_distribution), len(actual_distribution)))
if threshold < 0 or threshold > 100:
raise ValueError('Value error: Threshold should be between 0 to 100')
threshold_fraction = threshold / 100.0
for expected, actual in zip(expected_distribution, actual_distribution):
if actual < (expected * (1 - threshold_fraction)):
raise Exception("actual(%f) < expected(%f-%d%%)" %
(actual, expected, threshold))
if actual > (expected * (1 + threshold_fraction)):
raise Exception("actual(%f) > expected(%f+%d%%)" %
(actual, expected, threshold))
return True
def compare_expected_instances(stats, expected_instances):
"""Compare if stats have expected instances for each type of RPC.
Args:
stats: LoadBalancerStatsResponse reported by interop client.
expected_instances: a dict with key as the RPC type (string), value as
the expected backend instances (list of strings).
Returns:
Returns true if the instances are expected. False if not.
"""
for rpc_type, expected_peers in list(expected_instances.items()):
rpcs_by_peer_for_type = stats.rpcs_by_method[rpc_type]
rpcs_by_peer = rpcs_by_peer_for_type.rpcs_by_peer if rpcs_by_peer_for_type else None
logger.debug('rpc: %s, by_peer: %s', rpc_type, rpcs_by_peer)
peers = list(rpcs_by_peer.keys())
if set(peers) != set(expected_peers):
logger.info('unexpected peers for %s, got %s, want %s', rpc_type,
peers, expected_peers)
return False
return True
def test_backends_restart(gcp, backend_service, instance_group):
logger.info('Running test_backends_restart')
instance_names = get_instance_names(gcp, instance_group)
num_instances = len(instance_names)
start_time = time.time()
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_STATS_SEC)
try:
resize_instance_group(gcp, instance_group, 0)
wait_until_all_rpcs_go_to_given_backends_or_fail([],
_WAIT_FOR_BACKEND_SEC)
finally:
resize_instance_group(gcp, instance_group, num_instances)
wait_for_healthy_backends(gcp, backend_service, instance_group)
new_instance_names = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(new_instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_change_backend_service(gcp, original_backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group):
logger.info('Running test_change_backend_service')
original_backend_instances = get_instance_names(gcp, instance_group)
alternate_backend_instances = get_instance_names(gcp,
same_zone_instance_group)
patch_backend_service(gcp, alternate_backend_service,
[same_zone_instance_group])
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
wait_for_healthy_backends(gcp, alternate_backend_service,
same_zone_instance_group)
wait_until_all_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
passed = True
try:
patch_url_map_backend_service(gcp, alternate_backend_service)
wait_until_all_rpcs_go_to_given_backends(alternate_backend_instances,
_WAIT_FOR_URL_MAP_PATCH_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_gentle_failover(gcp,
backend_service,
primary_instance_group,
secondary_instance_group,
swapped_primary_and_secondary=False):
logger.info('Running test_gentle_failover')
num_primary_instances = len(get_instance_names(gcp, primary_instance_group))
min_instances_for_gentle_failover = 3 # Need >50% failure to start failover
passed = True
try:
if num_primary_instances < min_instances_for_gentle_failover:
resize_instance_group(gcp, primary_instance_group,
min_instances_for_gentle_failover)
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
primary_instance_names = get_instance_names(gcp, primary_instance_group)
secondary_instance_names = get_instance_names(gcp,
secondary_instance_group)
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
instances_to_stop = primary_instance_names[:-1]
remaining_instances = primary_instance_names[-1:]
try:
set_serving_status(instances_to_stop,
gcp.service_port,
serving=False)
wait_until_all_rpcs_go_to_given_backends(
remaining_instances + secondary_instance_names,
_WAIT_FOR_BACKEND_SEC)
finally:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=True)
except RpcDistributionError as e:
if not swapped_primary_and_secondary and is_primary_instance_group(
gcp, secondary_instance_group):
# Swap expectation of primary and secondary instance groups.
test_gentle_failover(gcp,
backend_service,
secondary_instance_group,
primary_instance_group,
swapped_primary_and_secondary=True)
else:
passed = False
raise e
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
resize_instance_group(gcp, primary_instance_group,
num_primary_instances)
instance_names = get_instance_names(gcp, primary_instance_group)
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_load_report_based_failover(gcp, backend_service,
primary_instance_group,
secondary_instance_group):
logger.info('Running test_load_report_based_failover')
passed = True
try:
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
primary_instance_names = get_instance_names(gcp, primary_instance_group)
secondary_instance_names = get_instance_names(gcp,
secondary_instance_group)
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
# Set primary locality's balance mode to RATE, and RPS to 20% of the
# client's QPS. The secondary locality will be used.
max_rate = int(args.qps * 1 / 5)
logger.info('Patching backend service to RATE with %d max_rate',
max_rate)
patch_backend_service(
gcp,
backend_service, [primary_instance_group, secondary_instance_group],
balancing_mode='RATE',
max_rate=max_rate)
wait_until_all_rpcs_go_to_given_backends(
primary_instance_names + secondary_instance_names,
_WAIT_FOR_BACKEND_SEC)
# Set primary locality's balance mode to RATE, and RPS to 120% of the
# client's QPS. Only the primary locality will be used.
max_rate = int(args.qps * 6 / 5)
logger.info('Patching backend service to RATE with %d max_rate',
max_rate)
patch_backend_service(
gcp,
backend_service, [primary_instance_group, secondary_instance_group],
balancing_mode='RATE',
max_rate=max_rate)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_BACKEND_SEC)
logger.info("success")
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
instance_names = get_instance_names(gcp, primary_instance_group)
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_ping_pong(gcp, backend_service, instance_group):
logger.info('Running test_ping_pong')
wait_for_healthy_backends(gcp, backend_service, instance_group)
instance_names = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_STATS_SEC)
def test_remove_instance_group(gcp, backend_service, instance_group,
same_zone_instance_group):
logger.info('Running test_remove_instance_group')
passed = True
try:
patch_backend_service(gcp,
backend_service,
[instance_group, same_zone_instance_group],
balancing_mode='RATE')
wait_for_healthy_backends(gcp, backend_service, instance_group)
wait_for_healthy_backends(gcp, backend_service,
same_zone_instance_group)
instance_names = get_instance_names(gcp, instance_group)
same_zone_instance_names = get_instance_names(gcp,
same_zone_instance_group)
try:
wait_until_all_rpcs_go_to_given_backends(
instance_names + same_zone_instance_names,
_WAIT_FOR_OPERATION_SEC)
remaining_instance_group = same_zone_instance_group
remaining_instance_names = same_zone_instance_names
except RpcDistributionError as e:
# If connected to TD in a different zone, we may route traffic to
# only one instance group. Determine which group that is to continue
# with the remainder of the test case.
try:
wait_until_all_rpcs_go_to_given_backends(
instance_names, _WAIT_FOR_STATS_SEC)
remaining_instance_group = same_zone_instance_group
remaining_instance_names = same_zone_instance_names
except RpcDistributionError as e:
wait_until_all_rpcs_go_to_given_backends(
same_zone_instance_names, _WAIT_FOR_STATS_SEC)
remaining_instance_group = instance_group
remaining_instance_names = instance_names
patch_backend_service(gcp,
backend_service, [remaining_instance_group],
balancing_mode='RATE')
wait_until_all_rpcs_go_to_given_backends(remaining_instance_names,
_WAIT_FOR_BACKEND_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service, [instance_group])
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_BACKEND_SEC)
def test_round_robin(gcp, backend_service, instance_group):
logger.info('Running test_round_robin')
wait_for_healthy_backends(gcp, backend_service, instance_group)
instance_names = get_instance_names(gcp, instance_group)
threshold = 1
wait_until_all_rpcs_go_to_given_backends(instance_names,
_WAIT_FOR_STATS_SEC)
# TODO(ericgribkoff) Delayed config propagation from earlier tests
# may result in briefly receiving an empty EDS update, resulting in failed
# RPCs. Retry distribution validation if this occurs; long-term fix is
# creating new backend resources for each individual test case.
# Each attempt takes 10 seconds. Config propagation can take several
# minutes.
max_attempts = 40
for i in range(max_attempts):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
requests_received = [stats.rpcs_by_peer[x] for x in stats.rpcs_by_peer]
total_requests_received = sum(requests_received)
if total_requests_received != _NUM_TEST_RPCS:
logger.info('Unexpected RPC failures, retrying: %s', stats)
continue
expected_requests = total_requests_received / len(instance_names)
for instance in instance_names:
if abs(stats.rpcs_by_peer[instance] -
expected_requests) > threshold:
raise Exception(
'RPC peer distribution differs from expected by more than %d '
'for instance %s (%s)' % (threshold, instance, stats))
return
raise Exception('RPC failures persisted through %d retries' % max_attempts)
def test_secondary_locality_gets_no_requests_on_partial_primary_failure(
gcp,
backend_service,
primary_instance_group,
secondary_instance_group,
swapped_primary_and_secondary=False):
logger.info(
'Running secondary_locality_gets_no_requests_on_partial_primary_failure'
)
passed = True
try:
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
primary_instance_names = get_instance_names(gcp, primary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
instances_to_stop = primary_instance_names[:1]
remaining_instances = primary_instance_names[1:]
try:
set_serving_status(instances_to_stop,
gcp.service_port,
serving=False)
wait_until_all_rpcs_go_to_given_backends(remaining_instances,
_WAIT_FOR_BACKEND_SEC)
finally:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=True)
except RpcDistributionError as e:
if not swapped_primary_and_secondary and is_primary_instance_group(
gcp, secondary_instance_group):
# Swap expectation of primary and secondary instance groups.
test_secondary_locality_gets_no_requests_on_partial_primary_failure(
gcp,
backend_service,
secondary_instance_group,
primary_instance_group,
swapped_primary_and_secondary=True)
else:
passed = False
raise e
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
def test_secondary_locality_gets_requests_on_primary_failure(
gcp,
backend_service,
primary_instance_group,
secondary_instance_group,
swapped_primary_and_secondary=False):
logger.info('Running secondary_locality_gets_requests_on_primary_failure')
passed = True
try:
patch_backend_service(
gcp, backend_service,
[primary_instance_group, secondary_instance_group])
wait_for_healthy_backends(gcp, backend_service, primary_instance_group)
wait_for_healthy_backends(gcp, backend_service,
secondary_instance_group)
primary_instance_names = get_instance_names(gcp, primary_instance_group)
secondary_instance_names = get_instance_names(gcp,
secondary_instance_group)
wait_until_all_rpcs_go_to_given_backends(primary_instance_names,
_WAIT_FOR_STATS_SEC)
try:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=False)
wait_until_all_rpcs_go_to_given_backends(secondary_instance_names,
_WAIT_FOR_BACKEND_SEC)
finally:
set_serving_status(primary_instance_names,
gcp.service_port,
serving=True)
except RpcDistributionError as e:
if not swapped_primary_and_secondary and is_primary_instance_group(
gcp, secondary_instance_group):
# Swap expectation of primary and secondary instance groups.
test_secondary_locality_gets_requests_on_primary_failure(
gcp,
backend_service,
secondary_instance_group,
primary_instance_group,
swapped_primary_and_secondary=True)
else:
passed = False
raise e
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, backend_service,
[primary_instance_group])
def prepare_services_for_urlmap_tests(gcp, original_backend_service,
instance_group, alternate_backend_service,
same_zone_instance_group):
'''
This function prepares the services to be ready for tests that modifies
urlmaps.
Returns:
Returns original and alternate backend names as lists of strings.
'''
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
patch_backend_service(gcp, alternate_backend_service,
[same_zone_instance_group])
logger.info('waiting for alternate to become healthy')
wait_for_healthy_backends(gcp, alternate_backend_service,
same_zone_instance_group)
original_backend_instances = get_instance_names(gcp, instance_group)
logger.info('original backends instances: %s', original_backend_instances)
alternate_backend_instances = get_instance_names(gcp,
same_zone_instance_group)
logger.info('alternate backends instances: %s', alternate_backend_instances)
# Start with all traffic going to original_backend_service.
logger.info('waiting for traffic to all go to original backends')
wait_until_all_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
return original_backend_instances, alternate_backend_instances
def test_metadata_filter(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
logger.info("Running test_metadata_filter")
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
original_backend_instances = get_instance_names(gcp, instance_group)
alternate_backend_instances = get_instance_names(gcp,
same_zone_instance_group)
patch_backend_service(gcp, alternate_backend_service,
[same_zone_instance_group])
wait_for_healthy_backends(gcp, alternate_backend_service,
same_zone_instance_group)
passed = True
try:
with open(bootstrap_path) as f:
md = json.load(f)['node']['metadata']
match_labels = []
for k, v in list(md.items()):
match_labels.append({'name': k, 'value': v})
not_match_labels = [{'name': 'fake', 'value': 'fail'}]
test_route_rules = [
# test MATCH_ALL
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': not_match_labels
}]
}],
'service': original_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': match_labels
}]
}],
'service': alternate_backend_service.url
},
],
# test mixing MATCH_ALL and MATCH_ANY
# test MATCH_ALL: super set labels won't match
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': not_match_labels + match_labels
}]
}],
'service': original_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': not_match_labels + match_labels
}]
}],
'service': alternate_backend_service.url
},
],
# test MATCH_ANY
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': not_match_labels
}]
}],
'service': original_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': not_match_labels + match_labels
}]
}],
'service': alternate_backend_service.url
},
],
# test match multiple route rules
[
{
'priority': 0,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ANY',
'filterLabels': match_labels
}]
}],
'service': alternate_backend_service.url
},
{
'priority': 1,
'matchRules': [{
'prefixMatch':
'/',
'metadataFilters': [{
'filterMatchCriteria': 'MATCH_ALL',
'filterLabels': match_labels
}]
}],
'service': original_backend_service.url
},
]
]
for route_rules in test_route_rules:
wait_until_all_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
wait_until_no_rpcs_go_to_given_backends(original_backend_instances,
_WAIT_FOR_STATS_SEC)
wait_until_all_rpcs_go_to_given_backends(
alternate_backend_instances, _WAIT_FOR_STATS_SEC)
patch_url_map_backend_service(gcp, original_backend_service)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_backend_service(gcp, alternate_backend_service, [])
def test_api_listener(gcp, backend_service, instance_group,
alternate_backend_service):
logger.info("Running api_listener")
passed = True
new_config_suffix = ''
try:
wait_for_healthy_backends(gcp, backend_service, instance_group)
backend_instances = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# create a second suite of map+tp+fr with the same host name in host rule
# and we have to disable proxyless validation because it needs `0.0.0.0`
# ip address in fr for proxyless and also we violate ip:port uniqueness
# for test purpose. See https://github.com/grpc/grpc-java/issues/8009
new_config_suffix = '2'
create_url_map(gcp, url_map_name + new_config_suffix, backend_service,
service_host_name)
create_target_proxy(gcp, target_proxy_name + new_config_suffix, False)
if not gcp.service_port:
raise Exception(
'Faied to find a valid port for the forwarding rule')
potential_ip_addresses = []
max_attempts = 10
for i in range(max_attempts):
potential_ip_addresses.append('10.10.10.%d' %
(random.randint(0, 255)))
create_global_forwarding_rule(gcp,
forwarding_rule_name + new_config_suffix,
[gcp.service_port],
potential_ip_addresses)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp,
url_map_name + new_config_suffix,
backend_service,
service_host_name)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
delete_global_forwarding_rule(gcp, forwarding_rule_name)
delete_target_proxy(gcp, target_proxy_name)
delete_url_map(gcp, url_map_name)
verify_attempts = int(_WAIT_FOR_URL_MAP_PATCH_SEC / _NUM_TEST_RPCS *
args.qps)
for i in range(verify_attempts):
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# delete host rule for the original host name
patch_url_map_backend_service(gcp, alternate_backend_service)
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
delete_global_forwarding_rule(
gcp, forwarding_rule_name + new_config_suffix)
delete_target_proxy(gcp, target_proxy_name + new_config_suffix)
delete_url_map(gcp, url_map_name + new_config_suffix)
create_url_map(gcp, url_map_name, backend_service,
service_host_name)
create_target_proxy(gcp, target_proxy_name)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
server_uri = service_host_name + ':' + str(gcp.service_port)
else:
server_uri = service_host_name
return server_uri
def test_forwarding_rule_port_match(gcp, backend_service, instance_group):
logger.info("Running test_forwarding_rule_port_match")
passed = True
try:
wait_for_healthy_backends(gcp, backend_service, instance_group)
backend_instances = get_instance_names(gcp, instance_group)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
delete_global_forwarding_rule(gcp)
create_global_forwarding_rule(gcp, forwarding_rule_name, [
x for x in parse_port_range(_DEFAULT_PORT_RANGE)
if x != gcp.service_port
])
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
delete_global_forwarding_rule(gcp)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
server_uri = service_host_name + ':' + str(gcp.service_port)
else:
server_uri = service_host_name
return server_uri
def test_forwarding_rule_default_port(gcp, backend_service, instance_group):
logger.info("Running test_forwarding_rule_default_port")
passed = True
try:
wait_for_healthy_backends(gcp, backend_service, instance_group)
backend_instances = get_instance_names(gcp, instance_group)
if gcp.service_port == _DEFAULT_SERVICE_PORT:
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
delete_global_forwarding_rule(gcp)
create_global_forwarding_rule(gcp, forwarding_rule_name,
parse_port_range(_DEFAULT_PORT_RANGE))
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# expect success when no port in client request service uri, and no port in url-map
delete_global_forwarding_rule(gcp)
delete_target_proxy(gcp)
delete_url_map(gcp)
create_url_map(gcp, url_map_name, backend_service, service_host_name)
create_target_proxy(gcp, gcp.target_proxy.name, False)
potential_ip_addresses = []
max_attempts = 10
for i in range(max_attempts):
potential_ip_addresses.append('10.10.10.%d' %
(random.randint(0, 255)))
create_global_forwarding_rule(gcp, forwarding_rule_name, [80],
potential_ip_addresses)
wait_until_all_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
# expect failure when no port in client request uri, but specify port in url-map
patch_url_map_host_rule_with_port(gcp, url_map_name, backend_service,
service_host_name)
wait_until_no_rpcs_go_to_given_backends(backend_instances,
_WAIT_FOR_STATS_SEC)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
delete_global_forwarding_rule(gcp)
delete_target_proxy(gcp)
delete_url_map(gcp)
create_url_map(gcp, url_map_name, backend_service,
service_host_name)
create_target_proxy(gcp, target_proxy_name)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
server_uri = service_host_name + ':' + str(gcp.service_port)
else:
server_uri = service_host_name
return server_uri
def test_traffic_splitting(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
# This test start with all traffic going to original_backend_service. Then
# it updates URL-map to set default action to traffic splitting between
# original and alternate. It waits for all backends in both services to
# receive traffic, then verifies that weights are expected.
logger.info('Running test_traffic_splitting')
original_backend_instances, alternate_backend_instances = prepare_services_for_urlmap_tests(
gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group)
passed = True
try:
# Patch urlmap, change route action to traffic splitting between
# original and alternate.
logger.info('patching url map with traffic splitting')
original_service_percentage, alternate_service_percentage = 20, 80
patch_url_map_backend_service(
gcp,
services_with_weights={
original_backend_service: original_service_percentage,
alternate_backend_service: alternate_service_percentage,
})
# Split percentage between instances: [20,80] -> [10,10,40,40].
expected_instance_percentage = [
original_service_percentage * 1.0 / len(original_backend_instances)
] * len(original_backend_instances) + [
alternate_service_percentage * 1.0 /
len(alternate_backend_instances)
] * len(alternate_backend_instances)
# Wait for traffic to go to both services.
logger.info(
'waiting for traffic to go to all backends (including alternate)')
wait_until_all_rpcs_go_to_given_backends(
original_backend_instances + alternate_backend_instances,
_WAIT_FOR_STATS_SEC)
# Verify that weights between two services are expected.
retry_count = 10
# Each attempt takes about 10 seconds, 10 retries is equivalent to 100
# seconds timeout.
for i in range(retry_count):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
got_instance_count = [
stats.rpcs_by_peer[i] for i in original_backend_instances
] + [stats.rpcs_by_peer[i] for i in alternate_backend_instances]
total_count = sum(got_instance_count)
got_instance_percentage = [
x * 100.0 / total_count for x in got_instance_count
]
try:
compare_distributions(got_instance_percentage,
expected_instance_percentage, 5)
except Exception as e:
logger.info('attempt %d', i)
logger.info('got percentage: %s', got_instance_percentage)
logger.info('expected percentage: %s',
expected_instance_percentage)
logger.info(e)
if i == retry_count - 1:
raise Exception(
'RPC distribution (%s) differs from expected (%s)' %
(got_instance_percentage, expected_instance_percentage))
else:
logger.info("success")
break
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_path_matching(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
# This test start with all traffic (UnaryCall and EmptyCall) going to
# original_backend_service.
#
# Then it updates URL-map to add routes, to make UnaryCall and EmptyCall to
# go different backends. It waits for all backends in both services to
# receive traffic, then verifies that traffic goes to the expected
# backends.
logger.info('Running test_path_matching')
original_backend_instances, alternate_backend_instances = prepare_services_for_urlmap_tests(
gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group)
passed = True
try:
# A list of tuples (route_rules, expected_instances).
test_cases = [
(
[{
'priority': 0,
# FullPath EmptyCall -> alternate_backend_service.
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/EmptyCall'
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Prefix UnaryCall -> alternate_backend_service.
'matchRules': [{
'prefixMatch': '/grpc.testing.TestService/Unary'
}],
'service': alternate_backend_service.url
}],
{
"UnaryCall": alternate_backend_instances,
"EmptyCall": original_backend_instances
}),
(
# This test case is similar to the one above (but with route
# services swapped). This test has two routes (full_path and
# the default) to match EmptyCall, and both routes set
# alternative_backend_service as the action. This forces the
# client to handle duplicate Clusters in the RDS response.
[
{
'priority': 0,
# Prefix UnaryCall -> original_backend_service.
'matchRules': [{
'prefixMatch': '/grpc.testing.TestService/Unary'
}],
'service': original_backend_service.url
},
{
'priority': 1,
# FullPath EmptyCall -> alternate_backend_service.
'matchRules': [{
'fullPathMatch':
'/grpc.testing.TestService/EmptyCall'
}],
'service': alternate_backend_service.url
}
],
{
"UnaryCall": original_backend_instances,
"EmptyCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Regex UnaryCall -> alternate_backend_service.
'matchRules': [{
'regexMatch':
'^\/.*\/UnaryCall$' # Unary methods with any services.
}],
'service': alternate_backend_service.url
}],
{
"UnaryCall": alternate_backend_instances,
"EmptyCall": original_backend_instances
}),
(
[{
'priority': 0,
# ignoreCase EmptyCall -> alternate_backend_service.
'matchRules': [{
# Case insensitive matching.
'fullPathMatch': '/gRpC.tEsTinG.tEstseRvice/empTycaLl',
'ignoreCase': True,
}],
'service': alternate_backend_service.url
}],
{
"UnaryCall": original_backend_instances,
"EmptyCall": alternate_backend_instances
}),
]
for (route_rules, expected_instances) in test_cases:
logger.info('patching url map with %s', route_rules)
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# Wait for traffic to go to both services.
logger.info(
'waiting for traffic to go to all backends (including alternate)'
)
wait_until_all_rpcs_go_to_given_backends(
original_backend_instances + alternate_backend_instances,
_WAIT_FOR_STATS_SEC)
retry_count = 80
# Each attempt takes about 5 seconds, 80 retries is equivalent to 400
# seconds timeout.
for i in range(retry_count):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
if not stats.rpcs_by_method:
raise ValueError(
'stats.rpcs_by_method is None, the interop client stats service does not support this test case'
)
logger.info('attempt %d', i)
if compare_expected_instances(stats, expected_instances):
logger.info("success")
break
elif i == retry_count - 1:
raise Exception(
'timeout waiting for RPCs to the expected instances: %s'
% expected_instances)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_header_matching(gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group):
# This test start with all traffic (UnaryCall and EmptyCall) going to
# original_backend_service.
#
# Then it updates URL-map to add routes, to make RPCs with test headers to
# go to different backends. It waits for all backends in both services to
# receive traffic, then verifies that traffic goes to the expected
# backends.
logger.info('Running test_header_matching')
original_backend_instances, alternate_backend_instances = prepare_services_for_urlmap_tests(
gcp, original_backend_service, instance_group,
alternate_backend_service, same_zone_instance_group)
passed = True
try:
# A list of tuples (route_rules, expected_instances).
test_cases = [
(
[{
'priority': 0,
# Header ExactMatch -> alternate_backend_service.
# EmptyCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'exactMatch': _TEST_METADATA_VALUE_EMPTY
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Header PrefixMatch -> alternate_backend_service.
# UnaryCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'prefixMatch': _TEST_METADATA_VALUE_UNARY[:2]
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": original_backend_instances,
"UnaryCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Header SuffixMatch -> alternate_backend_service.
# EmptyCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'suffixMatch': _TEST_METADATA_VALUE_EMPTY[-2:]
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Header 'xds_md_numeric' present -> alternate_backend_service.
# UnaryCall is sent with the metadata, so will be sent to alternative.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_NUMERIC_KEY,
'presentMatch': True
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": original_backend_instances,
"UnaryCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Header invert ExactMatch -> alternate_backend_service.
# UnaryCall is sent with the metadata, so will be sent to
# original. EmptyCall will be sent to alternative.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_KEY,
'exactMatch': _TEST_METADATA_VALUE_UNARY,
'invertMatch': True
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
(
[{
'priority': 0,
# Header 'xds_md_numeric' range [100,200] -> alternate_backend_service.
# UnaryCall is sent with the metadata in range.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': _TEST_METADATA_NUMERIC_KEY,
'rangeMatch': {
'rangeStart': '100',
'rangeEnd': '200'
}
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": original_backend_instances,
"UnaryCall": alternate_backend_instances
}),
(
[{
'priority': 0,
# Header RegexMatch -> alternate_backend_service.
# EmptyCall is sent with the metadata.
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName':
_TEST_METADATA_KEY,
'regexMatch':
"^%s.*%s$" % (_TEST_METADATA_VALUE_EMPTY[:2],
_TEST_METADATA_VALUE_EMPTY[-2:])
}]
}],
'service': alternate_backend_service.url
}],
{
"EmptyCall": alternate_backend_instances,
"UnaryCall": original_backend_instances
}),
]
for (route_rules, expected_instances) in test_cases:
logger.info('patching url map with %s -> alternative',
route_rules[0]['matchRules'])
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# Wait for traffic to go to both services.
logger.info(
'waiting for traffic to go to all backends (including alternate)'
)
wait_until_all_rpcs_go_to_given_backends(
original_backend_instances + alternate_backend_instances,
_WAIT_FOR_STATS_SEC)
retry_count = 80
# Each attempt takes about 5 seconds, 80 retries is equivalent to 400
# seconds timeout.
for i in range(retry_count):
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
if not stats.rpcs_by_method:
raise ValueError(
'stats.rpcs_by_method is None, the interop client stats service does not support this test case'
)
logger.info('attempt %d', i)
if compare_expected_instances(stats, expected_instances):
logger.info("success")
break
elif i == retry_count - 1:
raise Exception(
'timeout waiting for RPCs to the expected instances: %s'
% expected_instances)
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, alternate_backend_service, [])
def test_circuit_breaking(gcp, original_backend_service, instance_group,
same_zone_instance_group):
'''
Since backend service circuit_breakers configuration cannot be unset,
which causes trouble for restoring validate_for_proxy flag in target
proxy/global forwarding rule. This test uses dedicated backend sevices.
The url_map and backend services undergoes the following state changes:
Before test:
original_backend_service -> [instance_group]
extra_backend_service -> []
more_extra_backend_service -> []
url_map -> [original_backend_service]
In test:
extra_backend_service (with circuit_breakers) -> [instance_group]
more_extra_backend_service (with circuit_breakers) -> [same_zone_instance_group]
url_map -> [extra_backend_service, more_extra_backend_service]
After test:
original_backend_service -> [instance_group]
extra_backend_service (with circuit_breakers) -> []
more_extra_backend_service (with circuit_breakers) -> []
url_map -> [original_backend_service]
'''
logger.info('Running test_circuit_breaking')
additional_backend_services = []
passed = True
try:
# TODO(chengyuanzhang): Dedicated backend services created for circuit
# breaking test. Once the issue for unsetting backend service circuit
# breakers is resolved or configuring backend service circuit breakers is
# enabled for config validation, these dedicated backend services can be
# eliminated.
extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-extra' + gcp_suffix
more_extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-more-extra' + gcp_suffix
extra_backend_service = add_backend_service(gcp,
extra_backend_service_name)
additional_backend_services.append(extra_backend_service)
more_extra_backend_service = add_backend_service(
gcp, more_extra_backend_service_name)
additional_backend_services.append(more_extra_backend_service)
# The config validation for proxyless doesn't allow setting
# circuit_breakers. Disable validate validate_for_proxyless
# for this test. This can be removed when validation
# accepts circuit_breakers.
logger.info('disabling validate_for_proxyless in target proxy')
set_validate_for_proxyless(gcp, False)
extra_backend_service_max_requests = 500
more_extra_backend_service_max_requests = 1000
patch_backend_service(gcp,
extra_backend_service, [instance_group],
circuit_breakers={
'maxRequests':
extra_backend_service_max_requests
})
logger.info('Waiting for extra backends to become healthy')
wait_for_healthy_backends(gcp, extra_backend_service, instance_group)
patch_backend_service(gcp,
more_extra_backend_service,
[same_zone_instance_group],
circuit_breakers={
'maxRequests':
more_extra_backend_service_max_requests
})
logger.info('Waiting for more extra backend to become healthy')
wait_for_healthy_backends(gcp, more_extra_backend_service,
same_zone_instance_group)
extra_backend_instances = get_instance_names(gcp, instance_group)
more_extra_backend_instances = get_instance_names(
gcp, same_zone_instance_group)
route_rules = [
{
'priority': 0,
# UnaryCall -> extra_backend_service
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/UnaryCall'
}],
'service': extra_backend_service.url
},
{
'priority': 1,
# EmptyCall -> more_extra_backend_service
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/EmptyCall'
}],
'service': more_extra_backend_service.url
},
]
# Make client send UNARY_CALL and EMPTY_CALL.
configure_client([
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL
])
logger.info('Patching url map with %s', route_rules)
patch_url_map_backend_service(gcp,
extra_backend_service,
route_rules=route_rules)
logger.info('Waiting for traffic to go to all backends')
wait_until_all_rpcs_go_to_given_backends(
extra_backend_instances + more_extra_backend_instances,
_WAIT_FOR_STATS_SEC)
# Make all calls keep-open.
configure_client([
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL
], [(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
'rpc-behavior', 'keep-open'),
(messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL,
'rpc-behavior', 'keep-open')])
wait_until_rpcs_in_flight(
'UNARY_CALL', (_WAIT_FOR_BACKEND_SEC +
int(extra_backend_service_max_requests / args.qps)),
extra_backend_service_max_requests, 1)
logger.info('UNARY_CALL reached stable state (%d)',
extra_backend_service_max_requests)
wait_until_rpcs_in_flight(
'EMPTY_CALL',
(_WAIT_FOR_BACKEND_SEC +
int(more_extra_backend_service_max_requests / args.qps)),
more_extra_backend_service_max_requests, 1)
logger.info('EMPTY_CALL reached stable state (%d)',
more_extra_backend_service_max_requests)
# Increment circuit breakers max_requests threshold.
extra_backend_service_max_requests = 800
patch_backend_service(gcp,
extra_backend_service, [instance_group],
circuit_breakers={
'maxRequests':
extra_backend_service_max_requests
})
wait_until_rpcs_in_flight(
'UNARY_CALL', (_WAIT_FOR_BACKEND_SEC +
int(extra_backend_service_max_requests / args.qps)),
extra_backend_service_max_requests, 1)
logger.info('UNARY_CALL reached stable state after increase (%d)',
extra_backend_service_max_requests)
logger.info('success')
# Avoid new RPCs being outstanding (some test clients create threads
# for sending RPCs) after restoring backend services.
configure_client(
[messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL])
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
patch_backend_service(gcp, original_backend_service,
[instance_group])
for backend_service in additional_backend_services:
delete_backend_service(gcp, backend_service)
set_validate_for_proxyless(gcp, True)
def test_timeout(gcp, original_backend_service, instance_group):
logger.info('Running test_timeout')
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
# UnaryCall -> maxStreamDuration:3s
route_rules = [{
'priority': 0,
'matchRules': [{
'fullPathMatch': '/grpc.testing.TestService/UnaryCall'
}],
'service': original_backend_service.url,
'routeAction': {
'maxStreamDuration': {
'seconds': 3,
},
},
}]
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# A list of tuples (testcase_name, {client_config}, {expected_results})
test_cases = [
(
'timeout_exceeded (UNARY_CALL), timeout_different_route (EMPTY_CALL)',
# UnaryCall and EmptyCall both sleep-4.
# UnaryCall timeouts, EmptyCall succeeds.
{
'rpc_types': [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL,
],
'metadata': [
(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
'rpc-behavior', 'sleep-4'),
(messages_pb2.ClientConfigureRequest.RpcType.EMPTY_CALL,
'rpc-behavior', 'sleep-4'),
],
},
{
'UNARY_CALL': 4, # DEADLINE_EXCEEDED
'EMPTY_CALL': 0,
},
),
(
'app_timeout_exceeded',
# UnaryCall only with sleep-2; timeout=1s; calls timeout.
{
'rpc_types': [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
],
'metadata': [
(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
'rpc-behavior', 'sleep-2'),
],
'timeout_sec': 1,
},
{
'UNARY_CALL': 4, # DEADLINE_EXCEEDED
},
),
(
'timeout_not_exceeded',
# UnaryCall only with no sleep; calls succeed.
{
'rpc_types': [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
],
},
{
'UNARY_CALL': 0,
},
)
]
passed = True
try:
first_case = True
for (testcase_name, client_config, expected_results) in test_cases:
logger.info('starting case %s', testcase_name)
configure_client(**client_config)
# wait a second to help ensure the client stops sending RPCs with
# the old config. We will make multiple attempts if it is failing,
# but this improves confidence that the test is valid if the
# previous client_config would lead to the same results.
time.sleep(1)
# Each attempt takes 10 seconds; 20 attempts is equivalent to 200
# second timeout.
attempt_count = 20
if first_case:
attempt_count = 120
first_case = False
before_stats = get_client_accumulated_stats()
if not before_stats.stats_per_method:
raise ValueError(
'stats.stats_per_method is None, the interop client stats service does not support this test case'
)
for i in range(attempt_count):
logger.info('%s: attempt %d', testcase_name, i)
test_runtime_secs = 10
time.sleep(test_runtime_secs)
after_stats = get_client_accumulated_stats()
success = True
for rpc, status in list(expected_results.items()):
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
want = test_runtime_secs * args.qps
# Allow 10% deviation from expectation to reduce flakiness
if qty < (want * .9) or qty > (want * 1.1):
logger.info('%s: failed due to %s[%s]: got %d want ~%d',
testcase_name, rpc, status, qty, want)
success = False
if success:
logger.info('success')
break
logger.info('%s attempt %d failed', testcase_name, i)
before_stats = after_stats
else:
raise Exception(
'%s: timeout waiting for expected results: %s; got %s' %
(testcase_name, expected_results,
after_stats.stats_per_method))
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
def test_fault_injection(gcp, original_backend_service, instance_group):
logger.info('Running test_fault_injection')
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
testcase_header = 'fi_testcase'
def _route(pri, name, fi_policy):
return {
'priority': pri,
'matchRules': [{
'prefixMatch':
'/',
'headerMatches': [{
'headerName': testcase_header,
'exactMatch': name,
}],
}],
'service': original_backend_service.url,
'routeAction': {
'faultInjectionPolicy': fi_policy
},
}
def _abort(pct):
return {
'abort': {
'httpStatus': 401,
'percentage': pct,
}
}
def _delay(pct):
return {
'delay': {
'fixedDelay': {
'seconds': '20'
},
'percentage': pct,
}
}
zero_route = _abort(0)
zero_route.update(_delay(0))
route_rules = [
_route(0, 'zero_percent_fault_injection', zero_route),
_route(1, 'always_delay', _delay(100)),
_route(2, 'always_abort', _abort(100)),
_route(3, 'delay_half', _delay(50)),
_route(4, 'abort_half', _abort(50)),
{
'priority': 5,
'matchRules': [{
'prefixMatch': '/'
}],
'service': original_backend_service.url,
},
]
set_validate_for_proxyless(gcp, False)
patch_url_map_backend_service(gcp,
original_backend_service,
route_rules=route_rules)
# A list of tuples (testcase_name, {client_config}, {code: percent}). Each
# test case will set the testcase_header with the testcase_name for routing
# to the appropriate config for the case, defined above.
test_cases = [
(
'zero_percent_fault_injection',
{},
{
0: 1
}, # OK
),
(
'non_matching_fault_injection', # Not in route_rules, above.
{},
{
0: 1
}, # OK
),
(
'always_delay',
{
'timeout_sec': 2
},
{
4: 1
}, # DEADLINE_EXCEEDED
),
(
'always_abort',
{},
{
16: 1
}, # UNAUTHENTICATED
),
(
'delay_half',
{
'timeout_sec': 2
},
{
4: .5,
0: .5
}, # DEADLINE_EXCEEDED / OK: 50% / 50%
),
(
'abort_half',
{},
{
16: .5,
0: .5
}, # UNAUTHENTICATED / OK: 50% / 50%
)
]
passed = True
try:
first_case = True
for (testcase_name, client_config, expected_results) in test_cases:
logger.info('starting case %s', testcase_name)
client_config['metadata'] = [
(messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
testcase_header, testcase_name)
]
client_config['rpc_types'] = [
messages_pb2.ClientConfigureRequest.RpcType.UNARY_CALL,
]
configure_client(**client_config)
# wait a second to help ensure the client stops sending RPCs with
# the old config. We will make multiple attempts if it is failing,
# but this improves confidence that the test is valid if the
# previous client_config would lead to the same results.
time.sleep(1)
# Each attempt takes 10 seconds; 20 attempts is equivalent to 200
# second timeout.
attempt_count = 20
if first_case:
attempt_count = 120
first_case = False
before_stats = get_client_accumulated_stats()
if not before_stats.stats_per_method:
raise ValueError(
'stats.stats_per_method is None, the interop client stats service does not support this test case'
)
for i in range(attempt_count):
logger.info('%s: attempt %d', testcase_name, i)
test_runtime_secs = 10
time.sleep(test_runtime_secs)
after_stats = get_client_accumulated_stats()
success = True
for status, pct in list(expected_results.items()):
rpc = 'UNARY_CALL'
qty = (after_stats.stats_per_method[rpc].result[status] -
before_stats.stats_per_method[rpc].result[status])
want = pct * args.qps * test_runtime_secs
# Allow 10% deviation from expectation to reduce flakiness
VARIANCE_ALLOWED = 0.1
if abs(qty - want) > want * VARIANCE_ALLOWED:
logger.info('%s: failed due to %s[%s]: got %d want ~%d',
testcase_name, rpc, status, qty, want)
success = False
if success:
logger.info('success')
break
logger.info('%s attempt %d failed', testcase_name, i)
before_stats = after_stats
else:
raise Exception(
'%s: timeout waiting for expected results: %s; got %s' %
(testcase_name, expected_results,
after_stats.stats_per_method))
except Exception:
passed = False
raise
finally:
if passed or not args.halt_after_fail:
patch_url_map_backend_service(gcp, original_backend_service)
set_validate_for_proxyless(gcp, True)
def test_csds(gcp, original_backend_service, instance_group, server_uri):
test_csds_timeout_s = datetime.timedelta(minutes=5).total_seconds()
sleep_interval_between_attempts_s = datetime.timedelta(
seconds=2).total_seconds()
logger.info('Running test_csds')
logger.info('waiting for original backends to become healthy')
wait_for_healthy_backends(gcp, original_backend_service, instance_group)
# Test case timeout: 5 minutes
deadline = time.time() + test_csds_timeout_s
cnt = 0
while time.time() <= deadline:
client_config = get_client_xds_config_dump()
logger.info('test_csds attempt %d: received xDS config %s', cnt,
json.dumps(client_config, indent=2))
if client_config is not None:
# Got the xDS config dump, now validate it
ok = True
try:
if client_config['node']['locality']['zone'] != args.zone:
logger.info('Invalid zone %s != %s',
client_config['node']['locality']['zone'],
args.zone)
ok = False
seen = set()
for xds_config in client_config['xds_config']:
if 'listener_config' in xds_config:
listener_name = xds_config['listener_config'][
'dynamic_listeners'][0]['active_state']['listener'][
'name']
if listener_name != server_uri:
logger.info('Invalid Listener name %s != %s',
listener_name, server_uri)
ok = False
else:
seen.add('lds')
elif 'route_config' in xds_config:
num_vh = len(
xds_config['route_config']['dynamic_route_configs']
[0]['route_config']['virtual_hosts'])
if num_vh <= 0:
logger.info('Invalid number of VirtualHosts %s',
num_vh)
ok = False
else:
seen.add('rds')
elif 'cluster_config' in xds_config:
cluster_type = xds_config['cluster_config'][
'dynamic_active_clusters'][0]['cluster']['type']
if cluster_type != 'EDS':
logger.info('Invalid cluster type %s != EDS',
cluster_type)
ok = False
else:
seen.add('cds')
elif 'endpoint_config' in xds_config:
sub_zone = xds_config["endpoint_config"][
"dynamic_endpoint_configs"][0]["endpoint_config"][
"endpoints"][0]["locality"]["sub_zone"]
if args.zone not in sub_zone:
logger.info('Invalid endpoint sub_zone %s',
sub_zone)
ok = False
else:
seen.add('eds')
want = {'lds', 'rds', 'cds', 'eds'}
if seen != want:
logger.info('Incomplete xDS config dump, seen=%s', seen)
ok = False
except:
logger.exception('Error in xDS config dump:')
ok = False
finally:
if ok:
# Successfully fetched xDS config, and they looks good.
logger.info('success')
return
logger.info('test_csds attempt %d failed', cnt)
# Give the client some time to fetch xDS resources
time.sleep(sleep_interval_between_attempts_s)
cnt += 1
raise RuntimeError('failed to receive a valid xDS config in %s seconds' %
test_csds_timeout_s)
def set_validate_for_proxyless(gcp, validate_for_proxyless):
if not gcp.alpha_compute:
logger.debug(
'Not setting validateForProxy because alpha is not enabled')
return
# This function deletes global_forwarding_rule and target_proxy, then
# recreate target_proxy with validateForProxyless=False. This is necessary
# because patching target_grpc_proxy isn't supported.
delete_global_forwarding_rule(gcp)
delete_target_proxy(gcp)
create_target_proxy(gcp, gcp.target_proxy.name, validate_for_proxyless)
create_global_forwarding_rule(gcp, gcp.global_forwarding_rule.name,
[gcp.service_port])
def get_serving_status(instance, service_port):
with grpc.insecure_channel('%s:%d' % (instance, service_port)) as channel:
health_stub = health_pb2_grpc.HealthStub(channel)
return health_stub.Check(health_pb2.HealthCheckRequest())
def set_serving_status(instances, service_port, serving):
logger.info('setting %s serving status to %s', instances, serving)
for instance in instances:
with grpc.insecure_channel('%s:%d' %
(instance, service_port)) as channel:
logger.info('setting %s serving status to %s', instance, serving)
stub = test_pb2_grpc.XdsUpdateHealthServiceStub(channel)
retry_count = 5
for i in range(5):
if serving:
stub.SetServing(empty_pb2.Empty())
else:
stub.SetNotServing(empty_pb2.Empty())
serving_status = get_serving_status(instance, service_port)
logger.info('got instance service status %s', serving_status)
want_status = health_pb2.HealthCheckResponse.SERVING if serving else health_pb2.HealthCheckResponse.NOT_SERVING
if serving_status.status == want_status:
break
if i == retry_count - 1:
raise Exception(
'failed to set instance service status after %d retries'
% retry_count)
def is_primary_instance_group(gcp, instance_group):
# Clients may connect to a TD instance in a different region than the
# client, in which case primary/secondary assignments may not be based on
# the client's actual locality.
instance_names = get_instance_names(gcp, instance_group)
stats = get_client_stats(_NUM_TEST_RPCS, _WAIT_FOR_STATS_SEC)
return all(
peer in instance_names for peer in list(stats.rpcs_by_peer.keys()))
def get_startup_script(path_to_server_binary, service_port):
if path_to_server_binary:
return 'nohup %s --port=%d 1>/dev/null &' % (path_to_server_binary,
service_port)
else:
return """#!/bin/bash
sudo apt update
sudo apt install -y git default-jdk
mkdir java_server
pushd java_server
git clone https://github.com/grpc/grpc-java.git
pushd grpc-java
pushd interop-testing
../gradlew installDist -x test -PskipCodegen=true -PskipAndroid=true
nohup build/install/grpc-interop-testing/bin/xds-test-server \
--port=%d 1>/dev/null &""" % service_port
def create_instance_template(gcp, name, network, source_image, machine_type,
startup_script):
config = {
'name': name,
'properties': {
'tags': {
'items': ['allow-health-checks']
},
'machineType': machine_type,
'serviceAccounts': [{
'email': 'default',
'scopes': ['https://www.googleapis.com/auth/cloud-platform',]
}],
'networkInterfaces': [{
'accessConfigs': [{
'type': 'ONE_TO_ONE_NAT'
}],
'network': network
}],
'disks': [{
'boot': True,
'initializeParams': {
'sourceImage': source_image
},
'autoDelete': True
}],
'metadata': {
'items': [{
'key': 'startup-script',
'value': startup_script
}]
}
}
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.instanceTemplates().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.instance_template = GcpResource(config['name'], result['targetLink'])
def add_instance_group(gcp, zone, name, size):
config = {
'name': name,
'instanceTemplate': gcp.instance_template.url,
'targetSize': size,
'namedPorts': [{
'name': 'grpc',
'port': gcp.service_port
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.instanceGroupManagers().insert(
project=gcp.project, zone=zone,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_zone_operation(gcp, zone, result['name'])
result = gcp.compute.instanceGroupManagers().get(
project=gcp.project, zone=zone,
instanceGroupManager=config['name']).execute(
num_retries=_GCP_API_RETRIES)
instance_group = InstanceGroup(config['name'], result['instanceGroup'],
zone)
gcp.instance_groups.append(instance_group)
wait_for_instance_group_to_reach_expected_size(gcp, instance_group, size,
_WAIT_FOR_OPERATION_SEC)
return instance_group
def create_health_check(gcp, name):
if gcp.alpha_compute:
config = {
'name': name,
'type': 'GRPC',
'grpcHealthCheck': {
'portSpecification': 'USE_SERVING_PORT'
}
}
compute_to_use = gcp.alpha_compute
else:
config = {
'name': name,
'type': 'TCP',
'tcpHealthCheck': {
'portName': 'grpc'
}
}
compute_to_use = gcp.compute
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.healthChecks().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.health_check = GcpResource(config['name'], result['targetLink'])
def create_health_check_firewall_rule(gcp, name):
config = {
'name': name,
'direction': 'INGRESS',
'allowed': [{
'IPProtocol': 'tcp'
}],
'sourceRanges': ['35.191.0.0/16', '130.211.0.0/22'],
'targetTags': ['allow-health-checks'],
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.firewalls().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.health_check_firewall_rule = GcpResource(config['name'],
result['targetLink'])
def add_backend_service(gcp, name):
if gcp.alpha_compute:
protocol = 'GRPC'
compute_to_use = gcp.alpha_compute
else:
protocol = 'HTTP2'
compute_to_use = gcp.compute
config = {
'name': name,
'loadBalancingScheme': 'INTERNAL_SELF_MANAGED',
'healthChecks': [gcp.health_check.url],
'portName': 'grpc',
'protocol': protocol
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.backendServices().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
backend_service = GcpResource(config['name'], result['targetLink'])
gcp.backend_services.append(backend_service)
return backend_service
def create_url_map(gcp, name, backend_service, host_name):
config = {
'name': name,
'defaultService': backend_service.url,
'pathMatchers': [{
'name': _PATH_MATCHER_NAME,
'defaultService': backend_service.url,
}],
'hostRules': [{
'hosts': [host_name],
'pathMatcher': _PATH_MATCHER_NAME
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.urlMaps().insert(
project=gcp.project, body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.url_map = GcpResource(config['name'], result['targetLink'])
def patch_url_map_host_rule_with_port(gcp, name, backend_service, host_name):
config = {
'hostRules': [{
'hosts': ['%s:%d' % (host_name, gcp.service_port)],
'pathMatcher': _PATH_MATCHER_NAME
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.urlMaps().patch(
project=gcp.project, urlMap=name,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
def create_target_proxy(gcp, name, validate_for_proxyless=True):
if gcp.alpha_compute:
config = {
'name': name,
'url_map': gcp.url_map.url,
'validate_for_proxyless': validate_for_proxyless
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.alpha_compute.targetGrpcProxies().insert(
project=gcp.project,
body=config).execute(num_retries=_GCP_API_RETRIES)
else:
config = {
'name': name,
'url_map': gcp.url_map.url,
}
logger.debug('Sending GCP request with body=%s', config)
result = gcp.compute.targetHttpProxies().insert(
project=gcp.project,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.target_proxy = GcpResource(config['name'], result['targetLink'])
def create_global_forwarding_rule(gcp,
name,
potential_ports,
potential_ip_addresses=['0.0.0.0']):
if gcp.alpha_compute:
compute_to_use = gcp.alpha_compute
else:
compute_to_use = gcp.compute
for port in potential_ports:
for ip_address in potential_ip_addresses:
try:
config = {
'name': name,
'loadBalancingScheme': 'INTERNAL_SELF_MANAGED',
'portRange': str(port),
'IPAddress': ip_address,
'network': args.network,
'target': gcp.target_proxy.url,
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.globalForwardingRules().insert(
project=gcp.project,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
gcp.global_forwarding_rule = GcpResource(
config['name'], result['targetLink'])
gcp.service_port = port
return
except googleapiclient.errors.HttpError as http_error:
logger.warning(
'Got error %s when attempting to create forwarding rule to '
'%s:%d. Retrying with another port.' %
(http_error, ip_address, port))
def get_health_check(gcp, health_check_name):
try:
result = gcp.compute.healthChecks().get(
project=gcp.project, healthCheck=health_check_name).execute()
gcp.health_check = GcpResource(health_check_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.health_check = GcpResource(health_check_name, None)
def get_health_check_firewall_rule(gcp, firewall_name):
try:
result = gcp.compute.firewalls().get(project=gcp.project,
firewall=firewall_name).execute()
gcp.health_check_firewall_rule = GcpResource(firewall_name,
result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.health_check_firewall_rule = GcpResource(firewall_name, None)
def get_backend_service(gcp, backend_service_name, record_error=True):
try:
result = gcp.compute.backendServices().get(
project=gcp.project, backendService=backend_service_name).execute()
backend_service = GcpResource(backend_service_name, result['selfLink'])
except Exception as e:
if record_error:
gcp.errors.append(e)
backend_service = GcpResource(backend_service_name, None)
gcp.backend_services.append(backend_service)
return backend_service
def get_url_map(gcp, url_map_name):
try:
result = gcp.compute.urlMaps().get(project=gcp.project,
urlMap=url_map_name).execute()
gcp.url_map = GcpResource(url_map_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.url_map = GcpResource(url_map_name, None)
def get_target_proxy(gcp, target_proxy_name):
try:
if gcp.alpha_compute:
result = gcp.alpha_compute.targetGrpcProxies().get(
project=gcp.project,
targetGrpcProxy=target_proxy_name).execute()
else:
result = gcp.compute.targetHttpProxies().get(
project=gcp.project,
targetHttpProxy=target_proxy_name).execute()
gcp.target_proxy = GcpResource(target_proxy_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.target_proxy = GcpResource(target_proxy_name, None)
def get_global_forwarding_rule(gcp, forwarding_rule_name):
try:
result = gcp.compute.globalForwardingRules().get(
project=gcp.project, forwardingRule=forwarding_rule_name).execute()
gcp.global_forwarding_rule = GcpResource(forwarding_rule_name,
result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.global_forwarding_rule = GcpResource(forwarding_rule_name, None)
def get_instance_template(gcp, template_name):
try:
result = gcp.compute.instanceTemplates().get(
project=gcp.project, instanceTemplate=template_name).execute()
gcp.instance_template = GcpResource(template_name, result['selfLink'])
except Exception as e:
gcp.errors.append(e)
gcp.instance_template = GcpResource(template_name, None)
def get_instance_group(gcp, zone, instance_group_name):
try:
result = gcp.compute.instanceGroups().get(
project=gcp.project, zone=zone,
instanceGroup=instance_group_name).execute()
gcp.service_port = result['namedPorts'][0]['port']
instance_group = InstanceGroup(instance_group_name, result['selfLink'],
zone)
except Exception as e:
gcp.errors.append(e)
instance_group = InstanceGroup(instance_group_name, None, zone)
gcp.instance_groups.append(instance_group)
return instance_group
def delete_global_forwarding_rule(gcp, name=None):
if name:
forwarding_rule_to_delete = name
else:
forwarding_rule_to_delete = gcp.global_forwarding_rule.name
try:
logger.debug('Deleting forwarding rule %s', forwarding_rule_to_delete)
result = gcp.compute.globalForwardingRules().delete(
project=gcp.project,
forwardingRule=forwarding_rule_to_delete).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_target_proxy(gcp, name=None):
if name:
proxy_to_delete = name
else:
proxy_to_delete = gcp.target_proxy.name
try:
if gcp.alpha_compute:
logger.debug('Deleting grpc proxy %s', proxy_to_delete)
result = gcp.alpha_compute.targetGrpcProxies().delete(
project=gcp.project, targetGrpcProxy=proxy_to_delete).execute(
num_retries=_GCP_API_RETRIES)
else:
logger.debug('Deleting http proxy %s', proxy_to_delete)
result = gcp.compute.targetHttpProxies().delete(
project=gcp.project, targetHttpProxy=proxy_to_delete).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_url_map(gcp, name=None):
if name:
url_map_to_delete = name
else:
url_map_to_delete = gcp.url_map.name
try:
logger.debug('Deleting url map %s', url_map_to_delete)
result = gcp.compute.urlMaps().delete(
project=gcp.project,
urlMap=url_map_to_delete).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_backend_service(gcp, backend_service):
try:
logger.debug('Deleting backend service %s', backend_service.name)
result = gcp.compute.backendServices().delete(
project=gcp.project, backendService=backend_service.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_backend_services(gcp):
for backend_service in gcp.backend_services:
delete_backend_service(gcp, backend_service)
def delete_firewall(gcp):
try:
logger.debug('Deleting firewall %s',
gcp.health_check_firewall_rule.name)
result = gcp.compute.firewalls().delete(
project=gcp.project,
firewall=gcp.health_check_firewall_rule.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_health_check(gcp):
try:
logger.debug('Deleting health check %s', gcp.health_check.name)
result = gcp.compute.healthChecks().delete(
project=gcp.project, healthCheck=gcp.health_check.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_instance_groups(gcp):
for instance_group in gcp.instance_groups:
try:
logger.debug('Deleting instance group %s %s', instance_group.name,
instance_group.zone)
result = gcp.compute.instanceGroupManagers().delete(
project=gcp.project,
zone=instance_group.zone,
instanceGroupManager=instance_group.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_zone_operation(gcp,
instance_group.zone,
result['name'],
timeout_sec=_WAIT_FOR_BACKEND_SEC)
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def delete_instance_template(gcp):
try:
logger.debug('Deleting instance template %s',
gcp.instance_template.name)
result = gcp.compute.instanceTemplates().delete(
project=gcp.project,
instanceTemplate=gcp.instance_template.name).execute(
num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
except googleapiclient.errors.HttpError as http_error:
logger.info('Delete failed: %s', http_error)
def patch_backend_service(gcp,
backend_service,
instance_groups,
balancing_mode='UTILIZATION',
max_rate=1,
circuit_breakers=None):
if gcp.alpha_compute:
compute_to_use = gcp.alpha_compute
else:
compute_to_use = gcp.compute
config = {
'backends': [{
'group': instance_group.url,
'balancingMode': balancing_mode,
'maxRate': max_rate if balancing_mode == 'RATE' else None
} for instance_group in instance_groups],
'circuitBreakers': circuit_breakers,
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.backendServices().patch(
project=gcp.project, backendService=backend_service.name,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp,
result['name'],
timeout_sec=_WAIT_FOR_BACKEND_SEC)
def resize_instance_group(gcp,
instance_group,
new_size,
timeout_sec=_WAIT_FOR_OPERATION_SEC):
result = gcp.compute.instanceGroupManagers().resize(
project=gcp.project,
zone=instance_group.zone,
instanceGroupManager=instance_group.name,
size=new_size).execute(num_retries=_GCP_API_RETRIES)
wait_for_zone_operation(gcp,
instance_group.zone,
result['name'],
timeout_sec=360)
wait_for_instance_group_to_reach_expected_size(gcp, instance_group,
new_size, timeout_sec)
def patch_url_map_backend_service(gcp,
backend_service=None,
services_with_weights=None,
route_rules=None):
'''change url_map's backend service
Only one of backend_service and service_with_weights can be not None.
'''
if gcp.alpha_compute:
compute_to_use = gcp.alpha_compute
else:
compute_to_use = gcp.compute
if backend_service and services_with_weights:
raise ValueError(
'both backend_service and service_with_weights are not None.')
default_service = backend_service.url if backend_service else None
default_route_action = {
'weightedBackendServices': [{
'backendService': service.url,
'weight': w,
} for service, w in list(services_with_weights.items())]
} if services_with_weights else None
config = {
'pathMatchers': [{
'name': _PATH_MATCHER_NAME,
'defaultService': default_service,
'defaultRouteAction': default_route_action,
'routeRules': route_rules,
}]
}
logger.debug('Sending GCP request with body=%s', config)
result = compute_to_use.urlMaps().patch(
project=gcp.project, urlMap=gcp.url_map.name,
body=config).execute(num_retries=_GCP_API_RETRIES)
wait_for_global_operation(gcp, result['name'])
def wait_for_instance_group_to_reach_expected_size(gcp, instance_group,
expected_size, timeout_sec):
start_time = time.time()
while True:
current_size = len(get_instance_names(gcp, instance_group))
if current_size == expected_size:
break
if time.time() - start_time > timeout_sec:
raise Exception(
'Instance group had expected size %d but actual size %d' %
(expected_size, current_size))
time.sleep(2)
def wait_for_global_operation(gcp,
operation,
timeout_sec=_WAIT_FOR_OPERATION_SEC):
start_time = time.time()
while time.time() - start_time <= timeout_sec:
result = gcp.compute.globalOperations().get(
project=gcp.project,
operation=operation).execute(num_retries=_GCP_API_RETRIES)
if result['status'] == 'DONE':
if 'error' in result:
raise Exception(result['error'])
return
time.sleep(2)
raise Exception('Operation %s did not complete within %d' %
(operation, timeout_sec))
def wait_for_zone_operation(gcp,
zone,
operation,
timeout_sec=_WAIT_FOR_OPERATION_SEC):
start_time = time.time()
while time.time() - start_time <= timeout_sec:
result = gcp.compute.zoneOperations().get(
project=gcp.project, zone=zone,
operation=operation).execute(num_retries=_GCP_API_RETRIES)
if result['status'] == 'DONE':
if 'error' in result:
raise Exception(result['error'])
return
time.sleep(2)
raise Exception('Operation %s did not complete within %d' %
(operation, timeout_sec))
def wait_for_healthy_backends(gcp,
backend_service,
instance_group,
timeout_sec=_WAIT_FOR_BACKEND_SEC):
start_time = time.time()
config = {'group': instance_group.url}
instance_names = get_instance_names(gcp, instance_group)
expected_size = len(instance_names)
while time.time() - start_time <= timeout_sec:
for instance_name in instance_names:
try:
status = get_serving_status(instance_name, gcp.service_port)
logger.info('serving status response from %s: %s',
instance_name, status)
except grpc.RpcError as rpc_error:
logger.info('checking serving status of %s failed: %s',
instance_name, rpc_error)
result = gcp.compute.backendServices().getHealth(
project=gcp.project,
backendService=backend_service.name,
body=config).execute(num_retries=_GCP_API_RETRIES)
if 'healthStatus' in result:
logger.info('received GCP healthStatus: %s', result['healthStatus'])
healthy = True
for instance in result['healthStatus']:
if instance['healthState'] != 'HEALTHY':
healthy = False
break
if healthy and expected_size == len(result['healthStatus']):
return
else:
logger.info('no healthStatus received from GCP')
time.sleep(5)
raise Exception('Not all backends became healthy within %d seconds: %s' %
(timeout_sec, result))
def get_instance_names(gcp, instance_group):
instance_names = []
result = gcp.compute.instanceGroups().listInstances(
project=gcp.project,
zone=instance_group.zone,
instanceGroup=instance_group.name,
body={
'instanceState': 'ALL'
}).execute(num_retries=_GCP_API_RETRIES)
if 'items' not in result:
return []
for item in result['items']:
# listInstances() returns the full URL of the instance, which ends with
# the instance name. compute.instances().get() requires using the
# instance name (not the full URL) to look up instance details, so we
# just extract the name manually.
instance_name = item['instance'].split('/')[-1]
instance_names.append(instance_name)
logger.info('retrieved instance names: %s', instance_names)
return instance_names
def clean_up(gcp):
if gcp.global_forwarding_rule:
delete_global_forwarding_rule(gcp)
if gcp.target_proxy:
delete_target_proxy(gcp)
if gcp.url_map:
delete_url_map(gcp)
delete_backend_services(gcp)
if gcp.health_check_firewall_rule:
delete_firewall(gcp)
if gcp.health_check:
delete_health_check(gcp)
delete_instance_groups(gcp)
if gcp.instance_template:
delete_instance_template(gcp)
class InstanceGroup(object):
def __init__(self, name, url, zone):
self.name = name
self.url = url
self.zone = zone
class GcpResource(object):
def __init__(self, name, url):
self.name = name
self.url = url
class GcpState(object):
def __init__(self, compute, alpha_compute, project, project_num):
self.compute = compute
self.alpha_compute = alpha_compute
self.project = project
self.project_num = project_num
self.health_check = None
self.health_check_firewall_rule = None
self.backend_services = []
self.url_map = None
self.target_proxy = None
self.global_forwarding_rule = None
self.service_port = None
self.instance_template = None
self.instance_groups = []
self.errors = []
logging.debug(
"script start time: %s",
datetime.datetime.now(
datetime.timezone.utc).astimezone().strftime("%Y-%m-%dT%H:%M:%S %Z"))
logging.debug("logging local timezone: %s",
datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo)
alpha_compute = None
if args.compute_discovery_document:
with open(args.compute_discovery_document, 'r') as discovery_doc:
compute = googleapiclient.discovery.build_from_document(
discovery_doc.read())
if not args.only_stable_gcp_apis and args.alpha_compute_discovery_document:
with open(args.alpha_compute_discovery_document, 'r') as discovery_doc:
alpha_compute = googleapiclient.discovery.build_from_document(
discovery_doc.read())
else:
compute = googleapiclient.discovery.build('compute', 'v1')
if not args.only_stable_gcp_apis:
alpha_compute = googleapiclient.discovery.build('compute', 'alpha')
test_results = {}
failed_tests = []
try:
gcp = GcpState(compute, alpha_compute, args.project_id, args.project_num)
gcp_suffix = args.gcp_suffix
health_check_name = _BASE_HEALTH_CHECK_NAME + gcp_suffix
if not args.use_existing_gcp_resources:
if args.keep_gcp_resources:
# Auto-generating a unique suffix in case of conflict should not be
# combined with --keep_gcp_resources, as the suffix actually used
# for GCP resources will not match the provided --gcp_suffix value.
num_attempts = 1
else:
num_attempts = 5
for i in range(num_attempts):
try:
logger.info('Using GCP suffix %s', gcp_suffix)
create_health_check(gcp, health_check_name)
break
except googleapiclient.errors.HttpError as http_error:
gcp_suffix = '%s-%04d' % (gcp_suffix, random.randint(0, 9999))
health_check_name = _BASE_HEALTH_CHECK_NAME + gcp_suffix
logger.exception('HttpError when creating health check')
if gcp.health_check is None:
raise Exception('Failed to create health check name after %d '
'attempts' % num_attempts)
firewall_name = _BASE_FIREWALL_RULE_NAME + gcp_suffix
backend_service_name = _BASE_BACKEND_SERVICE_NAME + gcp_suffix
alternate_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-alternate' + gcp_suffix
extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-extra' + gcp_suffix
more_extra_backend_service_name = _BASE_BACKEND_SERVICE_NAME + '-more-extra' + gcp_suffix
url_map_name = _BASE_URL_MAP_NAME + gcp_suffix
service_host_name = _BASE_SERVICE_HOST + gcp_suffix
target_proxy_name = _BASE_TARGET_PROXY_NAME + gcp_suffix
forwarding_rule_name = _BASE_FORWARDING_RULE_NAME + gcp_suffix
template_name = _BASE_TEMPLATE_NAME + gcp_suffix
instance_group_name = _BASE_INSTANCE_GROUP_NAME + gcp_suffix
same_zone_instance_group_name = _BASE_INSTANCE_GROUP_NAME + '-same-zone' + gcp_suffix
secondary_zone_instance_group_name = _BASE_INSTANCE_GROUP_NAME + '-secondary-zone' + gcp_suffix
potential_service_ports = list(args.service_port_range)
random.shuffle(potential_service_ports)
if args.use_existing_gcp_resources:
logger.info('Reusing existing GCP resources')
get_health_check(gcp, health_check_name)
get_health_check_firewall_rule(gcp, firewall_name)
backend_service = get_backend_service(gcp, backend_service_name)
alternate_backend_service = get_backend_service(
gcp, alternate_backend_service_name)
extra_backend_service = get_backend_service(gcp,
extra_backend_service_name,
record_error=False)
more_extra_backend_service = get_backend_service(
gcp, more_extra_backend_service_name, record_error=False)
get_url_map(gcp, url_map_name)
get_target_proxy(gcp, target_proxy_name)
get_global_forwarding_rule(gcp, forwarding_rule_name)
get_instance_template(gcp, template_name)
instance_group = get_instance_group(gcp, args.zone, instance_group_name)
same_zone_instance_group = get_instance_group(
gcp, args.zone, same_zone_instance_group_name)
secondary_zone_instance_group = get_instance_group(
gcp, args.secondary_zone, secondary_zone_instance_group_name)
if gcp.errors:
raise Exception(gcp.errors)
else:
create_health_check_firewall_rule(gcp, firewall_name)
backend_service = add_backend_service(gcp, backend_service_name)
alternate_backend_service = add_backend_service(
gcp, alternate_backend_service_name)
create_url_map(gcp, url_map_name, backend_service, service_host_name)
create_target_proxy(gcp, target_proxy_name)
create_global_forwarding_rule(gcp, forwarding_rule_name,
potential_service_ports)
if not gcp.service_port:
raise Exception(
'Failed to find a valid ip:port for the forwarding rule')
if gcp.service_port != _DEFAULT_SERVICE_PORT:
patch_url_map_host_rule_with_port(gcp, url_map_name,
backend_service,
service_host_name)
startup_script = get_startup_script(args.path_to_server_binary,
gcp.service_port)
create_instance_template(gcp, template_name, args.network,
args.source_image, args.machine_type,
startup_script)
instance_group = add_instance_group(gcp, args.zone, instance_group_name,
_INSTANCE_GROUP_SIZE)
patch_backend_service(gcp, backend_service, [instance_group])
same_zone_instance_group = add_instance_group(
gcp, args.zone, same_zone_instance_group_name, _INSTANCE_GROUP_SIZE)
secondary_zone_instance_group = add_instance_group(
gcp, args.secondary_zone, secondary_zone_instance_group_name,
_INSTANCE_GROUP_SIZE)
wait_for_healthy_backends(gcp, backend_service, instance_group)
if args.test_case:
client_env = dict(os.environ)
if original_grpc_trace:
client_env['GRPC_TRACE'] = original_grpc_trace
if original_grpc_verbosity:
client_env['GRPC_VERBOSITY'] = original_grpc_verbosity
bootstrap_server_features = []
if gcp.service_port == _DEFAULT_SERVICE_PORT:
server_uri = service_host_name
else:
server_uri = service_host_name + ':' + str(gcp.service_port)
if args.xds_v3_support:
client_env['GRPC_XDS_EXPERIMENTAL_V3_SUPPORT'] = 'true'
bootstrap_server_features.append('xds_v3')
if args.bootstrap_file:
bootstrap_path = os.path.abspath(args.bootstrap_file)
else:
with tempfile.NamedTemporaryFile(delete=False) as bootstrap_file:
bootstrap_file.write(
_BOOTSTRAP_TEMPLATE.format(
node_id='projects/%s/networks/%s/nodes/%s' %
(gcp.project_num, args.network.split('/')[-1],
uuid.uuid1()),
server_features=json.dumps(
bootstrap_server_features)).encode('utf-8'))
bootstrap_path = bootstrap_file.name
client_env['GRPC_XDS_BOOTSTRAP'] = bootstrap_path
client_env['GRPC_XDS_EXPERIMENTAL_CIRCUIT_BREAKING'] = 'true'
client_env['GRPC_XDS_EXPERIMENTAL_ENABLE_TIMEOUT'] = 'true'
client_env['GRPC_XDS_EXPERIMENTAL_FAULT_INJECTION'] = 'true'
for test_case in args.test_case:
if test_case in _V3_TEST_CASES and not args.xds_v3_support:
logger.info('skipping test %s due to missing v3 support',
test_case)
continue
if test_case in _ALPHA_TEST_CASES and not gcp.alpha_compute:
logger.info('skipping test %s due to missing alpha support',
test_case)
continue
if test_case in [
'api_listener', 'forwarding_rule_port_match',
'forwarding_rule_default_port'
] and CLIENT_HOSTS:
logger.info(
'skipping test %s because test configuration is'
'not compatible with client processes on existing'
'client hosts', test_case)
continue
if test_case == 'forwarding_rule_default_port':
server_uri = service_host_name
result = jobset.JobResult()
log_dir = os.path.join(_TEST_LOG_BASE_DIR, test_case)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
test_log_filename = os.path.join(log_dir, _SPONGE_LOG_NAME)
test_log_file = open(test_log_filename, 'w+')
client_process = None
if test_case in _TESTS_TO_RUN_MULTIPLE_RPCS:
rpcs_to_send = '--rpc="UnaryCall,EmptyCall"'
else:
rpcs_to_send = '--rpc="UnaryCall"'
if test_case in _TESTS_TO_SEND_METADATA:
metadata_to_send = '--metadata="EmptyCall:{keyE}:{valueE},UnaryCall:{keyU}:{valueU},UnaryCall:{keyNU}:{valueNU}"'.format(
keyE=_TEST_METADATA_KEY,
valueE=_TEST_METADATA_VALUE_EMPTY,
keyU=_TEST_METADATA_KEY,
valueU=_TEST_METADATA_VALUE_UNARY,
keyNU=_TEST_METADATA_NUMERIC_KEY,
valueNU=_TEST_METADATA_NUMERIC_VALUE)
else:
# Setting the arg explicitly to empty with '--metadata=""'
# makes C# client fail
# (see https://github.com/commandlineparser/commandline/issues/412),
# so instead we just rely on clients using the default when
# metadata arg is not specified.
metadata_to_send = ''
# TODO(ericgribkoff) Temporarily disable fail_on_failed_rpc checks
# in the client. This means we will ignore intermittent RPC
# failures (but this framework still checks that the final result
# is as expected).
#
# Reason for disabling this is, the resources are shared by
# multiple tests, and a change in previous test could be delayed
# until the second test starts. The second test may see
# intermittent failures because of that.
#
# A fix is to not share resources between tests (though that does
# mean the tests will be significantly slower due to creating new
# resources).
fail_on_failed_rpc = ''
try:
if not CLIENT_HOSTS:
client_cmd_formatted = args.client_cmd.format(
server_uri=server_uri,
stats_port=args.stats_port,
qps=args.qps,
fail_on_failed_rpc=fail_on_failed_rpc,
rpcs_to_send=rpcs_to_send,
metadata_to_send=metadata_to_send)
logger.debug('running client: %s', client_cmd_formatted)
client_cmd = shlex.split(client_cmd_formatted)
client_process = subprocess.Popen(client_cmd,
env=client_env,
stderr=subprocess.STDOUT,
stdout=test_log_file)
if test_case == 'backends_restart':
test_backends_restart(gcp, backend_service, instance_group)
elif test_case == 'change_backend_service':
test_change_backend_service(gcp, backend_service,
instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'gentle_failover':
test_gentle_failover(gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'load_report_based_failover':
test_load_report_based_failover(
gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'ping_pong':
test_ping_pong(gcp, backend_service, instance_group)
elif test_case == 'remove_instance_group':
test_remove_instance_group(gcp, backend_service,
instance_group,
same_zone_instance_group)
elif test_case == 'round_robin':
test_round_robin(gcp, backend_service, instance_group)
elif test_case == 'secondary_locality_gets_no_requests_on_partial_primary_failure':
test_secondary_locality_gets_no_requests_on_partial_primary_failure(
gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'secondary_locality_gets_requests_on_primary_failure':
test_secondary_locality_gets_requests_on_primary_failure(
gcp, backend_service, instance_group,
secondary_zone_instance_group)
elif test_case == 'traffic_splitting':
test_traffic_splitting(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'path_matching':
test_path_matching(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'header_matching':
test_header_matching(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'circuit_breaking':
test_circuit_breaking(gcp, backend_service, instance_group,
same_zone_instance_group)
elif test_case == 'timeout':
test_timeout(gcp, backend_service, instance_group)
elif test_case == 'fault_injection':
test_fault_injection(gcp, backend_service, instance_group)
elif test_case == 'api_listener':
server_uri = test_api_listener(gcp, backend_service,
instance_group,
alternate_backend_service)
elif test_case == 'forwarding_rule_port_match':
server_uri = test_forwarding_rule_port_match(
gcp, backend_service, instance_group)
elif test_case == 'forwarding_rule_default_port':
server_uri = test_forwarding_rule_default_port(
gcp, backend_service, instance_group)
elif test_case == 'metadata_filter':
test_metadata_filter(gcp, backend_service, instance_group,
alternate_backend_service,
same_zone_instance_group)
elif test_case == 'csds':
test_csds(gcp, backend_service, instance_group, server_uri)
else:
logger.error('Unknown test case: %s', test_case)
sys.exit(1)
if client_process and client_process.poll() is not None:
raise Exception(
'Client process exited prematurely with exit code %d' %
client_process.returncode)
result.state = 'PASSED'
result.returncode = 0
except Exception as e:
logger.exception('Test case %s failed', test_case)
failed_tests.append(test_case)
result.state = 'FAILED'
result.message = str(e)
if args.halt_after_fail:
# Stop the test suite if one case failed.
raise
finally:
if client_process:
if client_process.returncode:
logger.info('Client exited with code %d' %
client_process.returncode)
else:
client_process.terminate()
test_log_file.close()
# Workaround for Python 3, as report_utils will invoke decode() on
# result.message, which has a default value of ''.
result.message = result.message.encode('UTF-8')
test_results[test_case] = [result]
if args.log_client_output:
logger.info('Client output:')
with open(test_log_filename, 'r') as client_output:
logger.info(client_output.read())
if not os.path.exists(_TEST_LOG_BASE_DIR):
os.makedirs(_TEST_LOG_BASE_DIR)
report_utils.render_junit_xml_report(test_results,
os.path.join(
_TEST_LOG_BASE_DIR,
_SPONGE_XML_NAME),
suite_name='xds_tests',
multi_target=True)
if failed_tests:
logger.error('Test case(s) %s failed', failed_tests)
sys.exit(1)
finally:
keep_resources = args.keep_gcp_resources
if args.halt_after_fail and failed_tests:
logger.info(
'Halt after fail triggered, exiting without cleaning up resources')
keep_resources = True
if not keep_resources:
logger.info('Cleaning up GCP resources. This may take some time.')
clean_up(gcp)
| codeparrot/github-code-clean |
"""
Forest of trees-based ensemble methods.
Those methods include random forests and extremely randomized trees.
The module structure is the following:
- The ``BaseForest`` base class implements a common ``fit`` method for all
the estimators in the module. The ``fit`` method of the base ``Forest``
class calls the ``fit`` method of each sub-estimator on random samples
(with replacement, a.k.a. bootstrap) of the training set.
The init of the sub-estimator is further delegated to the
``BaseEnsemble`` constructor.
- The ``ForestClassifier`` and ``ForestRegressor`` base classes further
implement the prediction logic by computing an average of the predicted
outcomes of the sub-estimators.
- The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived
classes provide the user with concrete implementations of
the forest ensemble method using classical, deterministic
``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as
sub-estimator implementations.
- The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived
classes provide the user with concrete implementations of the
forest ensemble method using the extremely randomized trees
``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as
sub-estimator implementations.
Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Joly Arnaud <arnaud.v.joly@gmail.com>
# Fares Hedayati <fares.hedayati@gmail.com>
#
# License: BSD 3 clause
import numbers
from warnings import catch_warnings, simplefilter, warn
import threading
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import issparse
from scipy.sparse import hstack as sparse_hstack
from joblib import Parallel
from ..base import is_classifier
from ..base import ClassifierMixin, RegressorMixin, MultiOutputMixin
from ..metrics import accuracy_score, r2_score
from ..preprocessing import OneHotEncoder
from ..tree import (DecisionTreeClassifier, DecisionTreeRegressor,
ExtraTreeClassifier, ExtraTreeRegressor)
from ..tree._tree import DTYPE, DOUBLE
from ..utils import check_random_state, compute_sample_weight, deprecated
from ..exceptions import DataConversionWarning
from ._base import BaseEnsemble, _partition_estimators
from ..utils.fixes import delayed
from ..utils.fixes import _joblib_parallel_args
from ..utils.multiclass import check_classification_targets, type_of_target
from ..utils.validation import check_is_fitted, _check_sample_weight
__all__ = ["RandomForestClassifier",
"RandomForestRegressor",
"ExtraTreesClassifier",
"ExtraTreesRegressor",
"RandomTreesEmbedding"]
MAX_INT = np.iinfo(np.int32).max
def _get_n_samples_bootstrap(n_samples, max_samples):
"""
Get the number of samples in a bootstrap sample.
Parameters
----------
n_samples : int
Number of samples in the dataset.
max_samples : int or float
The maximum number of samples to draw from the total available:
- if float, this indicates a fraction of the total and should be
the interval `(0.0, 1.0]`;
- if int, this indicates the exact number of samples;
- if None, this indicates the total number of samples.
Returns
-------
n_samples_bootstrap : int
The total number of samples to draw for the bootstrap sample.
"""
if max_samples is None:
return n_samples
if isinstance(max_samples, numbers.Integral):
if not (1 <= max_samples <= n_samples):
msg = "`max_samples` must be in range 1 to {} but got value {}"
raise ValueError(msg.format(n_samples, max_samples))
return max_samples
if isinstance(max_samples, numbers.Real):
if not (0 < max_samples <= 1):
msg = "`max_samples` must be in range (0.0, 1.0] but got value {}"
raise ValueError(msg.format(max_samples))
return round(n_samples * max_samples)
msg = "`max_samples` should be int or float, but got type '{}'"
raise TypeError(msg.format(type(max_samples)))
def _generate_sample_indices(random_state, n_samples, n_samples_bootstrap):
"""
Private function used to _parallel_build_trees function."""
random_instance = check_random_state(random_state)
sample_indices = random_instance.randint(0, n_samples, n_samples_bootstrap)
return sample_indices
def _generate_unsampled_indices(random_state, n_samples, n_samples_bootstrap):
"""
Private function used to forest._set_oob_score function."""
sample_indices = _generate_sample_indices(random_state, n_samples,
n_samples_bootstrap)
sample_counts = np.bincount(sample_indices, minlength=n_samples)
unsampled_mask = sample_counts == 0
indices_range = np.arange(n_samples)
unsampled_indices = indices_range[unsampled_mask]
return unsampled_indices
def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees,
verbose=0, class_weight=None,
n_samples_bootstrap=None):
"""
Private function used to fit a single tree in parallel."""
if verbose > 1:
print("building tree %d of %d" % (tree_idx + 1, n_trees))
if forest.bootstrap:
n_samples = X.shape[0]
if sample_weight is None:
curr_sample_weight = np.ones((n_samples,), dtype=np.float64)
else:
curr_sample_weight = sample_weight.copy()
indices = _generate_sample_indices(tree.random_state, n_samples,
n_samples_bootstrap)
sample_counts = np.bincount(indices, minlength=n_samples)
curr_sample_weight *= sample_counts
if class_weight == 'subsample':
with catch_warnings():
simplefilter('ignore', DeprecationWarning)
curr_sample_weight *= compute_sample_weight('auto', y,
indices=indices)
elif class_weight == 'balanced_subsample':
curr_sample_weight *= compute_sample_weight('balanced', y,
indices=indices)
tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False)
else:
tree.fit(X, y, sample_weight=sample_weight, check_input=False)
return tree
class BaseForest(MultiOutputMixin, BaseEnsemble, metaclass=ABCMeta):
"""
Base class for forests of trees.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=100, *,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
max_samples=None):
super().__init__(
base_estimator=base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params)
self.bootstrap = bootstrap
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
self.warm_start = warm_start
self.class_weight = class_weight
self.max_samples = max_samples
def apply(self, X):
"""
Apply trees in the forest to X, return leaf indices.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
X_leaves : ndarray of shape (n_samples, n_estimators)
For each datapoint x in X and for each tree in the forest,
return the index of the leaf x ends up in.
"""
X = self._validate_X_predict(X)
results = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
**_joblib_parallel_args(prefer="threads"))(
delayed(tree.apply)(X, check_input=False)
for tree in self.estimators_)
return np.array(results).T
def decision_path(self, X):
"""
Return the decision path in the forest.
.. versionadded:: 0.18
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
indicator : sparse matrix of shape (n_samples, n_nodes)
Return a node indicator matrix where non zero elements indicates
that the samples goes through the nodes. The matrix is of CSR
format.
n_nodes_ptr : ndarray of shape (n_estimators + 1,)
The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]]
gives the indicator value for the i-th estimator.
"""
X = self._validate_X_predict(X)
indicators = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
**_joblib_parallel_args(prefer='threads'))(
delayed(tree.decision_path)(X, check_input=False)
for tree in self.estimators_)
n_nodes = [0]
n_nodes.extend([i.shape[1] for i in indicators])
n_nodes_ptr = np.array(n_nodes).cumsum()
return sparse_hstack(indicators).tocsr(), n_nodes_ptr
def fit(self, X, y, sample_weight=None):
"""
Build a forest of trees from the training set (X, y).
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The training input samples. Internally, its dtype will be converted
to ``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csc_matrix``.
y : array-like of shape (n_samples,) or (n_samples, n_outputs)
The target values (class labels in classification, real numbers in
regression).
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
self : object
"""
# Validate or convert input data
if issparse(y):
raise ValueError(
"sparse multilabel-indicator for y is not supported."
)
X, y = self._validate_data(X, y, multi_output=True,
accept_sparse="csc", dtype=DTYPE)
if sample_weight is not None:
sample_weight = _check_sample_weight(sample_weight, X)
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
y = np.atleast_1d(y)
if y.ndim == 2 and y.shape[1] == 1:
warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples,), for example using ravel().",
DataConversionWarning, stacklevel=2)
if y.ndim == 1:
# reshape is necessary to preserve the data contiguity against vs
# [:, np.newaxis] that does not.
y = np.reshape(y, (-1, 1))
if self.criterion == "poisson":
if np.any(y < 0):
raise ValueError("Some value(s) of y are negative which is "
"not allowed for Poisson regression.")
if np.sum(y) <= 0:
raise ValueError("Sum of y is not strictly positive which "
"is necessary for Poisson regression.")
self.n_outputs_ = y.shape[1]
y, expanded_class_weight = self._validate_y_class_weight(y)
if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
if expanded_class_weight is not None:
if sample_weight is not None:
sample_weight = sample_weight * expanded_class_weight
else:
sample_weight = expanded_class_weight
# Get bootstrap sample size
n_samples_bootstrap = _get_n_samples_bootstrap(
n_samples=X.shape[0],
max_samples=self.max_samples
)
# Check parameters
self._validate_estimator()
# TODO: Remove in v1.2
if isinstance(self, (RandomForestRegressor, ExtraTreesRegressor)):
if self.criterion == "mse":
warn(
"Criterion 'mse' was deprecated in v1.0 and will be "
"removed in version 1.2. Use `criterion='squared_error'` "
"which is equivalent.",
FutureWarning
)
elif self.criterion == "mae":
warn(
"Criterion 'mae' was deprecated in v1.0 and will be "
"removed in version 1.2. Use `criterion='absolute_error'` "
"which is equivalent.",
FutureWarning
)
if not self.bootstrap and self.oob_score:
raise ValueError("Out of bag estimation only available"
" if bootstrap=True")
random_state = check_random_state(self.random_state)
if not self.warm_start or not hasattr(self, "estimators_"):
# Free allocated memory, if any
self.estimators_ = []
n_more_estimators = self.n_estimators - len(self.estimators_)
if n_more_estimators < 0:
raise ValueError('n_estimators=%d must be larger or equal to '
'len(estimators_)=%d when warm_start==True'
% (self.n_estimators, len(self.estimators_)))
elif n_more_estimators == 0:
warn("Warm-start fitting without increasing n_estimators does not "
"fit new trees.")
else:
if self.warm_start and len(self.estimators_) > 0:
# We draw from the random state to get the random state we
# would have got if we hadn't used a warm_start.
random_state.randint(MAX_INT, size=len(self.estimators_))
trees = [self._make_estimator(append=False,
random_state=random_state)
for i in range(n_more_estimators)]
# Parallel loop: we prefer the threading backend as the Cython code
# for fitting the trees is internally releasing the Python GIL
# making threading more efficient than multiprocessing in
# that case. However, for joblib 0.12+ we respect any
# parallel_backend contexts set at a higher level,
# since correctness does not rely on using threads.
trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
**_joblib_parallel_args(prefer='threads'))(
delayed(_parallel_build_trees)(
t, self, X, y, sample_weight, i, len(trees),
verbose=self.verbose, class_weight=self.class_weight,
n_samples_bootstrap=n_samples_bootstrap)
for i, t in enumerate(trees))
# Collect newly grown trees
self.estimators_.extend(trees)
if self.oob_score:
y_type = type_of_target(y)
if y_type in ("multiclass-multioutput", "unknown"):
# FIXME: we could consider to support multiclass-multioutput if
# we introduce or reuse a constructor parameter (e.g.
# oob_score) allowing our user to pass a callable defining the
# scoring strategy on OOB sample.
raise ValueError(
f"The type of target cannot be used to compute OOB "
f"estimates. Got {y_type} while only the following are "
f"supported: continuous, continuous-multioutput, binary, "
f"multiclass, multilabel-indicator."
)
self._set_oob_score_and_attributes(X, y)
# Decapsulate classes_ attributes
if hasattr(self, "classes_") and self.n_outputs_ == 1:
self.n_classes_ = self.n_classes_[0]
self.classes_ = self.classes_[0]
return self
@abstractmethod
def _set_oob_score_and_attributes(self, X, y):
"""Compute and set the OOB score and attributes.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
"""
def _compute_oob_predictions(self, X, y):
"""Compute and set the OOB score.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
Returns
-------
oob_pred : ndarray of shape (n_samples, n_classes, n_outputs) or \
(n_samples, 1, n_outputs)
The OOB predictions.
"""
X = self._validate_data(X, dtype=DTYPE, accept_sparse='csr',
reset=False)
n_samples = y.shape[0]
n_outputs = self.n_outputs_
if is_classifier(self) and hasattr(self, "n_classes_"):
# n_classes_ is a ndarray at this stage
# all the supported type of target will have the same number of
# classes in all outputs
oob_pred_shape = (n_samples, self.n_classes_[0], n_outputs)
else:
# for regression, n_classes_ does not exist and we create an empty
# axis to be consistent with the classification case and make
# the array operations compatible with the 2 settings
oob_pred_shape = (n_samples, 1, n_outputs)
oob_pred = np.zeros(shape=oob_pred_shape, dtype=np.float64)
n_oob_pred = np.zeros((n_samples, n_outputs), dtype=np.int64)
n_samples_bootstrap = _get_n_samples_bootstrap(
n_samples, self.max_samples,
)
for estimator in self.estimators_:
unsampled_indices = _generate_unsampled_indices(
estimator.random_state, n_samples, n_samples_bootstrap,
)
y_pred = self._get_oob_predictions(
estimator, X[unsampled_indices, :]
)
oob_pred[unsampled_indices, ...] += y_pred
n_oob_pred[unsampled_indices, :] += 1
for k in range(n_outputs):
if (n_oob_pred == 0).any():
warn(
"Some inputs do not have OOB scores. This probably means "
"too few trees were used to compute any reliable OOB "
"estimates.", UserWarning
)
n_oob_pred[n_oob_pred == 0] = 1
oob_pred[..., k] /= n_oob_pred[..., [k]]
return oob_pred
def _validate_y_class_weight(self, y):
# Default implementation
return y, None
def _validate_X_predict(self, X):
"""
Validate X whenever one tries to predict, apply, predict_proba."""
check_is_fitted(self)
return self.estimators_[0]._validate_X_predict(X, check_input=True)
@property
def feature_importances_(self):
"""
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
Returns
-------
feature_importances_ : ndarray of shape (n_features,)
The values of this array sum to 1, unless all trees are single node
trees consisting of only the root node, in which case it will be an
array of zeros.
"""
check_is_fitted(self)
all_importances = Parallel(n_jobs=self.n_jobs,
**_joblib_parallel_args(prefer='threads'))(
delayed(getattr)(tree, 'feature_importances_')
for tree in self.estimators_ if tree.tree_.node_count > 1)
if not all_importances:
return np.zeros(self.n_features_in_, dtype=np.float64)
all_importances = np.mean(all_importances,
axis=0, dtype=np.float64)
return all_importances / np.sum(all_importances)
# TODO: Remove in 1.2
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"Attribute n_features_ was deprecated in version 1.0 and will be "
"removed in 1.2. Use 'n_features_in_' instead."
)
@property
def n_features_(self):
return self.n_features_in_
def _accumulate_prediction(predict, X, out, lock):
"""
This is a utility function for joblib's Parallel.
It can't go locally in ForestClassifier or ForestRegressor, because joblib
complains that it cannot pickle it when placed there.
"""
prediction = predict(X, check_input=False)
with lock:
if len(out) == 1:
out[0] += prediction
else:
for i in range(len(out)):
out[i] += prediction[i]
class ForestClassifier(ClassifierMixin, BaseForest, metaclass=ABCMeta):
"""
Base class for forest of trees-based classifiers.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=100, *,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
max_samples=None):
super().__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight,
max_samples=max_samples)
@staticmethod
def _get_oob_predictions(tree, X):
"""Compute the OOB predictions for an individual tree.
Parameters
----------
tree : DecisionTreeClassifier object
A single decision tree classifier.
X : ndarray of shape (n_samples, n_features)
The OOB samples.
Returns
-------
y_pred : ndarray of shape (n_samples, n_classes, n_outputs)
The OOB associated predictions.
"""
y_pred = tree.predict_proba(X, check_input=False)
y_pred = np.array(y_pred, copy=False)
if y_pred.ndim == 2:
# binary and multiclass
y_pred = y_pred[..., np.newaxis]
else:
# Roll the first `n_outputs` axis to the last axis. We will reshape
# from a shape of (n_outputs, n_samples, n_classes) to a shape of
# (n_samples, n_classes, n_outputs).
y_pred = np.rollaxis(y_pred, axis=0, start=3)
return y_pred
def _set_oob_score_and_attributes(self, X, y):
"""Compute and set the OOB score and attributes.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
"""
self.oob_decision_function_ = super()._compute_oob_predictions(X, y)
if self.oob_decision_function_.shape[-1] == 1:
# drop the n_outputs axis if there is a single output
self.oob_decision_function_ = self.oob_decision_function_.squeeze(
axis=-1
)
self.oob_score_ = accuracy_score(
y, np.argmax(self.oob_decision_function_, axis=1)
)
def _validate_y_class_weight(self, y):
check_classification_targets(y)
y = np.copy(y)
expanded_class_weight = None
if self.class_weight is not None:
y_original = np.copy(y)
self.classes_ = []
self.n_classes_ = []
y_store_unique_indices = np.zeros(y.shape, dtype=int)
for k in range(self.n_outputs_):
classes_k, y_store_unique_indices[:, k] = \
np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes_k)
self.n_classes_.append(classes_k.shape[0])
y = y_store_unique_indices
if self.class_weight is not None:
valid_presets = ('balanced', 'balanced_subsample')
if isinstance(self.class_weight, str):
if self.class_weight not in valid_presets:
raise ValueError('Valid presets for class_weight include '
'"balanced" and "balanced_subsample".'
'Given "%s".'
% self.class_weight)
if self.warm_start:
warn('class_weight presets "balanced" or '
'"balanced_subsample" are '
'not recommended for warm_start if the fitted data '
'differs from the full dataset. In order to use '
'"balanced" weights, use compute_class_weight '
'("balanced", classes, y). In place of y you can use '
'a large enough sample of the full training set '
'target to properly estimate the class frequency '
'distributions. Pass the resulting weights as the '
'class_weight parameter.')
if (self.class_weight != 'balanced_subsample' or
not self.bootstrap):
if self.class_weight == "balanced_subsample":
class_weight = "balanced"
else:
class_weight = self.class_weight
expanded_class_weight = compute_sample_weight(class_weight,
y_original)
return y, expanded_class_weight
def predict(self, X):
"""
Predict class for X.
The predicted class of an input sample is a vote by the trees in
the forest, weighted by their probability estimates. That is,
the predicted class is the one with highest mean probability
estimate across the trees.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : ndarray of shape (n_samples,) or (n_samples, n_outputs)
The predicted classes.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)
else:
n_samples = proba[0].shape[0]
# all dtypes should be the same, so just take the first
class_type = self.classes_[0].dtype
predictions = np.empty((n_samples, self.n_outputs_),
dtype=class_type)
for k in range(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(np.argmax(proba[k],
axis=1),
axis=0)
return predictions
def predict_proba(self, X):
"""
Predict class probabilities for X.
The predicted class probabilities of an input sample are computed as
the mean predicted class probabilities of the trees in the forest.
The class probability of a single tree is the fraction of samples of
the same class in a leaf.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : ndarray of shape (n_samples, n_classes), or a list of such arrays
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# avoid storing the output of every estimator by summing them here
all_proba = [np.zeros((X.shape[0], j), dtype=np.float64)
for j in np.atleast_1d(self.n_classes_)]
lock = threading.Lock()
Parallel(n_jobs=n_jobs, verbose=self.verbose,
**_joblib_parallel_args(require="sharedmem"))(
delayed(_accumulate_prediction)(e.predict_proba, X, all_proba,
lock)
for e in self.estimators_)
for proba in all_proba:
proba /= len(self.estimators_)
if len(all_proba) == 1:
return all_proba[0]
else:
return all_proba
def predict_log_proba(self, X):
"""
Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the log of the mean predicted class probabilities of the trees in the
forest.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : ndarray of shape (n_samples, n_classes), or a list of such arrays
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in range(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class ForestRegressor(RegressorMixin, BaseForest, metaclass=ABCMeta):
"""
Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=100, *,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
max_samples=None):
super().__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=max_samples)
def predict(self, X):
"""
Predict regression target for X.
The predicted regression target of an input sample is computed as the
mean predicted regression targets of the trees in the forest.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : ndarray of shape (n_samples,) or (n_samples, n_outputs)
The predicted values.
"""
check_is_fitted(self)
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# avoid storing the output of every estimator by summing them here
if self.n_outputs_ > 1:
y_hat = np.zeros((X.shape[0], self.n_outputs_), dtype=np.float64)
else:
y_hat = np.zeros((X.shape[0]), dtype=np.float64)
# Parallel loop
lock = threading.Lock()
Parallel(n_jobs=n_jobs, verbose=self.verbose,
**_joblib_parallel_args(require="sharedmem"))(
delayed(_accumulate_prediction)(e.predict, X, [y_hat], lock)
for e in self.estimators_)
y_hat /= len(self.estimators_)
return y_hat
@staticmethod
def _get_oob_predictions(tree, X):
"""Compute the OOB predictions for an individual tree.
Parameters
----------
tree : DecisionTreeRegressor object
A single decision tree regressor.
X : ndarray of shape (n_samples, n_features)
The OOB samples.
Returns
-------
y_pred : ndarray of shape (n_samples, 1, n_outputs)
The OOB associated predictions.
"""
y_pred = tree.predict(X, check_input=False)
if y_pred.ndim == 1:
# single output regression
y_pred = y_pred[:, np.newaxis, np.newaxis]
else:
# multioutput regression
y_pred = y_pred[:, np.newaxis, :]
return y_pred
def _set_oob_score_and_attributes(self, X, y):
"""Compute and set the OOB score and attributes.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
"""
self.oob_prediction_ = super()._compute_oob_predictions(X, y).squeeze(
axis=1
)
if self.oob_prediction_.shape[-1] == 1:
# drop the n_outputs axis if there is a single output
self.oob_prediction_ = self.oob_prediction_.squeeze(axis=-1)
self.oob_score_ = r2_score(y, self.oob_prediction_)
def _compute_partial_dependence_recursion(self, grid, target_features):
"""Fast partial dependence computation.
Parameters
----------
grid : ndarray of shape (n_samples, n_target_features)
The grid points on which the partial dependence should be
evaluated.
target_features : ndarray of shape (n_target_features)
The set of target features for which the partial dependence
should be evaluated.
Returns
-------
averaged_predictions : ndarray of shape (n_samples,)
The value of the partial dependence function on each grid point.
"""
grid = np.asarray(grid, dtype=DTYPE, order='C')
averaged_predictions = np.zeros(shape=grid.shape[0],
dtype=np.float64, order='C')
for tree in self.estimators_:
# Note: we don't sum in parallel because the GIL isn't released in
# the fast method.
tree.tree_.compute_partial_dependence(
grid, target_features, averaged_predictions)
# Average over the forest
averaged_predictions /= len(self.estimators_)
return averaged_predictions
class RandomForestClassifier(ForestClassifier):
"""
A random forest classifier.
A random forest is a meta estimator that fits a number of decision tree
classifiers on various sub-samples of the dataset and uses averaging to
improve the predictive accuracy and control over-fitting.
The sub-sample size is controlled with the `max_samples` parameter if
`bootstrap=True` (default), otherwise the whole dataset is used to build
each tree.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"gini", "entropy"}, default="gini"
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)` (same as "auto").
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=True
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls both the randomness of the bootstrapping of the samples used
when building trees (if ``bootstrap=True``) and the sampling of the
features to consider when looking for the best split at each node
(if ``max_features < n_features``).
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
class_weight : {"balanced", "balanced_subsample"}, dict or list of dicts, \
default=None
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
Note that for multioutput (including multilabel) weights should be
defined for each class of every column in its own dict. For example,
for four-class multilabel classification weights should be
[{0: 1, 1: 1}, {0: 1, 1: 5}, {0: 1, 1: 1}, {0: 1, 1: 1}] instead of
[{1:1}, {2:5}, {3:1}, {4:1}].
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that
weights are computed based on the bootstrap sample for every tree
grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : DecisionTreeClassifier
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : ndarray of shape (n_classes,) or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_decision_function_ : ndarray of shape (n_samples, n_classes) or \
(n_samples, n_classes, n_outputs)
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN. This attribute exists
only when ``oob_score`` is True.
See Also
--------
DecisionTreeClassifier, ExtraTreesClassifier
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
The features are always randomly permuted at each split. Therefore,
the best found split may vary, even with the same training data,
``max_features=n_features`` and ``bootstrap=False``, if the improvement
of the criterion is identical for several splits enumerated during the
search of the best split. To obtain a deterministic behaviour during
fitting, ``random_state`` has to be fixed.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
Examples
--------
>>> from sklearn.ensemble import RandomForestClassifier
>>> from sklearn.datasets import make_classification
>>> X, y = make_classification(n_samples=1000, n_features=4,
... n_informative=2, n_redundant=0,
... random_state=0, shuffle=False)
>>> clf = RandomForestClassifier(max_depth=2, random_state=0)
>>> clf.fit(X, y)
RandomForestClassifier(...)
>>> print(clf.predict([[0, 0, 0, 0]]))
[1]
"""
def __init__(self,
n_estimators=100, *,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=True,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=DecisionTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class RandomForestRegressor(ForestRegressor):
"""
A random forest regressor.
A random forest is a meta estimator that fits a number of classifying
decision trees on various sub-samples of the dataset and uses averaging
to improve the predictive accuracy and control over-fitting.
The sub-sample size is controlled with the `max_samples` parameter if
`bootstrap=True` (default), otherwise the whole dataset is used to build
each tree.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"squared_error", "mse", "absolute_error", "poisson"}, \
default="squared_error"
The function to measure the quality of a split. Supported criteria
are "squared_error" for the mean squared error, which is equal to
variance reduction as feature selection criterion, "absolute_error"
for the mean absolute error, and "poisson" which uses reduction in
Poisson deviance to find splits.
Training using "absolute_error" is significantly slower
than when using "squared_error".
.. versionadded:: 0.18
Mean Absolute Error (MAE) criterion.
.. versionadded:: 1.0
Poisson criterion.
.. deprecated:: 1.0
Criterion "mse" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="squared_error"` which is equivalent.
.. deprecated:: 1.0
Criterion "mae" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="absolute_error"` which is equivalent.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=True
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls both the randomness of the bootstrapping of the samples used
when building trees (if ``bootstrap=True``) and the sampling of the
features to consider when looking for the best split at each node
(if ``max_features < n_features``).
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : DecisionTreeRegressor
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_prediction_ : ndarray of shape (n_samples,) or (n_samples, n_outputs)
Prediction computed with out-of-bag estimate on the training set.
This attribute exists only when ``oob_score`` is True.
See Also
--------
DecisionTreeRegressor, ExtraTreesRegressor
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
The features are always randomly permuted at each split. Therefore,
the best found split may vary, even with the same training data,
``max_features=n_features`` and ``bootstrap=False``, if the improvement
of the criterion is identical for several splits enumerated during the
search of the best split. To obtain a deterministic behaviour during
fitting, ``random_state`` has to be fixed.
The default value ``max_features="auto"`` uses ``n_features``
rather than ``n_features / 3``. The latter was originally suggested in
[1], whereas the former was more recently justified empirically in [2].
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
.. [2] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized
trees", Machine Learning, 63(1), 3-42, 2006.
Examples
--------
>>> from sklearn.ensemble import RandomForestRegressor
>>> from sklearn.datasets import make_regression
>>> X, y = make_regression(n_features=4, n_informative=2,
... random_state=0, shuffle=False)
>>> regr = RandomForestRegressor(max_depth=2, random_state=0)
>>> regr.fit(X, y)
RandomForestRegressor(...)
>>> print(regr.predict([[0, 0, 0, 0]]))
[-8.32987858]
"""
def __init__(self,
n_estimators=100, *,
criterion="squared_error",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=True,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=DecisionTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class ExtraTreesClassifier(ForestClassifier):
"""
An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and uses averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"gini", "entropy"}, default="gini"
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=False
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls 3 sources of randomness:
- the bootstrapping of the samples used when building trees
(if ``bootstrap=True``)
- the sampling of the features to consider when looking for the best
split at each node (if ``max_features < n_features``)
- the draw of the splits for each of the `max_features`
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
class_weight : {"balanced", "balanced_subsample"}, dict or list of dicts, \
default=None
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
Note that for multioutput (including multilabel) weights should be
defined for each class of every column in its own dict. For example,
for four-class multilabel classification weights should be
[{0: 1, 1: 1}, {0: 1, 1: 5}, {0: 1, 1: 1}, {0: 1, 1: 1}] instead of
[{1:1}, {2:5}, {3:1}, {4:1}].
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that
weights are computed based on the bootstrap sample for every tree
grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : ExtraTreesClassifier
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : ndarray of shape (n_classes,) or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_decision_function_ : ndarray of shape (n_samples, n_classes) or \
(n_samples, n_classes, n_outputs)
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN. This attribute exists
only when ``oob_score`` is True.
See Also
--------
sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble.
RandomForestClassifier : Ensemble Classifier based on trees with optimal
splits.
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized
trees", Machine Learning, 63(1), 3-42, 2006.
Examples
--------
>>> from sklearn.ensemble import ExtraTreesClassifier
>>> from sklearn.datasets import make_classification
>>> X, y = make_classification(n_features=4, random_state=0)
>>> clf = ExtraTreesClassifier(n_estimators=100, random_state=0)
>>> clf.fit(X, y)
ExtraTreesClassifier(random_state=0)
>>> clf.predict([[0, 0, 0, 0]])
array([1])
"""
def __init__(self,
n_estimators=100, *,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=ExtraTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class ExtraTreesRegressor(ForestRegressor):
"""
An extra-trees regressor.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and uses averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"squared_error", "mse", "absolute_error", "mae"}, \
default="squared_error"
The function to measure the quality of a split. Supported criteria
are "squared_error" for the mean squared error, which is equal to
variance reduction as feature selection criterion, and "absolute_error"
for the mean absolute error.
.. versionadded:: 0.18
Mean Absolute Error (MAE) criterion.
.. deprecated:: 1.0
Criterion "mse" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="squared_error"` which is equivalent.
.. deprecated:: 1.0
Criterion "mae" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="absolute_error"` which is equivalent.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=False
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls 3 sources of randomness:
- the bootstrapping of the samples used when building trees
(if ``bootstrap=True``)
- the sampling of the features to consider when looking for the best
split at each node (if ``max_features < n_features``)
- the draw of the splits for each of the `max_features`
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : ExtraTreeRegressor
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
n_features_ : int
The number of features.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_prediction_ : ndarray of shape (n_samples,) or (n_samples, n_outputs)
Prediction computed with out-of-bag estimate on the training set.
This attribute exists only when ``oob_score`` is True.
See Also
--------
sklearn.tree.ExtraTreeRegressor : Base estimator for this ensemble.
RandomForestRegressor : Ensemble regressor using trees with optimal splits.
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
Examples
--------
>>> from sklearn.datasets import load_diabetes
>>> from sklearn.model_selection import train_test_split
>>> from sklearn.ensemble import ExtraTreesRegressor
>>> X, y = load_diabetes(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(
... X, y, random_state=0)
>>> reg = ExtraTreesRegressor(n_estimators=100, random_state=0).fit(
... X_train, y_train)
>>> reg.score(X_test, y_test)
0.2708...
"""
def __init__(self,
n_estimators=100, *,
criterion="squared_error",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class RandomTreesEmbedding(BaseForest):
"""
An ensemble of totally random trees.
An unsupervised transformation of a dataset to a high-dimensional
sparse representation. A datapoint is coded according to which leaf of
each tree it is sorted into. Using a one-hot encoding of the leaves,
this leads to a binary coding with as many ones as there are trees in
the forest.
The dimensionality of the resulting representation is
``n_out <= n_estimators * max_leaf_nodes``. If ``max_leaf_nodes == None``,
the number of leaf nodes is at most ``n_estimators * 2 ** max_depth``.
Read more in the :ref:`User Guide <random_trees_embedding>`.
Parameters
----------
n_estimators : int, default=100
Number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
max_depth : int, default=5
The maximum depth of each tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` is the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` is the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
sparse_output : bool, default=True
Whether or not to return a sparse CSR matrix, as default behavior,
or to return a dense array compatible with dense pipeline operators.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`transform`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls the generation of the random `y` used to fit the trees
and the draw of the splits for each feature at the trees' nodes.
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
Attributes
----------
base_estimator_ : DecisionTreeClassifier instance
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeClassifier instances
The collection of fitted sub-estimators.
feature_importances_ : ndarray of shape (n_features,)
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
one_hot_encoder_ : OneHotEncoder instance
One-hot encoder used to create the sparse embedding.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
.. [2] Moosmann, F. and Triggs, B. and Jurie, F. "Fast discriminative
visual codebooks using randomized clustering forests"
NIPS 2007
Examples
--------
>>> from sklearn.ensemble import RandomTreesEmbedding
>>> X = [[0,0], [1,0], [0,1], [-1,0], [0,-1]]
>>> random_trees = RandomTreesEmbedding(
... n_estimators=5, random_state=0, max_depth=1).fit(X)
>>> X_sparse_embedding = random_trees.transform(X)
>>> X_sparse_embedding.toarray()
array([[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.],
[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.],
[0., 1., 0., 1., 0., 1., 0., 1., 0., 1.],
[1., 0., 1., 0., 1., 0., 1., 0., 1., 0.],
[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.]])
"""
criterion = "squared_error"
max_features = 1
def __init__(self,
n_estimators=100, *,
max_depth=5,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
sparse_output=True,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False):
super().__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state"),
bootstrap=False,
oob_score=False,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=None)
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.sparse_output = sparse_output
def _set_oob_score_and_attributes(self, X, y):
raise NotImplementedError("OOB score not supported by tree embedding")
def fit(self, X, y=None, sample_weight=None):
"""
Fit estimator.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csc_matrix`` for maximum efficiency.
y : Ignored
Not used, present for API consistency by convention.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
self : object
"""
self.fit_transform(X, y, sample_weight=sample_weight)
return self
def fit_transform(self, X, y=None, sample_weight=None):
"""
Fit estimator and transform dataset.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data used to build forests. Use ``dtype=np.float32`` for
maximum efficiency.
y : Ignored
Not used, present for API consistency by convention.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
X_transformed : sparse matrix of shape (n_samples, n_out)
Transformed dataset.
"""
X = self._validate_data(X, accept_sparse=['csc'])
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
rnd = check_random_state(self.random_state)
y = rnd.uniform(size=X.shape[0])
super().fit(X, y, sample_weight=sample_weight)
self.one_hot_encoder_ = OneHotEncoder(sparse=self.sparse_output)
return self.one_hot_encoder_.fit_transform(self.apply(X))
def transform(self, X):
"""
Transform dataset.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data to be transformed. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csr_matrix`` for maximum efficiency.
Returns
-------
X_transformed : sparse matrix of shape (n_samples, n_out)
Transformed dataset.
"""
check_is_fitted(self)
return self.one_hot_encoder_.transform(self.apply(X))
| codeparrot/github-code-clean |
#
# actions.py: routines that actually run the svn client.
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
import os, shutil, re, sys, errno
import difflib, pprint
import xml.parsers.expat
from xml.dom.minidom import parseString
import svntest
from svntest import main, verify, tree, wc
from svntest import Failure
def no_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
def do_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'no'
def no_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'yes'
def do_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'no'
def setup_pristine_greek_repository():
"""Create the pristine repository and 'svn import' the greek tree"""
# these directories don't exist out of the box, so we may have to create them
if not os.path.exists(main.general_wc_dir):
os.makedirs(main.general_wc_dir)
if not os.path.exists(main.general_repo_dir):
os.makedirs(main.general_repo_dir) # this also creates all the intermediate dirs
# If there's no pristine repos, create one.
if not os.path.exists(main.pristine_greek_repos_dir):
main.create_repos(main.pristine_greek_repos_dir)
# if this is dav, gives us access rights to import the greek tree.
if main.is_ra_type_dav():
authz_file = os.path.join(main.work_dir, "authz")
main.file_write(authz_file, "[/]\n* = rw\n")
# dump the greek tree to disk.
main.greek_state.write_to_disk(main.greek_dump_dir)
# import the greek tree, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'import', '-m',
'Log message for revision 1.',
main.greek_dump_dir,
main.pristine_greek_repos_url)
# check for any errors from the import
if len(errput):
display_lines("Errors during initial 'svn import':",
'STDERR', None, errput)
sys.exit(1)
# verify the printed output of 'svn import'.
lastline = output.pop().strip()
match = re.search("(Committed|Imported) revision [0-9]+.", lastline)
if not match:
print("ERROR: import did not succeed, while creating greek repos.")
print("The final line from 'svn import' was:")
print(lastline)
sys.exit(1)
output_tree = wc.State.from_commit(output)
expected_output_tree = main.greek_state.copy(main.greek_dump_dir)
expected_output_tree.tweak(verb='Adding',
contents=None)
try:
expected_output_tree.compare_and_display('output', output_tree)
except tree.SVNTreeUnequal:
verify.display_trees("ERROR: output of import command is unexpected.",
"OUTPUT TREE",
expected_output_tree.old_tree(),
output_tree.old_tree())
sys.exit(1)
# Finally, disallow any changes to the "pristine" repos.
error_msg = "Don't modify the pristine repository"
create_failing_hook(main.pristine_greek_repos_dir, 'start-commit', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-lock', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-revprop-change', error_msg)
######################################################################
def guarantee_empty_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing."""
if path == main.pristine_greek_repos_dir:
print("ERROR: attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# create an empty repository at PATH.
main.safe_rmtree(path)
main.create_repos(path)
# Used by every test, so that they can run independently of one
# another. Every time this routine is called, it recursively copies
# the `pristine repos' to a new location.
# Note: make sure setup_pristine_greek_repository was called once before
# using this function.
def guarantee_greek_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing but the greek-tree at revision 1."""
if path == main.pristine_greek_repos_dir:
print("ERROR: attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# copy the pristine repository to PATH.
main.safe_rmtree(path)
if main.copy_repos(main.pristine_greek_repos_dir, path, 1):
print("ERROR: copying repository failed.")
sys.exit(1)
# make the repos world-writeable, for mod_dav_svn's sake.
main.chmod_tree(path, 0666, 0666)
def run_and_verify_atomic_ra_revprop_change(message,
expected_stdout,
expected_stderr,
expected_exit,
url, revision, propname,
old_propval, propval,
want_error):
"""Run atomic-ra-revprop-change helper and check its output and exit code.
Transforms OLD_PROPVAL and PROPVAL into a skel.
For HTTP, the default HTTP library is used."""
KEY_OLD_PROPVAL = "old_value_p"
KEY_NEW_PROPVAL = "value"
def skel_make_atom(word):
return "%d %s" % (len(word), word)
def make_proplist_skel_part(nick, val):
if val is None:
return ""
else:
return "%s %s" % (skel_make_atom(nick), skel_make_atom(val))
skel = "( %s %s )" % (make_proplist_skel_part(KEY_OLD_PROPVAL, old_propval),
make_proplist_skel_part(KEY_NEW_PROPVAL, propval))
exit_code, out, err = main.run_atomic_ra_revprop_change(url, revision,
propname, skel,
want_error)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnlook(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnlook2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnlook command and check its output and exit code."""
exit_code, out, err = main.run_svnlook(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnadmin(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnadmin2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnadmin command and check its output and exit code."""
exit_code, out, err = main.run_svnadmin(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnversion(message, wc_dir, trail_url,
expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svnversion2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnversion command and check its output and exit code."""
if trail_url is None:
exit_code, out, err = main.run_svnversion(wc_dir, *varargs)
else:
exit_code, out, err = main.run_svnversion(wc_dir, trail_url, *varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svn(message, expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svn2, but the expected exit code is assumed to
be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None:
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
expected_exit = 1
elif expected_stderr != []:
expected_exit = 1
return run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Invoke main.run_svn() with *VARARGS. Return exit code as int; stdout,
stderr as lists of lines (including line terminators). For both
EXPECTED_STDOUT and EXPECTED_STDERR, create an appropriate instance of
verify.ExpectedOutput (if necessary):
- If it is an array of strings, create a vanilla ExpectedOutput.
- If it is a single string, create a RegexOutput that must match every
line (for stdout) or any line (for stderr) of the expected output.
- If it is already an instance of ExpectedOutput
(e.g. UnorderedOutput), leave it alone.
...and invoke compare_and_display_lines() on MESSAGE, a label based
on the name of the stream being compared (e.g. STDOUT), the
ExpectedOutput instance, and the actual output.
If EXPECTED_STDOUT is None, do not check stdout.
EXPECTED_STDERR may not be None.
If output checks pass, the expected and actual codes are compared.
If a comparison fails, a Failure will be raised."""
if expected_stderr is None:
raise verify.SVNIncorrectDatatype("expected_stderr must not be None")
want_err = None
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
want_err = True
elif expected_stderr != []:
want_err = True
exit_code, out, err = main.run_svn(want_err, *varargs)
verify.verify_outputs(message, out, err, expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_load(repo_dir, dump_file_content,
bypass_prop_validation = False):
"Runs 'svnadmin load' and reports any errors."
if not isinstance(dump_file_content, list):
raise TypeError("dump_file_content argument should have list type")
expected_stderr = []
if bypass_prop_validation:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, 1, dump_file_content,
'load', '--force-uuid', '--quiet', '--bypass-prop-validation', repo_dir)
else:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, 1, dump_file_content,
'load', '--force-uuid', '--quiet', repo_dir)
verify.verify_outputs("Unexpected stderr output", None, errput,
None, expected_stderr)
def run_and_verify_dump(repo_dir, deltas=False):
"Runs 'svnadmin dump' and reports any errors, returning the dump content."
if deltas:
exit_code, output, errput = main.run_svnadmin('dump', '--deltas',
repo_dir)
else:
exit_code, output, errput = main.run_svnadmin('dump', repo_dir)
verify.verify_outputs("Missing expected output(s)", output, errput,
verify.AnyOutput, verify.AnyOutput)
return output
def run_and_verify_svnrdump(dumpfile_content, expected_stdout,
expected_stderr, expected_exit, *varargs):
"""Runs 'svnrdump dump|load' depending on dumpfile_content and
reports any errors."""
exit_code, output, err = main.run_svnrdump(dumpfile_content, *varargs)
# Since main.run_svnrdump() uses binary mode, normalize the stderr
# line endings on Windows ourselves.
if sys.platform == 'win32':
err = map(lambda x : x.replace('\r\n', '\n'), err)
for index, line in enumerate(err[:]):
if re.search("warning: W200007", line):
del err[index]
verify.verify_outputs("Unexpected output", output, err,
expected_stdout, expected_stderr)
verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
return output
def run_and_verify_svnmucc(message, expected_stdout, expected_stderr,
*varargs):
"""Run svnmucc command and check its output"""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnmucc command and check its output and exit code."""
exit_code, out, err = main.run_svnmucc(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def load_repo(sbox, dumpfile_path = None, dump_str = None,
bypass_prop_validation = False):
"Loads the dumpfile into sbox"
if not dump_str:
dump_str = open(dumpfile_path, "rb").read()
# Create a virgin repos and working copy
main.safe_rmtree(sbox.repo_dir, 1)
main.safe_rmtree(sbox.wc_dir, 1)
main.create_repos(sbox.repo_dir)
# Load the mergetracking dumpfile into the repos, and check it out the repo
run_and_verify_load(sbox.repo_dir, dump_str.splitlines(True),
bypass_prop_validation)
run_and_verify_svn(None, None, [], "co", sbox.repo_url, sbox.wc_dir)
return dump_str
def expected_noop_update_output(rev):
"""Return an ExpectedOutput object describing what we'd expect to
see from an update to revision REV that was effectively a no-op (no
server changes transmitted)."""
return verify.createExpectedOutput("Updating '.*':|At revision %d."
% (rev),
"no-op update")
######################################################################
# Subversion Actions
#
# These are all routines that invoke 'svn' in particular ways, and
# then verify the results by comparing expected trees with actual
# trees.
#
def run_and_verify_checkout2(do_remove,
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Checkout the URL into a new directory WC_DIR_NAME. *ARGS are any
extra optional args to the checkout subcommand.
The subcommand output will be verified against OUTPUT_TREE,
and the working copy itself will be verified against DISK_TREE.
For the latter comparison, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details. Return if successful, raise
on failure.
WC_DIR_NAME is deleted if DO_REMOVE is True.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
if do_remove:
main.safe_rmtree(wc_dir_name)
# Checkout and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'co',
URL, wc_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# Create a tree by scanning the working copy
actual = tree.build_tree_from_wc(wc_dir_name)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Same as run_and_verify_checkout2(), but without the DO_REMOVE arg.
WC_DIR_NAME is deleted if present unless the '--force' option is passed
in *ARGS."""
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
return run_and_verify_checkout2(('--force' not in args),
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a,
a_baton,
singleton_handler_b,
b_baton,
*args)
def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree,
*args):
"""Export the URL into a new directory WC_DIR_NAME.
The subcommand output will be verified against OUTPUT_TREE,
and the exported copy itself will be verified against DISK_TREE.
Return if successful, raise on failure.
"""
assert isinstance(output_tree, wc.State)
assert isinstance(disk_tree, wc.State)
disk_tree = disk_tree.old_tree()
output_tree = output_tree.old_tree()
# Export and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'export',
URL, export_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, export_dir_name + os.sep)
raise
# Create a tree by scanning the working copy. Don't ignore
# the .svn directories so that we generate an error if they
# happen to show up.
actual = tree.build_tree_from_wc(export_dir_name, ignore_svn=False)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree)
except tree.SVNTreeUnequal:
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual, export_dir_name + os.sep)
raise
# run_and_verify_log_xml
class LogEntry:
def __init__(self, revision, changed_paths=None, revprops=None):
self.revision = revision
if changed_paths == None:
self.changed_paths = {}
else:
self.changed_paths = changed_paths
if revprops == None:
self.revprops = {}
else:
self.revprops = revprops
def assert_changed_paths(self, changed_paths):
"""Assert that changed_paths is the same as this entry's changed_paths
Raises svntest.Failure if not.
"""
raise Failure('NOT IMPLEMENTED')
def assert_revprops(self, revprops):
"""Assert that the dict revprops is the same as this entry's revprops.
Raises svntest.Failure if not.
"""
if self.revprops != revprops:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(revprops).splitlines(),
pprint.pformat(self.revprops).splitlines())))
class LogParser:
def parse(self, data):
"""Return a list of LogEntrys parsed from the sequence of strings data.
This is the only method of interest to callers.
"""
try:
for i in data:
self.parser.Parse(i)
self.parser.Parse('', True)
except xml.parsers.expat.ExpatError, e:
raise verify.SVNUnexpectedStdout('%s\n%s\n' % (e, ''.join(data),))
return self.entries
def __init__(self):
# for expat
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_start_element
self.parser.EndElementHandler = self.handle_end_element
self.parser.CharacterDataHandler = self.handle_character_data
# Ignore some things.
self.ignore_elements('log', 'paths', 'path', 'revprops')
self.ignore_tags('logentry_end', 'author_start', 'date_start', 'msg_start')
# internal state
self.cdata = []
self.property = None
# the result
self.entries = []
def ignore(self, *args, **kwargs):
del self.cdata[:]
def ignore_tags(self, *args):
for tag in args:
setattr(self, tag, self.ignore)
def ignore_elements(self, *args):
for element in args:
self.ignore_tags(element + '_start', element + '_end')
# expat handlers
def handle_start_element(self, name, attrs):
getattr(self, name + '_start')(attrs)
def handle_end_element(self, name):
getattr(self, name + '_end')()
def handle_character_data(self, data):
self.cdata.append(data)
# element handler utilities
def use_cdata(self):
result = ''.join(self.cdata).strip()
del self.cdata[:]
return result
def svn_prop(self, name):
self.entries[-1].revprops['svn:' + name] = self.use_cdata()
# element handlers
def logentry_start(self, attrs):
self.entries.append(LogEntry(int(attrs['revision'])))
def author_end(self):
self.svn_prop('author')
def msg_end(self):
self.svn_prop('log')
def date_end(self):
# svn:date could be anything, so just note its presence.
self.cdata[:] = ['']
self.svn_prop('date')
def property_start(self, attrs):
self.property = attrs['name']
def property_end(self):
self.entries[-1].revprops[self.property] = self.use_cdata()
def run_and_verify_log_xml(message=None, expected_paths=None,
expected_revprops=None, expected_stdout=None,
expected_stderr=None, args=[]):
"""Call run_and_verify_svn with log --xml and args (optional) as command
arguments, and pass along message, expected_stdout, and expected_stderr.
If message is None, pass the svn log command as message.
expected_paths checking is not yet implemented.
expected_revprops is an optional list of dicts, compared to each
revision's revprops. The list must be in the same order the log entries
come in. Any svn:date revprops in the dicts must be '' in order to
match, as the actual dates could be anything.
expected_paths and expected_revprops are ignored if expected_stdout or
expected_stderr is specified.
"""
if message == None:
message = ' '.join(args)
# We'll parse the output unless the caller specifies expected_stderr or
# expected_stdout for run_and_verify_svn.
parse = True
if expected_stderr == None:
expected_stderr = []
else:
parse = False
if expected_stdout != None:
parse = False
log_args = list(args)
if expected_paths != None:
log_args.append('-v')
(exit_code, stdout, stderr) = run_and_verify_svn(
message, expected_stdout, expected_stderr,
'log', '--xml', *log_args)
if not parse:
return
entries = LogParser().parse(stdout)
for index in range(len(entries)):
entry = entries[index]
if expected_revprops != None:
entry.assert_revprops(expected_revprops[index])
if expected_paths != None:
entry.assert_changed_paths(expected_paths[index])
def verify_update(actual_output,
actual_mergeinfo_output,
actual_elision_output,
wc_dir_name,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree,
status_tree,
singleton_handler_a=None,
a_baton=None,
singleton_handler_b=None,
b_baton=None,
check_props=False):
"""Verify update of WC_DIR_NAME.
The subcommand output (found in ACTUAL_OUTPUT, ACTUAL_MERGEINFO_OUTPUT,
and ACTUAL_ELISION_OUTPUT) will be verified against OUTPUT_TREE,
MERGEINFO_OUTPUT_TREE, and ELISION_OUTPUT_TREE respectively (if any of
these is provided, they may be None in which case a comparison is not
done). The working copy itself will be verified against DISK_TREE (if
provided), and the working copy's 'svn status' output will be verified
against STATUS_TREE (if provided). (This is a good way to check that
revision numbers were bumped.)
Return if successful, raise on failure.
For the comparison with DISK_TREE, pass SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B to tree.compare_trees -- see that function's doc
string for more details. If CHECK_PROPS is set, then disk
comparison will examine props."""
if isinstance(actual_output, wc.State):
actual_output = actual_output.old_tree()
if isinstance(actual_mergeinfo_output, wc.State):
actual_mergeinfo_output = actual_mergeinfo_output.old_tree()
if isinstance(actual_elision_output, wc.State):
actual_elision_output = actual_elision_output.old_tree()
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(mergeinfo_output_tree, wc.State):
mergeinfo_output_tree = mergeinfo_output_tree.old_tree()
if isinstance(elision_output_tree, wc.State):
elision_output_tree = elision_output_tree.old_tree()
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
# Verify actual output against expected output.
if output_tree:
try:
tree.compare_trees("output", actual_output, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual_output, wc_dir_name + os.sep)
raise
# Verify actual mergeinfo recording output against expected output.
if mergeinfo_output_tree:
try:
tree.compare_trees("mergeinfo_output", actual_mergeinfo_output,
mergeinfo_output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL MERGEINFO OUTPUT TREE:")
tree.dump_tree_script(actual_mergeinfo_output,
wc_dir_name + os.sep)
raise
# Verify actual mergeinfo elision output against expected output.
if elision_output_tree:
try:
tree.compare_trees("elision_output", actual_elision_output,
elision_output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL ELISION OUTPUT TREE:")
tree.dump_tree_script(actual_elision_output,
wc_dir_name + os.sep)
raise
# Create a tree by scanning the working copy, and verify it
if disk_tree:
actual_disk = tree.build_tree_from_wc(wc_dir_name, check_props)
try:
tree.compare_trees("disk", actual_disk, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
print("EXPECTED DISK TREE:")
tree.dump_tree_script(disk_tree)
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual_disk)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
def verify_disk(wc_dir_name, disk_tree, check_props=False):
"""Verify WC_DIR_NAME against DISK_TREE. If CHECK_PROPS is set,
the comparison will examin props. Returns if successful, raises on
failure."""
verify_update(None, None, None, wc_dir_name, None, None, None, disk_tree,
None, check_props=check_props)
def run_and_verify_update(wc_dir_name,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Update WC_DIR_NAME. *ARGS are any extra optional args to the
update subcommand. NOTE: If *ARGS is specified at all, explicit
target paths must be passed in *ARGS as well (or a default `.' will
be chosen by the 'svn' binary). This allows the caller to update
many items in a single working copy dir, but still verify the entire
working copy dir.
If ERROR_RE_STRING, the update must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
If OUTPUT_TREE is not None, the subcommand output will be verified
against OUTPUT_TREE. If DISK_TREE is not None, the working copy
itself will be verified against DISK_TREE. If STATUS_TREE is not
None, the 'svn status' output will be verified against STATUS_TREE.
(This is a good way to check that revision numbers were bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
if len(args):
exit_code, output, errput = main.run_svn(error_re_string, 'up', *args)
else:
exit_code, output, errput = main.run_svn(error_re_string,
'up', wc_dir_name,
*args)
if error_re_string:
rm = re.compile(error_re_string)
for line in errput:
match = rm.search(line)
if match:
return
raise main.SVNUnmatchedError
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_parse_info(*args):
"""Run 'svn info ARGS' and parse its output into a list of dicts,
one dict per reported node."""
# the returned array
all_infos = []
# per-target variables
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
exit_code, output, errput = main.run_svn(None, 'info', *args)
for line in output:
line = line[:-1] # trim '\n'
if lock_comment_lines > 0:
# mop up any lock comment lines
lock_comments.append(line)
lock_comment_lines = lock_comment_lines - 1
if lock_comment_lines == 0:
iter_info[prev_key] = lock_comments
elif len(line) == 0:
# separator line between items
all_infos.append(iter_info)
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
elif line[0].isspace():
# continuation line (for tree conflicts)
iter_info[prev_key] += line[1:]
else:
# normal line
key, value = line.split(':', 1)
if re.search(' \(\d+ lines?\)$', key):
# numbered continuation lines
match = re.match('^(.*) \((\d+) lines?\)$', key)
key = match.group(1)
lock_comment_lines = int(match.group(2))
elif len(value) > 1:
# normal normal line
iter_info[key] = value[1:]
else:
### originally added for "Tree conflict:\n" lines;
### tree-conflicts output format has changed since then
# continuation lines are implicit (prefixed by whitespace)
iter_info[key] = ''
prev_key = key
return all_infos
def run_and_verify_info(expected_infos, *args):
"""Run 'svn info' with the arguments in *ARGS and verify the results
against expected_infos. The latter should be a list of dicts, one dict
per reported node, in the order in which the 'Path' fields of the output
will appear after sorting them as Python strings. (The dicts in
EXPECTED_INFOS, however, need not have a 'Path' key.)
In the dicts, each key is the before-the-colon part of the 'svn info' output,
and each value is either None (meaning that the key should *not* appear in
the 'svn info' output) or a regex matching the output value. Output lines
not matching a key in the dict are ignored.
Return if successful, raise on failure."""
actual_infos = run_and_parse_info(*args)
actual_infos.sort(key=lambda info: info['Path'])
try:
# zip() won't complain, so check this manually
if len(actual_infos) != len(expected_infos):
raise verify.SVNUnexpectedStdout(
"Expected %d infos, found %d infos"
% (len(expected_infos), len(actual_infos)))
for actual, expected in zip(actual_infos, expected_infos):
# compare dicts
for key, value in expected.items():
assert ':' not in key # caller passed impossible expectations?
if value is None and key in actual:
raise main.SVNLineUnequal("Found unexpected key '%s' with value '%s'"
% (key, actual[key]))
if value is not None and key not in actual:
raise main.SVNLineUnequal("Expected key '%s' (with value '%s') "
"not found" % (key, value))
if value is not None and not re.match(value, actual[key]):
raise verify.SVNUnexpectedStdout("Values of key '%s' don't match:\n"
" Expected: '%s' (regex)\n"
" Found: '%s' (string)\n"
% (key, value, actual[key]))
except:
sys.stderr.write("Bad 'svn info' output:\n"
" Received: %s\n"
" Expected: %s\n"
% (actual_infos, expected_infos))
raise
def run_and_verify_merge(dir, rev1, rev2, url1, url2,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree, status_tree, skip_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
dry_run = True,
*args):
"""Run 'svn merge URL1@REV1 URL2@REV2 DIR' if URL2 is not None
(for a three-way merge between URLs and WC).
If URL2 is None, run 'svn merge -rREV1:REV2 URL1 DIR'. If both REV1
and REV2 are None, leave off the '-r' argument.
If ERROR_RE_STRING, the merge must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE. Output
related to mergeinfo notifications will be verified against
MERGEINFO_OUTPUT_TREE if that is not None. Output related to mergeinfo
elision will be verified against ELISION_OUTPUT_TREE if that is not None.
The working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared. The
'skipped' merge output will be compared to SKIP_TREE.
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run merge will be carried out first and
the output compared with that of the full merge.
Return if successful, raise on failure.
*ARGS are any extra optional args to the merge subcommand.
NOTE: If *ARGS is specified at all, an explicit target path must be passed
in *ARGS as well. This allows the caller to merge into single items inside
the working copy, but still verify the entire working copy dir. """
merge_command = [ "merge" ]
if url2:
merge_command.extend((url1 + "@" + str(rev1), url2 + "@" + str(rev2)))
else:
if not (rev1 is None and rev2 is None):
merge_command.append("-r" + str(rev1) + ":" + str(rev2))
merge_command.append(url1)
if len(args) == 0:
merge_command.append(dir)
merge_command = tuple(merge_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = merge_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
print("=============================================================")
print("Dry-run merge altered working copy")
print("=============================================================")
raise
# Update and make a tree of the output.
merge_command = merge_command + args
exit_code, out, err = main.run_svn(error_re_string, *merge_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
elif err:
raise verify.SVNUnexpectedStderr(err)
# Split the output into that related to application of the actual diff
# and that related to the recording of mergeinfo describing the merge.
merge_diff_out = []
mergeinfo_notification_out = []
mergeinfo_elision_out = []
mergeinfo_notifications = False
elision_notifications = False
for line in out:
if line.startswith('--- Recording'):
mergeinfo_notifications = True
elision_notifications = False
elif line.startswith('--- Eliding'):
mergeinfo_notifications = False
elision_notifications = True
elif line.startswith('--- Merging') or \
line.startswith('--- Reverse-merging') or \
line.startswith('Summary of conflicts') or \
line.startswith('Skipped missing target'):
mergeinfo_notifications = False
elision_notifications = False
if mergeinfo_notifications:
mergeinfo_notification_out.append(line)
elif elision_notifications:
mergeinfo_elision_out.append(line)
else:
merge_diff_out.append(line)
if dry_run and merge_diff_out != out_dry:
# Due to the way ra_serf works, it's possible that the dry-run and
# real merge operations did the same thing, but the output came in
# a different order. Let's see if maybe that's the case by comparing
# the outputs as unordered sets rather than as lists.
#
# This now happens for other RA layers with modern APR because the
# hash order now varies.
#
# The different orders of the real and dry-run merges may cause
# the "Merging rX through rY into" lines to be duplicated a
# different number of times in the two outputs. The list-set
# conversion removes duplicates so these differences are ignored.
# It also removes "U some/path" duplicate lines. Perhaps we
# should avoid that?
out_copy = set(merge_diff_out[:])
out_dry_copy = set(out_dry[:])
if out_copy != out_dry_copy:
print("=============================================================")
print("Merge outputs differ")
print("The dry-run merge output:")
for x in out_dry:
sys.stdout.write(x)
print("The full merge output:")
for x in out:
sys.stdout.write(x)
print("=============================================================")
raise main.SVNUnmatchedError
def missing_skip(a, b):
print("=============================================================")
print("Merge failed to skip: " + a.path)
print("=============================================================")
raise Failure
def extra_skip(a, b):
print("=============================================================")
print("Merge unexpectedly skipped: " + a.path)
print("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
try:
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
except tree.SVNTreeUnequal:
print("ACTUAL SKIP TREE:")
tree.dump_tree_script(myskiptree, dir + os.sep)
raise
actual_diff = svntest.wc.State.from_checkout(merge_diff_out, False)
actual_mergeinfo = svntest.wc.State.from_checkout(mergeinfo_notification_out,
False)
actual_elision = svntest.wc.State.from_checkout(mergeinfo_elision_out,
False)
verify_update(actual_diff, actual_mergeinfo, actual_elision, dir,
output_tree, mergeinfo_output_tree, elision_output_tree,
disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_verify_patch(dir, patch_path,
output_tree, disk_tree, status_tree, skip_tree,
error_re_string=None,
check_props=False,
dry_run=True,
*args):
"""Run 'svn patch patch_path DIR'.
If ERROR_RE_STRING, 'svn patch' must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared.
The 'skipped' merge output will be compared to SKIP_TREE.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run patch will be carried out first and
the output compared with that of the full patch application.
Returns if successful, raises on failure."""
patch_command = [ "patch" ]
patch_command.append(patch_path)
patch_command.append(dir)
patch_command = tuple(patch_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = patch_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
print("=============================================================")
print("'svn patch --dry-run' altered working copy")
print("=============================================================")
raise
# Update and make a tree of the output.
patch_command = patch_command + args
exit_code, out, err = main.run_svn(True, *patch_command)
if error_re_string:
rm = re.compile(error_re_string)
match = None
for line in err:
match = rm.search(line)
if match:
break
if not match:
raise main.SVNUnmatchedError
elif err:
print("UNEXPECTED STDERR:")
for x in err:
sys.stdout.write(x)
raise verify.SVNUnexpectedStderr
if dry_run and out != out_dry:
# APR hash order means the output order can vary, assume everything is OK
# if only the order changes.
out_dry_expected = svntest.verify.UnorderedOutput(out)
verify.compare_and_display_lines('dry-run patch output not as expected',
'', out_dry_expected, out_dry)
def missing_skip(a, b):
print("=============================================================")
print("'svn patch' failed to skip: " + a.path)
print("=============================================================")
raise Failure
def extra_skip(a, b):
print("=============================================================")
print("'svn patch' unexpectedly skipped: " + a.path)
print("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
mytree = tree.build_tree_from_checkout(out, 0)
# when the expected output is a list, we want a line-by-line
# comparison to happen instead of a tree comparison
if (isinstance(output_tree, list)
or isinstance(output_tree, verify.UnorderedOutput)):
verify.verify_outputs(None, out, err, output_tree, error_re_string)
output_tree = None
verify_update(mytree, None, None, dir,
output_tree, None, None, disk_tree, status_tree,
check_props=check_props)
def run_and_verify_mergeinfo(error_re_string = None,
expected_output = [],
*args):
"""Run 'svn mergeinfo ARGS', and compare the result against
EXPECTED_OUTPUT, a list of string representations of revisions
expected in the output. Raise an exception if an unexpected
output is encountered."""
mergeinfo_command = ["mergeinfo"]
mergeinfo_command.extend(args)
exit_code, out, err = main.run_svn(error_re_string, *mergeinfo_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
out = sorted([_f for _f in [x.rstrip()[1:] for x in out] if _f])
expected_output.sort()
extra_out = []
if out != expected_output:
exp_hash = dict.fromkeys(expected_output)
for rev in out:
if rev in exp_hash:
del(exp_hash[rev])
else:
extra_out.append(rev)
extra_exp = list(exp_hash.keys())
raise Exception("Unexpected 'svn mergeinfo' output:\n"
" expected but not found: %s\n"
" found but not expected: %s"
% (', '.join([str(x) for x in extra_exp]),
', '.join([str(x) for x in extra_out])))
def run_and_verify_switch(wc_dir_name,
wc_target,
switch_url,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Switch WC_TARGET (in working copy dir WC_DIR_NAME) to SWITCH_URL.
If ERROR_RE_STRING, the switch must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be
compared. (This is a good way to check that revision numbers were
bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
exit_code, output, errput = main.run_svn(error_re_string, 'switch',
switch_url, wc_target, *args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
return
elif errput:
raise verify.SVNUnexpectedStderr(err)
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def process_output_for_commit(output):
"""Helper for run_and_verify_commit(), also used in the factory."""
# Remove the final output line, and verify that the commit succeeded.
lastline = ""
rest = []
def external_removal(line):
return line.startswith('Removing external') \
or line.startswith('Removed external')
if len(output):
lastline = output.pop().strip()
while len(output) and external_removal(lastline):
rest.append(lastline)
lastline = output.pop().strip()
cm = re.compile("(Committed|Imported) revision [0-9]+.")
match = cm.search(lastline)
if not match:
print("ERROR: commit did not succeed.")
print("The final line from 'svn ci' was:")
print(lastline)
raise main.SVNCommitFailure
# The new 'final' line in the output is either a regular line that
# mentions {Adding, Deleting, Sending, ...}, or it could be a line
# that says "Transmitting file data ...". If the latter case, we
# want to remove the line from the output; it should be ignored when
# building a tree.
if len(output):
lastline = output.pop()
tm = re.compile("Transmitting file data.+")
match = tm.search(lastline)
if not match:
# whoops, it was important output, put it back.
output.append(lastline)
if len(rest):
output.extend(rest)
return output
def run_and_verify_commit(wc_dir_name, output_tree, status_tree,
error_re_string = None,
*args):
"""Commit and verify results within working copy WC_DIR_NAME,
sending ARGS to the commit subcommand.
The subcommand output will be verified against OUTPUT_TREE. If
optional STATUS_TREE is given, then 'svn status' output will
be compared. (This is a good way to check that revision numbers
were bumped.)
If ERROR_RE_STRING is None, the commit must not exit with error. If
ERROR_RE_STRING is a string, the commit must exit with error, and
the error message must match regular expression ERROR_RE_STRING.
Return if successful, raise on failure."""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
# Commit.
if '-m' not in args and '-F' not in args:
args = list(args) + ['-m', 'log msg']
exit_code, output, errput = main.run_svn(error_re_string, 'ci',
*args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
return
# Else not expecting error:
# Convert the output into a tree.
output = process_output_for_commit(output)
actual = tree.build_tree_from_commit(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees("Output of commit is unexpected",
"OUTPUT TREE", output_tree, actual)
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
# This function always passes '-q' to the status command, which
# suppresses the printing of any unversioned or nonexistent items.
def run_and_verify_status(wc_dir_name, output_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected OUTPUT_TREE. SINGLETON_HANDLER_A and SINGLETON_HANDLER_B will
be passed to tree.compare_trees - see that function's doc string for
more details.
Returns on success, raises on failure."""
if isinstance(output_tree, wc.State):
output_state = output_tree
output_tree = output_tree.old_tree()
else:
output_state = None
exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', '-q',
wc_dir_name)
actual = tree.build_tree_from_status(output)
# Verify actual output against expected output.
try:
tree.compare_trees("status", actual, output_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeError:
verify.display_trees(None, 'STATUS OUTPUT TREE', output_tree, actual)
print("ACTUAL STATUS TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# if we have an output State, and we can/are-allowed to create an
# entries-based State, then compare the two.
if output_state:
entries_state = wc.State.from_entries(wc_dir_name)
if entries_state:
tweaked = output_state.copy()
tweaked.tweak_for_entries_compare()
try:
tweaked.compare_and_display('entries', entries_state)
except tree.SVNTreeUnequal:
### do something more
raise
# A variant of previous func, but doesn't pass '-q'. This allows us
# to verify unversioned or nonexistent items in the list.
def run_and_verify_unquiet_status(wc_dir_name, status_tree):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected STATUS_TREE.
Returns on success, raises on failure."""
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'status', '-v',
'-u', wc_dir_name)
actual = tree.build_tree_from_status(output)
# Verify actual output against expected output.
try:
tree.compare_trees("UNQUIET STATUS", actual, status_tree)
except tree.SVNTreeError:
print("ACTUAL UNQUIET STATUS TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
def run_and_verify_status_xml(expected_entries = [],
*args):
""" Run 'status --xml' with arguments *ARGS. If successful the output
is parsed into an XML document and will be verified by comparing against
EXPECTED_ENTRIES.
"""
exit_code, output, errput = run_and_verify_svn(None, None, [],
'status', '--xml', *args)
if len(errput) > 0:
raise Failure
doc = parseString(''.join(output))
entries = doc.getElementsByTagName('entry')
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
actual_entries = {}
for entry in entries:
wcstatus = entry.getElementsByTagName('wc-status')[0]
commit = entry.getElementsByTagName('commit')
author = entry.getElementsByTagName('author')
rstatus = entry.getElementsByTagName('repos-status')
actual_entry = {'wcprops' : wcstatus.getAttribute('props'),
'wcitem' : wcstatus.getAttribute('item'),
}
if wcstatus.hasAttribute('revision'):
actual_entry['wcrev'] = wcstatus.getAttribute('revision')
if (commit):
actual_entry['crev'] = commit[0].getAttribute('revision')
if (author):
actual_entry['author'] = getText(author[0].childNodes)
if (rstatus):
actual_entry['rprops'] = rstatus[0].getAttribute('props')
actual_entry['ritem'] = rstatus[0].getAttribute('item')
actual_entries[entry.getAttribute('path')] = actual_entry
if expected_entries != actual_entries:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(expected_entries).splitlines(),
pprint.pformat(actual_entries).splitlines())))
def run_and_verify_diff_summarize_xml(error_re_string = [],
expected_prefix = None,
expected_paths = [],
expected_items = [],
expected_props = [],
expected_kinds = [],
*args):
"""Run 'diff --summarize --xml' with the arguments *ARGS, which should
contain all arguments beyond for your 'diff --summarize --xml' omitting
said arguments. EXPECTED_PREFIX will store a "common" path prefix
expected to be at the beginning of each summarized path. If
EXPECTED_PREFIX is None, then EXPECTED_PATHS will need to be exactly
as 'svn diff --summarize --xml' will output. If ERROR_RE_STRING, the
command must exit with error, and the error message must match regular
expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, the subcommand output will be parsed
into an XML document and will then be verified by comparing the parsed
output to the contents in the EXPECTED_PATHS, EXPECTED_ITEMS,
EXPECTED_PROPS and EXPECTED_KINDS. Returns on success, raises
on failure."""
exit_code, output, errput = run_and_verify_svn(None, None, error_re_string,
'diff', '--summarize',
'--xml', *args)
# Return if errors are present since they were expected
if len(errput) > 0:
return
doc = parseString(''.join(output))
paths = doc.getElementsByTagName("path")
items = expected_items
kinds = expected_kinds
for path in paths:
modified_path = path.childNodes[0].data
if (expected_prefix is not None
and modified_path.find(expected_prefix) == 0):
modified_path = modified_path.replace(expected_prefix, '')[1:].strip()
# Workaround single-object diff
if len(modified_path) == 0:
modified_path = path.childNodes[0].data.split(os.sep)[-1]
# From here on, we use '/' as path separator.
if os.sep != "/":
modified_path = modified_path.replace(os.sep, "/")
if modified_path not in expected_paths:
print("ERROR: %s not expected in the changed paths." % modified_path)
raise Failure
index = expected_paths.index(modified_path)
expected_item = items[index]
expected_kind = kinds[index]
expected_prop = expected_props[index]
actual_item = path.getAttribute('item')
actual_kind = path.getAttribute('kind')
actual_prop = path.getAttribute('props')
if expected_item != actual_item:
print("ERROR: expected: %s actual: %s" % (expected_item, actual_item))
raise Failure
if expected_kind != actual_kind:
print("ERROR: expected: %s actual: %s" % (expected_kind, actual_kind))
raise Failure
if expected_prop != actual_prop:
print("ERROR: expected: %s actual: %s" % (expected_prop, actual_prop))
raise Failure
def run_and_verify_diff_summarize(output_tree, *args):
"""Run 'diff --summarize' with the arguments *ARGS.
The subcommand output will be verified against OUTPUT_TREE. Returns
on success, raises on failure.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'diff', '--summarize',
*args)
actual = tree.build_tree_from_diff_summarize(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees(None, 'DIFF OUTPUT TREE', output_tree, actual)
print("ACTUAL DIFF OUTPUT TREE:")
tree.dump_tree_script(actual)
raise
def run_and_validate_lock(path, username):
"""`svn lock' the given path and validate the contents of the lock.
Use the given username. This is important because locks are
user specific."""
comment = "Locking path:%s." % path
# lock the path
run_and_verify_svn(None, ".*locked by user", [], 'lock',
'--username', username,
'-m', comment, path)
# Run info and check that we get the lock fields.
exit_code, output, err = run_and_verify_svn(None, None, [],
'info','-R',
path)
### TODO: Leverage RegexOuput([...], match_all=True) here.
# prepare the regexs to compare against
token_re = re.compile(".*?Lock Token: opaquelocktoken:.*?", re.DOTALL)
author_re = re.compile(".*?Lock Owner: %s\n.*?" % username, re.DOTALL)
created_re = re.compile(".*?Lock Created:.*?", re.DOTALL)
comment_re = re.compile(".*?%s\n.*?" % re.escape(comment), re.DOTALL)
# join all output lines into one
output = "".join(output)
# Fail even if one regex does not match
if ( not (token_re.match(output) and
author_re.match(output) and
created_re.match(output) and
comment_re.match(output))):
raise Failure
def _run_and_verify_resolve(cmd, expected_paths, *args):
"""Run "svn CMD" (where CMD is 'resolve' or 'resolved') with arguments
ARGS, and verify that it resolves the paths in EXPECTED_PATHS and no others.
If no ARGS are specified, use the elements of EXPECTED_PATHS as the
arguments."""
# TODO: verify that the status of PATHS changes accordingly.
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Resolved conflicted state of '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
cmd, *args)
def run_and_verify_resolve(expected_paths, *args):
"""Run "svn resolve" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolve', expected_paths, *args)
def run_and_verify_resolved(expected_paths, *args):
"""Run "svn resolved" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolved', expected_paths, *args)
def run_and_verify_revert(expected_paths, *args):
"""Run "svn revert" with arguments ARGS, and verify that it reverts
the paths in EXPECTED_PATHS and no others. If no ARGS are
specified, use the elements of EXPECTED_PATHS as the arguments."""
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Reverted '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
"revert", *args)
######################################################################
# Other general utilities
# This allows a test to *quickly* bootstrap itself.
def make_repo_and_wc(sbox, create_wc = True, read_only = False):
"""Create a fresh 'Greek Tree' repository and check out a WC from it.
If READ_ONLY is False, a dedicated repository will be created, at the path
SBOX.repo_dir. If READ_ONLY is True, the pristine repository will be used.
In either case, SBOX.repo_url is assumed to point to the repository that
will be used.
If create_wc is True, a dedicated working copy will be checked out from
the repository, at the path SBOX.wc_dir.
Returns on success, raises on failure."""
# Create (or copy afresh) a new repos with a greek tree in it.
if not read_only:
guarantee_greek_repository(sbox.repo_dir)
if create_wc:
# Generate the expected output tree.
expected_output = main.greek_state.copy()
expected_output.wc_dir = sbox.wc_dir
expected_output.tweak(status='A ', contents=None)
# Generate an expected wc tree.
expected_wc = main.greek_state
# Do a checkout, and verify the resulting output and disk contents.
run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir,
expected_output,
expected_wc)
else:
# just make sure the parent folder of our working copy is created
try:
os.mkdir(main.general_wc_dir)
except OSError, err:
if err.errno != errno.EEXIST:
raise
# Duplicate a working copy or other dir.
def duplicate_dir(wc_name, wc_copy_name):
"""Copy the working copy WC_NAME to WC_COPY_NAME. Overwrite any
existing tree at that location."""
main.safe_rmtree(wc_copy_name)
shutil.copytree(wc_name, wc_copy_name)
def get_virginal_state(wc_dir, rev):
"Return a virginal greek tree state for a WC and repos at revision REV."
rev = str(rev) ### maybe switch rev to an integer?
# copy the greek tree, shift it to the new wc_dir, insert a root elem,
# then tweak all values
state = main.greek_state.copy()
state.wc_dir = wc_dir
state.desc[''] = wc.StateItem()
state.tweak(contents=None, status=' ', wc_rev=rev)
return state
# Cheap administrative directory locking
def lock_admin_dir(wc_dir, recursive=False):
"Lock a SVN administrative directory"
db, root_path, relpath = wc.open_wc_db(wc_dir)
svntest.main.run_wc_lock_tester(recursive, wc_dir)
def set_incomplete(wc_dir, revision):
"Make wc_dir incomplete at revision"
svntest.main.run_wc_incomplete_tester(wc_dir, revision)
def get_wc_uuid(wc_dir):
"Return the UUID of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Repository UUID']
def get_wc_base_rev(wc_dir):
"Return the BASE revision of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Revision']
def hook_failure_message(hook_name):
"""Return the error message that the client prints for failure of the
specified hook HOOK_NAME. The wording changed with Subversion 1.5."""
if svntest.main.options.server_minor_version < 5:
return "'%s' hook failed with error output:\n" % hook_name
else:
if hook_name in ["start-commit", "pre-commit"]:
action = "Commit"
elif hook_name == "pre-revprop-change":
action = "Revprop change"
elif hook_name == "pre-lock":
action = "Lock"
elif hook_name == "pre-unlock":
action = "Unlock"
else:
action = None
if action is None:
message = "%s hook failed (exit code 1)" % (hook_name,)
else:
message = "%s blocked by %s hook (exit code 1)" % (action, hook_name)
return message + " with output:\n"
def create_failing_hook(repo_dir, hook_name, text):
"""Create a HOOK_NAME hook in the repository at REPO_DIR that prints
TEXT to stderr and exits with an error."""
hook_path = os.path.join(repo_dir, 'hooks', hook_name)
# Embed the text carefully: it might include characters like "%" and "'".
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write(' + repr(text) + ')\n'
'sys.exit(1)\n')
def enable_revprop_changes(repo_dir):
"""Enable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script and (if appropriate) making it executable."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys; sys.exit(0)')
def disable_revprop_changes(repo_dir):
"""Disable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script that prints "pre-revprop-change" followed
by its arguments, and returns an error."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path,
'import sys\n'
'sys.stderr.write("pre-revprop-change %s" % " ".join(sys.argv[1:6]))\n'
'sys.exit(1)\n')
def create_failing_post_commit_hook(repo_dir):
"""Create a post-commit hook script in the repository at REPO_DIR that always
reports an error."""
hook_path = main.get_post_commit_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write("Post-commit hook failed")\n'
'sys.exit(1)')
# set_prop can be used for properties with NULL characters which are not
# handled correctly when passed to subprocess.Popen() and values like "*"
# which are not handled correctly on Windows.
def set_prop(name, value, path, expected_re_string=None):
"""Set a property with specified value"""
if value and (value[0] == '-' or '\x00' in value or sys.platform == 'win32'):
from tempfile import mkstemp
(fd, value_file_path) = mkstemp()
value_file = open(value_file_path, 'wb')
value_file.write(value)
value_file.flush()
value_file.close()
exit_code, out, err = main.run_svn(expected_re_string, 'propset',
'-F', value_file_path, name, path)
os.close(fd)
os.remove(value_file_path)
else:
exit_code, out, err = main.run_svn(expected_re_string, 'propset',
name, value, path)
if expected_re_string:
if not expected_re_string.startswith(".*"):
expected_re_string = ".*(" + expected_re_string + ")"
expected_err = verify.RegexOutput(expected_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
def check_prop(name, path, exp_out, revprop=None):
"""Verify that property NAME on PATH has a value of EXP_OUT.
If REVPROP is not None, then it is a revision number and
a revision property is sought."""
if revprop is not None:
revprop_options = ['--revprop', '-r', revprop]
else:
revprop_options = []
# Not using run_svn because binary_mode must be set
exit_code, out, err = main.run_command(main.svn_binary, None, 1, 'pg',
'--strict', name, path,
'--config-dir',
main.default_config_dir,
'--username', main.wc_author,
'--password', main.wc_passwd,
*revprop_options)
if out != exp_out:
print("svn pg --strict %s output does not match expected." % name)
print("Expected standard output: %s\n" % exp_out)
print("Actual standard output: %s\n" % out)
raise Failure
def fill_file_with_lines(wc_path, line_nbr, line_descrip=None,
append=True):
"""Change the file at WC_PATH (adding some lines), and return its
new contents. LINE_NBR indicates the line number at which the new
contents should assume that it's being appended. LINE_DESCRIP is
something like 'This is line' (the default) or 'Conflicting line'."""
if line_descrip is None:
line_descrip = "This is line"
# Generate the new contents for the file.
contents = ""
for n in range(line_nbr, line_nbr + 3):
contents = contents + line_descrip + " " + repr(n) + " in '" + \
os.path.basename(wc_path) + "'.\n"
# Write the new contents to the file.
if append:
main.file_append(wc_path, contents)
else:
main.file_write(wc_path, contents)
return contents
def inject_conflict_into_wc(sbox, state_path, file_path,
expected_disk, expected_status, merged_rev):
"""Create a conflict at FILE_PATH by replacing its contents,
committing the change, backdating it to its previous revision,
changing its contents again, then updating it to merge in the
previous change."""
wc_dir = sbox.wc_dir
# Make a change to the file.
contents = fill_file_with_lines(file_path, 1, "This is line", append=False)
# Commit the changed file, first taking note of the current revision.
prev_rev = expected_status.desc[state_path].wc_rev
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(verb='Sending'),
})
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
run_and_verify_commit(wc_dir, expected_output, expected_status,
None, file_path)
# Backdate the file.
exit_code, output, errput = main.run_svn(None, "up", "-r", str(prev_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=prev_rev)
# Make a conflicting change to the file, and backdate the file.
conflicting_contents = fill_file_with_lines(file_path, 1, "Conflicting line",
append=False)
# Merge the previous change into the file to produce a conflict.
if expected_disk:
expected_disk.tweak(state_path, contents="")
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(status='C '),
})
inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
conflicting_contents, contents,
merged_rev)
exit_code, output, errput = main.run_svn(None, "up", "-r", str(merged_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
def inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
wc_text, merged_text, merged_rev):
"""Update the EXPECTED_DISK and EXPECTED_STATUS trees for the
conflict at STATE_PATH (ignored if None). WC_TEXT, MERGED_TEXT, and
MERGED_REV are used to determine the contents of the conflict (the
text parameters should be newline-terminated)."""
if expected_disk:
conflict_marker = make_conflict_marker_text(wc_text, merged_text,
merged_rev)
existing_text = expected_disk.desc[state_path].contents or ""
expected_disk.tweak(state_path, contents=existing_text + conflict_marker)
if expected_status:
expected_status.tweak(state_path, status='C ')
def make_conflict_marker_text(wc_text, merged_text, merged_rev):
"""Return the conflict marker text described by WC_TEXT (the current
text in the working copy, MERGED_TEXT (the conflicting text merged
in), and MERGED_REV (the revision from whence the conflicting text
came)."""
return "<<<<<<< .working\n" + wc_text + "=======\n" + \
merged_text + ">>>>>>> .merge-right.r" + str(merged_rev) + "\n"
def build_greek_tree_conflicts(sbox):
"""Create a working copy that has tree-conflict markings.
After this function has been called, sbox.wc_dir is a working
copy that has specific tree-conflict markings.
In particular, this does two conflicting sets of edits and performs an
update so that tree conflicts appear.
Note that this function calls sbox.build() because it needs a clean sbox.
So, there is no need to call sbox.build() before this.
The conflicts are the result of an 'update' on the following changes:
Incoming Local
A/D/G/pi text-mod del
A/D/G/rho del text-mod
A/D/G/tau del del
This function is useful for testing that tree-conflicts are handled
properly once they have appeared, e.g. that commits are blocked, that the
info output is correct, etc.
See also the tree-conflicts tests using deep_trees in various other
.py files, and tree_conflict_tests.py.
"""
sbox.build()
wc_dir = sbox.wc_dir
j = os.path.join
G = j(wc_dir, 'A', 'D', 'G')
pi = j(G, 'pi')
rho = j(G, 'rho')
tau = j(G, 'tau')
# Make incoming changes and "store them away" with a commit.
main.file_append(pi, "Incoming edit.\n")
main.run_svn(None, 'del', rho)
main.run_svn(None, 'del', tau)
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(verb='Sending'),
'A/D/G/rho' : Item(verb='Deleting'),
'A/D/G/tau' : Item(verb='Deleting'),
})
expected_status = get_virginal_state(wc_dir, 1)
expected_status.tweak('A/D/G/pi', wc_rev='2')
expected_status.remove('A/D/G/rho', 'A/D/G/tau')
run_and_verify_commit(wc_dir, expected_output, expected_status, None,
'-m', 'Incoming changes.', wc_dir )
# Update back to the pristine state ("time-warp").
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(status='U '),
'A/D/G/rho' : Item(status='A '),
'A/D/G/tau' : Item(status='A '),
})
expected_disk = main.greek_state
expected_status = get_virginal_state(wc_dir, 1)
run_and_verify_update(wc_dir, expected_output, expected_disk,
expected_status, None, None, None, None, None, False,
'-r', '1', wc_dir)
# Make local changes
main.run_svn(None, 'del', pi)
main.file_append(rho, "Local edit.\n")
main.run_svn(None, 'del', tau)
# Update, receiving the incoming changes on top of the local changes,
# causing tree conflicts. Don't check for any particular result: that is
# the job of other tests.
run_and_verify_svn(None, verify.AnyOutput, [], 'update', wc_dir)
def make_deep_trees(base):
"""Helper function for deep trees conflicts. Create a set of trees,
each in its own "container" dir. Any conflicts can be tested separately
in each container.
"""
j = os.path.join
# Create the container dirs.
F = j(base, 'F')
D = j(base, 'D')
DF = j(base, 'DF')
DD = j(base, 'DD')
DDF = j(base, 'DDF')
DDD = j(base, 'DDD')
os.makedirs(F)
os.makedirs(j(D, 'D1'))
os.makedirs(j(DF, 'D1'))
os.makedirs(j(DD, 'D1', 'D2'))
os.makedirs(j(DDF, 'D1', 'D2'))
os.makedirs(j(DDD, 'D1', 'D2', 'D3'))
# Create their files.
alpha = j(F, 'alpha')
beta = j(DF, 'D1', 'beta')
gamma = j(DDF, 'D1', 'D2', 'gamma')
main.file_append(alpha, "This is the file 'alpha'.\n")
main.file_append(beta, "This is the file 'beta'.\n")
main.file_append(gamma, "This is the file 'gamma'.\n")
def add_deep_trees(sbox, base_dir_name):
"""Prepare a "deep_trees" within a given directory.
The directory <sbox.wc_dir>/<base_dir_name> is created and a deep_tree
is created within. The items are only added, a commit has to be
called separately, if needed.
<base_dir_name> will thus be a container for the set of containers
mentioned in make_deep_trees().
"""
j = os.path.join
base = j(sbox.wc_dir, base_dir_name)
make_deep_trees(base)
main.run_svn(None, 'add', base)
Item = wc.StateItem
# initial deep trees state
deep_trees_virginal_state = wc.State('', {
'F' : Item(),
'F/alpha' : Item("This is the file 'alpha'.\n"),
'D' : Item(),
'D/D1' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DF/D1/beta' : Item("This is the file 'beta'.\n"),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\n"),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
})
# Many actions on deep trees and their resulting states...
def deep_trees_leaf_edit(base):
"""Helper function for deep trees test cases. Append text to files,
create new files in empty directories, and change leaf node properties."""
j = os.path.join
F = j(base, 'F', 'alpha')
DF = j(base, 'DF', 'D1', 'beta')
DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
main.file_append(F, "More text for file alpha.\n")
main.file_append(DF, "More text for file beta.\n")
main.file_append(DDF, "More text for file gamma.\n")
run_and_verify_svn(None, verify.AnyOutput, [],
'propset', 'prop1', '1', F, DF, DDF)
D = j(base, 'D', 'D1')
DD = j(base, 'DD', 'D1', 'D2')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
run_and_verify_svn(None, verify.AnyOutput, [],
'propset', 'prop1', '1', D, DD, DDD)
D = j(base, 'D', 'D1', 'delta')
DD = j(base, 'DD', 'D1', 'D2', 'epsilon')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3', 'zeta')
main.file_append(D, "This is the file 'delta'.\n")
main.file_append(DD, "This is the file 'epsilon'.\n")
main.file_append(DDD, "This is the file 'zeta'.\n")
run_and_verify_svn(None, verify.AnyOutput, [],
'add', D, DD, DDD)
# deep trees state after a call to deep_trees_leaf_edit
deep_trees_after_leaf_edit = wc.State('', {
'F' : Item(),
'F/alpha' : Item("This is the file 'alpha'.\nMore text for file alpha.\n"),
'D' : Item(),
'D/D1' : Item(),
'D/D1/delta' : Item("This is the file 'delta'.\n"),
'DF' : Item(),
'DF/D1' : Item(),
'DF/D1/beta' : Item("This is the file 'beta'.\nMore text for file beta.\n"),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DD/D1/D2/epsilon' : Item("This is the file 'epsilon'.\n"),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\nMore text for file gamma.\n"),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
'DDD/D1/D2/D3/zeta' : Item("This is the file 'zeta'.\n"),
})
def deep_trees_leaf_del(base):
"""Helper function for deep trees test cases. Delete files and empty
dirs."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1', 'beta')
DD = j(base, 'DD', 'D1', 'D2')
DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
# deep trees state after a call to deep_trees_leaf_del
deep_trees_after_leaf_del = wc.State('', {
'F' : Item(),
'D' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DD' : Item(),
'DD/D1' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
})
# deep trees state after a call to deep_trees_leaf_del with no commit
def deep_trees_after_leaf_del_no_ci(wc_dir):
if svntest.main.wc_is_singledb(wc_dir):
return deep_trees_after_leaf_del
else:
return deep_trees_empty_dirs
def deep_trees_tree_del(base):
"""Helper function for deep trees test cases. Delete top-level dirs."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1')
DD = j(base, 'DD', 'D1')
DDF = j(base, 'DDF', 'D1')
DDD = j(base, 'DDD', 'D1')
main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
def deep_trees_rmtree(base):
"""Helper function for deep trees test cases. Delete top-level dirs
with rmtree instead of svn del."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1')
DD = j(base, 'DD', 'D1')
DDF = j(base, 'DDF', 'D1')
DDD = j(base, 'DDD', 'D1')
os.unlink(F)
main.safe_rmtree(D)
main.safe_rmtree(DF)
main.safe_rmtree(DD)
main.safe_rmtree(DDF)
main.safe_rmtree(DDD)
# deep trees state after a call to deep_trees_tree_del
deep_trees_after_tree_del = wc.State('', {
'F' : Item(),
'D' : Item(),
'DF' : Item(),
'DD' : Item(),
'DDF' : Item(),
'DDD' : Item(),
})
# deep trees state without any files
deep_trees_empty_dirs = wc.State('', {
'F' : Item(),
'D' : Item(),
'D/D1' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
})
# deep trees state after a call to deep_trees_tree_del with no commit
def deep_trees_after_tree_del_no_ci(wc_dir):
if svntest.main.wc_is_singledb(wc_dir):
return deep_trees_after_tree_del
else:
return deep_trees_empty_dirs
def deep_trees_tree_del_repos(base):
"""Helper function for deep trees test cases. Delete top-level dirs,
directly in the repository."""
j = '/'.join
F = j([base, 'F', 'alpha'])
D = j([base, 'D', 'D1'])
DF = j([base, 'DF', 'D1'])
DD = j([base, 'DD', 'D1'])
DDF = j([base, 'DDF', 'D1'])
DDD = j([base, 'DDD', 'D1'])
main.run_svn(None, 'mkdir', '-m', '', F, D, DF, DD, DDF, DDD)
# Expected merge/update/switch output.
deep_trees_conflict_output = wc.State('', {
'F/alpha' : Item(status=' ', treeconflict='C'),
'D/D1' : Item(status=' ', treeconflict='C'),
'DF/D1' : Item(status=' ', treeconflict='C'),
'DD/D1' : Item(status=' ', treeconflict='C'),
'DDF/D1' : Item(status=' ', treeconflict='C'),
'DDD/D1' : Item(status=' ', treeconflict='C'),
})
deep_trees_conflict_output_skipped = wc.State('', {
'D/D1' : Item(verb='Skipped'),
'F/alpha' : Item(verb='Skipped'),
'DD/D1' : Item(verb='Skipped'),
'DF/D1' : Item(verb='Skipped'),
'DDD/D1' : Item(verb='Skipped'),
'DDF/D1' : Item(verb='Skipped'),
})
# Expected status output after merge/update/switch.
deep_trees_status_local_tree_del = wc.State('', {
'' : Item(status=' ', wc_rev=3),
'D' : Item(status=' ', wc_rev=3),
'D/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DD' : Item(status=' ', wc_rev=3),
'DD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DD/D1/D2' : Item(status='D ', wc_rev=2),
'DDD' : Item(status=' ', wc_rev=3),
'DDD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DDD/D1/D2' : Item(status='D ', wc_rev=2),
'DDD/D1/D2/D3' : Item(status='D ', wc_rev=2),
'DDF' : Item(status=' ', wc_rev=3),
'DDF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DDF/D1/D2' : Item(status='D ', wc_rev=2),
'DDF/D1/D2/gamma' : Item(status='D ', wc_rev=2),
'DF' : Item(status=' ', wc_rev=3),
'DF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DF/D1/beta' : Item(status='D ', wc_rev=2),
'F' : Item(status=' ', wc_rev=3),
'F/alpha' : Item(status='D ', wc_rev=2, treeconflict='C'),
})
deep_trees_status_local_leaf_edit = wc.State('', {
'' : Item(status=' ', wc_rev=3),
'D' : Item(status=' ', wc_rev=3),
'D/D1' : Item(status=' M', wc_rev=2, treeconflict='C'),
'D/D1/delta' : Item(status='A ', wc_rev=0),
'DD' : Item(status=' ', wc_rev=3),
'DD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DD/D1/D2' : Item(status=' M', wc_rev=2),
'DD/D1/D2/epsilon' : Item(status='A ', wc_rev=0),
'DDD' : Item(status=' ', wc_rev=3),
'DDD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DDD/D1/D2' : Item(status=' ', wc_rev=2),
'DDD/D1/D2/D3' : Item(status=' M', wc_rev=2),
'DDD/D1/D2/D3/zeta' : Item(status='A ', wc_rev=0),
'DDF' : Item(status=' ', wc_rev=3),
'DDF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DDF/D1/D2' : Item(status=' ', wc_rev=2),
'DDF/D1/D2/gamma' : Item(status='MM', wc_rev=2),
'DF' : Item(status=' ', wc_rev=3),
'DF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DF/D1/beta' : Item(status='MM', wc_rev=2),
'F' : Item(status=' ', wc_rev=3),
'F/alpha' : Item(status='MM', wc_rev=2, treeconflict='C'),
})
class DeepTreesTestCase:
"""Describes one tree-conflicts test case.
See deep_trees_run_tests_scheme_for_update(), ..._switch(), ..._merge().
The name field is the subdirectory name in which the test should be run.
The local_action and incoming_action are the functions to run
to construct the local changes and incoming changes, respectively.
See deep_trees_leaf_edit, deep_trees_tree_del, etc.
The expected_* and error_re_string arguments are described in functions
run_and_verify_[update|switch|merge]
except expected_info, which is a dict that has path keys with values
that are dicts as passed to run_and_verify_info():
expected_info = {
'F/alpha' : {
'Revision' : '3',
'Tree conflict' :
'^local delete, incoming edit upon update'
+ ' Source left: .file.*/F/alpha@2'
+ ' Source right: .file.*/F/alpha@3$',
},
'DF/D1' : {
'Tree conflict' :
'^local delete, incoming edit upon update'
+ ' Source left: .dir.*/DF/D1@2'
+ ' Source right: .dir.*/DF/D1@3$',
},
...
}
Note: expected_skip is only used in merge, i.e. using
deep_trees_run_tests_scheme_for_merge.
"""
def __init__(self, name, local_action, incoming_action,
expected_output = None, expected_disk = None,
expected_status = None, expected_skip = None,
error_re_string = None,
commit_block_string = ".*remains in conflict.*",
expected_info = None):
self.name = name
self.local_action = local_action
self.incoming_action = incoming_action
self.expected_output = expected_output
self.expected_disk = expected_disk
self.expected_status = expected_status
self.expected_skip = expected_skip
self.error_re_string = error_re_string
self.commit_block_string = commit_block_string
self.expected_info = expected_info
def deep_trees_run_tests_scheme_for_update(sbox, greater_scheme):
"""
Runs a given list of tests for conflicts occuring at an update operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
again contains one set of "deep_trees", being separate container
dirs for different depths of trees (F, D, DF, DD, DDF, DDD).
2) A commit is performed across all test cases and depths.
(our initial state, -r2)
3) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
its *incoming* action is performed (e.g. "deep_trees_leaf_edit"), in
each of the different depth trees (F, D, DF, ... DDD).
4) A commit is performed across all test cases and depths:
our "incoming" state is "stored away in the repository for now",
-r3.
5) All test case dirs and contained deep_trees are time-warped
(updated) back to -r2, the initial state containing deep_trees.
6) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
its *local* action is performed (e.g. "deep_trees_leaf_del"), in
each of the different depth trees (F, D, DF, ... DDD).
7) An update to -r3 is performed across all test cases and depths.
This causes tree-conflicts between the "local" state in the working
copy and the "incoming" state from the repository, -r3.
8) A commit is performed in each separate container, to verify
that each tree-conflict indeed blocks a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) create directories
for test_case in greater_scheme:
try:
add_deep_trees(sbox, test_case.name)
except:
print("ERROR IN: Tests scheme for update: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) commit initial state
main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
# 3) apply incoming changes
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name))
except:
print("ERROR IN: Tests scheme for update: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 4) commit incoming changes
main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
# 5) time-warp back to -r2
main.run_svn(None, 'update', '-r2', wc_dir)
# 6) apply local changes
for test_case in greater_scheme:
try:
test_case.local_action(j(wc_dir, test_case.name))
except:
print("ERROR IN: Tests scheme for update: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 7) update to -r3, conflicting with incoming changes.
# A lot of different things are expected.
# Do separate update operations for each test case.
for test_case in greater_scheme:
try:
base = j(wc_dir, test_case.name)
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = base
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = base
run_and_verify_update(base, x_out, x_disk, None,
error_re_string = test_case.error_re_string)
if x_status:
run_and_verify_unquiet_status(base, x_status)
x_info = test_case.expected_info or {}
for path in x_info:
run_and_verify_info([x_info[path]], j(base, path))
except:
print("ERROR IN: Tests scheme for update: "
+ "while verifying in '%s'" % test_case.name)
raise
# 8) Verify that commit fails.
for test_case in greater_scheme:
try:
base = j(wc_dir, test_case.name)
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = base
run_and_verify_commit(base, None, x_status,
test_case.commit_block_string,
base)
except:
print("ERROR IN: Tests scheme for update: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
def deep_trees_skipping_on_update(sbox, test_case, skip_paths,
chdir_skip_paths):
"""
Create tree conflicts, then update again, expecting the existing tree
conflicts to be skipped.
SKIP_PATHS is a list of paths, relative to the "base dir", for which
"update" on the "base dir" should report as skipped.
CHDIR_SKIP_PATHS is a list of (target-path, skipped-path) pairs for which
an update of "target-path" (relative to the "base dir") should result in
"skipped-path" (relative to "target-path") being reported as skipped.
"""
"""FURTHER_ACTION is a function that will make a further modification to
each target, this being the modification that we expect to be skipped. The
function takes the "base dir" (the WC path to the test case directory) as
its only argument."""
further_action = deep_trees_tree_del_repos
j = os.path.join
wc_dir = sbox.wc_dir
base = j(wc_dir, test_case.name)
# Initialize: generate conflicts. (We do not check anything here.)
setup_case = DeepTreesTestCase(test_case.name,
test_case.local_action,
test_case.incoming_action,
None,
None,
None)
deep_trees_run_tests_scheme_for_update(sbox, [setup_case])
# Make a further change to each target in the repository so there is a new
# revision to update to. (This is r4.)
further_action(sbox.repo_url + '/' + test_case.name)
# Update whole working copy, expecting the nodes still in conflict to be
# skipped.
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = base
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status = x_status.copy()
x_status.wc_dir = base
# Account for nodes that were updated by further_action
x_status.tweak('', 'D', 'F', 'DD', 'DF', 'DDD', 'DDF', wc_rev=4)
run_and_verify_update(base, x_out, x_disk, None,
error_re_string = test_case.error_re_string)
run_and_verify_unquiet_status(base, x_status)
# Try to update each in-conflict subtree. Expect a 'Skipped' output for
# each, and the WC status to be unchanged.
for path in skip_paths:
run_and_verify_update(j(base, path),
wc.State(base, {path : Item(verb='Skipped')}),
None, None)
run_and_verify_unquiet_status(base, x_status)
# Try to update each in-conflict subtree. Expect a 'Skipped' output for
# each, and the WC status to be unchanged.
# This time, cd to the subdir before updating it.
was_cwd = os.getcwd()
for path, skipped in chdir_skip_paths:
if isinstance(skipped, list):
expected_skip = {}
for p in skipped:
expected_skip[p] = Item(verb='Skipped')
else:
expected_skip = {skipped : Item(verb='Skipped')}
p = j(base, path)
run_and_verify_update(p,
wc.State(p, expected_skip),
None, None)
os.chdir(was_cwd)
run_and_verify_unquiet_status(base, x_status)
# Verify that commit still fails.
for path, skipped in chdir_skip_paths:
run_and_verify_commit(j(base, path), None, None,
test_case.commit_block_string,
base)
run_and_verify_unquiet_status(base, x_status)
def deep_trees_run_tests_scheme_for_switch(sbox, greater_scheme):
"""
Runs a given list of tests for conflicts occuring at a switch operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
again contains two subdirs: one "local" and one "incoming" for
the switch operation. These contain a set of deep_trees each.
2) A commit is performed across all test cases and depths.
(our initial state, -r2)
3) In each test case subdir's incoming subdir, the
incoming actions are performed.
4) A commit is performed across all test cases and depths. (-r3)
5) In each test case subdir's local subdir, the local actions are
performed. They remain uncommitted in the working copy.
6) In each test case subdir's local dir, a switch is performed to its
corresponding incoming dir.
This causes conflicts between the "local" state in the working
copy and the "incoming" state from the incoming subdir (still -r3).
7) A commit is performed in each separate container, to verify
that each tree-conflict indeed blocks a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) Create directories.
for test_case in greater_scheme:
try:
base = j(sbox.wc_dir, test_case.name)
os.makedirs(base)
make_deep_trees(j(base, "local"))
make_deep_trees(j(base, "incoming"))
main.run_svn(None, 'add', base)
except:
print("ERROR IN: Tests scheme for switch: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) Commit initial state (-r2).
main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
# 3) Apply incoming changes
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 4) Commit all changes (-r3).
main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
# 5) Apply local changes in their according subdirs.
for test_case in greater_scheme:
try:
test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 6) switch the local dir to the incoming url, conflicting with incoming
# changes. A lot of different things are expected.
# Do separate switch operations for each test case.
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, "local")
incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = local
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_switch(local, local, incoming, x_out, x_disk, None,
test_case.error_re_string, None, None, None,
None, False, '--ignore-ancestry')
run_and_verify_unquiet_status(local, x_status)
x_info = test_case.expected_info or {}
for path in x_info:
run_and_verify_info([x_info[path]], j(local, path))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while verifying in '%s'" % test_case.name)
raise
# 7) Verify that commit fails.
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, 'local')
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_commit(local, None, x_status,
test_case.commit_block_string,
local)
except:
print("ERROR IN: Tests scheme for switch: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
def deep_trees_run_tests_scheme_for_merge(sbox, greater_scheme,
do_commit_local_changes,
do_commit_conflicts=True,
ignore_ancestry=False):
"""
Runs a given list of tests for conflicts occuring at a merge operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
initially contains another subdir, called "incoming", which
contains a set of deep_trees.
2) A commit is performed across all test cases and depths.
(a pre-initial state)
3) In each test case subdir, the "incoming" subdir is copied to "local",
via the `svn copy' command. Each test case's subdir now has two sub-
dirs: "local" and "incoming", initial states for the merge operation.
4) An update is performed across all test cases and depths, so that the
copies made in 3) are pulled into the wc.
5) In each test case's "incoming" subdir, the incoming action is
performed.
6) A commit is performed across all test cases and depths, to commit
the incoming changes.
If do_commit_local_changes is True, this becomes step 7 (swap steps).
7) In each test case's "local" subdir, the local_action is performed.
If do_commit_local_changes is True, this becomes step 6 (swap steps).
Then, in effect, the local changes are committed as well.
8) In each test case subdir, the "incoming" subdir is merged into the
"local" subdir. If ignore_ancestry is True, then the merge is done
with the --ignore-ancestry option, so mergeinfo is neither considered
nor recorded. This causes conflicts between the "local" state in the
working copy and the "incoming" state from the incoming subdir.
9) If do_commit_conflicts is True, then a commit is performed in each
separate container, to verify that each tree-conflict indeed blocks
a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) Create directories.
for test_case in greater_scheme:
try:
base = j(sbox.wc_dir, test_case.name)
os.makedirs(base)
make_deep_trees(j(base, "incoming"))
main.run_svn(None, 'add', base)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) Commit pre-initial state (-r2).
main.run_svn(None, 'commit', '-m', 'pre-initial state', wc_dir)
# 3) Copy "incoming" to "local".
for test_case in greater_scheme:
try:
base_url = sbox.repo_url + "/" + test_case.name
incoming_url = base_url + "/incoming"
local_url = base_url + "/local"
main.run_svn(None, 'cp', incoming_url, local_url, '-m',
'copy incoming to local')
except:
print("ERROR IN: Tests scheme for merge: "
+ "while copying deep trees in '%s'" % test_case.name)
raise
# 4) Update to load all of the "/local" subdirs into the working copies.
try:
main.run_svn(None, 'up', sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while updating local subdirs")
raise
# 5) Perform incoming actions
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
except:
print("ERROR IN: Tests scheme for merge: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 6) or 7) Commit all incoming actions
if not do_commit_local_changes:
try:
main.run_svn(None, 'ci', '-m', 'Committing incoming actions',
sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while committing incoming actions")
raise
# 7) or 6) Perform all local actions.
for test_case in greater_scheme:
try:
test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
except:
print("ERROR IN: Tests scheme for merge: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 6) or 7) Commit all incoming actions
if do_commit_local_changes:
try:
main.run_svn(None, 'ci', '-m', 'Committing incoming and local actions',
sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while committing incoming and local actions")
raise
# 8) Merge all "incoming" subdirs to their respective "local" subdirs.
# This creates conflicts between the local changes in the "local" wc
# subdirs and the incoming states committed in the "incoming" subdirs.
for test_case in greater_scheme:
try:
local = j(sbox.wc_dir, test_case.name, "local")
incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = local
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
x_skip = test_case.expected_skip
if x_skip != None:
x_skip.copy()
x_skip.wc_dir = local
varargs = (local,)
if ignore_ancestry:
varargs = varargs + ('--ignore-ancestry',)
run_and_verify_merge(local, None, None, incoming, None,
x_out, None, None, x_disk, None, x_skip,
test_case.error_re_string,
None, None, None, None,
False, False, *varargs)
run_and_verify_unquiet_status(local, x_status)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while verifying in '%s'" % test_case.name)
raise
# 9) Verify that commit fails.
if do_commit_conflicts:
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, 'local')
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_commit(local, None, x_status,
test_case.commit_block_string,
local)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
| codeparrot/github-code-clean |
# Author: Travis Oliphant
# 1999 -- 2002
from __future__ import division, print_function, absolute_import
import warnings
import threading
from . import sigtools
from scipy._lib.six import callable
from scipy._lib._version import NumpyVersion
from scipy import linalg
from scipy.fftpack import (fft, ifft, ifftshift, fft2, ifft2, fftn,
ifftn, fftfreq)
from numpy.fft import rfftn, irfftn
from numpy import (allclose, angle, arange, argsort, array, asarray,
atleast_1d, atleast_2d, cast, dot, exp, expand_dims,
iscomplexobj, isscalar, mean, ndarray, newaxis, ones, pi,
poly, polyadd, polyder, polydiv, polymul, polysub, polyval,
prod, product, r_, ravel, real_if_close, reshape,
roots, sort, sum, take, transpose, unique, where, zeros,
zeros_like)
import numpy as np
from scipy.special import factorial
from .windows import get_window
from ._arraytools import axis_slice, axis_reverse, odd_ext, even_ext, const_ext
__all__ = ['correlate', 'fftconvolve', 'convolve', 'convolve2d', 'correlate2d',
'order_filter', 'medfilt', 'medfilt2d', 'wiener', 'lfilter',
'lfiltic', 'sosfilt', 'deconvolve', 'hilbert', 'hilbert2',
'cmplx_sort', 'unique_roots', 'invres', 'invresz', 'residue',
'residuez', 'resample', 'detrend', 'lfilter_zi', 'sosfilt_zi',
'filtfilt', 'decimate', 'vectorstrength']
_modedict = {'valid': 0, 'same': 1, 'full': 2}
_boundarydict = {'fill': 0, 'pad': 0, 'wrap': 2, 'circular': 2, 'symm': 1,
'symmetric': 1, 'reflect': 4}
_rfft_mt_safe = (NumpyVersion(np.__version__) >= '1.9.0.dev-e24486e')
_rfft_lock = threading.Lock()
def _valfrommode(mode):
try:
val = _modedict[mode]
except KeyError:
if mode not in [0, 1, 2]:
raise ValueError("Acceptable mode flags are 'valid' (0),"
" 'same' (1), or 'full' (2).")
val = mode
return val
def _bvalfromboundary(boundary):
try:
val = _boundarydict[boundary] << 2
except KeyError:
if val not in [0, 1, 2]:
raise ValueError("Acceptable boundary flags are 'fill', 'wrap'"
" (or 'circular'), \n and 'symm'"
" (or 'symmetric').")
val = boundary << 2
return val
def _check_valid_mode_shapes(shape1, shape2):
for d1, d2 in zip(shape1, shape2):
if not d1 >= d2:
raise ValueError(
"in1 should have at least as many items as in2 in "
"every dimension for 'valid' mode.")
def correlate(in1, in2, mode='full'):
"""
Cross-correlate two N-dimensional arrays.
Cross-correlate `in1` and `in2`, with the output size determined by the
`mode` argument.
Parameters
----------
in1 : array_like
First input.
in2 : array_like
Second input. Should have the same number of dimensions as `in1`;
if sizes of `in1` and `in2` are not equal then `in1` has to be the
larger array.
mode : str {'full', 'valid', 'same'}, optional
A string indicating the size of the output:
``full``
The output is the full discrete linear cross-correlation
of the inputs. (Default)
``valid``
The output consists only of those elements that do not
rely on the zero-padding.
``same``
The output is the same size as `in1`, centered
with respect to the 'full' output.
Returns
-------
correlate : array
An N-dimensional array containing a subset of the discrete linear
cross-correlation of `in1` with `in2`.
Notes
-----
The correlation z of two d-dimensional arrays x and y is defined as:
z[...,k,...] = sum[..., i_l, ...]
x[..., i_l,...] * conj(y[..., i_l + k,...])
Examples
--------
Implement a matched filter using cross-correlation, to recover a signal
that has passed through a noisy channel.
>>> from scipy import signal
>>> sig = np.repeat([0., 1., 1., 0., 1., 0., 0., 1.], 128)
>>> sig_noise = sig + np.random.randn(len(sig))
>>> corr = signal.correlate(sig_noise, np.ones(128), mode='same') / 128
>>> import matplotlib.pyplot as plt
>>> clock = np.arange(64, len(sig), 128)
>>> fig, (ax_orig, ax_noise, ax_corr) = plt.subplots(3, 1, sharex=True)
>>> ax_orig.plot(sig)
>>> ax_orig.plot(clock, sig[clock], 'ro')
>>> ax_orig.set_title('Original signal')
>>> ax_noise.plot(sig_noise)
>>> ax_noise.set_title('Signal with noise')
>>> ax_corr.plot(corr)
>>> ax_corr.plot(clock, corr[clock], 'ro')
>>> ax_corr.axhline(0.5, ls=':')
>>> ax_corr.set_title('Cross-correlated with rectangular pulse')
>>> ax_orig.margins(0, 0.1)
>>> fig.show()
"""
in1 = asarray(in1)
in2 = asarray(in2)
# Don't use _valfrommode, since correlate should not accept numeric modes
try:
val = _modedict[mode]
except KeyError:
raise ValueError("Acceptable mode flags are 'valid',"
" 'same', or 'full'.")
if in1.ndim == in2.ndim == 0:
return in1 * in2
elif not in1.ndim == in2.ndim:
raise ValueError("in1 and in2 should have the same dimensionality")
if mode == 'valid':
_check_valid_mode_shapes(in1.shape, in2.shape)
ps = [i - j + 1 for i, j in zip(in1.shape, in2.shape)]
out = np.empty(ps, in1.dtype)
z = sigtools._correlateND(in1, in2, out, val)
else:
# _correlateND is far slower when in2.size > in1.size, so swap them
# and then undo the effect afterward
swapped_inputs = (mode == 'full') and (in2.size > in1.size)
if swapped_inputs:
in1, in2 = in2, in1
ps = [i + j - 1 for i, j in zip(in1.shape, in2.shape)]
# zero pad input
in1zpadded = np.zeros(ps, in1.dtype)
sc = [slice(0, i) for i in in1.shape]
in1zpadded[sc] = in1.copy()
if mode == 'full':
out = np.empty(ps, in1.dtype)
elif mode == 'same':
out = np.empty(in1.shape, in1.dtype)
z = sigtools._correlateND(in1zpadded, in2, out, val)
# Reverse and conjugate to undo the effect of swapping inputs
if swapped_inputs:
slice_obj = [slice(None, None, -1)] * len(z.shape)
z = z[slice_obj].conj()
return z
def _centered(arr, newsize):
# Return the center newsize portion of the array.
newsize = asarray(newsize)
currsize = array(arr.shape)
startind = (currsize - newsize) // 2
endind = startind + newsize
myslice = [slice(startind[k], endind[k]) for k in range(len(endind))]
return arr[tuple(myslice)]
def _next_regular(target):
"""
Find the next regular number greater than or equal to target.
Regular numbers are composites of the prime factors 2, 3, and 5.
Also known as 5-smooth numbers or Hamming numbers, these are the optimal
size for inputs to FFTPACK.
Target must be a positive integer.
"""
if target <= 6:
return target
# Quickly check if it's already a power of 2
if not (target & (target-1)):
return target
match = float('inf') # Anything found will be smaller
p5 = 1
while p5 < target:
p35 = p5
while p35 < target:
# Ceiling integer division, avoiding conversion to float
# (quotient = ceil(target / p35))
quotient = -(-target // p35)
# Quickly find next power of 2 >= quotient
try:
p2 = 2**((quotient - 1).bit_length())
except AttributeError:
# Fallback for Python <2.7
p2 = 2**(len(bin(quotient - 1)) - 2)
N = p2 * p35
if N == target:
return N
elif N < match:
match = N
p35 *= 3
if p35 == target:
return p35
if p35 < match:
match = p35
p5 *= 5
if p5 == target:
return p5
if p5 < match:
match = p5
return match
def fftconvolve(in1, in2, mode="full"):
"""Convolve two N-dimensional arrays using FFT.
Convolve `in1` and `in2` using the fast Fourier transform method, with
the output size determined by the `mode` argument.
This is generally much faster than `convolve` for large arrays (n > ~500),
but can be slower when only a few output values are needed, and can only
output float arrays (int or object array inputs will be cast to float).
Parameters
----------
in1 : array_like
First input.
in2 : array_like
Second input. Should have the same number of dimensions as `in1`;
if sizes of `in1` and `in2` are not equal then `in1` has to be the
larger array.
mode : str {'full', 'valid', 'same'}, optional
A string indicating the size of the output:
``full``
The output is the full discrete linear convolution
of the inputs. (Default)
``valid``
The output consists only of those elements that do not
rely on the zero-padding.
``same``
The output is the same size as `in1`, centered
with respect to the 'full' output.
Returns
-------
out : array
An N-dimensional array containing a subset of the discrete linear
convolution of `in1` with `in2`.
Examples
--------
Autocorrelation of white noise is an impulse. (This is at least 100 times
as fast as `convolve`.)
>>> from scipy import signal
>>> sig = np.random.randn(1000)
>>> autocorr = signal.fftconvolve(sig, sig[::-1], mode='full')
>>> import matplotlib.pyplot as plt
>>> fig, (ax_orig, ax_mag) = plt.subplots(2, 1)
>>> ax_orig.plot(sig)
>>> ax_orig.set_title('White noise')
>>> ax_mag.plot(np.arange(-len(sig)+1,len(sig)), autocorr)
>>> ax_mag.set_title('Autocorrelation')
>>> fig.show()
Gaussian blur implemented using FFT convolution. Notice the dark borders
around the image, due to the zero-padding beyond its boundaries.
The `convolve2d` function allows for other types of image boundaries,
but is far slower.
>>> from scipy import misc
>>> lena = misc.lena()
>>> kernel = np.outer(signal.gaussian(70, 8), signal.gaussian(70, 8))
>>> blurred = signal.fftconvolve(lena, kernel, mode='same')
>>> fig, (ax_orig, ax_kernel, ax_blurred) = plt.subplots(1, 3)
>>> ax_orig.imshow(lena, cmap='gray')
>>> ax_orig.set_title('Original')
>>> ax_orig.set_axis_off()
>>> ax_kernel.imshow(kernel, cmap='gray')
>>> ax_kernel.set_title('Gaussian kernel')
>>> ax_kernel.set_axis_off()
>>> ax_blurred.imshow(blurred, cmap='gray')
>>> ax_blurred.set_title('Blurred')
>>> ax_blurred.set_axis_off()
>>> fig.show()
"""
in1 = asarray(in1)
in2 = asarray(in2)
if in1.ndim == in2.ndim == 0: # scalar inputs
return in1 * in2
elif not in1.ndim == in2.ndim:
raise ValueError("in1 and in2 should have the same dimensionality")
elif in1.size == 0 or in2.size == 0: # empty arrays
return array([])
s1 = array(in1.shape)
s2 = array(in2.shape)
complex_result = (np.issubdtype(in1.dtype, np.complex) or
np.issubdtype(in2.dtype, np.complex))
shape = s1 + s2 - 1
if mode == "valid":
_check_valid_mode_shapes(s1, s2)
# Speed up FFT by padding to optimal size for FFTPACK
fshape = [_next_regular(int(d)) for d in shape]
fslice = tuple([slice(0, int(sz)) for sz in shape])
# Pre-1.9 NumPy FFT routines are not threadsafe. For older NumPys, make
# sure we only call rfftn/irfftn from one thread at a time.
if not complex_result and (_rfft_mt_safe or _rfft_lock.acquire(False)):
try:
ret = irfftn(rfftn(in1, fshape) *
rfftn(in2, fshape), fshape)[fslice].copy()
finally:
if not _rfft_mt_safe:
_rfft_lock.release()
else:
# If we're here, it's either because we need a complex result, or we
# failed to acquire _rfft_lock (meaning rfftn isn't threadsafe and
# is already in use by another thread). In either case, use the
# (threadsafe but slower) SciPy complex-FFT routines instead.
ret = ifftn(fftn(in1, fshape) * fftn(in2, fshape))[fslice].copy()
if not complex_result:
ret = ret.real
if mode == "full":
return ret
elif mode == "same":
return _centered(ret, s1)
elif mode == "valid":
return _centered(ret, s1 - s2 + 1)
else:
raise ValueError("Acceptable mode flags are 'valid',"
" 'same', or 'full'.")
def convolve(in1, in2, mode='full'):
"""
Convolve two N-dimensional arrays.
Convolve `in1` and `in2`, with the output size determined by the
`mode` argument.
Parameters
----------
in1 : array_like
First input.
in2 : array_like
Second input. Should have the same number of dimensions as `in1`;
if sizes of `in1` and `in2` are not equal then `in1` has to be the
larger array.
mode : str {'full', 'valid', 'same'}, optional
A string indicating the size of the output:
``full``
The output is the full discrete linear convolution
of the inputs. (Default)
``valid``
The output consists only of those elements that do not
rely on the zero-padding.
``same``
The output is the same size as `in1`, centered
with respect to the 'full' output.
Returns
-------
convolve : array
An N-dimensional array containing a subset of the discrete linear
convolution of `in1` with `in2`.
See also
--------
numpy.polymul : performs polynomial multiplication (same operation, but
also accepts poly1d objects)
"""
volume = asarray(in1)
kernel = asarray(in2)
if volume.ndim == kernel.ndim == 0:
return volume * kernel
slice_obj = [slice(None, None, -1)] * len(kernel.shape)
if np.iscomplexobj(kernel):
return correlate(volume, kernel[slice_obj].conj(), mode)
else:
return correlate(volume, kernel[slice_obj], mode)
def order_filter(a, domain, rank):
"""
Perform an order filter on an N-dimensional array.
Perform an order filter on the array in. The domain argument acts as a
mask centered over each pixel. The non-zero elements of domain are
used to select elements surrounding each input pixel which are placed
in a list. The list is sorted, and the output for that pixel is the
element corresponding to rank in the sorted list.
Parameters
----------
a : ndarray
The N-dimensional input array.
domain : array_like
A mask array with the same number of dimensions as `in`.
Each dimension should have an odd number of elements.
rank : int
A non-negative integer which selects the element from the
sorted list (0 corresponds to the smallest element, 1 is the
next smallest element, etc.).
Returns
-------
out : ndarray
The results of the order filter in an array with the same
shape as `in`.
Examples
--------
>>> from scipy import signal
>>> x = np.arange(25).reshape(5, 5)
>>> domain = np.identity(3)
>>> x
array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14],
[15, 16, 17, 18, 19],
[20, 21, 22, 23, 24]])
>>> signal.order_filter(x, domain, 0)
array([[ 0., 0., 0., 0., 0.],
[ 0., 0., 1., 2., 0.],
[ 0., 5., 6., 7., 0.],
[ 0., 10., 11., 12., 0.],
[ 0., 0., 0., 0., 0.]])
>>> signal.order_filter(x, domain, 2)
array([[ 6., 7., 8., 9., 4.],
[ 11., 12., 13., 14., 9.],
[ 16., 17., 18., 19., 14.],
[ 21., 22., 23., 24., 19.],
[ 20., 21., 22., 23., 24.]])
"""
domain = asarray(domain)
size = domain.shape
for k in range(len(size)):
if (size[k] % 2) != 1:
raise ValueError("Each dimension of domain argument "
" should have an odd number of elements.")
return sigtools._order_filterND(a, domain, rank)
def medfilt(volume, kernel_size=None):
"""
Perform a median filter on an N-dimensional array.
Apply a median filter to the input array using a local window-size
given by `kernel_size`.
Parameters
----------
volume : array_like
An N-dimensional input array.
kernel_size : array_like, optional
A scalar or an N-length list giving the size of the median filter
window in each dimension. Elements of `kernel_size` should be odd.
If `kernel_size` is a scalar, then this scalar is used as the size in
each dimension. Default size is 3 for each dimension.
Returns
-------
out : ndarray
An array the same size as input containing the median filtered
result.
"""
volume = atleast_1d(volume)
if kernel_size is None:
kernel_size = [3] * len(volume.shape)
kernel_size = asarray(kernel_size)
if kernel_size.shape == ():
kernel_size = np.repeat(kernel_size.item(), volume.ndim)
for k in range(len(volume.shape)):
if (kernel_size[k] % 2) != 1:
raise ValueError("Each element of kernel_size should be odd.")
domain = ones(kernel_size)
numels = product(kernel_size, axis=0)
order = numels // 2
return sigtools._order_filterND(volume, domain, order)
def wiener(im, mysize=None, noise=None):
"""
Perform a Wiener filter on an N-dimensional array.
Apply a Wiener filter to the N-dimensional array `im`.
Parameters
----------
im : ndarray
An N-dimensional array.
mysize : int or arraylike, optional
A scalar or an N-length list giving the size of the Wiener filter
window in each dimension. Elements of mysize should be odd.
If mysize is a scalar, then this scalar is used as the size
in each dimension.
noise : float, optional
The noise-power to use. If None, then noise is estimated as the
average of the local variance of the input.
Returns
-------
out : ndarray
Wiener filtered result with the same shape as `im`.
"""
im = asarray(im)
if mysize is None:
mysize = [3] * len(im.shape)
mysize = asarray(mysize)
if mysize.shape == ():
mysize = np.repeat(mysize.item(), im.ndim)
# Estimate the local mean
lMean = correlate(im, ones(mysize), 'same') / product(mysize, axis=0)
# Estimate the local variance
lVar = (correlate(im ** 2, ones(mysize), 'same') / product(mysize, axis=0)
- lMean ** 2)
# Estimate the noise power if needed.
if noise is None:
noise = mean(ravel(lVar), axis=0)
res = (im - lMean)
res *= (1 - noise / lVar)
res += lMean
out = where(lVar < noise, lMean, res)
return out
def convolve2d(in1, in2, mode='full', boundary='fill', fillvalue=0):
"""
Convolve two 2-dimensional arrays.
Convolve `in1` and `in2` with output size determined by `mode`, and
boundary conditions determined by `boundary` and `fillvalue`.
Parameters
----------
in1, in2 : array_like
Two-dimensional input arrays to be convolved.
mode : str {'full', 'valid', 'same'}, optional
A string indicating the size of the output:
``full``
The output is the full discrete linear convolution
of the inputs. (Default)
``valid``
The output consists only of those elements that do not
rely on the zero-padding.
``same``
The output is the same size as `in1`, centered
with respect to the 'full' output.
boundary : str {'fill', 'wrap', 'symm'}, optional
A flag indicating how to handle boundaries:
``fill``
pad input arrays with fillvalue. (default)
``wrap``
circular boundary conditions.
``symm``
symmetrical boundary conditions.
fillvalue : scalar, optional
Value to fill pad input arrays with. Default is 0.
Returns
-------
out : ndarray
A 2-dimensional array containing a subset of the discrete linear
convolution of `in1` with `in2`.
Examples
--------
Compute the gradient of an image by 2D convolution with a complex Scharr
operator. (Horizontal operator is real, vertical is imaginary.) Use
symmetric boundary condition to avoid creating edges at the image
boundaries.
>>> from scipy import signal
>>> from scipy import misc
>>> lena = misc.lena()
>>> scharr = np.array([[ -3-3j, 0-10j, +3 -3j],
... [-10+0j, 0+ 0j, +10 +0j],
... [ -3+3j, 0+10j, +3 +3j]]) # Gx + j*Gy
>>> grad = signal.convolve2d(lena, scharr, boundary='symm', mode='same')
>>> import matplotlib.pyplot as plt
>>> fig, (ax_orig, ax_mag, ax_ang) = plt.subplots(1, 3)
>>> ax_orig.imshow(lena, cmap='gray')
>>> ax_orig.set_title('Original')
>>> ax_orig.set_axis_off()
>>> ax_mag.imshow(np.absolute(grad), cmap='gray')
>>> ax_mag.set_title('Gradient magnitude')
>>> ax_mag.set_axis_off()
>>> ax_ang.imshow(np.angle(grad), cmap='hsv') # hsv is cyclic, like angles
>>> ax_ang.set_title('Gradient orientation')
>>> ax_ang.set_axis_off()
>>> fig.show()
"""
in1 = asarray(in1)
in2 = asarray(in2)
if mode == 'valid':
_check_valid_mode_shapes(in1.shape, in2.shape)
val = _valfrommode(mode)
bval = _bvalfromboundary(boundary)
with warnings.catch_warnings():
warnings.simplefilter('ignore', np.ComplexWarning)
# FIXME: some cast generates a warning here
out = sigtools._convolve2d(in1, in2, 1, val, bval, fillvalue)
return out
def correlate2d(in1, in2, mode='full', boundary='fill', fillvalue=0):
"""
Cross-correlate two 2-dimensional arrays.
Cross correlate `in1` and `in2` with output size determined by `mode`, and
boundary conditions determined by `boundary` and `fillvalue`.
Parameters
----------
in1, in2 : array_like
Two-dimensional input arrays to be convolved.
mode : str {'full', 'valid', 'same'}, optional
A string indicating the size of the output:
``full``
The output is the full discrete linear cross-correlation
of the inputs. (Default)
``valid``
The output consists only of those elements that do not
rely on the zero-padding.
``same``
The output is the same size as `in1`, centered
with respect to the 'full' output.
boundary : str {'fill', 'wrap', 'symm'}, optional
A flag indicating how to handle boundaries:
``fill``
pad input arrays with fillvalue. (default)
``wrap``
circular boundary conditions.
``symm``
symmetrical boundary conditions.
fillvalue : scalar, optional
Value to fill pad input arrays with. Default is 0.
Returns
-------
correlate2d : ndarray
A 2-dimensional array containing a subset of the discrete linear
cross-correlation of `in1` with `in2`.
Examples
--------
Use 2D cross-correlation to find the location of a template in a noisy
image:
>>> from scipy import signal
>>> from scipy import misc
>>> lena = misc.lena() - misc.lena().mean()
>>> template = np.copy(lena[235:295, 310:370]) # right eye
>>> template -= template.mean()
>>> lena = lena + np.random.randn(*lena.shape) * 50 # add noise
>>> corr = signal.correlate2d(lena, template, boundary='symm', mode='same')
>>> y, x = np.unravel_index(np.argmax(corr), corr.shape) # find the match
>>> import matplotlib.pyplot as plt
>>> fig, (ax_orig, ax_template, ax_corr) = plt.subplots(1, 3)
>>> ax_orig.imshow(lena, cmap='gray')
>>> ax_orig.set_title('Original')
>>> ax_orig.set_axis_off()
>>> ax_template.imshow(template, cmap='gray')
>>> ax_template.set_title('Template')
>>> ax_template.set_axis_off()
>>> ax_corr.imshow(corr, cmap='gray')
>>> ax_corr.set_title('Cross-correlation')
>>> ax_corr.set_axis_off()
>>> ax_orig.plot(x, y, 'ro')
>>> fig.show()
"""
in1 = asarray(in1)
in2 = asarray(in2)
if mode == 'valid':
_check_valid_mode_shapes(in1.shape, in2.shape)
val = _valfrommode(mode)
bval = _bvalfromboundary(boundary)
with warnings.catch_warnings():
warnings.simplefilter('ignore', np.ComplexWarning)
# FIXME: some cast generates a warning here
out = sigtools._convolve2d(in1, in2, 0, val, bval, fillvalue)
return out
def medfilt2d(input, kernel_size=3):
"""
Median filter a 2-dimensional array.
Apply a median filter to the `input` array using a local window-size
given by `kernel_size` (must be odd).
Parameters
----------
input : array_like
A 2-dimensional input array.
kernel_size : array_like, optional
A scalar or a list of length 2, giving the size of the
median filter window in each dimension. Elements of
`kernel_size` should be odd. If `kernel_size` is a scalar,
then this scalar is used as the size in each dimension.
Default is a kernel of size (3, 3).
Returns
-------
out : ndarray
An array the same size as input containing the median filtered
result.
"""
image = asarray(input)
if kernel_size is None:
kernel_size = [3] * 2
kernel_size = asarray(kernel_size)
if kernel_size.shape == ():
kernel_size = np.repeat(kernel_size.item(), 2)
for size in kernel_size:
if (size % 2) != 1:
raise ValueError("Each element of kernel_size should be odd.")
return sigtools._medfilt2d(image, kernel_size)
def lfilter(b, a, x, axis=-1, zi=None):
"""
Filter data along one-dimension with an IIR or FIR filter.
Filter a data sequence, `x`, using a digital filter. This works for many
fundamental data types (including Object type). The filter is a direct
form II transposed implementation of the standard difference equation
(see Notes).
Parameters
----------
b : array_like
The numerator coefficient vector in a 1-D sequence.
a : array_like
The denominator coefficient vector in a 1-D sequence. If ``a[0]``
is not 1, then both `a` and `b` are normalized by ``a[0]``.
x : array_like
An N-dimensional input array.
axis : int
The axis of the input data array along which to apply the
linear filter. The filter is applied to each subarray along
this axis. Default is -1.
zi : array_like, optional
Initial conditions for the filter delays. It is a vector
(or array of vectors for an N-dimensional input) of length
``max(len(a),len(b))-1``. If `zi` is None or is not given then
initial rest is assumed. See `lfiltic` for more information.
Returns
-------
y : array
The output of the digital filter.
zf : array, optional
If `zi` is None, this is not returned, otherwise, `zf` holds the
final filter delay values.
Notes
-----
The filter function is implemented as a direct II transposed structure.
This means that the filter implements::
a[0]*y[n] = b[0]*x[n] + b[1]*x[n-1] + ... + b[nb]*x[n-nb]
- a[1]*y[n-1] - ... - a[na]*y[n-na]
using the following difference equations::
y[m] = b[0]*x[m] + z[0,m-1]
z[0,m] = b[1]*x[m] + z[1,m-1] - a[1]*y[m]
...
z[n-3,m] = b[n-2]*x[m] + z[n-2,m-1] - a[n-2]*y[m]
z[n-2,m] = b[n-1]*x[m] - a[n-1]*y[m]
where m is the output sample number and n=max(len(a),len(b)) is the
model order.
The rational transfer function describing this filter in the
z-transform domain is::
-1 -nb
b[0] + b[1]z + ... + b[nb] z
Y(z) = ---------------------------------- X(z)
-1 -na
a[0] + a[1]z + ... + a[na] z
"""
if isscalar(a):
a = [a]
if zi is None:
return sigtools._linear_filter(b, a, x, axis)
else:
return sigtools._linear_filter(b, a, x, axis, zi)
def lfiltic(b, a, y, x=None):
"""
Construct initial conditions for lfilter.
Given a linear filter (b, a) and initial conditions on the output `y`
and the input `x`, return the initial conditions on the state vector zi
which is used by `lfilter` to generate the output given the input.
Parameters
----------
b : array_like
Linear filter term.
a : array_like
Linear filter term.
y : array_like
Initial conditions.
If ``N=len(a) - 1``, then ``y = {y[-1], y[-2], ..., y[-N]}``.
If `y` is too short, it is padded with zeros.
x : array_like, optional
Initial conditions.
If ``M=len(b) - 1``, then ``x = {x[-1], x[-2], ..., x[-M]}``.
If `x` is not given, its initial conditions are assumed zero.
If `x` is too short, it is padded with zeros.
Returns
-------
zi : ndarray
The state vector ``zi``.
``zi = {z_0[-1], z_1[-1], ..., z_K-1[-1]}``, where ``K = max(M,N)``.
See Also
--------
lfilter
"""
N = np.size(a) - 1
M = np.size(b) - 1
K = max(M, N)
y = asarray(y)
if y.dtype.kind in 'bui':
# ensure calculations are floating point
y = y.astype(np.float64)
zi = zeros(K, y.dtype)
if x is None:
x = zeros(M, y.dtype)
else:
x = asarray(x)
L = np.size(x)
if L < M:
x = r_[x, zeros(M - L)]
L = np.size(y)
if L < N:
y = r_[y, zeros(N - L)]
for m in range(M):
zi[m] = sum(b[m + 1:] * x[:M - m], axis=0)
for m in range(N):
zi[m] -= sum(a[m + 1:] * y[:N - m], axis=0)
return zi
def deconvolve(signal, divisor):
"""Deconvolves ``divisor`` out of ``signal``.
Returns the quotient and remainder such that
``signal = convolve(divisor, quotient) + remainder``
Parameters
----------
signal : array_like
Signal data, typically a recorded signal
divisor : array_like
Divisor data, typically an impulse response or filter that was
applied to the original signal
Returns
-------
quotient : ndarray
Quotient, typically the recovered original signal
remainder : ndarray
Remainder
Examples
--------
Deconvolve a signal that's been filtered:
>>> from scipy import signal
>>> original = [0, 1, 0, 0, 1, 1, 0, 0]
>>> impulse_response = [2, 1]
>>> recorded = signal.convolve(impulse_response, original)
>>> recorded
array([0, 2, 1, 0, 2, 3, 1, 0, 0])
>>> recovered, remainder = signal.deconvolve(recorded, impulse_response)
>>> recovered
array([ 0., 1., 0., 0., 1., 1., 0., 0.])
See also
--------
numpy.polydiv : performs polynomial division (same operation, but
also accepts poly1d objects)
"""
num = atleast_1d(signal)
den = atleast_1d(divisor)
N = len(num)
D = len(den)
if D > N:
quot = []
rem = num
else:
input = ones(N - D + 1, float)
input[1:] = 0
quot = lfilter(num, den, input)
rem = num - convolve(den, quot, mode='full')
return quot, rem
def hilbert(x, N=None, axis=-1):
"""
Compute the analytic signal, using the Hilbert transform.
The transformation is done along the last axis by default.
Parameters
----------
x : array_like
Signal data. Must be real.
N : int, optional
Number of Fourier components. Default: ``x.shape[axis]``
axis : int, optional
Axis along which to do the transformation. Default: -1.
Returns
-------
xa : ndarray
Analytic signal of `x`, of each 1-D array along `axis`
Notes
-----
The analytic signal ``x_a(t)`` of signal ``x(t)`` is:
.. math:: x_a = F^{-1}(F(x) 2U) = x + i y
where `F` is the Fourier transform, `U` the unit step function,
and `y` the Hilbert transform of `x`. [1]_
In other words, the negative half of the frequency spectrum is zeroed
out, turning the real-valued signal into a complex signal. The Hilbert
transformed signal can be obtained from ``np.imag(hilbert(x))``, and the
original signal from ``np.real(hilbert(x))``.
References
----------
.. [1] Wikipedia, "Analytic signal".
http://en.wikipedia.org/wiki/Analytic_signal
"""
x = asarray(x)
if iscomplexobj(x):
raise ValueError("x must be real.")
if N is None:
N = x.shape[axis]
if N <= 0:
raise ValueError("N must be positive.")
Xf = fft(x, N, axis=axis)
h = zeros(N)
if N % 2 == 0:
h[0] = h[N // 2] = 1
h[1:N // 2] = 2
else:
h[0] = 1
h[1:(N + 1) // 2] = 2
if len(x.shape) > 1:
ind = [newaxis] * x.ndim
ind[axis] = slice(None)
h = h[ind]
x = ifft(Xf * h, axis=axis)
return x
def hilbert2(x, N=None):
"""
Compute the '2-D' analytic signal of `x`
Parameters
----------
x : array_like
2-D signal data.
N : int or tuple of two ints, optional
Number of Fourier components. Default is ``x.shape``
Returns
-------
xa : ndarray
Analytic signal of `x` taken along axes (0,1).
References
----------
.. [1] Wikipedia, "Analytic signal",
http://en.wikipedia.org/wiki/Analytic_signal
"""
x = atleast_2d(x)
if len(x.shape) > 2:
raise ValueError("x must be 2-D.")
if iscomplexobj(x):
raise ValueError("x must be real.")
if N is None:
N = x.shape
elif isinstance(N, int):
if N <= 0:
raise ValueError("N must be positive.")
N = (N, N)
elif len(N) != 2 or np.any(np.asarray(N) <= 0):
raise ValueError("When given as a tuple, N must hold exactly "
"two positive integers")
Xf = fft2(x, N, axes=(0, 1))
h1 = zeros(N[0], 'd')
h2 = zeros(N[1], 'd')
for p in range(2):
h = eval("h%d" % (p + 1))
N1 = N[p]
if N1 % 2 == 0:
h[0] = h[N1 // 2] = 1
h[1:N1 // 2] = 2
else:
h[0] = 1
h[1:(N1 + 1) // 2] = 2
exec("h%d = h" % (p + 1), globals(), locals())
h = h1[:, newaxis] * h2[newaxis, :]
k = len(x.shape)
while k > 2:
h = h[:, newaxis]
k -= 1
x = ifft2(Xf * h, axes=(0, 1))
return x
def cmplx_sort(p):
"""Sort roots based on magnitude.
Parameters
----------
p : array_like
The roots to sort, as a 1-D array.
Returns
-------
p_sorted : ndarray
Sorted roots.
indx : ndarray
Array of indices needed to sort the input `p`.
"""
p = asarray(p)
if iscomplexobj(p):
indx = argsort(abs(p))
else:
indx = argsort(p)
return take(p, indx, 0), indx
def unique_roots(p, tol=1e-3, rtype='min'):
"""
Determine unique roots and their multiplicities from a list of roots.
Parameters
----------
p : array_like
The list of roots.
tol : float, optional
The tolerance for two roots to be considered equal. Default is 1e-3.
rtype : {'max', 'min, 'avg'}, optional
How to determine the returned root if multiple roots are within
`tol` of each other.
- 'max': pick the maximum of those roots.
- 'min': pick the minimum of those roots.
- 'avg': take the average of those roots.
Returns
-------
pout : ndarray
The list of unique roots, sorted from low to high.
mult : ndarray
The multiplicity of each root.
Notes
-----
This utility function is not specific to roots but can be used for any
sequence of values for which uniqueness and multiplicity has to be
determined. For a more general routine, see `numpy.unique`.
Examples
--------
>>> from scipy import signal
>>> vals = [0, 1.3, 1.31, 2.8, 1.25, 2.2, 10.3]
>>> uniq, mult = signal.unique_roots(vals, tol=2e-2, rtype='avg')
Check which roots have multiplicity larger than 1:
>>> uniq[mult > 1]
array([ 1.305])
"""
if rtype in ['max', 'maximum']:
comproot = np.max
elif rtype in ['min', 'minimum']:
comproot = np.min
elif rtype in ['avg', 'mean']:
comproot = np.mean
else:
raise ValueError("`rtype` must be one of "
"{'max', 'maximum', 'min', 'minimum', 'avg', 'mean'}")
p = asarray(p) * 1.0
tol = abs(tol)
p, indx = cmplx_sort(p)
pout = []
mult = []
indx = -1
curp = p[0] + 5 * tol
sameroots = []
for k in range(len(p)):
tr = p[k]
if abs(tr - curp) < tol:
sameroots.append(tr)
curp = comproot(sameroots)
pout[indx] = curp
mult[indx] += 1
else:
pout.append(tr)
curp = tr
sameroots = [tr]
indx += 1
mult.append(1)
return array(pout), array(mult)
def invres(r, p, k, tol=1e-3, rtype='avg'):
"""
Compute b(s) and a(s) from partial fraction expansion.
If ``M = len(b)`` and ``N = len(a)``::
b(s) b[0] x**(M-1) + b[1] x**(M-2) + ... + b[M-1]
H(s) = ------ = ----------------------------------------------
a(s) a[0] x**(N-1) + a[1] x**(N-2) + ... + a[N-1]
r[0] r[1] r[-1]
= -------- + -------- + ... + --------- + k(s)
(s-p[0]) (s-p[1]) (s-p[-1])
If there are any repeated roots (closer than tol), then the partial
fraction expansion has terms like::
r[i] r[i+1] r[i+n-1]
-------- + ----------- + ... + -----------
(s-p[i]) (s-p[i])**2 (s-p[i])**n
Parameters
----------
r : ndarray
Residues.
p : ndarray
Poles.
k : ndarray
Coefficients of the direct polynomial term.
tol : float, optional
The tolerance for two roots to be considered equal. Default is 1e-3.
rtype : {'max', 'min, 'avg'}, optional
How to determine the returned root if multiple roots are within
`tol` of each other.
'max': pick the maximum of those roots.
'min': pick the minimum of those roots.
'avg': take the average of those roots.
See Also
--------
residue, unique_roots
"""
extra = k
p, indx = cmplx_sort(p)
r = take(r, indx, 0)
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for k in range(len(pout)):
p.extend([pout[k]] * mult[k])
a = atleast_1d(poly(p))
if len(extra) > 0:
b = polymul(extra, a)
else:
b = [0]
indx = 0
for k in range(len(pout)):
temp = []
for l in range(len(pout)):
if l != k:
temp.extend([pout[l]] * mult[l])
for m in range(mult[k]):
t2 = temp[:]
t2.extend([pout[k]] * (mult[k] - m - 1))
b = polyadd(b, r[indx] * poly(t2))
indx += 1
b = real_if_close(b)
while allclose(b[0], 0, rtol=1e-14) and (b.shape[-1] > 1):
b = b[1:]
return b, a
def residue(b, a, tol=1e-3, rtype='avg'):
"""
Compute partial-fraction expansion of b(s) / a(s).
If ``M = len(b)`` and ``N = len(a)``, then the partial-fraction
expansion H(s) is defined as::
b(s) b[0] s**(M-1) + b[1] s**(M-2) + ... + b[M-1]
H(s) = ------ = ----------------------------------------------
a(s) a[0] s**(N-1) + a[1] s**(N-2) + ... + a[N-1]
r[0] r[1] r[-1]
= -------- + -------- + ... + --------- + k(s)
(s-p[0]) (s-p[1]) (s-p[-1])
If there are any repeated roots (closer together than `tol`), then H(s)
has terms like::
r[i] r[i+1] r[i+n-1]
-------- + ----------- + ... + -----------
(s-p[i]) (s-p[i])**2 (s-p[i])**n
Returns
-------
r : ndarray
Residues.
p : ndarray
Poles.
k : ndarray
Coefficients of the direct polynomial term.
See Also
--------
invres, numpy.poly, unique_roots
"""
b, a = map(asarray, (b, a))
rscale = a[0]
k, b = polydiv(b, a)
p = roots(a)
r = p * 0.0
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for n in range(len(pout)):
p.extend([pout[n]] * mult[n])
p = asarray(p)
# Compute the residue from the general formula
indx = 0
for n in range(len(pout)):
bn = b.copy()
pn = []
for l in range(len(pout)):
if l != n:
pn.extend([pout[l]] * mult[l])
an = atleast_1d(poly(pn))
# bn(s) / an(s) is (s-po[n])**Nn * b(s) / a(s) where Nn is
# multiplicity of pole at po[n]
sig = mult[n]
for m in range(sig, 0, -1):
if sig > m:
# compute next derivative of bn(s) / an(s)
term1 = polymul(polyder(bn, 1), an)
term2 = polymul(bn, polyder(an, 1))
bn = polysub(term1, term2)
an = polymul(an, an)
r[indx + m - 1] = (polyval(bn, pout[n]) / polyval(an, pout[n])
/ factorial(sig - m))
indx += sig
return r / rscale, p, k
def residuez(b, a, tol=1e-3, rtype='avg'):
"""
Compute partial-fraction expansion of b(z) / a(z).
If ``M = len(b)`` and ``N = len(a)``::
b(z) b[0] + b[1] z**(-1) + ... + b[M-1] z**(-M+1)
H(z) = ------ = ----------------------------------------------
a(z) a[0] + a[1] z**(-1) + ... + a[N-1] z**(-N+1)
r[0] r[-1]
= --------------- + ... + ---------------- + k[0] + k[1]z**(-1) ...
(1-p[0]z**(-1)) (1-p[-1]z**(-1))
If there are any repeated roots (closer than tol), then the partial
fraction expansion has terms like::
r[i] r[i+1] r[i+n-1]
-------------- + ------------------ + ... + ------------------
(1-p[i]z**(-1)) (1-p[i]z**(-1))**2 (1-p[i]z**(-1))**n
See also
--------
invresz, unique_roots
"""
b, a = map(asarray, (b, a))
gain = a[0]
brev, arev = b[::-1], a[::-1]
krev, brev = polydiv(brev, arev)
if krev == []:
k = []
else:
k = krev[::-1]
b = brev[::-1]
p = roots(a)
r = p * 0.0
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for n in range(len(pout)):
p.extend([pout[n]] * mult[n])
p = asarray(p)
# Compute the residue from the general formula (for discrete-time)
# the polynomial is in z**(-1) and the multiplication is by terms
# like this (1-p[i] z**(-1))**mult[i]. After differentiation,
# we must divide by (-p[i])**(m-k) as well as (m-k)!
indx = 0
for n in range(len(pout)):
bn = brev.copy()
pn = []
for l in range(len(pout)):
if l != n:
pn.extend([pout[l]] * mult[l])
an = atleast_1d(poly(pn))[::-1]
# bn(z) / an(z) is (1-po[n] z**(-1))**Nn * b(z) / a(z) where Nn is
# multiplicity of pole at po[n] and b(z) and a(z) are polynomials.
sig = mult[n]
for m in range(sig, 0, -1):
if sig > m:
# compute next derivative of bn(s) / an(s)
term1 = polymul(polyder(bn, 1), an)
term2 = polymul(bn, polyder(an, 1))
bn = polysub(term1, term2)
an = polymul(an, an)
r[indx + m - 1] = (polyval(bn, 1.0 / pout[n]) /
polyval(an, 1.0 / pout[n]) /
factorial(sig - m) / (-pout[n]) ** (sig - m))
indx += sig
return r / gain, p, k
def invresz(r, p, k, tol=1e-3, rtype='avg'):
"""
Compute b(z) and a(z) from partial fraction expansion.
If ``M = len(b)`` and ``N = len(a)``::
b(z) b[0] + b[1] z**(-1) + ... + b[M-1] z**(-M+1)
H(z) = ------ = ----------------------------------------------
a(z) a[0] + a[1] z**(-1) + ... + a[N-1] z**(-N+1)
r[0] r[-1]
= --------------- + ... + ---------------- + k[0] + k[1]z**(-1)...
(1-p[0]z**(-1)) (1-p[-1]z**(-1))
If there are any repeated roots (closer than tol), then the partial
fraction expansion has terms like::
r[i] r[i+1] r[i+n-1]
-------------- + ------------------ + ... + ------------------
(1-p[i]z**(-1)) (1-p[i]z**(-1))**2 (1-p[i]z**(-1))**n
See Also
--------
residuez, unique_roots, invres
"""
extra = asarray(k)
p, indx = cmplx_sort(p)
r = take(r, indx, 0)
pout, mult = unique_roots(p, tol=tol, rtype=rtype)
p = []
for k in range(len(pout)):
p.extend([pout[k]] * mult[k])
a = atleast_1d(poly(p))
if len(extra) > 0:
b = polymul(extra, a)
else:
b = [0]
indx = 0
brev = asarray(b)[::-1]
for k in range(len(pout)):
temp = []
# Construct polynomial which does not include any of this root
for l in range(len(pout)):
if l != k:
temp.extend([pout[l]] * mult[l])
for m in range(mult[k]):
t2 = temp[:]
t2.extend([pout[k]] * (mult[k] - m - 1))
brev = polyadd(brev, (r[indx] * poly(t2))[::-1])
indx += 1
b = real_if_close(brev[::-1])
return b, a
def resample(x, num, t=None, axis=0, window=None):
"""
Resample `x` to `num` samples using Fourier method along the given axis.
The resampled signal starts at the same value as `x` but is sampled
with a spacing of ``len(x) / num * (spacing of x)``. Because a
Fourier method is used, the signal is assumed to be periodic.
Parameters
----------
x : array_like
The data to be resampled.
num : int
The number of samples in the resampled signal.
t : array_like, optional
If `t` is given, it is assumed to be the sample positions
associated with the signal data in `x`.
axis : int, optional
The axis of `x` that is resampled. Default is 0.
window : array_like, callable, string, float, or tuple, optional
Specifies the window applied to the signal in the Fourier
domain. See below for details.
Returns
-------
resampled_x or (resampled_x, resampled_t)
Either the resampled array, or, if `t` was given, a tuple
containing the resampled array and the corresponding resampled
positions.
Notes
-----
The argument `window` controls a Fourier-domain window that tapers
the Fourier spectrum before zero-padding to alleviate ringing in
the resampled values for sampled signals you didn't intend to be
interpreted as band-limited.
If `window` is a function, then it is called with a vector of inputs
indicating the frequency bins (i.e. fftfreq(x.shape[axis]) ).
If `window` is an array of the same length as `x.shape[axis]` it is
assumed to be the window to be applied directly in the Fourier
domain (with dc and low-frequency first).
For any other type of `window`, the function `scipy.signal.get_window`
is called to generate the window.
The first sample of the returned vector is the same as the first
sample of the input vector. The spacing between samples is changed
from ``dx`` to ``dx * len(x) / num``.
If `t` is not None, then it represents the old sample positions,
and the new sample positions will be returned as well as the new
samples.
As noted, `resample` uses FFT transformations, which can be very
slow if the number of input samples is large and prime, see
`scipy.fftpack.fft`.
"""
x = asarray(x)
X = fft(x, axis=axis)
Nx = x.shape[axis]
if window is not None:
if callable(window):
W = window(fftfreq(Nx))
elif isinstance(window, ndarray) and window.shape == (Nx,):
W = window
else:
W = ifftshift(get_window(window, Nx))
newshape = [1] * x.ndim
newshape[axis] = len(W)
W.shape = newshape
X = X * W
sl = [slice(None)] * len(x.shape)
newshape = list(x.shape)
newshape[axis] = num
N = int(np.minimum(num, Nx))
Y = zeros(newshape, 'D')
sl[axis] = slice(0, (N + 1) // 2)
Y[sl] = X[sl]
sl[axis] = slice(-(N - 1) // 2, None)
Y[sl] = X[sl]
y = ifft(Y, axis=axis) * (float(num) / float(Nx))
if x.dtype.char not in ['F', 'D']:
y = y.real
if t is None:
return y
else:
new_t = arange(0, num) * (t[1] - t[0]) * Nx / float(num) + t[0]
return y, new_t
def vectorstrength(events, period):
'''
Determine the vector strength of the events corresponding to the given
period.
The vector strength is a measure of phase synchrony, how well the
timing of the events is synchronized to a single period of a periodic
signal.
If multiple periods are used, calculate the vector strength of each.
This is called the "resonating vector strength".
Parameters
----------
events : 1D array_like
An array of time points containing the timing of the events.
period : float or array_like
The period of the signal that the events should synchronize to.
The period is in the same units as `events`. It can also be an array
of periods, in which case the outputs are arrays of the same length.
Returns
-------
strength : float or 1D array
The strength of the synchronization. 1.0 is perfect synchronization
and 0.0 is no synchronization. If `period` is an array, this is also
an array with each element containing the vector strength at the
corresponding period.
phase : float or array
The phase that the events are most strongly synchronized to in radians.
If `period` is an array, this is also an array with each element
containing the phase for the corresponding period.
References
----------
van Hemmen, JL, Longtin, A, and Vollmayr, AN. Testing resonating vector
strength: Auditory system, electric fish, and noise.
Chaos 21, 047508 (2011);
doi: 10.1063/1.3670512
van Hemmen, JL. Vector strength after Goldberg, Brown, and von Mises:
biological and mathematical perspectives. Biol Cybern.
2013 Aug;107(4):385-96. doi: 10.1007/s00422-013-0561-7.
van Hemmen, JL and Vollmayr, AN. Resonating vector strength: what happens
when we vary the "probing" frequency while keeping the spike times
fixed. Biol Cybern. 2013 Aug;107(4):491-94.
doi: 10.1007/s00422-013-0560-8
'''
events = asarray(events)
period = asarray(period)
if events.ndim > 1:
raise ValueError('events cannot have dimensions more than 1')
if period.ndim > 1:
raise ValueError('period cannot have dimensions more than 1')
# we need to know later if period was originally a scalar
scalarperiod = not period.ndim
events = atleast_2d(events)
period = atleast_2d(period)
if (period <= 0).any():
raise ValueError('periods must be positive')
# this converts the times to vectors
vectors = exp(dot(2j*pi/period.T, events))
# the vector strength is just the magnitude of the mean of the vectors
# the vector phase is the angle of the mean of the vectors
vectormean = mean(vectors, axis=1)
strength = abs(vectormean)
phase = angle(vectormean)
# if the original period was a scalar, return scalars
if scalarperiod:
strength = strength[0]
phase = phase[0]
return strength, phase
def detrend(data, axis=-1, type='linear', bp=0):
"""
Remove linear trend along axis from data.
Parameters
----------
data : array_like
The input data.
axis : int, optional
The axis along which to detrend the data. By default this is the
last axis (-1).
type : {'linear', 'constant'}, optional
The type of detrending. If ``type == 'linear'`` (default),
the result of a linear least-squares fit to `data` is subtracted
from `data`.
If ``type == 'constant'``, only the mean of `data` is subtracted.
bp : array_like of ints, optional
A sequence of break points. If given, an individual linear fit is
performed for each part of `data` between two break points.
Break points are specified as indices into `data`.
Returns
-------
ret : ndarray
The detrended input data.
Examples
--------
>>> from scipy import signal
>>> randgen = np.random.RandomState(9)
>>> npoints = 1e3
>>> noise = randgen.randn(npoints)
>>> x = 3 + 2*np.linspace(0, 1, npoints) + noise
>>> (signal.detrend(x) - noise).max() < 0.01
True
"""
if type not in ['linear', 'l', 'constant', 'c']:
raise ValueError("Trend type must be 'linear' or 'constant'.")
data = asarray(data)
dtype = data.dtype.char
if dtype not in 'dfDF':
dtype = 'd'
if type in ['constant', 'c']:
ret = data - expand_dims(mean(data, axis), axis)
return ret
else:
dshape = data.shape
N = dshape[axis]
bp = sort(unique(r_[0, bp, N]))
if np.any(bp > N):
raise ValueError("Breakpoints must be less than length "
"of data along given axis.")
Nreg = len(bp) - 1
# Restructure data so that axis is along first dimension and
# all other dimensions are collapsed into second dimension
rnk = len(dshape)
if axis < 0:
axis = axis + rnk
newdims = r_[axis, 0:axis, axis + 1:rnk]
newdata = reshape(transpose(data, tuple(newdims)),
(N, prod(dshape, axis=0) // N))
newdata = newdata.copy() # make sure we have a copy
if newdata.dtype.char not in 'dfDF':
newdata = newdata.astype(dtype)
# Find leastsq fit and remove it for each piece
for m in range(Nreg):
Npts = bp[m + 1] - bp[m]
A = ones((Npts, 2), dtype)
A[:, 0] = cast[dtype](arange(1, Npts + 1) * 1.0 / Npts)
sl = slice(bp[m], bp[m + 1])
coef, resids, rank, s = linalg.lstsq(A, newdata[sl])
newdata[sl] = newdata[sl] - dot(A, coef)
# Put data back in original shape.
tdshape = take(dshape, newdims, 0)
ret = reshape(newdata, tuple(tdshape))
vals = list(range(1, rnk))
olddims = vals[:axis] + [0] + vals[axis:]
ret = transpose(ret, tuple(olddims))
return ret
def lfilter_zi(b, a):
"""
Compute an initial state `zi` for the lfilter function that corresponds
to the steady state of the step response.
A typical use of this function is to set the initial state so that the
output of the filter starts at the same value as the first element of
the signal to be filtered.
Parameters
----------
b, a : array_like (1-D)
The IIR filter coefficients. See `lfilter` for more
information.
Returns
-------
zi : 1-D ndarray
The initial state for the filter.
Notes
-----
A linear filter with order m has a state space representation (A, B, C, D),
for which the output y of the filter can be expressed as::
z(n+1) = A*z(n) + B*x(n)
y(n) = C*z(n) + D*x(n)
where z(n) is a vector of length m, A has shape (m, m), B has shape
(m, 1), C has shape (1, m) and D has shape (1, 1) (assuming x(n) is
a scalar). lfilter_zi solves::
zi = A*zi + B
In other words, it finds the initial condition for which the response
to an input of all ones is a constant.
Given the filter coefficients `a` and `b`, the state space matrices
for the transposed direct form II implementation of the linear filter,
which is the implementation used by scipy.signal.lfilter, are::
A = scipy.linalg.companion(a).T
B = b[1:] - a[1:]*b[0]
assuming `a[0]` is 1.0; if `a[0]` is not 1, `a` and `b` are first
divided by a[0].
Examples
--------
The following code creates a lowpass Butterworth filter. Then it
applies that filter to an array whose values are all 1.0; the
output is also all 1.0, as expected for a lowpass filter. If the
`zi` argument of `lfilter` had not been given, the output would have
shown the transient signal.
>>> from numpy import array, ones
>>> from scipy.signal import lfilter, lfilter_zi, butter
>>> b, a = butter(5, 0.25)
>>> zi = lfilter_zi(b, a)
>>> y, zo = lfilter(b, a, ones(10), zi=zi)
>>> y
array([1., 1., 1., 1., 1., 1., 1., 1., 1., 1.])
Another example:
>>> x = array([0.5, 0.5, 0.5, 0.0, 0.0, 0.0, 0.0])
>>> y, zf = lfilter(b, a, x, zi=zi*x[0])
>>> y
array([ 0.5 , 0.5 , 0.5 , 0.49836039, 0.48610528,
0.44399389, 0.35505241])
Note that the `zi` argument to `lfilter` was computed using
`lfilter_zi` and scaled by `x[0]`. Then the output `y` has no
transient until the input drops from 0.5 to 0.0.
"""
# FIXME: Can this function be replaced with an appropriate
# use of lfiltic? For example, when b,a = butter(N,Wn),
# lfiltic(b, a, y=numpy.ones_like(a), x=numpy.ones_like(b)).
#
# We could use scipy.signal.normalize, but it uses warnings in
# cases where a ValueError is more appropriate, and it allows
# b to be 2D.
b = np.atleast_1d(b)
if b.ndim != 1:
raise ValueError("Numerator b must be 1-D.")
a = np.atleast_1d(a)
if a.ndim != 1:
raise ValueError("Denominator a must be 1-D.")
while len(a) > 1 and a[0] == 0.0:
a = a[1:]
if a.size < 1:
raise ValueError("There must be at least one nonzero `a` coefficient.")
if a[0] != 1.0:
# Normalize the coefficients so a[0] == 1.
b = b / a[0]
a = a / a[0]
n = max(len(a), len(b))
# Pad a or b with zeros so they are the same length.
if len(a) < n:
a = np.r_[a, np.zeros(n - len(a))]
elif len(b) < n:
b = np.r_[b, np.zeros(n - len(b))]
IminusA = np.eye(n - 1) - linalg.companion(a).T
B = b[1:] - a[1:] * b[0]
# Solve zi = A*zi + B
zi = np.linalg.solve(IminusA, B)
# For future reference: we could also use the following
# explicit formulas to solve the linear system:
#
# zi = np.zeros(n - 1)
# zi[0] = B.sum() / IminusA[:,0].sum()
# asum = 1.0
# csum = 0.0
# for k in range(1,n-1):
# asum += a[k]
# csum += b[k] - a[k]*b[0]
# zi[k] = asum*zi[0] - csum
return zi
def sosfilt_zi(sos):
"""
Compute an initial state `zi` for the sosfilt function that corresponds
to the steady state of the step response.
A typical use of this function is to set the initial state so that the
output of the filter starts at the same value as the first element of
the signal to be filtered.
Parameters
----------
sos : array_like
Array of second-order filter coefficients, must have shape
``(n_sections, 6)``. See `sosfilt` for the SOS filter format
specification.
Returns
-------
zi : ndarray
Initial conditions suitable for use with ``sosfilt``, shape
``(n_sections, 2)``.
See Also
--------
sosfilt, zpk2sos
Notes
-----
.. versionadded:: 0.16.0
Examples
--------
Filter a rectangular pulse that begins at time 0, with and without
the use of the `zi` argument of `scipy.signal.sosfilt`.
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
>>> sos = signal.butter(9, 0.125, output='sos')
>>> zi = signal.sosfilt_zi(sos)
>>> x = (np.arange(250) < 100).astype(int)
>>> f1 = signal.sosfilt(sos, x)
>>> f2, zo = signal.sosfilt(sos, x, zi=zi)
>>> plt.plot(x, 'k--', label='x')
>>> plt.plot(f1, 'b', alpha=0.5, linewidth=2, label='filtered')
>>> plt.plot(f2, 'g', alpha=0.25, linewidth=4, label='filtered with zi')
>>> plt.legend(loc='best')
>>> plt.show()
"""
sos = np.asarray(sos)
if sos.ndim != 2 or sos.shape[1] != 6:
raise ValueError('sos must be shape (n_sections, 6)')
n_sections = sos.shape[0]
zi = np.empty((n_sections, 2))
scale = 1.0
for section in range(n_sections):
b = sos[section, :3]
a = sos[section, 3:]
zi[section] = scale * lfilter_zi(b, a)
# If H(z) = B(z)/A(z) is this section's transfer function, then
# b.sum()/a.sum() is H(1), the gain at omega=0. That's the steady
# state value of this section's step response.
scale *= b.sum() / a.sum()
return zi
def _filtfilt_gust(b, a, x, axis=-1, irlen=None):
"""Forward-backward IIR filter that uses Gustafsson's method.
Apply the IIR filter defined by `(b,a)` to `x` twice, first forward
then backward, using Gustafsson's initial conditions [1]_.
Let ``y_fb`` be the result of filtering first forward and then backward,
and let ``y_bf`` be the result of filtering first backward then forward.
Gustafsson's method is to compute initial conditions for the forward
pass and the backward pass such that ``y_fb == y_bf``.
Parameters
----------
b : scalar or 1-D ndarray
Numerator coefficients of the filter.
a : scalar or 1-D ndarray
Denominator coefficients of the filter.
x : ndarray
Data to be filtered.
axis : int, optional
Axis of `x` to be filtered. Default is -1.
irlen : int or None, optional
The length of the nonnegligible part of the impulse response.
If `irlen` is None, or if the length of the signal is less than
``2 * irlen``, then no part of the impulse response is ignored.
Returns
-------
y : ndarray
The filtered data.
x0 : ndarray
Initial condition for the forward filter.
x1 : ndarray
Initial condition for the backward filter.
Notes
-----
Typically the return values `x0` and `x1` are not needed by the
caller. The intended use of these return values is in unit tests.
References
----------
.. [1] F. Gustaffson. Determining the initial states in forward-backward
filtering. Transactions on Signal Processing, 46(4):988-992, 1996.
"""
# In the comments, "Gustafsson's paper" and [1] refer to the
# paper referenced in the docstring.
b = np.atleast_1d(b)
a = np.atleast_1d(a)
order = max(len(b), len(a)) - 1
if order == 0:
# The filter is just scalar multiplication, with no state.
scale = (b[0] / a[0])**2
y = scale * x
return y, np.array([]), np.array([])
if axis != -1 or axis != x.ndim - 1:
# Move the axis containing the data to the end.
x = np.swapaxes(x, axis, x.ndim - 1)
# n is the number of samples in the data to be filtered.
n = x.shape[-1]
if irlen is None or n <= 2*irlen:
m = n
else:
m = irlen
# Create Obs, the observability matrix (called O in the paper).
# This matrix can be interpreted as the operator that propagates
# an arbitrary initial state to the output, assuming the input is
# zero.
# In Gustafsson's paper, the forward and backward filters are not
# necessarily the same, so he has both O_f and O_b. We use the same
# filter in both directions, so we only need O. The same comment
# applies to S below.
Obs = np.zeros((m, order))
zi = np.zeros(order)
zi[0] = 1
Obs[:, 0] = lfilter(b, a, np.zeros(m), zi=zi)[0]
for k in range(1, order):
Obs[k:, k] = Obs[:-k, 0]
# Obsr is O^R (Gustafsson's notation for row-reversed O)
Obsr = Obs[::-1]
# Create S. S is the matrix that applies the filter to the reversed
# propagated initial conditions. That is,
# out = S.dot(zi)
# is the same as
# tmp, _ = lfilter(b, a, zeros(), zi=zi) # Propagate ICs.
# out = lfilter(b, a, tmp[::-1]) # Reverse and filter.
# Equations (5) & (6) of [1]
S = lfilter(b, a, Obs[::-1], axis=0)
# Sr is S^R (row-reversed S)
Sr = S[::-1]
# M is [(S^R - O), (O^R - S)]
if m == n:
M = np.hstack((Sr - Obs, Obsr - S))
else:
# Matrix described in section IV of [1].
M = np.zeros((2*m, 2*order))
M[:m, :order] = Sr - Obs
M[m:, order:] = Obsr - S
# Naive forward-backward and backward-forward filters.
# These have large transients because the filters use zero initial
# conditions.
y_f = lfilter(b, a, x)
y_fb = lfilter(b, a, y_f[..., ::-1])[..., ::-1]
y_b = lfilter(b, a, x[..., ::-1])[..., ::-1]
y_bf = lfilter(b, a, y_b)
delta_y_bf_fb = y_bf - y_fb
if m == n:
delta = delta_y_bf_fb
else:
start_m = delta_y_bf_fb[..., :m]
end_m = delta_y_bf_fb[..., -m:]
delta = np.concatenate((start_m, end_m), axis=-1)
# ic_opt holds the "optimal" initial conditions.
# The following code computes the result shown in the formula
# of the paper between equations (6) and (7).
if delta.ndim == 1:
ic_opt = linalg.lstsq(M, delta)[0]
else:
# Reshape delta so it can be used as an array of multiple
# right-hand-sides in linalg.lstsq.
delta2d = delta.reshape(-1, delta.shape[-1]).T
ic_opt0 = linalg.lstsq(M, delta2d)[0].T
ic_opt = ic_opt0.reshape(delta.shape[:-1] + (M.shape[-1],))
# Now compute the filtered signal using equation (7) of [1].
# First, form [S^R, O^R] and call it W.
if m == n:
W = np.hstack((Sr, Obsr))
else:
W = np.zeros((2*m, 2*order))
W[:m, :order] = Sr
W[m:, order:] = Obsr
# Equation (7) of [1] says
# Y_fb^opt = Y_fb^0 + W * [x_0^opt; x_{N-1}^opt]
# `wic` is (almost) the product on the right.
# W has shape (m, 2*order), and ic_opt has shape (..., 2*order),
# so we can't use W.dot(ic_opt). Instead, we dot ic_opt with W.T,
# so wic has shape (..., m).
wic = ic_opt.dot(W.T)
# `wic` is "almost" the product of W and the optimal ICs in equation
# (7)--if we're using a truncated impulse response (m < n), `wic`
# contains only the adjustments required for the ends of the signal.
# Here we form y_opt, taking this into account if necessary.
y_opt = y_fb
if m == n:
y_opt += wic
else:
y_opt[..., :m] += wic[..., :m]
y_opt[..., -m:] += wic[..., -m:]
x0 = ic_opt[..., :order]
x1 = ic_opt[..., -order:]
if axis != -1 or axis != x.ndim - 1:
# Restore the data axis to its original position.
x0 = np.swapaxes(x0, axis, x.ndim - 1)
x1 = np.swapaxes(x1, axis, x.ndim - 1)
y_opt = np.swapaxes(y_opt, axis, x.ndim - 1)
return y_opt, x0, x1
def filtfilt(b, a, x, axis=-1, padtype='odd', padlen=None, method='pad',
irlen=None):
"""
A forward-backward filter.
This function applies a linear filter twice, once forward and once
backwards. The combined filter has linear phase.
The function provides options for handling the edges of the signal.
When `method` is "pad", the function pads the data along the given axis
in one of three ways: odd, even or constant. The odd and even extensions
have the corresponding symmetry about the end point of the data. The
constant extension extends the data with the values at the end points. On
both the forward and backward passes, the initial condition of the
filter is found by using `lfilter_zi` and scaling it by the end point of
the extended data.
When `method` is "gust", Gustafsson's method [1]_ is used. Initial
conditions are chosen for the forward and backward passes so that the
forward-backward filter gives the same result as the backward-forward
filter.
Parameters
----------
b : (N,) array_like
The numerator coefficient vector of the filter.
a : (N,) array_like
The denominator coefficient vector of the filter. If ``a[0]``
is not 1, then both `a` and `b` are normalized by ``a[0]``.
x : array_like
The array of data to be filtered.
axis : int, optional
The axis of `x` to which the filter is applied.
Default is -1.
padtype : str or None, optional
Must be 'odd', 'even', 'constant', or None. This determines the
type of extension to use for the padded signal to which the filter
is applied. If `padtype` is None, no padding is used. The default
is 'odd'.
padlen : int or None, optional
The number of elements by which to extend `x` at both ends of
`axis` before applying the filter. This value must be less than
``x.shape[axis] - 1``. ``padlen=0`` implies no padding.
The default value is ``3 * max(len(a), len(b))``.
method : str, optional
Determines the method for handling the edges of the signal, either
"pad" or "gust". When `method` is "pad", the signal is padded; the
type of padding is determined by `padtype` and `padlen`, and `irlen`
is ignored. When `method` is "gust", Gustafsson's method is used,
and `padtype` and `padlen` are ignored.
irlen : int or None, optional
When `method` is "gust", `irlen` specifies the length of the
impulse response of the filter. If `irlen` is None, no part
of the impulse response is ignored. For a long signal, specifying
`irlen` can significantly improve the performance of the filter.
Returns
-------
y : ndarray
The filtered output, an array of type numpy.float64 with the same
shape as `x`.
See Also
--------
lfilter_zi, lfilter
Notes
-----
The option to use Gustaffson's method was added in scipy version 0.16.0.
References
----------
.. [1] F. Gustaffson, "Determining the initial states in forward-backward
filtering", Transactions on Signal Processing, Vol. 46, pp. 988-992,
1996.
Examples
--------
The examples will use several functions from `scipy.signal`.
>>> from scipy import signal
>>> import matplotlib.pyplot as plt
First we create a one second signal that is the sum of two pure sine
waves, with frequencies 5 Hz and 250 Hz, sampled at 2000 Hz.
>>> t = np.linspace(0, 1.0, 2001)
>>> xlow = np.sin(2 * np.pi * 5 * t)
>>> xhigh = np.sin(2 * np.pi * 250 * t)
>>> x = xlow + xhigh
Now create a lowpass Butterworth filter with a cutoff of 0.125 times
the Nyquist rate, or 125 Hz, and apply it to ``x`` with `filtfilt`.
The result should be approximately ``xlow``, with no phase shift.
>>> b, a = signal.butter(8, 0.125)
>>> y = signal.filtfilt(b, a, x, padlen=150)
>>> np.abs(y - xlow).max()
9.1086182074789912e-06
We get a fairly clean result for this artificial example because
the odd extension is exact, and with the moderately long padding,
the filter's transients have dissipated by the time the actual data
is reached. In general, transient effects at the edges are
unavoidable.
The following example demonstrates the option ``method="gust"``.
First, create a filter.
>>> b, a = signal.ellip(4, 0.01, 120, 0.125) # Filter to be applied.
>>> np.random.seed(123456)
`sig` is a random input signal to be filtered.
>>> n = 60
>>> sig = np.random.randn(n)**3 + 3*np.random.randn(n).cumsum()
Apply `filtfilt` to `sig`, once using the Gustafsson method, and
once using padding, and plot the results for comparison.
>>> fgust = signal.filtfilt(b, a, sig, method="gust")
>>> fpad = signal.filtfilt(b, a, sig, padlen=50)
>>> plt.plot(sig, 'k-', label='input')
>>> plt.plot(fgust, 'b-', linewidth=4, label='gust')
>>> plt.plot(fpad, 'c-', linewidth=1.5, label='pad')
>>> plt.legend(loc='best')
>>> plt.show()
The `irlen` argument can be used to improve the performance
of Gustafsson's method.
Estimate the impulse response length of the filter.
>>> z, p, k = signal.tf2zpk(b, a)
>>> eps = 1e-9
>>> r = np.max(np.abs(p))
>>> approx_impulse_len = int(np.ceil(np.log(eps) / np.log(r)))
>>> approx_impulse_len
137
Apply the filter to a longer signal, with and without the `irlen`
argument. The difference between `y1` and `y2` is small. For long
signals, using `irlen` gives a significant performance improvement.
>>> x = np.random.randn(5000)
>>> y1 = signal.filtfilt(b, a, x, method='gust')
>>> y2 = signal.filtfilt(b, a, x, method='gust', irlen=approx_impulse_len)
>>> print(np.max(np.abs(y1 - y2)))
1.80056858312e-10
"""
b = np.atleast_1d(b)
a = np.atleast_1d(a)
x = np.asarray(x)
if method not in ["pad", "gust"]:
raise ValueError("method must be 'pad' or 'gust'.")
if method == "gust":
y, z1, z2 = _filtfilt_gust(b, a, x, axis=axis, irlen=irlen)
return y
# `method` is "pad"...
ntaps = max(len(a), len(b))
if padtype not in ['even', 'odd', 'constant', None]:
raise ValueError(("Unknown value '%s' given to padtype. padtype "
"must be 'even', 'odd', 'constant', or None.") %
padtype)
if padtype is None:
padlen = 0
if padlen is None:
# Original padding; preserved for backwards compatibility.
edge = ntaps * 3
else:
edge = padlen
# x's 'axis' dimension must be bigger than edge.
if x.shape[axis] <= edge:
raise ValueError("The length of the input vector x must be at least "
"padlen, which is %d." % edge)
if padtype is not None and edge > 0:
# Make an extension of length `edge` at each
# end of the input array.
if padtype == 'even':
ext = even_ext(x, edge, axis=axis)
elif padtype == 'odd':
ext = odd_ext(x, edge, axis=axis)
else:
ext = const_ext(x, edge, axis=axis)
else:
ext = x
# Get the steady state of the filter's step response.
zi = lfilter_zi(b, a)
# Reshape zi and create x0 so that zi*x0 broadcasts
# to the correct value for the 'zi' keyword argument
# to lfilter.
zi_shape = [1] * x.ndim
zi_shape[axis] = zi.size
zi = np.reshape(zi, zi_shape)
x0 = axis_slice(ext, stop=1, axis=axis)
# Forward filter.
(y, zf) = lfilter(b, a, ext, axis=axis, zi=zi * x0)
# Backward filter.
# Create y0 so zi*y0 broadcasts appropriately.
y0 = axis_slice(y, start=-1, axis=axis)
(y, zf) = lfilter(b, a, axis_reverse(y, axis=axis), axis=axis, zi=zi * y0)
# Reverse y.
y = axis_reverse(y, axis=axis)
if edge > 0:
# Slice the actual signal from the extended signal.
y = axis_slice(y, start=edge, stop=-edge, axis=axis)
return y
def sosfilt(sos, x, axis=-1, zi=None):
"""
Filter data along one dimension using cascaded second-order sections
Filter a data sequence, `x`, using a digital IIR filter defined by
`sos`. This is implemented by performing `lfilter` for each
second-order section. See `lfilter` for details.
Parameters
----------
sos : array_like
Array of second-order filter coefficients, must have shape
``(n_sections, 6)``. Each row corresponds to a second-order
section, with the first three columns providing the numerator
coefficients and the last three providing the denominator
coefficients.
x : array_like
An N-dimensional input array.
axis : int
The axis of the input data array along which to apply the
linear filter. The filter is applied to each subarray along
this axis. Default is -1.
zi : array_like, optional
Initial conditions for the cascaded filter delays. It is a (at
least 2D) vector of shape ``(n_sections, ..., 2, ...)``, where
``..., 2, ...`` denotes the shape of `x`, but with ``x.shape[axis]``
replaced by 2. If `zi` is None or is not given then initial rest
(i.e. all zeros) is assumed.
Note that these initial conditions are *not* the same as the initial
conditions given by `lfiltic` or `lfilter_zi`.
Returns
-------
y : ndarray
The output of the digital filter.
zf : ndarray, optional
If `zi` is None, this is not returned, otherwise, `zf` holds the
final filter delay values.
See Also
--------
zpk2sos, sos2zpk, sosfilt_zi
Notes
-----
The filter function is implemented as a series of second-order filters
with direct-form II transposed structure. It is designed to minimize
numerical precision errors for high-order filters.
.. versionadded:: 0.16.0
Examples
--------
Plot a 13th-order filter's impulse response using both `lfilter` and
`sosfilt`, showing the instability that results from trying to do a
13th-order filter in a single stage (the numerical error pushes some poles
outside of the unit circle):
>>> import matplotlib.pyplot as plt
>>> from scipy import signal
>>> b, a = signal.ellip(13, 0.009, 80, 0.05, output='ba')
>>> sos = signal.ellip(13, 0.009, 80, 0.05, output='sos')
>>> x = np.zeros(700)
>>> x[0] = 1.
>>> y_tf = signal.lfilter(b, a, x)
>>> y_sos = signal.sosfilt(sos, x)
>>> plt.plot(y_tf, 'r', label='TF')
>>> plt.plot(y_sos, 'k', label='SOS')
>>> plt.legend(loc='best')
>>> plt.show()
"""
x = np.asarray(x)
sos = atleast_2d(sos)
if sos.ndim != 2:
raise ValueError('sos array must be 2D')
n_sections, m = sos.shape
if m != 6:
raise ValueError('sos array must be shape (n_sections, 6)')
use_zi = zi is not None
if use_zi:
zi = np.asarray(zi)
x_zi_shape = list(x.shape)
x_zi_shape[axis] = 2
x_zi_shape = tuple([n_sections] + x_zi_shape)
if zi.shape != x_zi_shape:
raise ValueError('Invalid zi shape. With axis=%r, an input with '
'shape %r, and an sos array with %d sections, zi '
'must have shape %r.' %
(axis, x.shape, n_sections, x_zi_shape))
zf = zeros_like(zi)
for section in range(n_sections):
if use_zi:
x, zf[section] = lfilter(sos[section, :3], sos[section, 3:],
x, axis, zi=zi[section])
else:
x = lfilter(sos[section, :3], sos[section, 3:], x, axis)
out = (x, zf) if use_zi else x
return out
from scipy.signal.filter_design import cheby1
from scipy.signal.fir_filter_design import firwin
def decimate(x, q, n=None, ftype='iir', axis=-1):
"""
Downsample the signal by using a filter.
By default, an order 8 Chebyshev type I filter is used. A 30 point FIR
filter with hamming window is used if `ftype` is 'fir'.
Parameters
----------
x : ndarray
The signal to be downsampled, as an N-dimensional array.
q : int
The downsampling factor.
n : int, optional
The order of the filter (1 less than the length for 'fir').
ftype : str {'iir', 'fir'}, optional
The type of the lowpass filter.
axis : int, optional
The axis along which to decimate.
Returns
-------
y : ndarray
The down-sampled signal.
See also
--------
resample
"""
if not isinstance(q, int):
raise TypeError("q must be an integer")
if n is None:
if ftype == 'fir':
n = 30
else:
n = 8
if ftype == 'fir':
b = firwin(n + 1, 1. / q, window='hamming')
a = 1.
else:
b, a = cheby1(n, 0.05, 0.8 / q)
y = lfilter(b, a, x, axis=axis)
sl = [slice(None)] * y.ndim
sl[axis] = slice(None, None, q)
return y[sl]
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
# Unit tests for cache framework
# Uses whatever cache backend is set in the test settings file.
from __future__ import unicode_literals
import copy
import os
import re
import shutil
import tempfile
import threading
import time
import unittest
import warnings
from django.conf import settings
from django.core import management, signals
from django.core.cache import (
DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches,
)
from django.core.cache.utils import make_template_fragment_key
from django.db import connection, connections, transaction
from django.http import HttpRequest, HttpResponse, StreamingHttpResponse
from django.middleware.cache import (
CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware,
)
from django.middleware.csrf import CsrfViewMiddleware
from django.template import engines
from django.template.context_processors import csrf
from django.template.response import TemplateResponse
from django.test import (
RequestFactory, TestCase, TransactionTestCase, override_settings,
)
from django.test.signals import setting_changed
from django.utils import six, timezone, translation
from django.utils.cache import (
get_cache_key, learn_cache_key, patch_cache_control,
patch_response_headers, patch_vary_headers,
)
from django.utils.encoding import force_text
from django.views.decorators.cache import cache_page
from .models import Poll, expensive_calculation
try: # Use the same idiom as in cache backends
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
# functions/classes for complex data type tests
def f():
return 42
class C:
def m(n):
return 24
class Unpickable(object):
def __getstate__(self):
raise pickle.PickleError()
@override_settings(CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
})
class DummyCacheTests(TestCase):
# The Dummy cache backend doesn't really behave like a test backend,
# so it has its own test case.
def test_simple(self):
"Dummy cache backend ignores cache set calls"
cache.set("key", "value")
self.assertIsNone(cache.get("key"))
def test_add(self):
"Add doesn't do anything in dummy cache backend"
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertTrue(result)
self.assertIsNone(cache.get("addkey1"))
def test_non_existent(self):
"Non-existent keys aren't found in the dummy cache backend"
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
"get_many returns nothing for the dummy cache backend"
cache.set('a', 'a')
cache.set('b', 'b')
cache.set('c', 'c')
cache.set('d', 'd')
self.assertEqual(cache.get_many(['a', 'c', 'd']), {})
self.assertEqual(cache.get_many(['a', 'b', 'e']), {})
def test_delete(self):
"Cache deletion is transparently ignored on the dummy cache backend"
cache.set("key1", "spam")
cache.set("key2", "eggs")
self.assertIsNone(cache.get("key1"))
cache.delete("key1")
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_has_key(self):
"The has_key method doesn't ever return True for the dummy cache backend"
cache.set("hello1", "goodbye1")
self.assertFalse(cache.has_key("hello1"))
self.assertFalse(cache.has_key("goodbye1"))
def test_in(self):
"The in operator doesn't ever return True for the dummy cache backend"
cache.set("hello2", "goodbye2")
self.assertNotIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
"Dummy cache values can't be incremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.incr, 'answer')
self.assertRaises(ValueError, cache.incr, 'does_not_exist')
def test_decr(self):
"Dummy cache values can't be decremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.decr, 'answer')
self.assertRaises(ValueError, cache.decr, 'does_not_exist')
def test_data_types(self):
"All data types are ignored equally by the dummy cache"
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertIsNone(cache.get("stuff"))
def test_expiration(self):
"Expiration has no effect on the dummy cache"
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
self.assertIsNone(cache.get("expire2"))
self.assertFalse(cache.has_key("expire3"))
def test_unicode(self):
"Unicode values are ignored by the dummy cache"
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
for (key, value) in stuff.items():
cache.set(key, value)
self.assertIsNone(cache.get(key))
def test_set_many(self):
"set_many does nothing for the dummy cache backend"
cache.set_many({'a': 1, 'b': 2})
cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1')
def test_delete_many(self):
"delete_many does nothing for the dummy cache backend"
cache.delete_many(['a', 'b'])
def test_clear(self):
"clear does nothing for the dummy cache backend"
cache.clear()
def test_incr_version(self):
"Dummy cache versions can't be incremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.incr_version, 'answer')
self.assertRaises(ValueError, cache.incr_version, 'does_not_exist')
def test_decr_version(self):
"Dummy cache versions can't be decremented"
cache.set('answer', 42)
self.assertRaises(ValueError, cache.decr_version, 'answer')
self.assertRaises(ValueError, cache.decr_version, 'does_not_exist')
def custom_key_func(key, key_prefix, version):
"A customized cache key function"
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
_caches_setting_base = {
'default': {},
'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())},
'v2': {'VERSION': 2},
'custom_key': {'KEY_FUNCTION': custom_key_func},
'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'},
'cull': {'OPTIONS': {'MAX_ENTRIES': 30}},
'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}},
}
def caches_setting_for_tests(base=None, **params):
# `base` is used to pull in the memcached config from the original settings,
# `params` are test specific overrides and `_caches_settings_base` is the
# base config for the tests.
# This results in the following search order:
# params -> _caches_setting_base -> base
base = base or {}
setting = {k: base.copy() for k in _caches_setting_base.keys()}
for key, cache_params in setting.items():
cache_params.update(_caches_setting_base[key])
cache_params.update(params)
return setting
class BaseCacheTests(object):
# A common set of tests to apply to all cache backends
def setUp(self):
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_simple(self):
# Simple cache set/get works
cache.set("key", "value")
self.assertEqual(cache.get("key"), "value")
def test_add(self):
# A key can be added to a cache
cache.add("addkey1", "value")
result = cache.add("addkey1", "newvalue")
self.assertFalse(result)
self.assertEqual(cache.get("addkey1"), "value")
def test_prefix(self):
# Test for same cache key conflicts between shared backend
cache.set('somekey', 'value')
# should not be set in the prefixed cache
self.assertFalse(caches['prefix'].has_key('somekey'))
caches['prefix'].set('somekey', 'value2')
self.assertEqual(cache.get('somekey'), 'value')
self.assertEqual(caches['prefix'].get('somekey'), 'value2')
def test_non_existent(self):
# Non-existent cache keys return as None/default
# get with non-existent keys
self.assertIsNone(cache.get("does_not_exist"))
self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!")
def test_get_many(self):
# Multiple cache keys can be returned using get_many
cache.set('a', 'a')
cache.set('b', 'b')
cache.set('c', 'c')
cache.set('d', 'd')
self.assertDictEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'})
self.assertDictEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'})
def test_delete(self):
# Cache keys can be deleted
cache.set("key1", "spam")
cache.set("key2", "eggs")
self.assertEqual(cache.get("key1"), "spam")
cache.delete("key1")
self.assertIsNone(cache.get("key1"))
self.assertEqual(cache.get("key2"), "eggs")
def test_has_key(self):
# The cache can be inspected for cache keys
cache.set("hello1", "goodbye1")
self.assertTrue(cache.has_key("hello1"))
self.assertFalse(cache.has_key("goodbye1"))
cache.set("no_expiry", "here", None)
self.assertTrue(cache.has_key("no_expiry"))
def test_in(self):
# The in operator can be used to inspect cache contents
cache.set("hello2", "goodbye2")
self.assertIn("hello2", cache)
self.assertNotIn("goodbye2", cache)
def test_incr(self):
# Cache values can be incremented
cache.set('answer', 41)
self.assertEqual(cache.incr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.incr('answer', 10), 52)
self.assertEqual(cache.get('answer'), 52)
self.assertEqual(cache.incr('answer', -10), 42)
self.assertRaises(ValueError, cache.incr, 'does_not_exist')
def test_decr(self):
# Cache values can be decremented
cache.set('answer', 43)
self.assertEqual(cache.decr('answer'), 42)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.decr('answer', 10), 32)
self.assertEqual(cache.get('answer'), 32)
self.assertEqual(cache.decr('answer', -10), 42)
self.assertRaises(ValueError, cache.decr, 'does_not_exist')
def test_close(self):
self.assertTrue(hasattr(cache, 'close'))
cache.close()
def test_data_types(self):
# Many different data types can be cached
stuff = {
'string': 'this is a string',
'int': 42,
'list': [1, 2, 3, 4],
'tuple': (1, 2, 3, 4),
'dict': {'A': 1, 'B': 2},
'function': f,
'class': C,
}
cache.set("stuff", stuff)
self.assertEqual(cache.get("stuff"), stuff)
def test_cache_read_for_model_instance(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
my_poll = Poll.objects.create(question="Well?")
self.assertEqual(Poll.objects.count(), 1)
pub_date = my_poll.pub_date
cache.set('question', my_poll)
cached_poll = cache.get('question')
self.assertEqual(cached_poll.pub_date, pub_date)
# We only want the default expensive calculation run once
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_write_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache write
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
self.assertEqual(expensive_calculation.num_runs, 1)
cache.set('deferred_queryset', defer_qs)
# cache set should not re-evaluate default functions
self.assertEqual(expensive_calculation.num_runs, 1)
def test_cache_read_for_model_instance_with_deferred(self):
# Don't want fields with callable as default to be called on cache read
expensive_calculation.num_runs = 0
Poll.objects.all().delete()
Poll.objects.create(question="What?")
self.assertEqual(expensive_calculation.num_runs, 1)
defer_qs = Poll.objects.all().defer('question')
self.assertEqual(defer_qs.count(), 1)
cache.set('deferred_queryset', defer_qs)
self.assertEqual(expensive_calculation.num_runs, 1)
runs_before_cache_read = expensive_calculation.num_runs
cache.get('deferred_queryset')
# We only want the default expensive calculation run on creation and set
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
def test_expiration(self):
# Cache values can be set to expire
cache.set('expire1', 'very quickly', 1)
cache.set('expire2', 'very quickly', 1)
cache.set('expire3', 'very quickly', 1)
time.sleep(2)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
self.assertEqual(cache.get("expire2"), "newvalue")
self.assertFalse(cache.has_key("expire3"))
def test_unicode(self):
# Unicode values can be cached
stuff = {
'ascii': 'ascii_value',
'unicode_ascii': 'Iñtërnâtiônàlizætiøn1',
'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2',
'ascii2': {'x': 1}
}
# Test `set`
for (key, value) in stuff.items():
cache.set(key, value)
self.assertEqual(cache.get(key), value)
# Test `add`
for (key, value) in stuff.items():
cache.delete(key)
cache.add(key, value)
self.assertEqual(cache.get(key), value)
# Test `set_many`
for (key, value) in stuff.items():
cache.delete(key)
cache.set_many(stuff)
for (key, value) in stuff.items():
self.assertEqual(cache.get(key), value)
def test_binary_string(self):
# Binary strings should be cacheable
from zlib import compress, decompress
value = 'value_to_be_compressed'
compressed_value = compress(value.encode())
# Test set
cache.set('binary1', compressed_value)
compressed_result = cache.get('binary1')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test add
cache.add('binary1-add', compressed_value)
compressed_result = cache.get('binary1-add')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
# Test set_many
cache.set_many({'binary1-set_many': compressed_value})
compressed_result = cache.get('binary1-set_many')
self.assertEqual(compressed_value, compressed_result)
self.assertEqual(value, decompress(compressed_result).decode())
def test_set_many(self):
# Multiple keys can be set using set_many
cache.set_many({"key1": "spam", "key2": "eggs"})
self.assertEqual(cache.get("key1"), "spam")
self.assertEqual(cache.get("key2"), "eggs")
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
time.sleep(2)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_delete_many(self):
# Multiple keys can be deleted using delete_many
cache.set("key1", "spam")
cache.set("key2", "eggs")
cache.set("key3", "ham")
cache.delete_many(["key1", "key2"])
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
def test_clear(self):
# The cache can be emptied using clear
cache.set("key1", "spam")
cache.set("key2", "eggs")
cache.clear()
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
def test_long_timeout(self):
'''
Using a timeout greater than 30 days makes memcached think
it is an absolute expiration timestamp instead of a relative
offset. Test that we honour this convention. Refs #12399.
'''
cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key2'), 'ham')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_forever_timeout(self):
'''
Passing in None into timeout results in a value that is cached forever
'''
cache.set('key1', 'eggs', None)
self.assertEqual(cache.get('key1'), 'eggs')
cache.add('key2', 'ham', None)
self.assertEqual(cache.get('key2'), 'ham')
added = cache.add('key1', 'new eggs', None)
self.assertEqual(added, False)
self.assertEqual(cache.get('key1'), 'eggs')
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None)
self.assertEqual(cache.get('key3'), 'sausage')
self.assertEqual(cache.get('key4'), 'lobster bisque')
def test_zero_timeout(self):
'''
Passing in zero into timeout results in a value that is not cached
'''
cache.set('key1', 'eggs', 0)
self.assertIsNone(cache.get('key1'))
cache.add('key2', 'ham', 0)
self.assertIsNone(cache.get('key2'))
cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0)
self.assertIsNone(cache.get('key3'))
self.assertIsNone(cache.get('key4'))
def test_float_timeout(self):
# Make sure a timeout given as a float doesn't crash anything.
cache.set("key1", "spam", 100.2)
self.assertEqual(cache.get("key1"), "spam")
def _perform_cull_test(self, cull_cache, initial_count, final_count):
# Create initial cache key entries. This will overflow the cache,
# causing a cull.
for i in range(1, initial_count):
cull_cache.set('cull%d' % i, 'value', 1000)
count = 0
# Count how many keys are left in the cache.
for i in range(1, initial_count):
if cull_cache.has_key('cull%d' % i):
count = count + 1
self.assertEqual(count, final_count)
def test_cull(self):
self._perform_cull_test(caches['cull'], 50, 29)
def test_zero_cull(self):
self._perform_cull_test(caches['zero_cull'], 50, 19)
def test_invalid_keys(self):
"""
All the builtin backends (except memcached, see below) should warn on
keys that would be refused by memcached. This encourages portable
caching code without making it too difficult to use production backends
with more liberal key rules. Refs #6447.
"""
# mimic custom ``make_key`` method being defined since the default will
# never show the below warnings
def func(key, *args):
return key
old_func = cache.key_func
cache.key_func = func
try:
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# memcached does not allow whitespace or control characters in keys
cache.set('key with spaces', 'value')
self.assertEqual(len(w), 2)
self.assertIsInstance(w[0].message, CacheKeyWarning)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# memcached limits key length to 250
cache.set('a' * 251, 'value')
self.assertEqual(len(w), 1)
self.assertIsInstance(w[0].message, CacheKeyWarning)
finally:
cache.key_func = old_func
def test_cache_versioning_get_set(self):
# set, using default version = 1
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertEqual(cache.get('answer1', version=1), 42)
self.assertIsNone(cache.get('answer1', version=2))
self.assertIsNone(caches['v2'].get('answer1'))
self.assertEqual(caches['v2'].get('answer1', version=1), 42)
self.assertIsNone(caches['v2'].get('answer1', version=2))
# set, default version = 1, but manually override version = 2
cache.set('answer2', 42, version=2)
self.assertIsNone(cache.get('answer2'))
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
# v2 set, using default version = 2
caches['v2'].set('answer3', 42)
self.assertIsNone(cache.get('answer3'))
self.assertIsNone(cache.get('answer3', version=1))
self.assertEqual(cache.get('answer3', version=2), 42)
self.assertEqual(caches['v2'].get('answer3'), 42)
self.assertIsNone(caches['v2'].get('answer3', version=1))
self.assertEqual(caches['v2'].get('answer3', version=2), 42)
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set('answer4', 42, version=1)
self.assertEqual(cache.get('answer4'), 42)
self.assertEqual(cache.get('answer4', version=1), 42)
self.assertIsNone(cache.get('answer4', version=2))
self.assertIsNone(caches['v2'].get('answer4'))
self.assertEqual(caches['v2'].get('answer4', version=1), 42)
self.assertIsNone(caches['v2'].get('answer4', version=2))
def test_cache_versioning_add(self):
# add, default version = 1, but manually override version = 2
cache.add('answer1', 42, version=2)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.add('answer1', 37, version=2)
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.add('answer1', 37, version=1)
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
# v2 add, using default version = 2
caches['v2'].add('answer2', 42)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
caches['v2'].add('answer2', 37)
self.assertIsNone(cache.get('answer2', version=1))
self.assertEqual(cache.get('answer2', version=2), 42)
caches['v2'].add('answer2', 37, version=1)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
# v2 add, default version = 2, but manually override version = 1
caches['v2'].add('answer3', 42, version=1)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
caches['v2'].add('answer3', 37, version=1)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertIsNone(cache.get('answer3', version=2))
caches['v2'].add('answer3', 37)
self.assertEqual(cache.get('answer3', version=1), 42)
self.assertEqual(cache.get('answer3', version=2), 37)
def test_cache_versioning_has_key(self):
cache.set('answer1', 42)
# has_key
self.assertTrue(cache.has_key('answer1'))
self.assertTrue(cache.has_key('answer1', version=1))
self.assertFalse(cache.has_key('answer1', version=2))
self.assertFalse(caches['v2'].has_key('answer1'))
self.assertTrue(caches['v2'].has_key('answer1', version=1))
self.assertFalse(caches['v2'].has_key('answer1', version=2))
def test_cache_versioning_delete(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
cache.delete('answer1')
self.assertIsNone(cache.get('answer1', version=1))
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
cache.delete('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertIsNone(cache.get('answer2', version=2))
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
caches['v2'].delete('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertIsNone(cache.get('answer3', version=2))
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
caches['v2'].delete('answer4', version=1)
self.assertIsNone(cache.get('answer4', version=1))
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_incr_decr(self):
cache.set('answer1', 37, version=1)
cache.set('answer1', 42, version=2)
cache.incr('answer1')
self.assertEqual(cache.get('answer1', version=1), 38)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.decr('answer1')
self.assertEqual(cache.get('answer1', version=1), 37)
self.assertEqual(cache.get('answer1', version=2), 42)
cache.set('answer2', 37, version=1)
cache.set('answer2', 42, version=2)
cache.incr('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 43)
cache.decr('answer2', version=2)
self.assertEqual(cache.get('answer2', version=1), 37)
self.assertEqual(cache.get('answer2', version=2), 42)
cache.set('answer3', 37, version=1)
cache.set('answer3', 42, version=2)
caches['v2'].incr('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 43)
caches['v2'].decr('answer3')
self.assertEqual(cache.get('answer3', version=1), 37)
self.assertEqual(cache.get('answer3', version=2), 42)
cache.set('answer4', 37, version=1)
cache.set('answer4', 42, version=2)
caches['v2'].incr('answer4', version=1)
self.assertEqual(cache.get('answer4', version=1), 38)
self.assertEqual(cache.get('answer4', version=2), 42)
caches['v2'].decr('answer4', version=1)
self.assertEqual(cache.get('answer4', version=1), 37)
self.assertEqual(cache.get('answer4', version=2), 42)
def test_cache_versioning_get_set_many(self):
# set, using default version = 1
cache.set_many({'ford1': 37, 'arthur1': 42})
self.assertDictEqual(cache.get_many(['ford1', 'arthur1']),
{'ford1': 37, 'arthur1': 42})
self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=1),
{'ford1': 37, 'arthur1': 42})
self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=2), {})
self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1']), {})
self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1),
{'ford1': 37, 'arthur1': 42})
self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {})
# set, default version = 1, but manually override version = 2
cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
self.assertDictEqual(cache.get_many(['ford2', 'arthur2']), {})
self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=1), {})
self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=2),
{'ford2': 37, 'arthur2': 42})
self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2']),
{'ford2': 37, 'arthur2': 42})
self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {})
self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2),
{'ford2': 37, 'arthur2': 42})
# v2 set, using default version = 2
caches['v2'].set_many({'ford3': 37, 'arthur3': 42})
self.assertDictEqual(cache.get_many(['ford3', 'arthur3']), {})
self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=1), {})
self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=2),
{'ford3': 37, 'arthur3': 42})
self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3']),
{'ford3': 37, 'arthur3': 42})
self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {})
self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2),
{'ford3': 37, 'arthur3': 42})
# v2 set, default version = 2, but manually override version = 1
caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1)
self.assertDictEqual(cache.get_many(['ford4', 'arthur4']),
{'ford4': 37, 'arthur4': 42})
self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=1),
{'ford4': 37, 'arthur4': 42})
self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=2), {})
self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4']), {})
self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1),
{'ford4': 37, 'arthur4': 42})
self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {})
def test_incr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertIsNone(cache.get('answer', version=3))
self.assertEqual(cache.incr_version('answer', version=2), 3)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertIsNone(cache.get('answer', version=2))
self.assertEqual(cache.get('answer', version=3), 42)
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertIsNone(caches['v2'].get('answer2', version=3))
self.assertEqual(caches['v2'].incr_version('answer2'), 3)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertEqual(caches['v2'].get('answer2', version=3), 42)
self.assertRaises(ValueError, cache.incr_version, 'does_not_exist')
def test_decr_version(self):
cache.set('answer', 42, version=2)
self.assertIsNone(cache.get('answer'))
self.assertIsNone(cache.get('answer', version=1))
self.assertEqual(cache.get('answer', version=2), 42)
self.assertEqual(cache.decr_version('answer', version=2), 1)
self.assertEqual(cache.get('answer'), 42)
self.assertEqual(cache.get('answer', version=1), 42)
self.assertIsNone(cache.get('answer', version=2))
caches['v2'].set('answer2', 42)
self.assertEqual(caches['v2'].get('answer2'), 42)
self.assertIsNone(caches['v2'].get('answer2', version=1))
self.assertEqual(caches['v2'].get('answer2', version=2), 42)
self.assertEqual(caches['v2'].decr_version('answer2'), 1)
self.assertIsNone(caches['v2'].get('answer2'))
self.assertEqual(caches['v2'].get('answer2', version=1), 42)
self.assertIsNone(caches['v2'].get('answer2', version=2))
self.assertRaises(ValueError, cache.decr_version, 'does_not_exist', version=2)
def test_custom_key_func(self):
# Two caches with different key functions aren't visible to each other
cache.set('answer1', 42)
self.assertEqual(cache.get('answer1'), 42)
self.assertIsNone(caches['custom_key'].get('answer1'))
self.assertIsNone(caches['custom_key2'].get('answer1'))
caches['custom_key'].set('answer2', 42)
self.assertIsNone(cache.get('answer2'))
self.assertEqual(caches['custom_key'].get('answer2'), 42)
self.assertEqual(caches['custom_key2'].get('answer2'), 42)
def test_cache_write_unpickable_object(self):
update_middleware = UpdateCacheMiddleware()
update_middleware.cache = cache
fetch_middleware = FetchFromCacheMiddleware()
fetch_middleware.cache = cache
request = self.factory.get('/cache/test')
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
response = HttpResponse()
content = 'Testing cookie serialization.'
response.content = content
response.set_cookie('foo', 'bar')
update_middleware.process_response(request, response)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode('utf-8'))
self.assertEqual(get_cache_data.cookies, response.cookies)
update_middleware.process_response(request, get_cache_data)
get_cache_data = fetch_middleware.process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode('utf-8'))
self.assertEqual(get_cache_data.cookies, response.cookies)
def test_add_fail_on_pickleerror(self):
"See https://code.djangoproject.com/ticket/21200"
with self.assertRaises(pickle.PickleError):
cache.add('unpickable', Unpickable())
def test_set_fail_on_pickleerror(self):
"See https://code.djangoproject.com/ticket/21200"
with self.assertRaises(pickle.PickleError):
cache.set('unpickable', Unpickable())
def test_get_or_set(self):
self.assertIsNone(cache.get('projector'))
self.assertEqual(cache.get_or_set('projector', 42), 42)
self.assertEqual(cache.get('projector'), 42)
def test_get_or_set_callable(self):
def my_callable():
return 'value'
self.assertEqual(cache.get_or_set('mykey', my_callable), 'value')
def test_get_or_set_version(self):
cache.get_or_set('brian', 1979, version=2)
with self.assertRaisesMessage(ValueError, 'You need to specify a value.'):
cache.get_or_set('brian')
with self.assertRaisesMessage(ValueError, 'You need to specify a value.'):
cache.get_or_set('brian', version=1)
self.assertIsNone(cache.get('brian', version=1))
self.assertEqual(cache.get_or_set('brian', 42, version=1), 42)
self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979)
self.assertIsNone(cache.get('brian', version=3))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Spaces are used in the table name to ensure quoting/escaping is working
LOCATION='test cache table'
))
class DBCacheTests(BaseCacheTests, TransactionTestCase):
available_apps = ['cache']
def setUp(self):
# The super calls needs to happen first for the settings override.
super(DBCacheTests, self).setUp()
self.create_table()
def tearDown(self):
# The super call needs to happen first because it uses the database.
super(DBCacheTests, self).tearDown()
self.drop_table()
def create_table(self):
management.call_command('createcachetable', verbosity=0, interactive=False)
def drop_table(self):
with connection.cursor() as cursor:
table_name = connection.ops.quote_name('test cache table')
cursor.execute('DROP TABLE %s' % table_name)
def test_zero_cull(self):
self._perform_cull_test(caches['zero_cull'], 50, 18)
def test_second_call_doesnt_crash(self):
out = six.StringIO()
management.call_command('createcachetable', stdout=out)
self.assertEqual(out.getvalue(),
"Cache table 'test cache table' already exists.\n" * len(settings.CACHES))
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.db.DatabaseCache',
# Use another table name to avoid the 'table already exists' message.
LOCATION='createcachetable_dry_run_mode'
))
def test_createcachetable_dry_run_mode(self):
out = six.StringIO()
management.call_command('createcachetable', dry_run=True, stdout=out)
output = out.getvalue()
self.assertTrue(output.startswith("CREATE TABLE"))
def test_createcachetable_with_table_argument(self):
"""
Delete and recreate cache table with legacy behavior (explicitly
specifying the table name).
"""
self.drop_table()
out = six.StringIO()
management.call_command(
'createcachetable',
'test cache table',
verbosity=2,
stdout=out,
)
self.assertEqual(out.getvalue(),
"Cache table 'test cache table' created.\n")
def test_clear_commits_transaction(self):
# Ensure the database transaction is committed (#19896)
cache.set("key1", "spam")
cache.clear()
transaction.rollback()
self.assertIsNone(cache.get("key1"))
@override_settings(USE_TZ=True)
class DBCacheWithTimeZoneTests(DBCacheTests):
pass
class DBCacheRouter(object):
"""A router that puts the cache table on the 'other' database."""
def db_for_read(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'django_cache':
return 'other'
return None
def allow_migrate(self, db, app_label, **hints):
if app_label == 'django_cache':
return db == 'other'
return None
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'my_cache_table',
},
},
)
class CreateCacheTableForDBCacheTests(TestCase):
multi_db = True
@override_settings(DATABASE_ROUTERS=[DBCacheRouter()])
def test_createcachetable_observes_database_router(self):
# cache table should not be created on 'default'
with self.assertNumQueries(0, using='default'):
management.call_command('createcachetable',
database='default',
verbosity=0, interactive=False)
# cache table should be created on 'other'
# Queries:
# 1: check table doesn't already exist
# 2: create savepoint (if transactional DDL is supported)
# 3: create the table
# 4: create the index
# 5: release savepoint (if transactional DDL is supported)
num = 5 if connections['other'].features.can_rollback_ddl else 3
with self.assertNumQueries(num, using='other'):
management.call_command('createcachetable',
database='other',
verbosity=0, interactive=False)
class PicklingSideEffect(object):
def __init__(self, cache):
self.cache = cache
self.locked = False
def __getstate__(self):
if self.cache._lock.active_writers:
self.locked = True
return {}
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.locmem.LocMemCache',
))
class LocMemCacheTests(BaseCacheTests, TestCase):
def setUp(self):
super(LocMemCacheTests, self).setUp()
# LocMem requires a hack to make the other caches
# share a data store with the 'normal' cache.
caches['prefix']._cache = cache._cache
caches['prefix']._expire_info = cache._expire_info
caches['v2']._cache = cache._cache
caches['v2']._expire_info = cache._expire_info
caches['custom_key']._cache = cache._cache
caches['custom_key']._expire_info = cache._expire_info
caches['custom_key2']._cache = cache._cache
caches['custom_key2']._expire_info = cache._expire_info
@override_settings(CACHES={
'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other'
},
})
def test_multiple_caches(self):
"Check that multiple locmem caches are isolated"
cache.set('value', 42)
self.assertEqual(caches['default'].get('value'), 42)
self.assertIsNone(caches['other'].get('value'))
def test_locking_on_pickle(self):
"""#20613/#18541 -- Ensures pickling is done outside of the lock."""
bad_obj = PicklingSideEffect(cache)
cache.set('set', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
cache.add('add', bad_obj)
self.assertFalse(bad_obj.locked, "Cache was locked during pickling")
def test_incr_decr_timeout(self):
"""incr/decr does not modify expiry time (matches memcached behavior)"""
key = 'value'
_key = cache.make_key(key)
cache.set(key, 1, timeout=cache.default_timeout * 10)
expire = cache._expire_info[_key]
cache.incr(key)
self.assertEqual(expire, cache._expire_info[_key])
cache.decr(key)
self.assertEqual(expire, cache._expire_info[_key])
# memcached backend isn't guaranteed to be available.
# To check the memcached backend, the test settings file will
# need to contain at least one cache backend setting that points at
# your memcache server.
memcached_params = {}
for _cache_params in settings.CACHES.values():
if _cache_params['BACKEND'].startswith('django.core.cache.backends.memcached.'):
memcached_params = _cache_params
memcached_never_expiring_params = memcached_params.copy()
memcached_never_expiring_params['TIMEOUT'] = None
memcached_far_future_params = memcached_params.copy()
memcached_far_future_params['TIMEOUT'] = 31536000 # 60*60*24*365, 1 year
@unittest.skipUnless(memcached_params, "memcached not available")
@override_settings(CACHES=caches_setting_for_tests(base=memcached_params))
class MemcachedCacheTests(BaseCacheTests, TestCase):
def test_invalid_keys(self):
"""
On memcached, we don't introduce a duplicate key validation
step (for speed reasons), we just let the memcached API
library raise its own exception on bad keys. Refs #6447.
In order to be memcached-API-library agnostic, we only assert
that a generic exception of some kind is raised.
"""
# memcached does not allow whitespace or control characters in keys
self.assertRaises(Exception, cache.set, 'key with spaces', 'value')
# memcached limits key length to 250
self.assertRaises(Exception, cache.set, 'a' * 251, 'value')
# Explicitly display a skipped test if no configured cache uses MemcachedCache
@unittest.skipUnless(
memcached_params.get('BACKEND') == 'django.core.cache.backends.memcached.MemcachedCache',
"cache with python-memcached library not available")
def test_memcached_uses_highest_pickle_version(self):
# Regression test for #19810
for cache_key, cache_config in settings.CACHES.items():
if cache_config['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache':
self.assertEqual(caches[cache_key]._cache.pickleProtocol,
pickle.HIGHEST_PROTOCOL)
@override_settings(CACHES=caches_setting_for_tests(base=memcached_never_expiring_params))
def test_default_never_expiring_timeout(self):
# Regression test for #22845
cache.set('infinite_foo', 'bar')
self.assertEqual(cache.get('infinite_foo'), 'bar')
@override_settings(CACHES=caches_setting_for_tests(base=memcached_far_future_params))
def test_default_far_future_timeout(self):
# Regression test for #22845
cache.set('future_foo', 'bar')
self.assertEqual(cache.get('future_foo'), 'bar')
def test_cull(self):
# culling isn't implemented, memcached deals with it.
pass
def test_zero_cull(self):
# culling isn't implemented, memcached deals with it.
pass
def test_memcached_deletes_key_on_failed_set(self):
# By default memcached allows objects up to 1MB. For the cache_db session
# backend to always use the current session, memcached needs to delete
# the old key if it fails to set.
# pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can
# tell from a quick check of its source code. This is falling back to
# the default value exposed by python-memcached on my system.
max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576)
cache.set('small_value', 'a')
self.assertEqual(cache.get('small_value'), 'a')
large_value = 'a' * (max_value_length + 1)
cache.set('small_value', large_value)
# small_value should be deleted, or set if configured to accept larger values
value = cache.get('small_value')
self.assertTrue(value is None or value == large_value)
@override_settings(CACHES=caches_setting_for_tests(
BACKEND='django.core.cache.backends.filebased.FileBasedCache',
))
class FileBasedCacheTests(BaseCacheTests, TestCase):
"""
Specific test cases for the file-based cache.
"""
def setUp(self):
super(FileBasedCacheTests, self).setUp()
self.dirname = tempfile.mkdtemp()
# Caches location cannot be modified through override_settings / modify_settings,
# hence settings are manipulated directly here and the setting_changed signal
# is triggered manually.
for cache_params in settings.CACHES.values():
cache_params.update({'LOCATION': self.dirname})
setting_changed.send(self.__class__, setting='CACHES', enter=False)
def tearDown(self):
super(FileBasedCacheTests, self).tearDown()
# Call parent first, as cache.clear() may recreate cache base directory
shutil.rmtree(self.dirname)
def test_ignores_non_cache_files(self):
fname = os.path.join(self.dirname, 'not-a-cache-file')
with open(fname, 'w'):
os.utime(fname, None)
cache.clear()
self.assertTrue(os.path.exists(fname),
'Expected cache.clear to ignore non cache files')
os.remove(fname)
def test_clear_does_not_remove_cache_dir(self):
cache.clear()
self.assertTrue(os.path.exists(self.dirname),
'Expected cache.clear to keep the cache dir')
def test_creates_cache_dir_if_nonexistent(self):
os.rmdir(self.dirname)
cache.set('foo', 'bar')
os.path.exists(self.dirname)
@override_settings(CACHES={
'default': {
'BACKEND': 'cache.liberal_backend.CacheClass',
},
})
class CustomCacheKeyValidationTests(TestCase):
"""
Tests for the ability to mixin a custom ``validate_key`` method to
a custom cache backend that otherwise inherits from a builtin
backend, and override the default key validation. Refs #6447.
"""
def test_custom_key_validation(self):
# this key is both longer than 250 characters, and has spaces
key = 'some key with spaces' * 15
val = 'a value'
cache.set(key, val)
self.assertEqual(cache.get(key), val)
@override_settings(
CACHES={
'default': {
'BACKEND': 'cache.closeable_cache.CacheClass',
}
}
)
class CacheClosingTests(TestCase):
def test_close(self):
self.assertFalse(cache.closed)
signals.request_finished.send(self.__class__)
self.assertTrue(cache.closed)
DEFAULT_MEMORY_CACHES_SETTINGS = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
}
}
NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS)
NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None
class DefaultNonExpiringCacheKeyTests(TestCase):
"""Tests that verify that settings having Cache arguments with a TIMEOUT
set to `None` will create Caches that will set non-expiring keys.
This fixes ticket #22085.
"""
def setUp(self):
# The 5 minute (300 seconds) default expiration time for keys is
# defined in the implementation of the initializer method of the
# BaseCache type.
self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout
def tearDown(self):
del(self.DEFAULT_TIMEOUT)
def test_default_expiration_time_for_keys_is_5_minutes(self):
"""The default expiration time of a cache key is 5 minutes.
This value is defined inside the __init__() method of the
:class:`django.core.cache.backends.base.BaseCache` type.
"""
self.assertEqual(300, self.DEFAULT_TIMEOUT)
def test_caches_with_unset_timeout_has_correct_default_timeout(self):
"""Caches that have the TIMEOUT parameter undefined in the default
settings will use the default 5 minute timeout.
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout)
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self):
"""Memory caches that have the TIMEOUT parameter set to `None` in the
default settings with have `None` as the default timeout.
This means "no timeout".
"""
cache = caches[DEFAULT_CACHE_ALIAS]
self.assertIsNone(cache.default_timeout)
self.assertIsNone(cache.get_backend_timeout())
@override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS)
def test_caches_with_unset_timeout_set_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter unset will set cache
keys having the default 5 minute timeout.
"""
key = "my-key"
value = "my-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNotNone(cache._expire_info[cache_key])
@override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS)
def text_caches_set_with_timeout_as_none_set_non_expiring_key(self):
"""Memory caches that have the TIMEOUT parameter set to `None` will set
a non expiring key by default.
"""
key = "another-key"
value = "another-value"
cache = caches[DEFAULT_CACHE_ALIAS]
cache.set(key, value)
cache_key = cache.make_key(key)
self.assertIsNone(cache._expire_info[cache_key])
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class CacheUtils(TestCase):
"""TestCase for django.utils.cache functions."""
def setUp(self):
self.host = 'www.example.com'
self.path = '/cache/test/'
self.factory = RequestFactory(HTTP_HOST=self.host)
def tearDown(self):
cache.clear()
def _get_request_cache(self, method='GET', query_string=None, update_cache=None):
request = self._get_request(self.host, self.path,
method, query_string=query_string)
request._cache_update_cache = True if not update_cache else update_cache
return request
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
response = HttpResponse()
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
# Verify that a specified key_prefix is taken into account.
key_prefix = 'localprefix'
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
response = HttpResponse()
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# Verify that the querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e'
)
def test_cache_key_varies_by_url(self):
"""
get_cache_key keys differ by fully-qualified URL instead of path
"""
request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com')
learn_cache_key(request1, HttpResponse())
request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com')
learn_cache_key(request2, HttpResponse())
self.assertNotEqual(get_cache_key(request1), get_cache_key(request2))
def test_learn_cache_key(self):
request = self.factory.head(self.path)
response = HttpResponse()
response['Vary'] = 'Pony'
# Make sure that the Vary header is added to the key hash
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e'
)
def test_patch_cache_control(self):
tests = (
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
(None, {'private': True}, {'private'}),
# Test whether private/public attributes are mutually exclusive
('private', {'private': True}, {'private'}),
('private', {'public': True}, {'public'}),
('public', {'public': True}, {'public'}),
('public', {'private': True}, {'private'}),
('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}),
('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}),
)
cc_delim_re = re.compile(r'\s*,\s*')
for initial_cc, newheaders, expected_cc in tests:
response = HttpResponse()
if initial_cc is not None:
response['Cache-Control'] = initial_cc
patch_cache_control(response, **newheaders)
parts = set(cc_delim_re.split(response['Cache-Control']))
self.assertEqual(parts, expected_cc)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix',
},
},
)
class PrefixedCacheUtils(CacheUtils):
pass
@override_settings(
CACHE_MIDDLEWARE_SECONDS=60,
CACHE_MIDDLEWARE_KEY_PREFIX='test',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
)
class CacheHEADTest(TestCase):
def setUp(self):
self.path = '/cache/test/'
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def _set_cache(self, request, msg):
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
def test_head_caches_correctly(self):
test_content = 'test content'
request = self.factory.head(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
def test_head_with_cached_get(self):
test_content = 'test content'
request = self.factory.get(self.path)
request._cache_update_cache = True
self._set_cache(request, test_content)
request = self.factory.head(self.path)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNotNone(get_cache_data)
self.assertEqual(test_content.encode(), get_cache_data.content)
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
LANGUAGES=[
('en', 'English'),
('es', 'Spanish'),
],
)
class CacheI18nTest(TestCase):
def setUp(self):
self.path = '/cache/test/'
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
def check_accept_language_vary(self, accept_language, vary, reference_key):
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = vary
key = learn_cache_key(request, response)
key2 = get_cache_key(request)
self.assertEqual(key, reference_key)
self.assertEqual(key2, reference_key)
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
def test_cache_key_i18n_translation_accept_language(self):
lang = translation.get_language()
self.assertEqual(lang, 'en')
request = self.factory.get(self.path)
request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0'
response = HttpResponse()
response['Vary'] = 'accept-encoding'
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
self.check_accept_language_vary(
'en-us',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'en-US',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8',
'accept-encoding, accept-language, cookie',
key
)
self.check_accept_language_vary(
'en-US,en;q=0.8,ko;q=0.6',
'accept-language, cookie, accept-encoding',
key
)
self.check_accept_language_vary(
'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ',
'accept-encoding, cookie, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4',
'accept-language, accept-encoding, cookie',
key
)
self.check_accept_language_vary(
'ko;q=1.0,en;q=0.5',
'cookie, accept-language, accept-encoding',
key
)
self.check_accept_language_vary(
'ko, en',
'cookie, accept-encoding, accept-language',
key
)
self.check_accept_language_vary(
'ko-KR, en-US',
'accept-encoding, accept-language, cookie',
key
)
@override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False)
def test_cache_key_i18n_formatting(self):
request = self.factory.get(self.path)
lang = translation.get_language()
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(lang, key, "Cache keys should include the language name when formatting is active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_i18n_timezone(self):
request = self.factory.get(self.path)
# This is tightly coupled to the implementation,
# but it's the most straightforward way to test the key.
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
key2 = get_cache_key(request)
self.assertEqual(key, key2)
@override_settings(USE_I18N=False, USE_L10N=False)
def test_cache_key_no_i18n(self):
request = self.factory.get(self.path)
lang = translation.get_language()
tz = force_text(timezone.get_current_timezone_name(), errors='ignore')
tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
response = HttpResponse()
key = learn_cache_key(request, response)
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
def test_cache_key_with_non_ascii_tzname(self):
# Regression test for #17476
class CustomTzName(timezone.UTC):
name = ''
def tzname(self, dt):
return self.name
request = self.factory.get(self.path)
response = HttpResponse()
with timezone.override(CustomTzName()):
CustomTzName.name = 'Hora estándar de Argentina'.encode('UTF-8') # UTF-8 string
sanitized_name = 'Hora_estndar_de_Argentina'
self.assertIn(sanitized_name, learn_cache_key(request, response),
"Cache keys should include the time zone name when time zones are active")
CustomTzName.name = 'Hora estándar de Argentina' # unicode
sanitized_name = 'Hora_estndar_de_Argentina'
self.assertIn(sanitized_name, learn_cache_key(request, response),
"Cache keys should include the time zone name when time zones are active")
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_ETAGS=True,
USE_I18N=True,
)
def test_middleware(self):
def set_cache(request, lang, msg):
translation.activate(lang)
response = HttpResponse()
response.content = msg
return UpdateCacheMiddleware().process_response(request, response)
# cache with non empty request.GET
request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# first access, cache must return None
self.assertIsNone(get_cache_data)
response = HttpResponse()
content = 'Check for cache with QUERY_STRING'
response.content = content
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# cache must return content
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, content.encode())
# different QUERY_STRING, cache must be empty
request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'})
request._cache_update_cache = True
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
# i18n tests
en_message = "Hello world!"
es_message = "Hola mundo!"
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
# Check that we can recover the cache
self.assertIsNotNone(get_cache_data)
self.assertEqual(get_cache_data.content, en_message.encode())
# Check that we use etags
self.assertTrue(get_cache_data.has_header('ETag'))
# Check that we can disable etags
with self.settings(USE_ETAGS=False):
request._cache_update_cache = True
set_cache(request, 'en', en_message)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertFalse(get_cache_data.has_header('ETag'))
# change the session language and set content
request = self.factory.get(self.path)
request._cache_update_cache = True
set_cache(request, 'es', es_message)
# change again the language
translation.activate('en')
# retrieve the content from cache
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, en_message.encode())
# change again the language
translation.activate('es')
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertEqual(get_cache_data.content, es_message.encode())
# reset the language
translation.deactivate()
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX="test",
CACHE_MIDDLEWARE_SECONDS=60,
USE_ETAGS=True,
)
def test_middleware_doesnt_cache_streaming_response(self):
request = self.factory.get(self.path)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
# This test passes on Python < 3.3 even without the corresponding code
# in UpdateCacheMiddleware, because pickling a StreamingHttpResponse
# fails (http://bugs.python.org/issue14288). LocMemCache silently
# swallows the exception and doesn't store the response in cache.
content = ['Check for cache with streaming content.']
response = StreamingHttpResponse(content)
UpdateCacheMiddleware().process_response(request, response)
get_cache_data = FetchFromCacheMiddleware().process_request(request)
self.assertIsNone(get_cache_data)
@override_settings(
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'KEY_PREFIX': 'cacheprefix'
},
},
)
class PrefixedCacheI18nTest(CacheI18nTest):
pass
def hello_world_view(request, value):
return HttpResponse('Hello World %s' % value)
def csrf_view(request):
return HttpResponse(csrf(request)['csrf_token'])
@override_settings(
CACHE_MIDDLEWARE_ALIAS='other',
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
CACHE_MIDDLEWARE_SECONDS=30,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'other',
'TIMEOUT': '1',
},
},
)
class CacheMiddlewareTest(TestCase):
def setUp(self):
super(CacheMiddlewareTest, self).setUp()
self.factory = RequestFactory()
self.default_cache = caches['default']
self.other_cache = caches['other']
def tearDown(self):
self.default_cache.clear()
self.other_cache.clear()
super(CacheMiddlewareTest, self).tearDown()
def test_constructor(self):
"""
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
appropriately.
"""
# If no arguments are passed in construction, it's being used as middleware.
middleware = CacheMiddleware()
# Now test object attributes against values defined in setUp above
self.assertEqual(middleware.cache_timeout, 30)
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
self.assertEqual(middleware.cache_alias, 'other')
# If arguments are being passed in construction, it's being used as a decorator.
# First, test with "defaults":
as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None)
self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30
self.assertEqual(as_view_decorator.key_prefix, '')
self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache
# Next, test with custom values:
as_view_decorator_with_custom = CacheMiddleware(cache_timeout=60, cache_alias='other', key_prefix='foo')
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
def test_middleware(self):
middleware = CacheMiddleware()
prefix_middleware = CacheMiddleware(key_prefix='prefix1')
timeout_middleware = CacheMiddleware(cache_timeout=1)
request = self.factory.get('/view/')
# Put the request through the request middleware
result = middleware.process_request(request)
self.assertIsNone(result)
response = hello_world_view(request, '1')
# Now put the response through the response middleware
response = middleware.process_response(request, response)
# Repeating the request should result in a cache hit
result = middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
# The same request through a different middleware won't hit
result = prefix_middleware.process_request(request)
self.assertIsNone(result)
# The same request with a timeout _will_ hit
result = timeout_middleware.process_request(request)
self.assertIsNotNone(result)
self.assertEqual(result.content, b'Hello World 1')
def test_view_decorator(self):
# decorate the same view with different cache decorators
default_view = cache_page(3)(hello_world_view)
default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view)
explicit_default_view = cache_page(3, cache='default')(hello_world_view)
explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view)
other_view = cache_page(1, cache='other')(hello_world_view)
other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view)
request = self.factory.get('/view/')
# Request the view once
response = default_view(request, '1')
self.assertEqual(response.content, b'Hello World 1')
# Request again -- hit the cache
response = default_view(request, '2')
self.assertEqual(response.content, b'Hello World 1')
# Requesting the same view with the explicit cache should yield the same result
response = explicit_default_view(request, '3')
self.assertEqual(response.content, b'Hello World 1')
# Requesting with a prefix will hit a different cache key
response = explicit_default_with_prefix_view(request, '4')
self.assertEqual(response.content, b'Hello World 4')
# Hitting the same view again gives a cache hit
response = explicit_default_with_prefix_view(request, '5')
self.assertEqual(response.content, b'Hello World 4')
# And going back to the implicit cache will hit the same cache
response = default_with_prefix_view(request, '6')
self.assertEqual(response.content, b'Hello World 4')
# Requesting from an alternate cache won't hit cache
response = other_view(request, '7')
self.assertEqual(response.content, b'Hello World 7')
# But a repeated hit will hit cache
response = other_view(request, '8')
self.assertEqual(response.content, b'Hello World 7')
# And prefixing the alternate cache yields yet another cache entry
response = other_with_prefix_view(request, '9')
self.assertEqual(response.content, b'Hello World 9')
# But if we wait a couple of seconds...
time.sleep(2)
# ... the default cache will still hit
caches['default']
response = default_view(request, '11')
self.assertEqual(response.content, b'Hello World 1')
# ... the default cache with a prefix will still hit
response = default_with_prefix_view(request, '12')
self.assertEqual(response.content, b'Hello World 4')
# ... the explicit default cache will still hit
response = explicit_default_view(request, '13')
self.assertEqual(response.content, b'Hello World 1')
# ... the explicit default cache with a prefix will still hit
response = explicit_default_with_prefix_view(request, '14')
self.assertEqual(response.content, b'Hello World 4')
# .. but a rapidly expiring cache won't hit
response = other_view(request, '15')
self.assertEqual(response.content, b'Hello World 15')
# .. even if it has a prefix
response = other_with_prefix_view(request, '16')
self.assertEqual(response.content, b'Hello World 16')
def test_sensitive_cookie_not_cached(self):
"""
Django must prevent caching of responses that set a user-specific (and
maybe security sensitive) cookie in response to a cookie-less request.
"""
csrf_middleware = CsrfViewMiddleware()
cache_middleware = CacheMiddleware()
request = self.factory.get('/view/')
self.assertIsNone(cache_middleware.process_request(request))
csrf_middleware.process_view(request, csrf_view, (), {})
response = csrf_view(request)
response = csrf_middleware.process_response(request, response)
response = cache_middleware.process_response(request, response)
# Inserting a CSRF cookie in a cookie-less request prevented caching.
self.assertIsNone(cache_middleware.process_request(request))
@override_settings(
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
CACHE_MIDDLEWARE_SECONDS=1,
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
},
USE_I18N=False,
)
class TestWithTemplateResponse(TestCase):
"""
Tests various headers w/ TemplateResponse.
Most are probably redundant since they manipulate the same object
anyway but the Etag header is 'special' because it relies on the
content being complete (which is not necessarily always the case
with a TemplateResponse)
"""
def setUp(self):
self.path = '/cache/test/'
self.factory = RequestFactory()
def tearDown(self):
cache.clear()
def test_patch_vary_headers(self):
headers = (
# Initial vary, new headers, resulting vary.
(None, ('Accept-Encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
)
for initial_vary, newheaders, resulting_vary in headers:
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
if initial_vary is not None:
response['Vary'] = initial_vary
patch_vary_headers(response, newheaders)
self.assertEqual(response['Vary'], resulting_vary)
def test_get_cache_key(self):
request = self.factory.get(self.path)
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
key_prefix = 'localprefix'
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
# Verify that a specified key_prefix is taken into account.
learn_cache_key(request, response, key_prefix=key_prefix)
self.assertEqual(
get_cache_key(request, key_prefix=key_prefix),
'views.decorators.cache.cache_page.localprefix.GET.'
'58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e'
)
def test_get_cache_key_with_query(self):
request = self.factory.get(self.path, {'test': 1})
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
# Expect None if no headers have been set yet.
self.assertIsNone(get_cache_key(request))
# Set headers to an empty list.
learn_cache_key(request, response)
# Verify that the querystring is taken into account.
self.assertEqual(
get_cache_key(request),
'views.decorators.cache.cache_page.settingsprefix.GET.'
'0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e'
)
@override_settings(USE_ETAGS=False)
def test_without_etag(self):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
self.assertFalse(response.has_header('ETag'))
patch_response_headers(response)
self.assertFalse(response.has_header('ETag'))
response = response.render()
self.assertFalse(response.has_header('ETag'))
@override_settings(USE_ETAGS=True)
def test_with_etag(self):
template = engines['django'].from_string("This is a test")
response = TemplateResponse(HttpRequest(), template)
self.assertFalse(response.has_header('ETag'))
patch_response_headers(response)
self.assertFalse(response.has_header('ETag'))
response = response.render()
self.assertTrue(response.has_header('ETag'))
class TestMakeTemplateFragmentKey(TestCase):
def test_without_vary_on(self):
key = make_template_fragment_key('a.fragment')
self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e')
def test_with_one_vary_on(self):
key = make_template_fragment_key('foo', ['abc'])
self.assertEqual(key,
'template.cache.foo.900150983cd24fb0d6963f7d28e17f72')
def test_with_many_vary_on(self):
key = make_template_fragment_key('bar', ['abc', 'def'])
self.assertEqual(key,
'template.cache.bar.4b35f12ab03cec09beec4c21b2d2fa88')
def test_proper_escaping(self):
key = make_template_fragment_key('spam', ['abc:def%'])
self.assertEqual(key,
'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469')
class CacheHandlerTest(TestCase):
def test_same_instance(self):
"""
Attempting to retrieve the same alias should yield the same instance.
"""
cache1 = caches['default']
cache2 = caches['default']
self.assertIs(cache1, cache2)
def test_per_thread(self):
"""
Requesting the same alias from separate threads should yield separate
instances.
"""
c = []
def runner():
c.append(caches['default'])
for x in range(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertIsNot(c[0], c[1])
| codeparrot/github-code-clean |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2010 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
from datetime import datetime
import doctest
from gettext import NullTranslations
import unittest
from genshi.core import Attrs
from genshi.template import MarkupTemplate, Context
from genshi.filters.i18n import Translator, extract
from genshi.input import HTML
from genshi.compat import IS_PYTHON2, StringIO
class DummyTranslations(NullTranslations):
_domains = {}
def __init__(self, catalog=()):
NullTranslations.__init__(self)
self._catalog = catalog or {}
self.plural = lambda n: n != 1
def add_domain(self, domain, catalog):
translation = DummyTranslations(catalog)
translation.add_fallback(self)
self._domains[domain] = translation
def _domain_call(self, func, domain, *args, **kwargs):
return getattr(self._domains.get(domain, self), func)(*args, **kwargs)
if IS_PYTHON2:
def ugettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.ugettext(message)
return unicode(message)
return tmsg
else:
def gettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.gettext(message)
return unicode(message)
return tmsg
if IS_PYTHON2:
def dugettext(self, domain, message):
return self._domain_call('ugettext', domain, message)
else:
def dgettext(self, domain, message):
return self._domain_call('gettext', domain, message)
def ungettext(self, msgid1, msgid2, n):
try:
return self._catalog[(msgid1, self.plural(n))]
except KeyError:
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
if not IS_PYTHON2:
ngettext = ungettext
del ungettext
if IS_PYTHON2:
def dungettext(self, domain, singular, plural, numeral):
return self._domain_call('ungettext', domain, singular, plural, numeral)
else:
def dngettext(self, domain, singular, plural, numeral):
return self._domain_call('ngettext', domain, singular, plural, numeral)
class TranslatorTestCase(unittest.TestCase):
def test_translate_included_attribute_text(self):
"""
Verify that translated attributes end up in a proper `Attrs` instance.
"""
html = HTML(u"""<html>
<span title="Foo"></span>
</html>""")
translator = Translator(lambda s: u"Voh")
stream = list(html.filter(translator))
kind, data, pos = stream[2]
assert isinstance(data[1], Attrs)
def test_extract_without_text(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<p title="Bar">Foo</p>
${ngettext("Singular", "Plural", num)}
</html>""")
translator = Translator(extract_text=False)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Singular', 'Plural', None), []),
messages[0])
def test_extract_plural_form(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
${ngettext("Singular", "Plural", num)}
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, 'ngettext', ('Singular', 'Plural', None), []),
messages[0])
def test_extract_funky_plural_form(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
${ngettext(len(items), *widget.display_names)}
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, 'ngettext', (None, None), []), messages[0])
def test_extract_gettext_with_unicode_string(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
${gettext("Grüße")}
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, 'gettext', u'Gr\xfc\xdfe', []), messages[0])
def test_extract_included_attribute_text(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<span title="Foo"></span>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, 'Foo', []), messages[0])
def test_extract_attribute_expr(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<input type="submit" value="${_('Save')}" />
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, '_', 'Save', []), messages[0])
def test_extract_non_included_attribute_interpolated(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<a href="#anchor_${num}">Foo</a>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, 'Foo', []), messages[0])
def test_extract_text_from_sub(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<py:if test="foo">Foo</py:if>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, 'Foo', []), messages[0])
def test_ignore_tag_with_fixed_xml_lang(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<p xml:lang="en">(c) 2007 Edgewall Software</p>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(0, len(messages))
def test_extract_tag_with_variable_xml_lang(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<p xml:lang="${lang}">(c) 2007 Edgewall Software</p>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((2, None, '(c) 2007 Edgewall Software', []),
messages[0])
def test_ignore_attribute_with_expression(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/">
<input type="submit" value="Reply" title="Reply to comment $num" />
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(0, len(messages))
def test_translate_with_translations_object(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
translator = Translator(DummyTranslations({'Foo': 'Voh'}))
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
</html>""", tmpl.generate().render())
class MsgDirectiveTestCase(unittest.TestCase):
def test_extract_i18n_msg(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help] for details.', messages[0][2])
def test_translate_i18n_msg(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für Details siehe bitte <a href="help.html">Hilfe</a>.</p>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Please see <a href="help.html">Help</a></p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help]', messages[0][2])
def test_translate_i18n_msg_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Please see <a href="help.html">Help</a></p>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für Details siehe bitte <a href="help.html">Hilfe</a></p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_elt_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg>Please see <a href="help.html">Help</a></i18n:msg>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help]', messages[0][2])
def test_translate_i18n_msg_elt_nonewline(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg>Please see <a href="help.html">Help</a></i18n:msg>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
Für Details siehe bitte <a href="help.html">Hilfe</a>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="A helpful paragraph">
Please see <a href="help.html" title="Click for help">Help</a>
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(3, len(messages))
self.assertEqual('A helpful paragraph', messages[0][2])
self.assertEqual(3, messages[0][0])
self.assertEqual('Click for help', messages[1][2])
self.assertEqual(4, messages[1][0])
self.assertEqual('Please see [1:Help]', messages[2][2])
self.assertEqual(3, messages[2][0])
def test_translate_i18n_msg_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="A helpful paragraph">
Please see <a href="help.html" title="Click for help">Help</a>
</p>
</html>""")
translator = Translator(lambda msgid: {
'A helpful paragraph': 'Ein hilfreicher Absatz',
'Click for help': u'Klicken für Hilfe',
'Please see [1:Help]': u'Siehe bitte [1:Hilfe]'
}[msgid])
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Ein hilfreicher Absatz">Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a></p>
</html>""", tmpl.generate().render(encoding=None))
def test_extract_i18n_msg_with_dynamic_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="${_('A helpful paragraph')}">
Please see <a href="help.html" title="${_('Click for help')}">Help</a>
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(3, len(messages))
self.assertEqual('A helpful paragraph', messages[0][2])
self.assertEqual(3, messages[0][0])
self.assertEqual('Click for help', messages[1][2])
self.assertEqual(4, messages[1][0])
self.assertEqual('Please see [1:Help]', messages[2][2])
self.assertEqual(3, messages[2][0])
def test_translate_i18n_msg_with_dynamic_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="${_('A helpful paragraph')}">
Please see <a href="help.html" title="${_('Click for help')}">Help</a>
</p>
</html>""")
translator = Translator(lambda msgid: {
'A helpful paragraph': 'Ein hilfreicher Absatz',
'Click for help': u'Klicken für Hilfe',
'Please see [1:Help]': u'Siehe bitte [1:Hilfe]'
}[msgid])
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Ein hilfreicher Absatz">Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a></p>
</html>""", tmpl.generate(_=translator.translate).render(encoding=None))
def test_extract_i18n_msg_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="">
Please see <a href="help.html" title="Click for help">Help</a>
</i18n:msg>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual('Click for help', messages[0][2])
self.assertEqual(4, messages[0][0])
self.assertEqual('Please see [1:Help]', messages[1][2])
self.assertEqual(3, messages[1][0])
def test_translate_i18n_msg_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="">
Please see <a href="help.html" title="Click for help">Help</a>
</i18n:msg>
</html>""")
translator = Translator(lambda msgid: {
'Click for help': u'Klicken für Hilfe',
'Please see [1:Help]': u'Siehe bitte [1:Hilfe]'
}[msgid])
translator.setup(tmpl)
self.assertEqual(u"""<html>
Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a>
</html>""", tmpl.generate().render(encoding=None))
def test_extract_i18n_msg_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html"><em>Help</em> page</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:[2:Help] page] for details.',
messages[0][2])
def test_translate_i18n_msg_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html"><em>Help</em> page</a> for details.
</p>
</html>""")
gettext = lambda s: u"Für Details siehe bitte [1:[2:Hilfeseite]]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für Details siehe bitte <a href="help.html"><em>Hilfeseite</em></a>.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_label_with_nested_input(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:msg="">
<label><input type="text" size="3" name="daysback" value="30" /> days back</label>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('[1:[2:] days back]',
messages[0][2])
def test_translate_i18n_msg_label_with_nested_input(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:msg="">
<label><input type="text" size="3" name="daysback" value="30" /> foo bar</label>
</div>
</html>""")
gettext = lambda s: "[1:[2:] foo bar]"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual("""<html>
<div><label><input type="text" size="3" name="daysback" value="30"/> foo bar</label></div>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Show me [1:] entries per page.', messages[0][2])
def test_translate_i18n_msg_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page.
</p>
</html>""")
gettext = lambda s: u"[1:] Einträge pro Seite anzeigen."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><input type="text" name="num"/> Einträge pro Seite anzeigen.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_multiple(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for <em>details</em>.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Please see [1:Help] for [2:details].', messages[0][2])
def test_translate_i18n_msg_multiple(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Please see <a href="help.html">Help</a> for <em>details</em>.
</p>
</html>""")
gettext = lambda s: u"Für [2:Details] siehe bitte [1:Hilfe]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Für <em>Details</em> siehe bitte <a href="help.html">Hilfe</a>.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_multiple_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page, starting at page <input type="text" name="num" />.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Show me [1:] entries per page, starting at page [2:].',
messages[0][2])
def test_translate_i18n_msg_multiple_empty(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" /> entries per page, starting at page <input type="text" name="num" />.
</p>
</html>""", encoding='utf-8')
gettext = lambda s: u"[1:] Einträge pro Seite, beginnend auf Seite [2:]."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><input type="text" name="num"/> Eintr\u00E4ge pro Seite, beginnend auf Seite <input type="text" name="num"/>.</p>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_with_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name">
Hello, ${user.name}!
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Hello, %(name)s!', messages[0][2])
def test_translate_i18n_msg_with_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name">
Hello, ${user.name}!
</p>
</html>""")
gettext = lambda s: u"Hallo, %(name)s!"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Hallo, Jim!</p>
</html>""", tmpl.generate(user=dict(name='Jim')).render())
def test_translate_i18n_msg_with_param_reordered(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name">
Hello, ${user.name}!
</p>
</html>""")
gettext = lambda s: u"%(name)s, sei gegrüßt!"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Jim, sei gegrüßt!</p>
</html>""", tmpl.generate(user=dict(name='Jim')).render())
def test_translate_i18n_msg_with_attribute_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Hello, <a href="#${anchor}">dude</a>!
</p>
</html>""")
gettext = lambda s: u"Sei gegrüßt, [1:Alter]!"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p>Sei gegrüßt, <a href="#42">Alter</a>!</p>
</html>""", tmpl.generate(anchor='42').render())
def test_extract_i18n_msg_with_two_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name, time">
Posted by ${post.author} at ${entry.time.strftime('%H:%m')}
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Posted by %(name)s at %(time)s', messages[0][2])
def test_translate_i18n_msg_with_two_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="name, time">
Written by ${entry.author} at ${entry.time.strftime('%H:%M')}
</p>
</html>""")
gettext = lambda s: u"%(name)s schrieb dies um %(time)s"
translator = Translator(gettext)
translator.setup(tmpl)
entry = {
'author': 'Jim',
'time': datetime(2008, 4, 1, 14, 30)
}
self.assertEqual("""<html>
<p>Jim schrieb dies um 14:30</p>
</html>""", tmpl.generate(entry=entry).render())
def test_extract_i18n_msg_with_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" py:attrs="{'value': x}" /> entries per page.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual('Show me [1:] entries per page.', messages[0][2])
def test_translate_i18n_msg_with_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">
Show me <input type="text" name="num" py:attrs="{'value': 'x'}" /> entries per page.
</p>
</html>""")
gettext = lambda s: u"[1:] Einträge pro Seite anzeigen."
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><input type="text" name="num" value="x"/> Einträge pro Seite anzeigen.</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_with_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:comment="As in foo bar" i18n:msg="">Foo</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Foo', ['As in foo bar']), messages[0])
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Foo', ['As in foo bar']), messages[0])
def test_translate_i18n_msg_with_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
gettext = lambda s: u"Voh"
translator = Translator(gettext)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_extract_i18n_msg_with_attr(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="Foo bar">Foo</p>
</html>""")
translator = Translator()
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, None, 'Foo bar', []), messages[0])
self.assertEqual((3, None, 'Foo', []), messages[1])
def test_translate_i18n_msg_with_attr(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" title="Foo bar">Foo</p>
</html>""")
gettext = lambda s: u"Voh"
translator = Translator(DummyTranslations({
'Foo': 'Voh',
'Foo bar': u'Voh bär'
}))
tmpl.filters.insert(0, translator)
tmpl.add_directives(Translator.NAMESPACE, translator)
self.assertEqual(u"""<html>
<p title="Voh bär">Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_msg_and_py_strip_directives(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:strip="">Foo</p>
<p py:strip="" i18n:msg="">Foo</p>
</html>""")
translator = Translator(DummyTranslations({'Foo': 'Voh'}))
translator.setup(tmpl)
self.assertEqual("""<html>
Voh
Voh
</html>""", tmpl.generate().render())
def test_i18n_msg_ticket_300_extract(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="date, author">
Changed ${ '10/12/2008' } ago by ${ 'me, the author' }
</i18n:msg>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
(3, None, 'Changed %(date)s ago by %(author)s', []), messages[0]
)
def test_i18n_msg_ticket_300_translate(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:msg params="date, author">
Changed ${ date } ago by ${ author }
</i18n:msg>
</html>""")
translations = DummyTranslations({
'Changed %(date)s ago by %(author)s': u'Modificado à %(date)s por %(author)s'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
Modificado à um dia por Pedro
</html>""".encode('utf-8'), tmpl.generate(date='um dia', author="Pedro").render(encoding='utf-8'))
def test_i18n_msg_ticket_251_extract(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg=""><tt><b>Translation[ 0 ]</b>: <em>One coin</em></tt></p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
(3, None, u'[1:[2:Translation\\[\xa00\xa0\\]]: [3:One coin]]', []), messages[0]
)
def test_i18n_msg_ticket_251_translate(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg=""><tt><b>Translation[ 0 ]</b>: <em>One coin</em></tt></p>
</html>""")
translations = DummyTranslations({
u'[1:[2:Translation\\[\xa00\xa0\\]]: [3:One coin]]':
u'[1:[2:Trandução\\[\xa00\xa0\\]]: [3:Uma moeda]]'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p><tt><b>Trandução[ 0 ]</b>: <em>Uma moeda</em></tt></p>
</html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8'))
def test_extract_i18n_msg_with_other_directives_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:with="q = quote_plus(message[:80])">Before you do that, though, please first try
<strong><a href="${trac.homepage}search?ticket=yes&noquickjump=1&q=$q">searching</a>
for similar issues</strong>, as it is quite likely that this problem
has been reported before. For questions about installation
and configuration of Trac, please try the
<a href="${trac.homepage}wiki/MailingList">mailing list</a>
instead of filing a ticket.
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
'Before you do that, though, please first try\n '
'[1:[2:searching]\n for similar issues], as it is '
'quite likely that this problem\n has been reported '
'before. For questions about installation\n and '
'configuration of Trac, please try the\n '
'[3:mailing list]\n instead of filing a ticket.',
messages[0][2]
)
def test_translate_i18n_msg_with_other_directives_nested(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Before you do that, though, please first try
<strong><a href="${trac.homepage}search?ticket=yes&noquickjump=1&q=q">searching</a>
for similar issues</strong>, as it is quite likely that this problem
has been reported before. For questions about installation
and configuration of Trac, please try the
<a href="${trac.homepage}wiki/MailingList">mailing list</a>
instead of filing a ticket.
</p>
</html>""")
translations = DummyTranslations({
'Before you do that, though, please first try\n '
'[1:[2:searching]\n for similar issues], as it is '
'quite likely that this problem\n has been reported '
'before. For questions about installation\n and '
'configuration of Trac, please try the\n '
'[3:mailing list]\n instead of filing a ticket.':
u'Antes de o fazer, porém,\n '
u'[1:por favor tente [2:procurar]\n por problemas semelhantes], uma vez que '
u'é muito provável que este problema\n já tenha sido reportado '
u'anteriormente. Para questões relativas à instalação\n e '
u'configuração do Trac, por favor tente a\n '
u'[3:mailing list]\n em vez de criar um assunto.'
})
translator = Translator(translations)
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
ctx = Context()
ctx.push({'trac': {'homepage': 'http://trac.edgewall.org/'}})
self.assertEqual(u"""<html>
<p>Antes de o fazer, porém,
<strong>por favor tente <a href="http://trac.edgewall.org/search?ticket=yes&noquickjump=1&q=q">procurar</a>
por problemas semelhantes</strong>, uma vez que é muito provável que este problema
já tenha sido reportado anteriormente. Para questões relativas à instalação
e configuração do Trac, por favor tente a
<a href="http://trac.edgewall.org/wiki/MailingList">mailing list</a>
em vez de criar um assunto.</p>
</html>""", tmpl.generate(ctx).render())
def test_i18n_msg_with_other_nested_directives_with_reordered_content(self):
# See: http://genshi.edgewall.org/ticket/300#comment:10
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p py:if="not editable" class="hint" i18n:msg="">
<strong>Note:</strong> This repository is defined in
<code><a href="${ 'href.wiki(TracIni)' }">trac.ini</a></code>
and cannot be edited on this page.
</p>
</html>""")
translations = DummyTranslations({
'[1:Note:] This repository is defined in\n '
'[2:[3:trac.ini]]\n and cannot be edited on this page.':
u'[1:Nota:] Este repositório está definido em \n '
u'[2:[3:trac.ini]]\n e não pode ser editado nesta página.',
})
translator = Translator(translations)
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual(
'[1:Note:] This repository is defined in\n '
'[2:[3:trac.ini]]\n and cannot be edited on this page.',
messages[0][2]
)
self.assertEqual(u"""<html>
<p class="hint"><strong>Nota:</strong> Este repositório está definido em
<code><a href="href.wiki(TracIni)">trac.ini</a></code>
e não pode ser editado nesta página.</p>
</html>""".encode('utf-8'), tmpl.generate(editable=False).render(encoding='utf-8'))
def test_extract_i18n_msg_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:strip="">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Please see [1:Help] for details.', []),
messages[0])
def test_extract_i18n_msg_with_py_strip_and_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" py:strip="" i18n:comment="Foo">
Please see <a href="help.html">Help</a> for details.
</p>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, None, 'Please see [1:Help] for details.',
['Foo']), messages[0])
def test_translate_i18n_msg_and_comment_with_py_strip_directives(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:comment="As in foo bar" py:strip="">Foo</p>
<p py:strip="" i18n:msg="" i18n:comment="As in foo bar">Foo</p>
</html>""")
translator = Translator(DummyTranslations({'Foo': 'Voh'}))
translator.setup(tmpl)
self.assertEqual("""<html>
Voh
Voh
</html>""", tmpl.generate().render())
def test_translate_i18n_msg_ticket_404(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="first,second">
$first <span>$second</span> KEPT <span>Inside a tag</span> tail
</p></html>""")
translator = Translator(DummyTranslations())
translator.setup(tmpl)
self.assertEqual("""<html>
<p>FIRST <span>SECOND</span> KEPT <span>Inside a tag</span> tail"""
"""</p></html>""",
tmpl.generate(first="FIRST", second="SECOND").render())
class ChooseDirectiveTestCase(unittest.TestCase):
def test_translate_i18n_choose_as_attribute(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
<div i18n:choose="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
</html>""")
translations = DummyTranslations()
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>FooBar</p>
</div>
<div>
<p>FooBars</p>
</div>
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
</html>""")
translations = DummyTranslations()
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>FooBars</p>
<p>FooBar</p>
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_as_directive_singular_and_plural_with_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<p i18n:singular="" py:strip="">FooBar Singular with Strip</p>
<p i18n:plural="">FooBars Plural without Strip</p>
</i18n:choose>
<i18n:choose numeral="two">
<p i18n:singular="">FooBar singular without strip</p>
<p i18n:plural="" py:strip="">FooBars plural with strip</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="">FooBar singular without strip</p>
<p i18n:plural="" py:strip="">FooBars plural with strip</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="" py:strip="">FooBar singular with strip</p>
<p i18n:plural="">FooBars plural without strip</p>
</i18n:choose>
</html>""")
translations = DummyTranslations()
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>FooBars Plural without Strip</p>
FooBars plural with strip
<p>FooBar singular without strip</p>
FooBar singular with strip
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_plural_singular_as_directive(self):
# Ticket 371
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<i18n:singular>FooBar</i18n:singular>
<i18n:plural>FooBars</i18n:plural>
</i18n:choose>
<i18n:choose numeral="one">
<i18n:singular>FooBar</i18n:singular>
<i18n:plural>FooBars</i18n:plural>
</i18n:choose>
</html>""")
translations = DummyTranslations({
('FooBar', 0): 'FuBar',
('FooBars', 1): 'FuBars',
'FooBar': 'FuBar',
'FooBars': 'FuBars',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
FuBars
FuBar
</html>""", tmpl.generate(one=1, two=2).render())
def test_translate_i18n_choose_as_attribute_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_as_attribute_with_params_and_domain_as_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations()
translations.add_domain('foo', {
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_as_directive_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Vohs John Doe</p>
<p>Voh John Doe</p>
</html>""", tmpl.generate(one=1, two=2,
fname='John', lname='Doe').render())
def test_translate_i18n_choose_as_directive_with_params_and_domain_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:domain name="foo">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</i18n:domain>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translations = DummyTranslations()
translations.add_domain('foo', {
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Vohs John Doe</p>
<p>Foo John Doe</p>
</html>""", tmpl.generate(one=1, two=2,
fname='John', lname='Doe').render())
def test_extract_i18n_choose_as_attribute(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
<div i18n:choose="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0])
self.assertEqual((7, 'ngettext', ('FooBar', 'FooBars'), []), messages[1])
def test_extract_i18n_choose_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
<i18n:choose numeral="one">
<p i18n:singular="">FooBar</p>
<p i18n:plural="">FooBars</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0])
self.assertEqual((7, 'ngettext', ('FooBar', 'FooBars'), []), messages[1])
def test_extract_i18n_choose_as_attribute_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
def test_extract_i18n_choose_as_attribute_with_params_and_domain_as_param(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((4, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
def test_extract_i18n_choose_as_directive_with_params(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
self.assertEqual((7, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[1])
def test_extract_i18n_choose_as_directive_with_params_and_domain_as_directive(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:domain name="foo">
<i18n:choose numeral="two" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</i18n:domain>
<i18n:choose numeral="one" params="fname, lname">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(2, len(messages))
self.assertEqual((4, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[0])
self.assertEqual((9, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'), []),
messages[1])
def test_extract_i18n_choose_as_attribute_with_params_and_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname" i18n:comment="As in Foo Bar">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'),
['As in Foo Bar']),
messages[0])
def test_extract_i18n_choose_as_directive_with_params_and_comment(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="two" params="fname, lname" i18n:comment="As in Foo Bar">
<p i18n:singular="">Foo ${fname} ${lname}</p>
<p i18n:plural="">Foos ${fname} ${lname}</p>
</i18n:choose>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s'),
['As in Foo Bar']),
messages[0])
def test_extract_i18n_choose_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:choose="num; num" title="Things">
<i18n:singular>
There is <a href="$link" title="View thing">${num} thing</a>.
</i18n:singular>
<i18n:plural>
There are <a href="$link" title="View things">${num} things</a>.
</i18n:plural>
</p>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(4, len(messages))
self.assertEqual((3, None, 'Things', []), messages[0])
self.assertEqual((5, None, 'View thing', []), messages[1])
self.assertEqual((8, None, 'View things', []), messages[2])
self.assertEqual(
(3, 'ngettext', ('There is [1:%(num)s thing].',
'There are [1:%(num)s things].'), []),
messages[3])
def test_translate_i18n_choose_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:choose="num; num" title="Things">
<i18n:singular>
There is <a href="$link" title="View thing">${num} thing</a>.
</i18n:singular>
<i18n:plural>
There are <a href="$link" title="View things">${num} things</a>.
</i18n:plural>
</p>
</html>""")
translations = DummyTranslations({
'Things': 'Sachen',
'View thing': 'Sache betrachten',
'View things': 'Sachen betrachten',
('There is [1:%(num)s thing].', 0): 'Da ist [1:%(num)s Sache].',
('There is [1:%(num)s thing].', 1): 'Da sind [1:%(num)s Sachen].'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Sachen">
Da ist <a href="/things" title="Sache betrachten">1 Sache</a>.
</p>
</html>""", tmpl.generate(link="/things", num=1).render(encoding=None))
self.assertEqual(u"""<html>
<p title="Sachen">
Da sind <a href="/things" title="Sachen betrachten">3 Sachen</a>.
</p>
</html>""", tmpl.generate(link="/things", num=3).render(encoding=None))
def test_extract_i18n_choose_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="num" params="num">
<p i18n:singular="" title="Things">
There is <a href="$link" title="View thing">${num} thing</a>.
</p>
<p i18n:plural="" title="Things">
There are <a href="$link" title="View things">${num} things</a>.
</p>
</i18n:choose>
</html>""")
translator = Translator()
translator.setup(tmpl)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(5, len(messages))
self.assertEqual((4, None, 'Things', []), messages[0])
self.assertEqual((5, None, 'View thing', []), messages[1])
self.assertEqual((7, None, 'Things', []), messages[2])
self.assertEqual((8, None, 'View things', []), messages[3])
self.assertEqual(
(3, 'ngettext', ('There is [1:%(num)s thing].',
'There are [1:%(num)s things].'), []),
messages[4])
def test_translate_i18n_choose_as_element_with_attributes(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:choose numeral="num" params="num">
<p i18n:singular="" title="Things">
There is <a href="$link" title="View thing">${num} thing</a>.
</p>
<p i18n:plural="" title="Things">
There are <a href="$link" title="View things">${num} things</a>.
</p>
</i18n:choose>
</html>""")
translations = DummyTranslations({
'Things': 'Sachen',
'View thing': 'Sache betrachten',
'View things': 'Sachen betrachten',
('There is [1:%(num)s thing].', 0): 'Da ist [1:%(num)s Sache].',
('There is [1:%(num)s thing].', 1): 'Da sind [1:%(num)s Sachen].'
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual(u"""<html>
<p title="Sachen">Da ist <a href="/things" title="Sache betrachten">1 Sache</a>.</p>
</html>""", tmpl.generate(link="/things", num=1).render(encoding=None))
self.assertEqual(u"""<html>
<p title="Sachen">Da sind <a href="/things" title="Sachen betrachten">3 Sachen</a>.</p>
</html>""", tmpl.generate(link="/things", num=3).render(encoding=None))
def test_translate_i18n_choose_and_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_and_domain_and_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<div i18n:choose="two; fname, lname">
<p i18n:singular="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations()
translations.add_domain('foo', {
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
</html>""", tmpl.generate(two=2, fname='John', lname='Doe').render())
def test_translate_i18n_choose_and_singular_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="" py:strip="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
<div i18n:choose="one; fname, lname">
<p i18n:singular="" py:strip="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>Vohs John Doe</p>
</div>
<div>
Voh John Doe
</div>
</html>""", tmpl.generate(
one=1, two=2, fname='John',lname='Doe').render())
def test_translate_i18n_choose_and_plural_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="two; fname, lname">
<p i18n:singular="" py:strip="">Foo $fname $lname</p>
<p i18n:plural="">Foos $fname $lname</p>
</div>
</html>""")
translations = DummyTranslations({
('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s',
('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s',
'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s',
'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s',
})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
Voh John Doe
</div>
</html>""", tmpl.generate(two=1, fname='John', lname='Doe').render())
def test_extract_i18n_choose_as_attribute_and_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:choose="one" py:strip="">
<p i18n:singular="" py:strip="">FooBar</p>
<p i18n:plural="" py:strip="">FooBars</p>
</div>
</html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(1, len(messages))
self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0])
class DomainDirectiveTestCase(unittest.TestCase):
def test_translate_i18n_domain_with_msg_directives(self):
#"""translate with i18n:domain and nested i18n:msg directives """
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:msg="">Bar</p>
</div>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<div>
<p>BarFoo</p>
<p>PT_Foo</p>
</div>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_inline_directives(self):
#"""translate with inlined i18n:domain and i18n:msg directives"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="" i18n:domain="foo">FooBar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>BarFoo</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_without_msg_directives(self):
#"""translate domain call without i18n:msg directives still uses current domain"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Bar</p>
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:msg="">Bar</p>
<p>Bar</p>
</div>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
<div>
<p>BarFoo</p>
<p>PT_Foo</p>
<p>PT_Foo</p>
</div>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_as_directive_not_attribute(self):
#"""translate with domain as directive"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<i18n:domain name="foo">
<p i18n:msg="">FooBar</p>
<p i18n:msg="">Bar</p>
<p>Bar</p>
</i18n:domain>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>BarFoo</p>
<p>PT_Foo</p>
<p>PT_Foo</p>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_nested_directives(self):
#"""translate with nested i18n:domain directives"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Bar</p>
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:domain="bar" i18n:msg="">Bar</p>
<p>Bar</p>
</div>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'foo_Bar'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
<div>
<p>BarFoo</p>
<p>bar_Bar</p>
<p>foo_Bar</p>
</div>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_empty_nested_domain_directive(self):
#"""translate with empty nested i18n:domain directive does not use dngettext"""
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n">
<p i18n:msg="">Bar</p>
<div i18n:domain="foo">
<p i18n:msg="">FooBar</p>
<p i18n:domain="" i18n:msg="">Bar</p>
<p>Bar</p>
</div>
<p>Bar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'foo_Bar'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>Voh</p>
<div>
<p>BarFoo</p>
<p>Voh</p>
<p>foo_Bar</p>
</div>
<p>Voh</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_inline_directive_on_START_NS(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo">
<p i18n:msg="">FooBar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""<html>
<p>BarFoo</p>
</html>""", tmpl.generate().render())
def test_translate_i18n_domain_with_inline_directive_on_START_NS_with_py_strip(self):
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo" py:strip="">
<p i18n:msg="">FooBar</p>
</html>""")
translations = DummyTranslations({'Bar': 'Voh'})
translations.add_domain('foo', {'FooBar': 'BarFoo'})
translator = Translator(translations)
translator.setup(tmpl)
self.assertEqual("""
<p>BarFoo</p>
""", tmpl.generate().render())
def test_translate_i18n_domain_with_nested_includes(self):
import os, shutil, tempfile
from genshi.template.loader import TemplateLoader
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
for idx in range(7):
file1 = open(os.path.join(dirname, 'tmpl%d.html' % idx), 'w')
try:
file1.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" py:strip="">
<div>Included tmpl$idx</div>
<p i18n:msg="idx">Bar $idx</p>
<p i18n:domain="bar">Bar</p>
<p i18n:msg="idx" i18n:domain="">Bar $idx</p>
<p i18n:domain="" i18n:msg="idx">Bar $idx</p>
<py:if test="idx < 6">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</py:if>
</html>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl10.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</html>""")
finally:
file2.close()
def callback(template):
translations = DummyTranslations({'Bar %(idx)s': 'Voh %(idx)s'})
translations.add_domain('foo', {'Bar %(idx)s': 'foo_Bar %(idx)s'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
translator.setup(template)
loader = TemplateLoader([dirname], callback=callback)
tmpl = loader.load('tmpl10.html')
self.assertEqual("""<html>
<div>Included tmpl0</div>
<p>foo_Bar 0</p>
<p>bar_Bar</p>
<p>Voh 0</p>
<p>Voh 0</p>
<div>Included tmpl1</div>
<p>foo_Bar 1</p>
<p>bar_Bar</p>
<p>Voh 1</p>
<p>Voh 1</p>
<div>Included tmpl2</div>
<p>foo_Bar 2</p>
<p>bar_Bar</p>
<p>Voh 2</p>
<p>Voh 2</p>
<div>Included tmpl3</div>
<p>foo_Bar 3</p>
<p>bar_Bar</p>
<p>Voh 3</p>
<p>Voh 3</p>
<div>Included tmpl4</div>
<p>foo_Bar 4</p>
<p>bar_Bar</p>
<p>Voh 4</p>
<p>Voh 4</p>
<div>Included tmpl5</div>
<p>foo_Bar 5</p>
<p>bar_Bar</p>
<p>Voh 5</p>
<p>Voh 5</p>
<div>Included tmpl6</div>
<p>foo_Bar 6</p>
<p>bar_Bar</p>
<p>Voh 6</p>
<p>Voh 6</p>
</html>""", tmpl.generate(idx=-1).render())
finally:
shutil.rmtree(dirname)
def test_translate_i18n_domain_with_nested_includes_with_translatable_attrs(self):
import os, shutil, tempfile
from genshi.template.loader import TemplateLoader
dirname = tempfile.mkdtemp(suffix='genshi_test')
try:
for idx in range(4):
file1 = open(os.path.join(dirname, 'tmpl%d.html' % idx), 'w')
try:
file1.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n" py:strip="">
<div>Included tmpl$idx</div>
<p title="${dg('foo', 'Bar %(idx)s') % dict(idx=idx)}" i18n:msg="idx">Bar $idx</p>
<p title="Bar" i18n:domain="bar">Bar</p>
<p title="Bar" i18n:msg="idx" i18n:domain="">Bar $idx</p>
<p i18n:msg="idx" i18n:domain="" title="Bar">Bar $idx</p>
<p i18n:domain="" i18n:msg="idx" title="Bar">Bar $idx</p>
<py:if test="idx < 3">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</py:if>
</html>""")
finally:
file1.close()
file2 = open(os.path.join(dirname, 'tmpl10.html'), 'w')
try:
file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude"
xmlns:py="http://genshi.edgewall.org/"
xmlns:i18n="http://genshi.edgewall.org/i18n"
i18n:domain="foo">
<xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/>
</html>""")
finally:
file2.close()
translations = DummyTranslations({'Bar %(idx)s': 'Voh %(idx)s',
'Bar': 'Voh'})
translations.add_domain('foo', {'Bar %(idx)s': 'foo_Bar %(idx)s'})
translations.add_domain('bar', {'Bar': 'bar_Bar'})
translator = Translator(translations)
def callback(template):
translator.setup(template)
loader = TemplateLoader([dirname], callback=callback)
tmpl = loader.load('tmpl10.html')
if IS_PYTHON2:
dgettext = translations.dugettext
else:
dgettext = translations.dgettext
self.assertEqual("""<html>
<div>Included tmpl0</div>
<p title="foo_Bar 0">foo_Bar 0</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 0</p>
<p title="Voh">Voh 0</p>
<p title="Voh">Voh 0</p>
<div>Included tmpl1</div>
<p title="foo_Bar 1">foo_Bar 1</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 1</p>
<p title="Voh">Voh 1</p>
<p title="Voh">Voh 1</p>
<div>Included tmpl2</div>
<p title="foo_Bar 2">foo_Bar 2</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 2</p>
<p title="Voh">Voh 2</p>
<p title="Voh">Voh 2</p>
<div>Included tmpl3</div>
<p title="foo_Bar 3">foo_Bar 3</p>
<p title="bar_Bar">bar_Bar</p>
<p title="Voh">Voh 3</p>
<p title="Voh">Voh 3</p>
<p title="Voh">Voh 3</p>
</html>""", tmpl.generate(idx=-1,
dg=dgettext).render())
finally:
shutil.rmtree(dirname)
class ExtractTestCase(unittest.TestCase):
def test_markup_template_extraction(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<head>
<title>Example</title>
</head>
<body>
<h1>Example</h1>
<p>${_("Hello, %(name)s") % dict(name=username)}</p>
<p>${ngettext("You have %d item", "You have %d items", num)}</p>
</body>
</html>""")
results = list(extract(buf, ['_', 'ngettext'], [], {}))
self.assertEqual([
(3, None, 'Example', []),
(6, None, 'Example', []),
(7, '_', 'Hello, %(name)s', []),
(8, 'ngettext', ('You have %d item', 'You have %d items', None),
[]),
], results)
def test_extraction_without_text(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<p title="Bar">Foo</p>
${ngettext("Singular", "Plural", num)}
</html>""")
results = list(extract(buf, ['_', 'ngettext'], [], {
'extract_text': 'no'
}))
self.assertEqual([
(3, 'ngettext', ('Singular', 'Plural', None), []),
], results)
def test_text_template_extraction(self):
buf = StringIO("""${_("Dear %(name)s") % {'name': name}},
${ngettext("Your item:", "Your items", len(items))}
#for item in items
* $item
#end
All the best,
Foobar""")
results = list(extract(buf, ['_', 'ngettext'], [], {
'template_class': 'genshi.template:TextTemplate'
}))
self.assertEqual([
(1, '_', 'Dear %(name)s', []),
(3, 'ngettext', ('Your item:', 'Your items', None), []),
(7, None, 'All the best,\n Foobar', [])
], results)
def test_extraction_with_keyword_arg(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
${gettext('Foobar', foo='bar')}
</html>""")
results = list(extract(buf, ['gettext'], [], {}))
self.assertEqual([
(2, 'gettext', ('Foobar'), []),
], results)
def test_extraction_with_nonstring_arg(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
${dgettext(curdomain, 'Foobar')}
</html>""")
results = list(extract(buf, ['dgettext'], [], {}))
self.assertEqual([
(2, 'dgettext', (None, 'Foobar'), []),
], results)
def test_extraction_inside_ignored_tags(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<script type="text/javascript">
$('#llist').tabs({
remote: true,
spinner: "${_('Please wait...')}"
});
</script>
</html>""")
results = list(extract(buf, ['_'], [], {}))
self.assertEqual([
(5, '_', 'Please wait...', []),
], results)
def test_extraction_inside_ignored_tags_with_directives(self):
buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/">
<script type="text/javascript">
<py:if test="foobar">
alert("This shouldn't be extracted");
</py:if>
</script>
</html>""")
self.assertEqual([], list(extract(buf, ['_'], [], {})))
def test_extract_py_def_directive_with_py_strip(self):
# Failed extraction from Trac
tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" py:strip="">
<py:def function="diff_options_fields(diff)">
<label for="style">View differences</label>
<select id="style" name="style">
<option selected="${diff.style == 'inline' or None}"
value="inline">inline</option>
<option selected="${diff.style == 'sidebyside' or None}"
value="sidebyside">side by side</option>
</select>
<div class="field">
Show <input type="text" name="contextlines" id="contextlines" size="2"
maxlength="3" value="${diff.options.contextlines < 0 and 'all' or diff.options.contextlines}" />
<label for="contextlines">lines around each change</label>
</div>
<fieldset id="ignore" py:with="options = diff.options">
<legend>Ignore:</legend>
<div class="field">
<input type="checkbox" id="ignoreblanklines" name="ignoreblanklines"
checked="${options.ignoreblanklines or None}" />
<label for="ignoreblanklines">Blank lines</label>
</div>
<div class="field">
<input type="checkbox" id="ignorecase" name="ignorecase"
checked="${options.ignorecase or None}" />
<label for="ignorecase">Case changes</label>
</div>
<div class="field">
<input type="checkbox" id="ignorewhitespace" name="ignorewhitespace"
checked="${options.ignorewhitespace or None}" />
<label for="ignorewhitespace">White space changes</label>
</div>
</fieldset>
<div class="buttons">
<input type="submit" name="update" value="${_('Update')}" />
</div>
</py:def></html>""")
translator = Translator()
tmpl.add_directives(Translator.NAMESPACE, translator)
messages = list(translator.extract(tmpl.stream))
self.assertEqual(10, len(messages))
self.assertEqual([
(3, None, 'View differences', []),
(6, None, 'inline', []),
(8, None, 'side by side', []),
(10, None, 'Show', []),
(13, None, 'lines around each change', []),
(16, None, 'Ignore:', []),
(20, None, 'Blank lines', []),
(25, None, 'Case changes',[]),
(30, None, 'White space changes', []),
(34, '_', 'Update', [])], messages)
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(Translator.__module__))
suite.addTest(unittest.makeSuite(TranslatorTestCase, 'test'))
suite.addTest(unittest.makeSuite(MsgDirectiveTestCase, 'test'))
suite.addTest(unittest.makeSuite(ChooseDirectiveTestCase, 'test'))
suite.addTest(unittest.makeSuite(DomainDirectiveTestCase, 'test'))
suite.addTest(unittest.makeSuite(ExtractTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| codeparrot/github-code-clean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.