repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
mKeRix/home-assistant | homeassistant/components/bbox/sensor.py | 16 | 6275 | """Support for Bbox Bouygues Modem Router."""
from datetime import timedelta
import logging
import pybbox
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_MONITORED_VARIABLES,
CONF_NAME,
DATA_RATE_MEGABITS_PER_SECOND,
DEVICE_CLASS_TIMESTAMP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
from homeassistant.util.dt import utcnow
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Powered by Bouygues Telecom"
DEFAULT_NAME = "Bbox"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
# Sensor types are defined like so: Name, unit, icon
SENSOR_TYPES = {
"down_max_bandwidth": [
"Maximum Download Bandwidth",
DATA_RATE_MEGABITS_PER_SECOND,
"mdi:download",
],
"up_max_bandwidth": [
"Maximum Upload Bandwidth",
DATA_RATE_MEGABITS_PER_SECOND,
"mdi:upload",
],
"current_down_bandwidth": [
"Currently Used Download Bandwidth",
DATA_RATE_MEGABITS_PER_SECOND,
"mdi:download",
],
"current_up_bandwidth": [
"Currently Used Upload Bandwidth",
DATA_RATE_MEGABITS_PER_SECOND,
"mdi:upload",
],
"uptime": ["Uptime", None, "mdi:clock"],
"number_of_reboots": ["Number of reboot", None, "mdi:restart"],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_VARIABLES): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Bbox sensor."""
# Create a data fetcher to support all of the configured sensors. Then make
# the first call to init the data.
try:
bbox_data = BboxData()
bbox_data.update()
except requests.exceptions.HTTPError as error:
_LOGGER.error(error)
return False
name = config[CONF_NAME]
sensors = []
for variable in config[CONF_MONITORED_VARIABLES]:
if variable == "uptime":
sensors.append(BboxUptimeSensor(bbox_data, variable, name))
else:
sensors.append(BboxSensor(bbox_data, variable, name))
add_entities(sensors, True)
class BboxUptimeSensor(Entity):
"""Bbox uptime sensor."""
def __init__(self, bbox_data, sensor_type, name):
"""Initialize the sensor."""
self.client_name = name
self.type = sensor_type
self._name = SENSOR_TYPES[sensor_type][0]
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._icon = SENSOR_TYPES[sensor_type][2]
self.bbox_data = bbox_data
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
@property
def device_class(self):
"""Return the class of this sensor."""
return DEVICE_CLASS_TIMESTAMP
def update(self):
"""Get the latest data from Bbox and update the state."""
self.bbox_data.update()
uptime = utcnow() - timedelta(
seconds=self.bbox_data.router_infos["device"]["uptime"]
)
self._state = uptime.replace(microsecond=0).isoformat()
class BboxSensor(Entity):
"""Implementation of a Bbox sensor."""
def __init__(self, bbox_data, sensor_type, name):
"""Initialize the sensor."""
self.client_name = name
self.type = sensor_type
self._name = SENSOR_TYPES[sensor_type][0]
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._icon = SENSOR_TYPES[sensor_type][2]
self.bbox_data = bbox_data
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return f"{self.client_name} {self._name}"
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {ATTR_ATTRIBUTION: ATTRIBUTION}
def update(self):
"""Get the latest data from Bbox and update the state."""
self.bbox_data.update()
if self.type == "down_max_bandwidth":
self._state = round(self.bbox_data.data["rx"]["maxBandwidth"] / 1000, 2)
elif self.type == "up_max_bandwidth":
self._state = round(self.bbox_data.data["tx"]["maxBandwidth"] / 1000, 2)
elif self.type == "current_down_bandwidth":
self._state = round(self.bbox_data.data["rx"]["bandwidth"] / 1000, 2)
elif self.type == "current_up_bandwidth":
self._state = round(self.bbox_data.data["tx"]["bandwidth"] / 1000, 2)
elif self.type == "number_of_reboots":
self._state = self.bbox_data.router_infos["device"]["numberofboots"]
class BboxData:
"""Get data from the Bbox."""
def __init__(self):
"""Initialize the data object."""
self.data = None
self.router_infos = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from the Bbox."""
try:
box = pybbox.Bbox()
self.data = box.get_ip_stats()
self.router_infos = box.get_bbox_info()
except requests.exceptions.HTTPError as error:
_LOGGER.error(error)
self.data = None
self.router_infos = None
return False
| mit |
talha81/TACTIC-DEV | src/bin/fixes/mod_widget_config_code.py | 6 | 1161 | import sys
import tacticenv
from pyasm.biz import Project
from pyasm.security import Batch
from pyasm.command import Command
from pyasm.search import Search
class ModCodeCmd(Command):
def execute(my):
search = Search('config/widget_config')
sobjects = search.get_sobjects()
padding = 5
prefix = 'WIDGET_CONFIG'
for sobject in sobjects:
id = sobject.get_id()
code_expr = "%%s%%0.%dd" % padding
new_code = code_expr % (prefix, id)
old_code = sobject.get_code()
print "Updating widget_config [%s] with new code [%s]"%(id ,new_code)
sobject.set_value("code", new_code )
sobject.commit(triggers=False)
my.add_description('Batch-update widget config code')
if __name__ == '__main__':
args = sys.argv[1:]
if len(args) != 1:
print "Please provide a valid project code!"
sys.exit(2)
my_login = 'admin'
batch = Batch(login_code=my_login)
Project.set_project(args[0])
command = ModCodeCmd()
Command.execute_cmd(command)
| epl-1.0 |
rizac/gfz-reportgen | gfzreport/sphinxbuild/map/__init__.py | 2 | 43603 | '''
This module implements the function `plotmap` which plots scattered points on a map
retrieved using ArgGIS Server REST API. The function is highly customizable and is basically a
wrapper around the `Basemap` library (for the map background)
plus matplotlib utilities (for plotting points, shapes, labels and legend)
Created on Mar 10, 2016
@author: riccardo
'''
import numpy as np
import re
from itertools import izip, chain
from urllib2 import URLError, HTTPError
import socket
import matplotlib.pyplot as plt
import matplotlib.patheffects as PathEffects
from mpl_toolkits.basemap import Basemap
from matplotlib.font_manager import FontProperties
from matplotlib import rcParams
def parse_margins(obj, parsefunc=lambda margins: [float(val) for val in margins]):
"""Parses obj returning a 4 element numpy array denoting the top, right, bottom and left
values. This function first converts obj to a 4 element list L, and then
calls `parsefunc`, which by default converts all L values into float
:param obj: either None, a number, a list of numbers (allowed lengths: 1 to 4),
a comma/semicolon/spaces separated string (e.g. "4deg 0.0", "1, 1.2", "2km,4deg", "1 ; 2")
:param parsefunc: a function to be applied to obj converted to list. By default, returns
float(v) for any v in L
:return: a 4 element numpy array of floats denoting the top, right, bottom, left values of
the margins. The idea is the same as css margins, as depicted in the table below.
:Examples:
Called f `parsefunc`, then:
============= =========================
obj is returns
============= =========================
None [0, 0, 0, 0]
------------- -------------------------
string the list obtained after
splitting string via
regexp where comma,
semicolon and spaces
are valid separators
------------- -------------------------
x or [x] parsefunc([x, x, x, x])
------------- -------------------------
[x, y] parsefunc([x, y ,x, y])
------------- -------------------------
[x, y, z] parsefunc([x, y, z, y])
------------- -------------------------
[x, y, z, t] parsefunc([x, y, z, t])
============= =========================
"""
if obj is None:
margins = [0] * 4
elif hasattr(obj, "__iter__") and not isinstance(obj, str):
# is an iterable not string. Note the if above is py2 py3 compatible
margins = list(obj)
else:
try:
margins = [float(obj)] * 4
except (TypeError, ValueError):
margins = re.compile("(?:\\s*,\\s*|\\s*;\\s*|\\s+)").split(obj)
if len(margins) == 1:
margins *= 4
elif len(margins) == 2:
margins *= 2
elif len(margins) == 3:
margins.append(margins[1])
elif len(margins) != 4:
raise ValueError("unable to parse margins on invalid value '%s'" % obj)
return np.asarray(parsefunc(margins) if hasattr(parsefunc, "__call__") else margins)
# return margins
def parse_distance(dist, lat_0=None):
"""Returns the distance in degrees. If dist is in km or m, and lat_0 is not None,
returns w2lon, else h2lat. dist None defaults to 0
:param dist: float, int None, string. If string and has a unit, see above
"""
try:
return 0 if dist is None else float(dist)
except ValueError:
if dist[-3:].lower() == 'deg':
return float(dist[:-3])
elif dist[-2:] == 'km':
dst = 1000 * float(dist[:-2])
elif dist[-1:] == 'm':
dst = float(dist[:1])
else:
raise
return w2lon(dst, lat_0) if lat_0 is not None else h2lat(dst)
def get_lon0_lat0(min_lons, min_lats, max_lons, max_lats):
""" Calculates lat_0, lon_0, i.e., the mid point of the bounding box denoted by the
arguments
:param min_lons: the minimum of longitudes
:param min_lats: the maximum of latitudes
:param max_lons: the minimum of longitudes
:param max_lats: the maximum of latitudes
:return: the 2-element tuple denoting the mid point lon_0, lat_0
"""
lat_0 = max_lats / 2. + min_lats / 2.
lon_0 = max_lons / 2. + min_lons / 2.
if lon_0 > 180: # FIXME: necessary?? see self.get_normalized... above
lon_0 -= 360
return lon_0, lat_0
def getbounds(min_lon, min_lat, max_lon, max_lat, margins):
"""Calculates the bounds given the bounding box identified by the arguments and
given optional margins
:param min_lon: the minimum longitude (numeric, scalar)
:param min_lat: the maximum latitude (numeric, scalar)
:param max_lon: the minimum longitude (numeric, scalar)
:param max_lat: the maximum latitude (numeric, scalar)
:param margins: the margins as a css-like string (with units 'deg', 'km' or 'm'), or as
a 1 to 4 element array of numeric values (in that case denoting degrees).
As in css, a 4 element array denotes the [top, right, bottom, left] values.
None defaults to [0, 0, 0, 0].
:return: the 6-element tuple denoting lon_0, lat_0, min_lon, min_lat, max_lon, max_lat.
where min_lon, min_lat, max_lon, max_lat are the new bounds and lon_0 and lat_0 are
their midpoints (x and y, respectively)
"""
def parsefunc(mrgns):
"""parses mrgns as array of strings into array of floats
"""
return parse_distance(mrgns[0]), parse_distance(mrgns[1], max_lat), \
parse_distance(mrgns[2]), parse_distance(mrgns[3], min_lat)
top, right, btm, left = parse_margins(margins, parsefunc)
min_lon, min_lat, max_lon, max_lat = min_lon-left, min_lat-btm, max_lon+right, max_lat+top
if min_lon == max_lon:
min_lon -= 10 # in degrees
max_lon += 10 # in degrees
if min_lat == max_lat:
min_lat -= 10 # in degrees
max_lat += 10 # in degrees
# minima must be within bounds:
min_lat = max(-90, min_lat)
max_lat = min(90, max_lat)
min_lon = max(-180, min_lon)
max_lon = min(180, max_lon)
lon_0, lat_0 = get_lon0_lat0(min_lon, min_lat, max_lon, max_lat)
return lon_0, lat_0, min_lon, min_lat, max_lon, max_lat
# static constant converter (degree to meters and viceversa) for latitudes
DEG2M_LAT = 2 * np.pi * 6371 * 1000 / 360
def lat2h(distance_in_degrees):
"""converts latitude distance from degrees to height in meters
:param distance_in_degrees: a distance (python scalar or numpy array) along the great circle
espressed in degrees"""
deg2m_lat = DEG2M_LAT # 2 * np.pi * 6371 * 1000 / 360
return distance_in_degrees * deg2m_lat
def h2lat(distance_in_meters):
"""converts latitude distance from height in meters to degrees
:param distance_in_degrees: a distance (python scalar or numpy array) along the great circle
espressed in degrees"""
deg2m_lat = DEG2M_LAT # deg2m_lat = 2 * np.pi * 6371 * 1000 / 360
return distance_in_meters / deg2m_lat
def lon2w(distance_in_degrees, lat_0):
"""converts longitude distance from degrees to width in meters
:param distance_in_degrees: a distance (python scalar or numpy array)
along the lat_0 circle expressed in degrees
:param lat_0: if missing or None, defaults to the internal lat_0, which is set as the mean
of all points passed to this object. Otherwise, expresses the latitude of the circle along
which the lon2w(distance_in_degrees) must be converted to meters"""
deg2m_lat = DEG2M_LAT
deg2m_lon = deg2m_lat * np.cos(lat_0 / 180 * np.pi)
return distance_in_degrees * deg2m_lon
def w2lon(distance_in_meters, lat_0):
"""converts longitude distance from width in meters to degrees
:param distance_in_meters: a distance (python scalar or numpy array)
along the lat_0 circle expressed in meters
:param lat_0: if missing or None, defaults to the internal lat_0, which is set as the mean
of all points passed to this object. Otherwise, expresses the latitude (in degrees) of the
circle along which w2lon(distance_in_meters) must be converted to degrees"""
deg2m_lat = DEG2M_LAT # deg2m_lat = 2 * np.pi * 6371 * 1000 / 360
deg2m_lon = deg2m_lat * np.cos(lat_0 / 180 * np.pi)
return distance_in_meters / deg2m_lon
class MapHandler(object):
"""
Class handling bounds of a map given points (lons and lats)
"""
def __init__(self, lons, lats, map_margins):
"""Initializes a new MapHandler. If figure here is None, you **MUST**
call self.set_fig(fig) to calculate bounds and other stuff
when you have a ready figure"""
self.lons = lons if len(lons) else [0] # FIXME: use numpy arrays!!
self.lats = lats if len(lats) else [0]
self.max_lons, self.min_lons = max(self.lons), min(self.lons)
self.max_lats, self.min_lats = max(self.lats), min(self.lats)
self.lon_0, self.lat_0, self.llcrnrlon, self.llcrnrlat, self.urcrnrlon, self.urcrnrlat = \
getbounds(self.min_lons, self.min_lats, self.max_lons, self.max_lats, map_margins)
def _get_map_dims(self): # , fig_size_in_inches, colorbar=False):
"""Returns the map dimension width, height, in meters"""
max_lons, min_lons = self.urcrnrlon, self.llcrnrlon
max_lats, min_lats = self.urcrnrlat, self.llcrnrlat
height = lat2h(max_lats - min_lats)
width = lon2w(max_lons - min_lons, self.lat_0)
return width, height
def get_parallels(self, max_labels_count=8):
width, height = self._get_map_dims()
lat_0 = self.lat_0
N1 = int(np.ceil(height / max(width, height) * max_labels_count))
parallels = MapHandler._linspace(lat_0 - h2lat(height / 2),
lat_0 + h2lat(height / 2), N1)
return parallels
def get_meridians(self, max_labels_count=8):
width, height = self._get_map_dims()
lon_0 = self.lon_0
lat_0 = self.lat_0
N2 = int(np.ceil(width / max(width, height) * max_labels_count))
meridians = MapHandler._linspace(lon_0 - w2lon(width / 2, lat_0),
lon_0 + w2lon(width / 2, lat_0), N2)
meridians[meridians > 180] -= 360
return meridians
@staticmethod
def _linspace(val1, val2, N):
"""
returns around N 'nice' values between val1 and val2. Copied from obspy.plot_map
"""
dval = val2 - val1
round_pos = int(round(-np.log10(1. * dval / N)))
# Fake negative rounding as not supported by future as of now.
if round_pos < 0:
factor = 10 ** (abs(round_pos))
delta = round(2. * dval / N / factor) * factor / 2
else:
delta = round(2. * dval / N, round_pos) / 2
new_val1 = np.ceil(val1 / delta) * delta
new_val2 = np.floor(val2 / delta) * delta
N = (new_val2 - new_val1) / delta + 1
return np.linspace(new_val1, new_val2, N)
def _normalize(obj, size=None, dtype=None):
""""Casts" obj to a numpy array of the given optional size and optional dtype, and returns it.
If size is not None, the array must have length size. If not, and has length 1, it will be
resized to the specified size. Otherwise a ValueError is raised
If size is None, no resize will be in place and the array is returend as it is
Note: obj=None will be converted to the array [None], apparently in the current version of numpy
this wouldn't be the default (see argument ndmin=1)
:return an numpy array resulting to the coinversion of obj into array
:Examples:
"""
x = np.array(obj, ndmin=1) if dtype is None else np.array(obj, ndmin=1, dtype=dtype)
if size is None:
return np.array([]) if obj is None else x # if obj is None x is [None], return [] instead
try:
if len(x) == 1:
x = np.resize(x, size)
elif len(x) != size:
raise ValueError("invalid array length: %d. Expected %d" % (len(x), size))
except (ValueError, TypeError) as _err:
raise ValueError(str(_err))
return x
def torgba(html_str):
"""Converts html_str into a tuple of rgba colors all in [0, 1]
Curiously, matplotlib color functions do not provide this functionality for
'#RGBA' color formats
:param html_str: a valid html string in hexadecimal format.
Can have length 4, 7 or 9 such as #F1a, #fa98e3, #fc456a09
:return: a rgba vector, i.e. a 4-element numpy array of values in [0,1] denoting `html_str`
:raise: ValueError if html_str is invalid
"""
if len(html_str) not in (4, 7, 9) or not html_str[0] == '#':
raise ValueError("'%s' invalid html string" % html_str)
elif len(html_str) == 4:
rgb = [html_str[i:i+1]*2 for i in xrange(1, len(html_str))]
else:
rgb = [html_str[i:i+2] for i in xrange(1, len(html_str), 2)]
if len(rgb) == 3:
rgb += ['FF']
return np.true_divide(np.array([int(r, 16) for r in rgb]), 255)
def _shapeargs(lons, lats, labels, sizes, colors, markers, legend_labels):
lons = _normalize(lons, dtype=float) # basically: convert to float array if scalar (size=0)
lats = _normalize(lats, dtype=float) # basically: convert to float array if scalar (size=0)
if len(lons) != len(lats):
raise ValueError('mismatch in lengths: lons (%d) and lats (%d)' % (len(lons), len(lats)))
leng = len(lons)
labels = _normalize(labels, size=leng)
colors = _normalize(colors, size=leng)
markers = _normalize(markers, size=leng)
legend_labels = _normalize(legend_labels, size=leng)
# colors[np.isnan(colors) | (colors <= 0)] = 1.0 # nan colors default to 1 (black?)
sizes = _normalize(sizes, size=leng, dtype=float)
valid_points = np.logical_not(np.isnan(lons) | np.isnan(lats) | (sizes <= 0))
# return all points whose corresponding numeric values are not nan:
return (lons[valid_points],
lats[valid_points],
labels[valid_points],
sizes[valid_points],
colors[valid_points],
markers[valid_points],
legend_labels[valid_points])
# def get_ax_size(ax, fig):
# bbox = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
# return bbox.width, bbox.height
def pix2inch(pix, fig):
"""Converts pixel to inches on a given matplotlib figure"""
return pix / fig.dpi
def inch2pix(inch, fig):
"""Converts inches to pixel on a given matplotlib figure"""
return inch * fig.dpi
def _joinargs(key_prefix, kwargs, **already_supplied_args):
'''updates already_supplied_args with kwargs using a given prefix in kwargs to identify
common keys. Used in plotmap for kwargs'''
key_prefix += "_"
len_prefix = len(key_prefix)
already_supplied_args.update({k[len_prefix:]: v
for k, v in kwargs.iteritems() if k.startswith(key_prefix)})
return already_supplied_args
def _mp_set_custom_props(drawfunc_retval, lines_props, labels_props):
"""Sets custom properties on drawparallels or drawmeridians return function.
drawfunc_retval is a dict of numbers mapped to tuples where the first element is a list of
matplotlib lines, and the second element is a list of matplotlib texts"""
_setprop(chain.from_iterable((lin for lin, lab in drawfunc_retval.itervalues())), lines_props)
_setprop(chain.from_iterable((lab for lin, lab in drawfunc_retval.itervalues())), labels_props)
def _setprop(iterator_of_mp_objects, props):
'''sets the given properties of an iterator of same type matplotlib objects'''
if not props:
return
prp = {}
for obj in iterator_of_mp_objects:
if not prp:
prp = {"set_%s" % name: val for name, val in props.iteritems()
if hasattr(obj, "set_%s" % name)}
for name, val in prp.iteritems():
getattr(obj, name)(val)
# values below CAN be None but CANNOT be arrays containing None's
def plotmap(lons,
lats,
labels=None,
legendlabels=None,
markers="o",
colors="#FF4400",
sizes=20,
cmap=None,
fontsize=None,
fontweight='regular',
fontcolor='k',
labels_h_offset=0,
labels_v_offset=0,
mapmargins='0.5deg',
figmargins=2,
arcgis_service='World_Street_Map',
arcgis_xpixels=1500,
arcgis_dpi=96,
urlfail='ignore',
maxmeridians=5,
maxparallels=5,
legend_pos='bottom',
legend_borderaxespad=1.5,
legend_ncol=1,
title=None,
show=False,
**kwargs): # @UnusedVariable
"""
Makes a scatter plot of points on a map background using ArcGIS REST API.
:param lons: (array-like of length N or scalar) Longitudes of the data points, in degreee
:param lats: (array-like of length N or scalar) Latitudes of the data points, in degree
:param labels: (array-like of length N or string. Default: None, no labels) Annotations
(labels) for the individual data points on the map. If non-array (e.g. string), the same value
will be applied to all points
:param legendlabels: (array-like of length N or string. Default: None, no legend)
Annotations (labels) for the legend. You can supply a sparse array where only some points
will be displayed on the legend. All points with no legend label will not show up in the
legend
:param sizes: (array-like of length N or number. Default: 20) Sizes (in points^2) of the
individual points in the scatter plot.
:param markers: (array-like of length N,
`MarkerStyle<http://matplotlib.org/api/markers_api.html#matplotlib.markers.MarkerStyle>`_ or
string. Default: 'o' - circle) The markers (shapes) to be drawn for each point on the map.
See `markers <http://matplotlib.org/api/markers_api.html#module-matplotlib.markers>`_ for
more information on the different styles of markers scatter supports. Marker can be either
an instance of the class or the text shorthand for a particular marker.
:param colors: (array-like of length N,
`matplotlib color <http://matplotlib.org/api/colors_api.html>`_, e.g. string.
Default: "#FF4400")
Colors for the markers (fill color). You can type color transparency by supplying string of 9
elements where the last two characters denote the transparency ('00' fully transparent,
'ff' fully opaque). Note that this is a feature not implemented in `matplotlib` colors, where
transparency is given as the last element of the numeric tuple (r, g, b, a)
:param fontsize: (numeric or None. Default: None) The fontsize for all texts drawn on the
map (labels, axis tick labels, legend). None uses the default figure font size for all. Custom
values for the individual text types (e.g. legend texts vs labels texts) can be supplied
via the `kwargs` argument and a given prefix (see below)
:param fontweight: (string or number. Default: 'regular') The font weight for all texts drawn
on the map (labels, axis tick labels, legend). Accepts the values (see
http://matplotlib.org/api/text_api.html#matplotlib.text.Text.set_weight):
```
[a numeric value in range 0-1000 | 'ultralight' | 'light' |
'normal' | 'regular' | 'book' | 'medium' | 'roman' | 'semibold' | 'demibold' | 'demi' |
'bold' | 'heavy' | 'extra bold' | 'black' ]
```
Custom
values for the individual text types (e.g. legend texts vs labels texts) can be supplied
via the `kwargs` argument and a given prefix (see below)
:param fontcolor: (`matplotlib color <http://matplotlib.org/api/colors_api.html>`_ or
string. Default: 'k', black) The font color for all texts drawn on the
map (labels, axis tick labels, legend). Custom
values for the individual text types (e.g. legend texts vs labels texts) can be supplied
via the `kwargs` argument and a given prefix (see below)
:param labels_h_offset: (string, number. Defaults None=0) The horizontal offset to be applied
to each label on the map relative to its point coordinates. Negative values will shift the
labels westward, positive values eastward. Useful for not overlapping
markers and labels.
If numeric, it is assumed to be the expressed in degrees. Otherwise, you can supply a string
with a number followed by one of the units 'm', 'km' or 'deg' (e.g., '5km', '0.5deg').
Note that this value affects the
`horizontalalignment` and `multialignment` properties of the labels
(for info see http://matplotlib.org/api/text_api.html#matplotlib.text.Text). Supplying
`labels_horizontalalignment` or `labels_ha` as optional argument will override
this behaviour (see `kwargs` below)
:param labels_v_offset: (string, number. Defaults None=0) The vertical offset to be applied
to each label on the map relative to its point coordinates. Negative values will shift the
labels southhward, positive values northward. See notes on `labels_h_offset` for details
Note that this value affects the
`verticalalignment` property of the labels
(for info see http://matplotlib.org/api/text_api.html#matplotlib.text.Text). Supplying
`labels_verticalalignment` or `labels_va` as optional argument will override
this behaviour (see `kwargs` below)
:param mapmargins: (array-like of 1,2,3,4 elements, numeric or string, or None=0.
Default: '0.5deg').
The map margins, i.e. how much the map has to 'expand/shrink' in any direction, relative
to the bounding box calculated to include all points.
If array-like, it behaves like the css 'margin' property of html: 4 elements will denote
[top, right, bottom, left], two elements will denote [top/bottom, left/right], three
elements [top, right/left, bottom], a single element array (or a single number or a string)
applies the value to all directions.
Finally, elements of the array must be expressed as the arguments `labels_h_offset` or
`labels_v_offset`: numbers denoting degrees or strings with units 'm', 'km', 'deg'. Negative
values will shrink the map.
If string, the argument will be first splitted using commas, semicolon or spaces as delimiters
(if no delimiter is found, the string is taken as a single chunk) and converted to an array-like
object.
:param figmargins: (array-like of 1,2,3,4 elements, number or None=0. Default:2) The
figure margins *in font height units* (e.g., 2 means: twice the font height). This argument
behaves exactly as `mapmargins` but expands/shrinks the distances between map and figure
(image) bounds. Useful to include axis tick labels or legend, if they overflow.
Note also that strings
are allowed only if they are parsable to float (e.g. "5,6; -12 1")
:param arcgis_service: (string, default: 'World_Street_Map'). The map image type, or
more technically the service for the map
hosted on ArcGIS server. Other values are 'ESRI_Imagery_World_2D'
(default in
`Basemap.arcgisimage <http://matplotlib.org/basemap/api/basemap_api.html#mpl_toolkits.basemap.Basemap.arcgisimage>`_),
'World_Topo_Map', 'World_Terrain_Base'. For details, see:
http://server.arcgisonline.com/arcgis/rest/services.
:param arcgis_xpixels: (numeric, default: 3000). Requested number of image pixels
in x-direction (default is 400 in
`Basemap.arcgisimage <http://matplotlib.org/basemap/api/basemap_api.html#mpl_toolkits.basemap.Basemap.arcgisimage>`_).
The documentation is quite unclear but this parameter seems to set the zoom of the image. From
this `link <http://basemaptutorial.readthedocs.io/en/latest/backgrounds.html#arcgisimage>`_:
A bigger number will ask a bigger image, so the image will have more detail.
So when the zoom is bigger, `xsize` must be bigger to maintain the resolution
:param urlfail: (string, 'raise' or 'ignore'. Default: 'ignore'). Tells what to do if the
ArcGIS requet fails (URLError, no internet connection etcetera). By default, on failure a raw
map with continents contour, and oceans will be plotted (good for
debug). Otherwise, the exception resulting from the web request is raised
:param maxmeridians: (numeric default: 5). The number of maximum meridians to be drawn. Set to
<=0 to hide meridians. Note that also x-axis labels are drawn.
To further manipulate meridians display, use any argument starting with
'mlabels_', 'mlines_' or 'meridians' (see `kwargs` below). E.g., to show only the labels and not
the lines, supply as argument `meridians_linewidth=0` or 'mlines_linewidth=0'.
:param maxparallels: (numeric default: 5). The number of maximum parallels to be drawn. Set to
<=0 to hide parallels. Note that also y-axis labels are drawn.
To further manipulate parallels display, use any argument starting with
'plabels_', 'plines_' or 'parallels' (see `kwargs` below). E.g., to show only the labels and not
the lines, supply as argument `parallels_linewidth=0` or 'plines_linewidth=0'.
:param legend_pos: (string in ['top'. 'bottom', 'right', 'left'], default='bottom'). The legend
location with respect to the map. It also adjusts the bounding box that the legend will be
anchored to.
For
customizing entirely the legend placement overriding this parameter, provide `legend_loc`
(and optionally `legend_bbox_to_anchor`) in `kwargs` (see below)
:param legend_borderaxespad: (numeric, default 1.5) The pad between the axes and legend border,
in font units
:param legend_ncol: (integer, default=1) The legend number of columns
:param title (string or None. Default: None): Title above plot (Note: not tested)
:param show (boolean, default: False): Whether to show the figure after plotting or not
(Note: not tested). Can be used to do further customization of the plot before showing it.
:param fig: (matplotlib figure or None, default: None). Note: deprecated, pass None as
supplying an already existing figure with other axes might break the figure layout
:param kwargs: any kind of additional argument passed to `matplotlib` and `Basemap` functions
or objects.
The name of the argument must be of the form
```
prefix_propertyname=propertyvalue
```
where prefix indicates the function/object to be called with keyword argument:
```
propertyname=propertyvalue
```
Current supported prefixes are (for available property names see links):
Prefix Passes `propertyname` to
============ ==================================================================================
arcgis `Basemap.arcgisimage <http://matplotlib.org/basemap/api/basemap_api.html#mpl_toolkits.basemap.Basemap.arcgisimage>_
used to retrieve the background map using ArgGIS Server REST API. See also
http://basemaptutorial.readthedocs.io/en/latest/backgrounds.html#arcgisimage
basemap `Basemap <http://matplotlib.org/basemap/api/basemap_api.html#mpl_toolkits.basemap.Basemap>`_
the object responsible of drawing and managing the map. Note that
`basemap_resolution=h` and `basemap_epsg=4326` by default.
labels All `texts <http://matplotlib.org/api/text_api.html#matplotlib.text.Text>`_
used to display the point labels on the map
legend The `legend <http://matplotlib.org/api/legend_api.html#matplotlib.legend.Legend>`_.
See the already implemented arguments `legend_borderaxespad`,
`legend_ncol`
legendlabels All `texts <http://matplotlib.org/api/text_api.html#matplotlib.text.Text>`_
used to display the text labels of the legend
meridians `Basemap.drawmeridians`. For more detailed settings on meridians, see
`mlines` and `mlabels`
parallels `Basemap.drawparallels`. For more detailed settings on parallels, see
`plines` and `plabels`
plines All `lines <http://matplotlib.org/api/lines_api.html#matplotlib.lines.Line2D>`_
used to display the parallels
plabels All `texts <http://matplotlib.org/api/text_api.html#matplotlib.text.Text>`_
used to display the parallels labels on the y axis
mlines All `lines <http://matplotlib.org/api/lines_api.html#matplotlib.lines.Line2D>`_
used to display the meridians
mlabels All `texts <http://matplotlib.org/api/text_api.html#matplotlib.text.Text>`_
used to display the meridians labels on the x axis
============ ==================================================================================
Examples
--------
- `legend_title='abc'` will call `legend(..., title='abc', ...)`
- `labels_path_effects=[PathEffects.withStroke(linewidth=2, foreground='white')]` will set the
a white contour around each label text
- `meridians_labelstyle="+/-"` will call `Basemap.drawmeridians(..., labelstyle="+/-", ...)`
Notes:
------
The objects referenced by `plines`, `plabels`, `mlines`, `mlabels` and `legendlabels`
cannot be initialized directly with the given properties, which will be set after they are
created assuming that for any property `foo` passed as keyword argument in their constructor
there exist a method `set_foo(...)` (which will be called with the given propertyvalue).
This is most likely always true according to matplotlib api, but we cannot assure it works
100% of the times
"""
lons, lats, labels, sizes, colors, markers, legendlabels =\
_shapeargs(lons, lats, labels, sizes, colors, markers, legendlabels)
# convert html strings to tuples of rgba values in [0.1] if the former are in string format,
# because (maybe too old matplotlib version?) colors in the format '#RGBA' are not supported
# Also, if cmap is provided, basemap.scatter calls matplotlib.scatter which
# wants float sequenes in case of color map
if colors.dtype.char in ('U', 'S'): # pylint: disable=no-member
colors = np.array([torgba(c) for c in colors])
fig = plt.figure()
map_ax = fig.add_axes([0, 0, 1, 1]) # set axes size the same as figure
# setup handler for managing basemap coordinates and meridians / parallels calculation:
handler = MapHandler(lons, lats, mapmargins)
kwa = _joinargs('basemap', kwargs,
llcrnrlon=handler.llcrnrlon,
llcrnrlat=handler.llcrnrlat,
urcrnrlon=handler.urcrnrlon,
urcrnrlat=handler.urcrnrlat,
epsg='4326', # 4326, # 3395, # 3857,
resolution='i', # 'h',
ax=map_ax)
bmap = Basemap(**kwa)
try:
kwa = _joinargs("arcgis", kwargs, service=arcgis_service, xpixels=arcgis_xpixels,
dpi=arcgis_dpi)
# set the map image via a map service. In case you need the returned values, note that
# This function returns an ImageAxis (or AxisImage, check matplotlib doc)
bmap.arcgisimage(**kwa)
except (URLError, HTTPError, socket.error) as exc:
# failed, maybe there is not internet connection
if urlfail == 'ignore':
# Print a simple map offline
bmap.drawcoastlines()
watercolor = '#4444bb'
bmap.fillcontinents(color='#eebb66', lake_color=watercolor)
bmap.drawmapboundary(fill_color=watercolor)
else:
raise
# draw meridians and parallels. From basemap.drawmeridians / drawparallels doc:
# returns a dictionary whose keys are the meridian values, and
# whose values are tuples containing lists of the
# matplotlib.lines.Line2D and matplotlib.text.Text instances
# associated with each meridian. Deleting an item from the
# dictionary removes the correpsonding meridian from the plot.
if maxparallels > 0:
kwa = _joinargs("parallels", kwargs, linewidth=1, fontsize=fontsize,
labels=[0, 1, 1, 0], fontweight=fontweight)
parallels = handler.get_parallels(maxparallels)
# Old basemap versions have problems with non-integer parallels.
try:
# Note: the method below # returns a list of text object
# represeting the tick labels
_dict = bmap.drawparallels(parallels, **kwa)
except KeyError:
parallels = sorted(list(set(map(int, parallels))))
_dict = bmap.drawparallels(parallels, **kwa)
# set custom properties:
kwa_lines = _joinargs("plines", kwargs)
kwa_labels = _joinargs("plabels", kwargs, color=fontcolor)
_mp_set_custom_props(_dict, kwa_lines, kwa_labels)
if maxmeridians > 0:
kwa = _joinargs("meridians", kwargs, linewidth=1, fontsize=fontsize,
labels=[1, 0, 0, 1], fontweight=fontweight)
meridians = handler.get_meridians(maxmeridians)
_dict = bmap.drawmeridians(meridians, **kwa)
# set custom properties:
kwa_lines = _joinargs("mlines", kwargs)
kwa_labels = _joinargs("mlabels", kwargs, color=fontcolor)
_mp_set_custom_props(_dict, kwa_lines, kwa_labels)
# fig.get_axes()[0].tick_params(direction='out', length=15) # does not work, check basemap
fig.bmap = bmap
# compute the native bmap projection coordinates for events.
# from the docs (this is kind of outdated, however leave here for the moment):
# Calling a Basemap class instance with the arguments lon, lat will
# convert lon/lat (in degrees) to x/y map projection
# coordinates (in meters). If optional keyword ``inverse`` is
# True (default is False), the inverse transformation from x/y
# to lon/lat is performed.
# For cylindrical equidistant projection (``cyl``), this
# does nothing (i.e. x,y == lon,lat).
# For non-cylindrical projections, the inverse transformation
# always returns longitudes between -180 and 180 degrees. For
# cylindrical projections (self.projection == ``cyl``,
# ``cea``, ``mill``, ``gall`` or ``merc``)
# the inverse transformation will return longitudes between
# self.llcrnrlon and self.llcrnrlat.
# Input arguments lon, lat can be either scalar floats,
# sequences, or numpy arrays.
# parse hoffset and voffset and assure they are at least arrays of 1 elements
# (for aligning text labels, see below)
hoffset = np.array(parse_distance(labels_h_offset, lats), copy=False, ndmin=1)
voffset = np.array(parse_distance(labels_v_offset), copy=False, ndmin=1)
lbl_lons = lons + hoffset
lbl_lats = lats + voffset
# convert labels coordinates:
xlbl, ylbl = bmap(lbl_lons, lbl_lats)
# plot point labels
max_points = -1 # negative means: plot all
if max_points < 0 or len(lons) < max_points:
# Set alignments which control also the corner point reference when placing labels
# from (FIXME: add ref?)
# horizontalalignment controls whether the x positional argument for the text indicates
# the left, center or right side of the text bounding box.
# verticalalignment controls whether the y positional argument for the text indicates
# the bottom, center or top side of the text bounding box.
# multialignment, for newline separated strings only, controls whether the different lines
# are left, center or right justified
ha = 'left' if hoffset[0] > 0 else 'right' if hoffset[0] < 0 else 'center'
va = 'bottom' if voffset[0] > 0 else 'top' if voffset[0] < 0 else 'center'
ma = ha
kwa = _joinargs("labels", kwargs, fontweight=fontweight, color=fontcolor,
zorder=100, fontsize=fontsize, horizontalalignment=ha,
verticalalignment=va, multialignment=ma)
for name, xpt, ypt in zip(labels, xlbl, ylbl):
# Check if the point can actually be seen with the current bmap
# projection. The bmap object will set the coordinates to very
# large values if it cannot project a point.
if xpt > 1e25:
continue
map_ax.text(xpt, ypt, name, **kwa)
# plot points
x, y = bmap(lons, lats)
# store handles to points, and relative labels, if any
leg_handles, leg_labels = [], []
# bmap.scatter accepts all array-like args except markers. Avoid several useless loops
# and do only those for distinct markers:
# unique markers (sorted according to their index in markers, not their value):
mrks = markers[np.sort(np.unique(markers, return_index=True)[1])]
for mrk in mrks:
# Note using masks with '==' (numpy==1.11.3):
#
# >>> a = np.array([1,2,3])
# >>> a == 3
# array([False, False, True], dtype=bool) # OK
# >>> a == None
# False # NOT AS EXPECTED!
# >>> np.equal(a, None)
# array([False, False, False], dtype=bool) # OK
#
# (Note also that a == None issues:
# FutureWarning: comparison to `None` will result in an elementwise object
# comparison in the future.)
#
# So the correct way is to write
# mask = np.equal(array, val) if val is None else (a == val)
m_mask = np.equal(markers, mrk) if mrk is None else markers == mrk # see above
__x = x[m_mask]
__y = y[m_mask]
__m = mrk
__s = sizes[m_mask]
__c = colors[m_mask]
__l = legendlabels[m_mask]
# unique legends (sorted according to their index in __l, not their value):
for leg in __l[np.sort(np.unique(__l, return_index=True)[1])]:
l_mask = np.equal(__l, leg) if leg is None else __l == leg # see above
_scatter = bmap.scatter(__x[l_mask],
__y[l_mask],
marker=mrk,
s=__s[l_mask],
c=__c[l_mask],
cmap=cmap,
zorder=10)
if leg:
leg_handles.append(_scatter)
leg_labels.append(leg)
if leg_handles:
# if we provided `legend_loc`, use that:
loc = kwargs.get('legend_loc', None)
bbox_to_anchor = None # defaults in matplotlib legend
# we do have legend to show. Adjust legend reference corner:
if loc is None:
if legend_pos == 'bottom':
loc = 'upper center'
bbox_to_anchor = (0.5, -0.05)
elif legend_pos == 'top':
loc = 'lower center'
bbox_to_anchor = (0.5, 1.05)
elif legend_pos == 'left':
loc = 'center right'
bbox_to_anchor = (-0.05, 0.5)
elif legend_pos == 'right':
loc = 'center left'
bbox_to_anchor = (1, 0.5)
else:
raise ValueError('invalid legend_pos value:"%s"' % legend_pos)
# The plt.legend has the prop argument which sets the font properties:
# family, style, variant, weight, stretch, size, fname. See
# http://matplotlib.org/api/font_manager_api.html#matplotlib.font_manager.FontProperties
# However, that property does not allow to set font color. So we
# use the get_text method of Legend. Note that we pass font size *now* even if
# setting it later works as well (the legend frame is resized accordingly)
kwa = _joinargs("legend", kwargs, scatterpoints=1, ncol=legend_ncol, loc=loc,
bbox_to_anchor=bbox_to_anchor, borderaxespad=legend_borderaxespad,
fontsize=fontsize)
# http://stackoverflow.com/questions/17411940/matplotlib-scatter-plot-legend
leg = map_ax.legend(leg_handles, leg_labels, **kwa)
# set properties supplied via 'legend_'
_setprop(leg.get_texts(), _joinargs("legendlabels", kwargs, color=fontcolor))
# re-position the axes. The REAL map aspect ratio seems to be this:
realratio_h_w = bmap.aspect
fig_w, fig_h = fig.get_size_inches()
figratio_h_w = np.true_divide(fig_h, fig_w)
if figratio_h_w >= realratio_h_w:
# we have margins (blank space) above and below
# thus, we assume:
map_w = fig_w
# and we calculate map_h
map_h = map_w * realratio_h_w
# assume there is the same amount of space above and below:
vpad = (fig_h - map_h) / 2.0
# hpad is zero:
hpad = 0
else:
# we have margins (blank space) left and right
# thus, we assume:
map_h = fig_h
# and consequently:
map_w = map_h / realratio_h_w
# assume there is the same amount of space above and below:
hpad = (fig_w - map_w) / 2.0
# wpad is zero:
vpad = 0
# calculate new fig dimensions EXACTLY as contour of the map
new_fig_w = fig_w - 2 * hpad
new_fig_h = fig_h - 2 * vpad
# now margins:
marginz = parse_margins(figmargins) # margins are in fontheight units. Get font height:
fontsize_inch = 0
if len(np.nonzero(marginz)[0]):
# Calculate the font size in pixels.
# We want to be consistent with matplotlib way of getting fontsize.
# inspecting matplotlib.legend.Legend.draw we end up with:
# 1. Get the renderer
rend = fig.canvas.get_renderer()
# 2. get the fontsize in points. We might use `fontsize` but it might be None and we want
# the default in case. There are several 'defaults' (rcParams['font.size'],
# rcParams["legend.fontsize"])... we don't care for now, use the first. How to get
# rcParams['font.size'] ? Either this: (see at matplotlib.Legend.__init__):
# fontsize_pt = FontProperties(size=fontsize, weight=fontweight).get_size_in_points()
# or simply do:
fontsize_pt = fontsize or rcParams['font.size']
# Now use renderer to convert to pixels:
# For info see matplotlib.text.Text.get_window_extent
fontsize_px = rend.points_to_pixels(fontsize_pt)
# finally inches:
fontsize_inch = pix2inch(rend.points_to_pixels(fontsize_px), fig)
# calculate insets in inches (top right bottom left)
insets_inch = marginz * fontsize_inch
# set to fig dimensions
new_fig_w += insets_inch[1] + insets_inch[3]
new_fig_h += insets_inch[0] + insets_inch[2]
fig.set_size_inches(new_fig_w, new_fig_h, forward=True)
# (forward necessary if fig is in GUI, let's set for safety)
# now the axes which are relative to the figure. Thus first normalize inches:
insets_inch /= [fig_h, fig_w, fig_h, fig_w]
# pos1 = map_ax.get_position() # get the original position
# NOTE: it seems that pos[0], pos[1] indicate the x and y of the LOWER LEFT corner, not
# upper left!
pos2 = [insets_inch[3], insets_inch[2],
1 - (insets_inch[1] + insets_inch[3]),
1 - (insets_inch[0] + insets_inch[2])]
map_ax.set_position(pos2)
if title:
plt.suptitle(title)
if show:
plt.show()
return fig
| gpl-3.0 |
horstjens/internationalopenmagazine | plugins/tag_cloud/tag_cloud.py | 39 | 2377 | '''
tag_cloud
===================================
This plugin generates a tag cloud from available tags
'''
from __future__ import unicode_literals
from collections import defaultdict
from operator import attrgetter, itemgetter
import logging
import math
import random
from pelican import signals, contents
logger = logging.getLogger(__name__)
def set_default_settings(settings):
settings.setdefault('TAG_CLOUD_STEPS', 4)
settings.setdefault('TAG_CLOUD_MAX_ITEMS', 100)
settings.setdefault('TAG_CLOUD_SORTING', 'random')
def init_default_config(pelican):
from pelican.settings import DEFAULT_CONFIG
set_default_settings(DEFAULT_CONFIG)
if(pelican):
set_default_settings(pelican.settings)
def generate_tag_cloud(generator):
tag_cloud = defaultdict(int)
for article in generator.articles:
for tag in getattr(article, 'tags', []):
tag_cloud[tag] += 1
tag_cloud = sorted(tag_cloud.items(), key=itemgetter(1), reverse=True)
tag_cloud = tag_cloud[:generator.settings.get('TAG_CLOUD_MAX_ITEMS')]
tags = list(map(itemgetter(1), tag_cloud))
if tags:
max_count = max(tags)
steps = generator.settings.get('TAG_CLOUD_STEPS')
# calculate word sizes
tag_cloud = [
(
tag,
int(math.floor(steps - (steps - 1) * math.log(count)
/ (math.log(max_count)or 1)))
)
for tag, count in tag_cloud
]
sorting = generator.settings.get('TAG_CLOUD_SORTING')
if sorting == 'alphabetically':
tag_cloud.sort(key=lambda elem: elem[0].name)
elif sorting == 'alphabetically-rev':
tag_cloud.sort(key=lambda elem: elem[0].name, reverse=True)
elif sorting == 'size':
tag_cloud.sort(key=lambda elem: elem[1])
elif sorting == 'size-rev':
tag_cloud.sort(key=lambda elem: elem[1], reverse=True)
elif sorting == 'random':
random.shuffle(tag_cloud)
else:
logger.warning("setting for TAG_CLOUD_SORTING not recognized: %s, "
"falling back to 'random'", sorting)
random.shuffle(tag_cloud)
#make available in context
generator.tag_cloud = tag_cloud
generator._update_context(['tag_cloud'])
def register():
signals.initialized.connect(init_default_config)
signals.article_generator_finalized.connect(generate_tag_cloud)
| gpl-3.0 |
theatlantic/django-categories | categories/editor/templatetags/admin_tree_list_tags.py | 8 | 6665 | import django
from django.db import models
from django.template import Library
from django.contrib.admin.templatetags.admin_list import result_headers, _boolean_icon
try:
from django.contrib.admin.util import lookup_field, display_for_field, label_for_field
except ImportError:
from categories.editor.utils import lookup_field, display_for_field, label_for_field
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
from django.core.exceptions import ObjectDoesNotExist
from django.utils.encoding import smart_unicode, force_unicode
from django.utils.html import escape, conditional_escape
from django.utils.safestring import mark_safe
from categories.editor import settings
register = Library()
TREE_LIST_RESULTS_TEMPLATE = 'admin/editor/tree_list_results.html'
if settings.IS_GRAPPELLI_INSTALLED:
TREE_LIST_RESULTS_TEMPLATE = 'admin/editor/grappelli_tree_list_results.html'
def items_for_tree_result(cl, result, form):
"""
Generates the actual list of data.
"""
first = True
pk = cl.lookup_opts.pk.attname
for field_name in cl.list_display:
row_class = ''
try:
f, attr, value = lookup_field(field_name, result, cl.model_admin)
except (AttributeError, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
if django.VERSION[1] == 4:
if field_name == 'action_checkbox':
row_class = ' class="action-checkbox disclosure"'
allow_tags = getattr(attr, 'allow_tags', False)
boolean = getattr(attr, 'boolean', False)
if boolean:
allow_tags = True
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
# Strip HTML tags in the resulting text, except if the
# function has an "allow_tags" attribute set to True.
if not allow_tags:
result_repr = escape(result_repr)
else:
result_repr = mark_safe(result_repr)
else:
if value is None:
result_repr = EMPTY_CHANGELIST_VALUE
if isinstance(f.rel, models.ManyToOneRel):
result_repr = escape(getattr(result, f.name))
else:
result_repr = display_for_field(value, f)
if isinstance(f, models.DateField) or isinstance(f, models.TimeField):
row_class = ' class="nowrap"'
if first:
if django.VERSION[1] < 4:
try:
f, attr, checkbox_value = lookup_field('action_checkbox', result, cl.model_admin)
if row_class:
row_class = "%s%s" % (row_class[:-1], ' disclosure"')
else:
row_class = ' class="disclosure"'
except (AttributeError, ObjectDoesNotExist):
pass
if force_unicode(result_repr) == '':
result_repr = mark_safe(' ')
# If list_display_links not defined, add the link tag to the first field
if (first and not cl.list_display_links) or field_name in cl.list_display_links:
if django.VERSION[1] < 4:
table_tag = 'td' # {True:'th', False:'td'}[first]
else:
table_tag = {True: 'th', False: 'td'}[first]
url = cl.url_for_result(result)
# Convert the pk to something that can be used in Javascript.
# Problem cases are long ints (23L) and non-ASCII strings.
if cl.to_field:
attr = str(cl.to_field)
else:
attr = pk
value = result.serializable_value(attr)
result_id = repr(force_unicode(value))[1:]
first = False
if django.VERSION[1] < 4:
yield mark_safe(u'<%s%s>%s<a href="%s"%s>%s</a></%s>' % \
(table_tag, row_class, checkbox_value, url, (cl.is_popup and ' onclick="opener.dismissRelatedLookupPopup(window, %s); return false;"' % result_id or ''), conditional_escape(result_repr), table_tag))
else:
yield mark_safe(u'<%s%s><a href="%s"%s>%s</a></%s>' % \
(table_tag, row_class, url, (cl.is_popup and ' onclick="opener.dismissRelatedLookupPopup(window, %s); return false;"' % result_id or ''), conditional_escape(result_repr), table_tag))
else:
# By default the fields come from ModelAdmin.list_editable, but if we pull
# the fields out of the form instead of list_editable custom admins
# can provide fields on a per request basis
if form and field_name in form.fields:
bf = form[field_name]
result_repr = mark_safe(force_unicode(bf.errors) + force_unicode(bf))
else:
result_repr = conditional_escape(result_repr)
yield mark_safe(u'<td%s>%s</td>' % (row_class, result_repr))
if form and not form[cl.model._meta.pk.name].is_hidden:
yield mark_safe(u'<td>%s</td>' % force_unicode(form[cl.model._meta.pk.name]))
class TreeList(list):
pass
def tree_results(cl):
if cl.formset:
for res, form in zip(cl.result_list, cl.formset.forms):
result = TreeList(items_for_tree_result(cl, res, form))
if hasattr(res, 'pk'):
result.pk = res.pk
if res.parent:
result.parent_pk = res.parent.pk
else:
res.parent_pk = None
yield result
else:
for res in cl.result_list:
result = TreeList(items_for_tree_result(cl, res, None))
if hasattr(res, 'pk'):
result.pk = res.pk
if res.parent:
result.parent_pk = res.parent.pk
else:
res.parent_pk = None
yield result
def result_tree_list(cl):
"""
Displays the headers and data list together
"""
import django
result = {'cl': cl,
'result_headers': list(result_headers(cl)),
'results': list(tree_results(cl))}
if django.VERSION[1] > 2:
from django.contrib.admin.templatetags.admin_list import result_hidden_fields
result['result_hidden_fields'] = list(result_hidden_fields(cl))
return result
result_tree_list = register.inclusion_tag(TREE_LIST_RESULTS_TEMPLATE)(result_tree_list)
| apache-2.0 |
poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/boto-2.19.0-py2.7.egg/boto/mws/connection.py | 2 | 34595 | # Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import xml.sax
import hashlib
import base64
import string
from boto.connection import AWSQueryConnection
from boto.mws.exception import ResponseErrorFactory
from boto.mws.response import ResponseFactory, ResponseElement
from boto.handler import XmlHandler
import boto.mws.response
__all__ = ['MWSConnection']
api_version_path = {
'Feeds': ('2009-01-01', 'Merchant', '/'),
'Reports': ('2009-01-01', 'Merchant', '/'),
'Orders': ('2011-01-01', 'SellerId', '/Orders/2011-01-01'),
'Products': ('2011-10-01', 'SellerId', '/Products/2011-10-01'),
'Sellers': ('2011-07-01', 'SellerId', '/Sellers/2011-07-01'),
'Inbound': ('2010-10-01', 'SellerId',
'/FulfillmentInboundShipment/2010-10-01'),
'Outbound': ('2010-10-01', 'SellerId',
'/FulfillmentOutboundShipment/2010-10-01'),
'Inventory': ('2010-10-01', 'SellerId',
'/FulfillmentInventory/2010-10-01'),
}
content_md5 = lambda c: base64.encodestring(hashlib.md5(c).digest()).strip()
decorated_attrs = ('action', 'response', 'section',
'quota', 'restore', 'version')
api_call_map = {}
def add_attrs_from(func, to):
for attr in decorated_attrs:
setattr(to, attr, getattr(func, attr, None))
return to
def structured_lists(*fields):
def decorator(func):
def wrapper(self, *args, **kw):
for key, acc in [f.split('.') for f in fields]:
if key in kw:
newkey = key + '.' + acc + (acc and '.' or '')
for i in range(len(kw[key])):
kw[newkey + str(i + 1)] = kw[key][i]
kw.pop(key)
return func(self, *args, **kw)
wrapper.__doc__ = "{0}\nLists: {1}".format(func.__doc__,
', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
def http_body(field):
def decorator(func):
def wrapper(*args, **kw):
if filter(lambda x: not x in kw, (field, 'content_type')):
message = "{0} requires {1} and content_type arguments for " \
"building HTTP body".format(func.action, field)
raise KeyError(message)
kw['body'] = kw.pop(field)
kw['headers'] = {
'Content-Type': kw.pop('content_type'),
'Content-MD5': content_md5(kw['body']),
}
return func(*args, **kw)
wrapper.__doc__ = "{0}\nRequired HTTP Body: " \
"{1}".format(func.__doc__, field)
return add_attrs_from(func, to=wrapper)
return decorator
def destructure_object(value, into={}, prefix=''):
if isinstance(value, ResponseElement):
for name, attr in value.__dict__.items():
if name.startswith('_'):
continue
destructure_object(attr, into=into, prefix=prefix + '.' + name)
elif filter(lambda x: isinstance(value, x), (list, set, tuple)):
for index, element in [(prefix + '.' + str(i + 1), value[i])
for i in range(len(value))]:
destructure_object(element, into=into, prefix=index)
elif isinstance(value, bool):
into[prefix] = str(value).lower()
else:
into[prefix] = value
def structured_objects(*fields):
def decorator(func):
def wrapper(*args, **kw):
for field in filter(kw.has_key, fields):
destructure_object(kw.pop(field), into=kw, prefix=field)
return func(*args, **kw)
wrapper.__doc__ = "{0}\nObjects: {1}".format(func.__doc__,
', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
def requires(*groups):
def decorator(func):
def wrapper(*args, **kw):
hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
if 1 != len(filter(hasgroup, groups)):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires {1} argument(s)" \
"".format(func.action, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
wrapper.__doc__ = "{0}\nRequired: {1}".format(func.__doc__,
message)
return add_attrs_from(func, to=wrapper)
return decorator
def exclusive(*groups):
def decorator(func):
def wrapper(*args, **kw):
hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
if len(filter(hasgroup, groups)) not in (0, 1):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires either {1}" \
"".format(func.action, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
wrapper.__doc__ = "{0}\nEither: {1}".format(func.__doc__,
message)
return add_attrs_from(func, to=wrapper)
return decorator
def dependent(field, *groups):
def decorator(func):
def wrapper(*args, **kw):
hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
if field in kw and 1 > len(filter(hasgroup, groups)):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} argument {1} requires {2}" \
"".format(func.action, field, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
wrapper.__doc__ = "{0}\n{1} requires: {2}".format(func.__doc__,
field,
message)
return add_attrs_from(func, to=wrapper)
return decorator
def requires_some_of(*fields):
def decorator(func):
def wrapper(*args, **kw):
if not filter(kw.has_key, fields):
message = "{0} requires at least one of {1} argument(s)" \
"".format(func.action, ', '.join(fields))
raise KeyError(message)
return func(*args, **kw)
wrapper.__doc__ = "{0}\nSome Required: {1}".format(func.__doc__,
', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
def boolean_arguments(*fields):
def decorator(func):
def wrapper(*args, **kw):
for field in filter(lambda x: isinstance(kw.get(x), bool), fields):
kw[field] = str(kw[field]).lower()
return func(*args, **kw)
wrapper.__doc__ = "{0}\nBooleans: {1}".format(func.__doc__,
', '.join(fields))
return add_attrs_from(func, to=wrapper)
return decorator
def api_action(section, quota, restore, *api):
def decorator(func, quota=int(quota), restore=float(restore)):
version, accesskey, path = api_version_path[section]
action = ''.join(api or map(str.capitalize, func.func_name.split('_')))
if hasattr(boto.mws.response, action + 'Response'):
response = getattr(boto.mws.response, action + 'Response')
else:
response = ResponseFactory(action)
response._action = action
def wrapper(self, *args, **kw):
kw.setdefault(accesskey, getattr(self, accesskey, None))
if kw[accesskey] is None:
message = "{0} requires {1} argument. Set the " \
"MWSConnection.{2} attribute?" \
"".format(action, accesskey, accesskey)
raise KeyError(message)
kw['Action'] = action
kw['Version'] = version
return func(self, path, response, *args, **kw)
for attr in decorated_attrs:
setattr(wrapper, attr, locals().get(attr))
wrapper.__doc__ = "MWS {0}/{1} API call; quota={2} restore={3:.2f}\n" \
"{4}".format(action, version, quota, restore,
func.__doc__)
api_call_map[action] = func.func_name
return wrapper
return decorator
class MWSConnection(AWSQueryConnection):
ResponseError = ResponseErrorFactory
def __init__(self, *args, **kw):
kw.setdefault('host', 'mws.amazonservices.com')
self.Merchant = kw.pop('Merchant', None) or kw.get('SellerId')
self.SellerId = kw.pop('SellerId', None) or self.Merchant
AWSQueryConnection.__init__(self, *args, **kw)
def _required_auth_capability(self):
return ['mws']
def post_request(self, path, params, cls, body='', headers={}, isXML=True):
"""Make a POST request, optionally with a content body,
and return the response, optionally as raw text.
Modelled off of the inherited get_object/make_request flow.
"""
request = self.build_base_http_request('POST', path, None, data=body,
params=params, headers=headers,
host=self.host)
response = self._mexe(request, override_num_retries=None)
body = response.read()
boto.log.debug(body)
if not body:
boto.log.error('Null body %s' % body)
raise self.ResponseError(response.status, response.reason, body)
if response.status != 200:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
if not isXML:
digest = response.getheader('Content-MD5')
assert content_md5(body) == digest
return body
return self._parse_response(cls, body)
def _parse_response(self, cls, body):
obj = cls(self)
h = XmlHandler(obj, self)
xml.sax.parseString(body, h)
return obj
def method_for(self, name):
"""Return the MWS API method referred to in the argument.
The named method can be in CamelCase or underlined_lower_case.
This is the complement to MWSConnection.any_call.action
"""
action = '_' in name and string.capwords(name, '_') or name
if action in api_call_map:
return getattr(self, api_call_map[action])
return None
def iter_call(self, call, *args, **kw):
"""Pass a call name as the first argument and a generator
is returned for the initial response and any continuation
call responses made using the NextToken.
"""
method = self.method_for(call)
assert method, 'No call named "{0}"'.format(call)
return self.iter_response(method(*args, **kw))
def iter_response(self, response):
"""Pass a call's response as the initial argument and a
generator is returned for the initial response and any
continuation call responses made using the NextToken.
"""
yield response
more = self.method_for(response._action + 'ByNextToken')
while more and response._result.HasNext == 'true':
response = more(NextToken=response._result.NextToken)
yield response
@boolean_arguments('PurgeAndReplace')
@http_body('FeedContent')
@structured_lists('MarketplaceIdList.Id')
@requires(['FeedType'])
@api_action('Feeds', 15, 120)
def submit_feed(self, path, response, headers={}, body='', **kw):
"""Uploads a feed for processing by Amazon MWS.
"""
return self.post_request(path, kw, response, body=body,
headers=headers)
@structured_lists('FeedSubmissionIdList.Id', 'FeedTypeList.Type',
'FeedProcessingStatusList.Status')
@api_action('Feeds', 10, 45)
def get_feed_submission_list(self, path, response, **kw):
"""Returns a list of all feed submissions submitted in the
previous 90 days.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Feeds', 0, 0)
def get_feed_submission_list_by_next_token(self, path, response, **kw):
"""Returns a list of feed submissions using the NextToken parameter.
"""
return self.post_request(path, kw, response)
@structured_lists('FeedTypeList.Type', 'FeedProcessingStatusList.Status')
@api_action('Feeds', 10, 45)
def get_feed_submission_count(self, path, response, **kw):
"""Returns a count of the feeds submitted in the previous 90 days.
"""
return self.post_request(path, kw, response)
@structured_lists('FeedSubmissionIdList.Id', 'FeedTypeList.Type')
@api_action('Feeds', 10, 45)
def cancel_feed_submissions(self, path, response, **kw):
"""Cancels one or more feed submissions and returns a
count of the feed submissions that were canceled.
"""
return self.post_request(path, kw, response)
@requires(['FeedSubmissionId'])
@api_action('Feeds', 15, 60)
def get_feed_submission_result(self, path, response, **kw):
"""Returns the feed processing report.
"""
return self.post_request(path, kw, response, isXML=False)
def get_service_status(self, **kw):
"""Instruct the user on how to get service status.
"""
sections = ', '.join(map(str.lower, api_version_path.keys()))
message = "Use {0}.get_(section)_service_status(), " \
"where (section) is one of the following: " \
"{1}".format(self.__class__.__name__, sections)
raise AttributeError(message)
@structured_lists('MarketplaceIdList.Id')
@boolean_arguments('ReportOptions=ShowSalesChannel')
@requires(['ReportType'])
@api_action('Reports', 15, 60)
def request_report(self, path, response, **kw):
"""Creates a report request and submits the request to Amazon MWS.
"""
return self.post_request(path, kw, response)
@structured_lists('ReportRequestIdList.Id', 'ReportTypeList.Type',
'ReportProcessingStatusList.Status')
@api_action('Reports', 10, 45)
def get_report_request_list(self, path, response, **kw):
"""Returns a list of report requests that you can use to get the
ReportRequestId for a report.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Reports', 0, 0)
def get_report_request_list_by_next_token(self, path, response, **kw):
"""Returns a list of report requests using the NextToken,
which was supplied by a previous request to either
GetReportRequestListByNextToken or GetReportRequestList, where
the value of HasNext was true in that previous request.
"""
return self.post_request(path, kw, response)
@structured_lists('ReportTypeList.Type',
'ReportProcessingStatusList.Status')
@api_action('Reports', 10, 45)
def get_report_request_count(self, path, response, **kw):
"""Returns a count of report requests that have been submitted
to Amazon MWS for processing.
"""
return self.post_request(path, kw, response)
@api_action('Reports', 10, 45)
def cancel_report_requests(self, path, response, **kw):
"""Cancel one or more report requests, returning the count of the
canceled report requests and the report request information.
"""
return self.post_request(path, kw, response)
@boolean_arguments('Acknowledged')
@structured_lists('ReportRequestIdList.Id', 'ReportTypeList.Type')
@api_action('Reports', 10, 60)
def get_report_list(self, path, response, **kw):
"""Returns a list of reports that were created in the previous
90 days that match the query parameters.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Reports', 0, 0)
def get_report_list_by_next_token(self, path, response, **kw):
"""Returns a list of reports using the NextToken, which
was supplied by a previous request to either
GetReportListByNextToken or GetReportList, where the
value of HasNext was true in the previous call.
"""
return self.post_request(path, kw, response)
@boolean_arguments('Acknowledged')
@structured_lists('ReportTypeList.Type')
@api_action('Reports', 10, 45)
def get_report_count(self, path, response, **kw):
"""Returns a count of the reports, created in the previous 90 days,
with a status of _DONE_ and that are available for download.
"""
return self.post_request(path, kw, response)
@requires(['ReportId'])
@api_action('Reports', 15, 60)
def get_report(self, path, response, **kw):
"""Returns the contents of a report.
"""
return self.post_request(path, kw, response, isXML=False)
@requires(['ReportType', 'Schedule'])
@api_action('Reports', 10, 45)
def manage_report_schedule(self, path, response, **kw):
"""Creates, updates, or deletes a report request schedule for
a specified report type.
"""
return self.post_request(path, kw, response)
@structured_lists('ReportTypeList.Type')
@api_action('Reports', 10, 45)
def get_report_schedule_list(self, path, response, **kw):
"""Returns a list of order report requests that are scheduled
to be submitted to Amazon MWS for processing.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Reports', 0, 0)
def get_report_schedule_list_by_next_token(self, path, response, **kw):
"""Returns a list of report requests using the NextToken,
which was supplied by a previous request to either
GetReportScheduleListByNextToken or GetReportScheduleList,
where the value of HasNext was true in that previous request.
"""
return self.post_request(path, kw, response)
@structured_lists('ReportTypeList.Type')
@api_action('Reports', 10, 45)
def get_report_schedule_count(self, path, response, **kw):
"""Returns a count of order report requests that are scheduled
to be submitted to Amazon MWS.
"""
return self.post_request(path, kw, response)
@boolean_arguments('Acknowledged')
@requires(['ReportIdList'])
@structured_lists('ReportIdList.Id')
@api_action('Reports', 10, 45)
def update_report_acknowledgements(self, path, response, **kw):
"""Updates the acknowledged status of one or more reports.
"""
return self.post_request(path, kw, response)
@requires(['ShipFromAddress', 'InboundShipmentPlanRequestItems'])
@structured_objects('ShipFromAddress', 'InboundShipmentPlanRequestItems')
@api_action('Inbound', 30, 0.5)
def create_inbound_shipment_plan(self, path, response, **kw):
"""Returns the information required to create an inbound shipment.
"""
return self.post_request(path, kw, response)
@requires(['ShipmentId', 'InboundShipmentHeader', 'InboundShipmentItems'])
@structured_objects('InboundShipmentHeader', 'InboundShipmentItems')
@api_action('Inbound', 30, 0.5)
def create_inbound_shipment(self, path, response, **kw):
"""Creates an inbound shipment.
"""
return self.post_request(path, kw, response)
@requires(['ShipmentId'])
@structured_objects('InboundShipmentHeader', 'InboundShipmentItems')
@api_action('Inbound', 30, 0.5)
def update_inbound_shipment(self, path, response, **kw):
"""Updates an existing inbound shipment. Amazon documentation
is ambiguous as to whether the InboundShipmentHeader and
InboundShipmentItems arguments are required.
"""
return self.post_request(path, kw, response)
@requires_some_of('ShipmentIdList', 'ShipmentStatusList')
@structured_lists('ShipmentIdList.Id', 'ShipmentStatusList.Status')
@api_action('Inbound', 30, 0.5)
def list_inbound_shipments(self, path, response, **kw):
"""Returns a list of inbound shipments based on criteria that
you specify.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Inbound', 30, 0.5)
def list_inbound_shipments_by_next_token(self, path, response, **kw):
"""Returns the next page of inbound shipments using the NextToken
parameter.
"""
return self.post_request(path, kw, response)
@requires(['ShipmentId'], ['LastUpdatedAfter', 'LastUpdatedBefore'])
@api_action('Inbound', 30, 0.5)
def list_inbound_shipment_items(self, path, response, **kw):
"""Returns a list of items in a specified inbound shipment, or a
list of items that were updated within a specified time frame.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Inbound', 30, 0.5)
def list_inbound_shipment_items_by_next_token(self, path, response, **kw):
"""Returns the next page of inbound shipment items using the
NextToken parameter.
"""
return self.post_request(path, kw, response)
@api_action('Inbound', 2, 300, 'GetServiceStatus')
def get_inbound_service_status(self, path, response, **kw):
"""Returns the operational status of the Fulfillment Inbound
Shipment API section.
"""
return self.post_request(path, kw, response)
@requires(['SellerSkus'], ['QueryStartDateTime'])
@structured_lists('SellerSkus.member')
@api_action('Inventory', 30, 0.5)
def list_inventory_supply(self, path, response, **kw):
"""Returns information about the availability of a seller's
inventory.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Inventory', 30, 0.5)
def list_inventory_supply_by_next_token(self, path, response, **kw):
"""Returns the next page of information about the availability
of a seller's inventory using the NextToken parameter.
"""
return self.post_request(path, kw, response)
@api_action('Inventory', 2, 300, 'GetServiceStatus')
def get_inventory_service_status(self, path, response, **kw):
"""Returns the operational status of the Fulfillment Inventory
API section.
"""
return self.post_request(path, kw, response)
@requires(['PackageNumber'])
@api_action('Outbound', 30, 0.5)
def get_package_tracking_details(self, path, response, **kw):
"""Returns delivery tracking information for a package in
an outbound shipment for a Multi-Channel Fulfillment order.
"""
return self.post_request(path, kw, response)
@structured_objects('Address', 'Items')
@requires(['Address', 'Items'])
@api_action('Outbound', 30, 0.5)
def get_fulfillment_preview(self, path, response, **kw):
"""Returns a list of fulfillment order previews based on items
and shipping speed categories that you specify.
"""
return self.post_request(path, kw, response)
@structured_objects('DestinationAddress', 'Items')
@requires(['SellerFulfillmentOrderId', 'DisplayableOrderId',
'ShippingSpeedCategory', 'DisplayableOrderDateTime',
'DestinationAddress', 'DisplayableOrderComment',
'Items'])
@api_action('Outbound', 30, 0.5)
def create_fulfillment_order(self, path, response, **kw):
"""Requests that Amazon ship items from the seller's inventory
to a destination address.
"""
return self.post_request(path, kw, response)
@requires(['SellerFulfillmentOrderId'])
@api_action('Outbound', 30, 0.5)
def get_fulfillment_order(self, path, response, **kw):
"""Returns a fulfillment order based on a specified
SellerFulfillmentOrderId.
"""
return self.post_request(path, kw, response)
@api_action('Outbound', 30, 0.5)
def list_all_fulfillment_orders(self, path, response, **kw):
"""Returns a list of fulfillment orders fulfilled after (or
at) a specified date or by fulfillment method.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Outbound', 30, 0.5)
def list_all_fulfillment_orders_by_next_token(self, path, response, **kw):
"""Returns the next page of inbound shipment items using the
NextToken parameter.
"""
return self.post_request(path, kw, response)
@requires(['SellerFulfillmentOrderId'])
@api_action('Outbound', 30, 0.5)
def cancel_fulfillment_order(self, path, response, **kw):
"""Requests that Amazon stop attempting to fulfill an existing
fulfillment order.
"""
return self.post_request(path, kw, response)
@api_action('Outbound', 2, 300, 'GetServiceStatus')
def get_outbound_service_status(self, path, response, **kw):
"""Returns the operational status of the Fulfillment Outbound
API section.
"""
return self.post_request(path, kw, response)
@requires(['CreatedAfter'], ['LastUpdatedAfter'])
@exclusive(['CreatedAfter'], ['LastUpdatedAfter'])
@dependent('CreatedBefore', ['CreatedAfter'])
@exclusive(['LastUpdatedAfter'], ['BuyerEmail'], ['SellerOrderId'])
@dependent('LastUpdatedBefore', ['LastUpdatedAfter'])
@exclusive(['CreatedAfter'], ['LastUpdatedBefore'])
@requires(['MarketplaceId'])
@structured_objects('OrderTotal', 'ShippingAddress',
'PaymentExecutionDetail')
@structured_lists('MarketplaceId.Id', 'OrderStatus.Status',
'FulfillmentChannel.Channel', 'PaymentMethod.')
@api_action('Orders', 6, 60)
def list_orders(self, path, response, **kw):
"""Returns a list of orders created or updated during a time
frame that you specify.
"""
toggle = set(('FulfillmentChannel.Channel.1',
'OrderStatus.Status.1', 'PaymentMethod.1',
'LastUpdatedAfter', 'LastUpdatedBefore'))
for do, dont in {
'BuyerEmail': toggle.union(['SellerOrderId']),
'SellerOrderId': toggle.union(['BuyerEmail']),
}.items():
if do in kw and filter(kw.has_key, dont):
message = "Don't include {0} when specifying " \
"{1}".format(' or '.join(dont), do)
raise AssertionError(message)
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Orders', 6, 60)
def list_orders_by_next_token(self, path, response, **kw):
"""Returns the next page of orders using the NextToken value
that was returned by your previous request to either
ListOrders or ListOrdersByNextToken.
"""
return self.post_request(path, kw, response)
@requires(['AmazonOrderId'])
@structured_lists('AmazonOrderId.Id')
@api_action('Orders', 6, 60)
def get_order(self, path, response, **kw):
"""Returns an order for each AmazonOrderId that you specify.
"""
return self.post_request(path, kw, response)
@requires(['AmazonOrderId'])
@api_action('Orders', 30, 2)
def list_order_items(self, path, response, **kw):
"""Returns order item information for an AmazonOrderId that
you specify.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Orders', 30, 2)
def list_order_items_by_next_token(self, path, response, **kw):
"""Returns the next page of order items using the NextToken
value that was returned by your previous request to either
ListOrderItems or ListOrderItemsByNextToken.
"""
return self.post_request(path, kw, response)
@api_action('Orders', 2, 300, 'GetServiceStatus')
def get_orders_service_status(self, path, response, **kw):
"""Returns the operational status of the Orders API section.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'Query'])
@api_action('Products', 20, 20)
def list_matching_products(self, path, response, **kw):
"""Returns a list of products and their attributes, ordered
by relevancy, based on a search query that you specify.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'ASINList'])
@structured_lists('ASINList.ASIN')
@api_action('Products', 20, 20)
def get_matching_product(self, path, response, **kw):
"""Returns a list of products and their attributes, based on
a list of ASIN values that you specify.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'IdType', 'IdList'])
@structured_lists('IdList.Id')
@api_action('Products', 20, 20)
def get_matching_product_for_id(self, path, response, **kw):
"""Returns a list of products and their attributes, based on
a list of Product IDs that you specify.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'SellerSKUList'])
@structured_lists('SellerSKUList.SellerSKU')
@api_action('Products', 20, 10, 'GetCompetitivePricingForSKU')
def get_competitive_pricing_for_sku(self, path, response, **kw):
"""Returns the current competitive pricing of a product,
based on the SellerSKUs and MarketplaceId that you specify.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'ASINList'])
@structured_lists('ASINList.ASIN')
@api_action('Products', 20, 10, 'GetCompetitivePricingForASIN')
def get_competitive_pricing_for_asin(self, path, response, **kw):
"""Returns the current competitive pricing of a product,
based on the ASINs and MarketplaceId that you specify.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'SellerSKUList'])
@structured_lists('SellerSKUList.SellerSKU')
@api_action('Products', 20, 5, 'GetLowestOfferListingsForSKU')
def get_lowest_offer_listings_for_sku(self, path, response, **kw):
"""Returns the lowest price offer listings for a specific
product by item condition and SellerSKUs.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'ASINList'])
@structured_lists('ASINList.ASIN')
@api_action('Products', 20, 5, 'GetLowestOfferListingsForASIN')
def get_lowest_offer_listings_for_asin(self, path, response, **kw):
"""Returns the lowest price offer listings for a specific
product by item condition and ASINs.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'SellerSKU'])
@api_action('Products', 20, 20, 'GetProductCategoriesForSKU')
def get_product_categories_for_sku(self, path, response, **kw):
"""Returns the product categories that a SellerSKU belongs to.
"""
return self.post_request(path, kw, response)
@requires(['MarketplaceId', 'ASIN'])
@api_action('Products', 20, 20, 'GetProductCategoriesForASIN')
def get_product_categories_for_asin(self, path, response, **kw):
"""Returns the product categories that an ASIN belongs to.
"""
return self.post_request(path, kw, response)
@api_action('Products', 2, 300, 'GetServiceStatus')
def get_products_service_status(self, path, response, **kw):
"""Returns the operational status of the Products API section.
"""
return self.post_request(path, kw, response)
@api_action('Sellers', 15, 60)
def list_marketplace_participations(self, path, response, **kw):
"""Returns a list of marketplaces that the seller submitting
the request can sell in, and a list of participations that
include seller-specific information in that marketplace.
"""
return self.post_request(path, kw, response)
@requires(['NextToken'])
@api_action('Sellers', 15, 60)
def list_marketplace_participations_by_next_token(self, path, response,
**kw):
"""Returns the next page of marketplaces and participations
using the NextToken value that was returned by your
previous request to either ListMarketplaceParticipations
or ListMarketplaceParticipationsByNextToken.
"""
return self.post_request(path, kw, response)
| apache-2.0 |
Syrcon/servo | components/script/dom/bindings/codegen/parser/tests/test_const.py | 134 | 3000 | import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
interface TestConsts {
const byte zero = 0;
const byte b = -1;
const octet o = 2;
const short s = -3;
const unsigned short us = 0x4;
const long l = -0X5;
const unsigned long ul = 6;
const unsigned long long ull = 7;
const long long ll = -010;
const boolean t = true;
const boolean f = false;
const boolean? n = null;
const boolean? nt = true;
const boolean? nf = false;
};
""")
results = parser.finish()
harness.ok(True, "TestConsts interface parsed without error.")
harness.check(len(results), 1, "Should be one production.")
iface = results[0]
harness.ok(isinstance(iface, WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.check(iface.identifier.QName(), "::TestConsts", "Interface has the right QName")
harness.check(iface.identifier.name, "TestConsts", "Interface has the right name")
harness.check(len(iface.members), 14, "Expect 14 members")
consts = iface.members
def checkConst(const, QName, name, type, value):
harness.ok(isinstance(const, WebIDL.IDLConst),
"Should be an IDLConst")
harness.ok(const.isConst(), "Const is a const")
harness.ok(not const.isAttr(), "Const is not an attr")
harness.ok(not const.isMethod(), "Const is not a method")
harness.check(const.identifier.QName(), QName, "Const has the right QName")
harness.check(const.identifier.name, name, "Const has the right name")
harness.check(str(const.type), type, "Const has the right type")
harness.ok(const.type.isPrimitive(), "All consts should be primitive")
harness.check(str(const.value.type), str(const.type),
"Const's value has the same type as the type")
harness.check(const.value.value, value, "Const value has the right value.")
checkConst(consts[0], "::TestConsts::zero", "zero", "Byte", 0)
checkConst(consts[1], "::TestConsts::b", "b", "Byte", -1)
checkConst(consts[2], "::TestConsts::o", "o", "Octet", 2)
checkConst(consts[3], "::TestConsts::s", "s", "Short", -3)
checkConst(consts[4], "::TestConsts::us", "us", "UnsignedShort", 4)
checkConst(consts[5], "::TestConsts::l", "l", "Long", -5)
checkConst(consts[6], "::TestConsts::ul", "ul", "UnsignedLong", 6)
checkConst(consts[7], "::TestConsts::ull", "ull", "UnsignedLongLong", 7)
checkConst(consts[8], "::TestConsts::ll", "ll", "LongLong", -8)
checkConst(consts[9], "::TestConsts::t", "t", "Boolean", True)
checkConst(consts[10], "::TestConsts::f", "f", "Boolean", False)
checkConst(consts[11], "::TestConsts::n", "n", "BooleanOrNull", None)
checkConst(consts[12], "::TestConsts::nt", "nt", "BooleanOrNull", True)
checkConst(consts[13], "::TestConsts::nf", "nf", "BooleanOrNull", False)
| mpl-2.0 |
leandro86/epubcreator | epubcreator/epubbase/ebook.py | 1 | 14784 | import os
from lxml import etree
from epubcreator.pyepub.pyepubwriter import epub
from epubcreator.epubbase import ebook_metadata, ebook_data, files, images
from epubcreator.misc import utils
from epubcreator.misc.options import Options, Option
class Ebook(Options):
OPTIONS = [Option(name="includeOptionalFiles",
value=True,
description="Indica si los archivos opcionales (dedicatoria.xhtml y autor.xhtml) deben incluirse en el epub "
"incluso si los respectivos campos no fueron ingresados.")]
def __init__(self, ebookData, metadata=None, **options):
super().__init__(**options)
self._ebookData = ebookData or ebook_data.EbookData()
self._metadata = metadata or ebook_metadata.Metadata()
def save(self, file):
"""
Genera y guarda el epub.
@param file: un string con el directorio donde guardar el epub (no el nombre del
archivo, ya que este debe generarse de acuerdo a los metadatos), o un objeto file-like.
@return: el path del archivo generado, si "file" es un string. Si "file" es un objeto de tipo
file-like, se retorna el nombre de archivo del epub.
"""
outputEpub = epub.EpubWriter()
self._addEpubBaseFiles(outputEpub)
self._addSectionsAndToc(outputEpub)
self._addImages(outputEpub)
self._addMetadata(outputEpub)
epubName = self._getOutputFileName()
# Compruebo si estoy ante un string (o sea, un directorio) o un objeto file-like.
if isinstance(file, str):
fileName = os.path.join(file, epubName)
outputEpub.generate(fileName)
return fileName
else:
outputEpub.generate(file)
return epubName
def _addEpubBaseFiles(self, outputEpub):
synopsis = self._metadata.synopsis or ebook_metadata.Metadata.DEFAULT_SYNOPSIS
title = self._metadata.title or ebook_metadata.Metadata.DEFAULT_TITLE
editor = self._metadata.editor or ebook_metadata.Metadata.DEFAULT_EDITOR
coverModification = self._metadata.coverModification or ebook_metadata.Metadata.DEFAULT_COVER_MODIFICATION
coverImage = self._metadata.coverImage or images.CoverImage(files.EpubBaseFiles.getFile(files.EpubBaseFiles.COVER_IMAGE_FILENAME))
publicationYear = self._metadata.publicationDate.year if self._metadata.publicationDate else ""
authors = self._metadata.authors or [ebook_metadata.Person(ebook_metadata.Metadata.DEFAULT_AUTHOR, ebook_metadata.Metadata.DEFAULT_AUTHOR)]
author = self._getPersonsListAsText(authors)[0]
translator = self._getPersonsListAsText(self._metadata.translators)[0]
ilustrator = self._getPersonsListAsText(self._metadata.ilustrators)[0]
# Agrego los xhtml requeridos, excepto autor.xhtml, que debe ir despúes de las secciones.
outputEpub.addHtmlData(files.EpubBaseFiles.COVER_FILENAME, files.EpubBaseFiles.getFile(files.EpubBaseFiles.COVER_FILENAME))
outputEpub.addHtmlData(files.EpubBaseFiles.SYNOPSIS_FILENAME, files.EpubBaseFiles.getSynopsis(synopsis))
outputEpub.addHtmlData(files.EpubBaseFiles.TITLE_FILENAME, files.EpubBaseFiles.getTitle(author,
title,
self._metadata.subtitle,
editor,
self._metadata.collectionName,
self._metadata.subCollectionName,
self._metadata.collectionVolume))
outputEpub.addHtmlData(files.EpubBaseFiles.INFO_FILENAME, files.EpubBaseFiles.getInfo(self._metadata.originalTitle,
author,
publicationYear,
translator,
ilustrator,
self._metadata.coverDesigner,
coverModification,
editor))
if self._metadata.dedication or self._options.includeOptionalFiles:
dedication = self._metadata.dedication or ebook_metadata.Metadata.DEFAULT_DEDICATION
outputEpub.addHtmlData(files.EpubBaseFiles.DEDICATION_FILENAME, files.EpubBaseFiles.getDedication(dedication))
outputEpub.addImageData(files.EpubBaseFiles.COVER_IMAGE_FILENAME, coverImage.toBytes())
# Agrego el resto de los archivos del epubbase.
outputEpub.addImageData(files.EpubBaseFiles.EPL_LOGO_FILENAME, files.EpubBaseFiles.getFile(files.EpubBaseFiles.EPL_LOGO_FILENAME))
outputEpub.addImageData(files.EpubBaseFiles.EX_LIBRIS_FILENAME, files.EpubBaseFiles.getFile(files.EpubBaseFiles.EX_LIBRIS_FILENAME))
outputEpub.addStyleData(files.EpubBaseFiles.STYLE_FILENAME, files.EpubBaseFiles.getFile(files.EpubBaseFiles.STYLE_FILENAME))
outputEpub.addMetaFile(files.EpubBaseFiles.APPLE_XML, files.EpubBaseFiles.getFile(files.EpubBaseFiles.APPLE_XML))
def _addSectionsAndToc(self, outputEpub):
def processSections(sections):
navPoints = []
previousLevel = "1"
for section in sections:
outputEpub.addHtmlData(section.name, section.toHtml())
hs = section.xpath("//h1 | //h2 | //h3 | //h4 | //h5 | //h6")
for h in hs:
currentLevel = h.tag[-1]
titleText = self._getTitleText(h)
titleId = h.get("id")
titleSrc = "{0}{1}".format(section.name, "#" + titleId if titleId else "")
if currentLevel == "1":
navPoints.append(outputEpub.addNavPoint(titleSrc, titleText))
else:
if currentLevel < previousLevel:
for i in range(int(previousLevel) - int(currentLevel) + 1):
navPoints.pop()
elif currentLevel == previousLevel:
navPoints.pop()
childNavPoint = navPoints[-1].addNavPoint(titleSrc, titleText)
navPoints.append(childNavPoint)
previousLevel = currentLevel
# La cubierta debe ser la primera entrada en la toc.
outputEpub.addNavPoint(files.EpubBaseFiles.COVER_FILENAME, "Cubierta")
# El título del libro debe ser la segunda entrada en la toc.
outputEpub.addNavPoint(files.EpubBaseFiles.TITLE_FILENAME, self._metadata.title or ebook_metadata.Metadata.DEFAULT_TITLE)
processSections(self._ebookData.iterTextSections())
authors = self._metadata.authors or [ebook_metadata.Person(ebook_metadata.Metadata.DEFAULT_AUTHOR, ebook_metadata.Metadata.DEFAULT_AUTHOR)]
authorsWithBiographyOrImage = [a for a in authors if a.biography or a.image or self._options.includeOptionalFiles]
for i, author in enumerate(authorsWithBiographyOrImage):
biography = author.biography or ebook_metadata.Metadata.DEFAULT_AUTHOR_BIOGRAPHY
image = author.image or images.AuthorImage(files.EpubBaseFiles.getFile(files.EpubBaseFiles.AUTHOR_IMAGE_FILENAME), allowProcessing=False)
title = self._getTocTitleForAuthorFile(authors) if i == 0 else None
imageName = files.EpubBaseFiles.generateAuthorImageFileName(i)
authorContent = files.EpubBaseFiles.getAuthor(biography, title, imageName)
outputEpub.addHtmlData(files.EpubBaseFiles.generateAuthorFileName(i), authorContent)
outputEpub.addImageData(imageName, image.toBytes())
if len(authorsWithBiographyOrImage) > 0:
outputEpub.addNavPoint(files.EpubBaseFiles.AUTHOR_FILENAME, self._getTocTitleForAuthorFile(authors))
processSections(self._ebookData.iterNotesSections())
def _addImages(self, outputEpub):
for image in self._ebookData.iterImages():
outputEpub.addImageData(image.name, image.content)
def _addMetadata(self, outputEpub):
authors = self._metadata.authors or [ebook_metadata.Person(ebook_metadata.Metadata.DEFAULT_AUTHOR, ebook_metadata.Metadata.DEFAULT_AUTHOR)]
author = self._getPersonsListAsText(authors)
# Agrego semántica a cubierta.xhtml.
outputEpub.addReference(files.EpubBaseFiles.COVER_FILENAME, "Cover", "cover")
# Es necesario agregarle semántica a cover.jpg, sino algunos ereaders no la reconocen como imagen de portada.
outputEpub.addCustomMetadata("cover", files.EpubBaseFiles.COVER_IMAGE_FILENAME)
outputEpub.addTitle(self._metadata.title or ebook_metadata.Metadata.DEFAULT_TITLE)
outputEpub.addAuthor(author[0], author[1])
outputEpub.addLanguage(self._metadata.language or ebook_metadata.Metadata.DEFAULT_LANGUAGE)
if self._metadata.synopsis:
# En la sinopsis (el campo description) en los metadatos, no puedo tener saltos de línea. Podría directamente
# eliminarlos, pero entonces el texto del párrafo B quedaría pegado al del párrafo A. Por eso es que reemplazo
# los saltos de línea por un espacio.
outputEpub.addDescription(utils.removeTags(self._metadata.synopsis.replace("\n", " ")))
else:
outputEpub.addDescription("Sinopsis")
outputEpub.addPublisher("ePubLibre")
# El tipo de género no interesa si debo poner uno por defecto, dado que no aparece en los metadatos del epub.
genres = self._metadata.genres or [ebook_metadata.Genre("bla", "Género", "Subgéneros")]
# Ordeno los géneros alfabéticamente.
genres.sort(key=lambda x: (x.genreType, x.genre, x.subGenre))
genresText = []
previousGenre = ""
for genre in genres:
if genre.genre != previousGenre:
genresText.append(genre.genre)
previousGenre = genre.genre
genresText.append(genre.subGenre)
outputEpub.addSubject(", ".join(genresText))
if self._metadata.translators:
translator = self._getPersonsListAsText(self._metadata.translators)
outputEpub.addTranslator(translator[0], translator[1])
if self._metadata.ilustrators:
ilustrator = self._getPersonsListAsText(self._metadata.ilustrators)
outputEpub.addIlustrator(ilustrator[0], ilustrator[1])
if self._metadata.publicationDate is not None:
outputEpub.addPublicationDate(self._metadata.publicationDate)
if self._metadata.subCollectionName:
calibreSeries = ""
if self._metadata.collectionName:
calibreSeries += "{0}: ".format(self._metadata.collectionName)
calibreSeries += self._metadata.subCollectionName
try:
# Elimino los ceros a la izquierda si se trata de un número.
series_index = str(int(self._metadata.collectionVolume))
except ValueError:
series_index = self._metadata.collectionVolume
outputEpub.addCustomMetadata("calibre:series", calibreSeries)
outputEpub.addCustomMetadata("calibre:series_index", series_index)
def _getOutputFileName(self):
authors = self._metadata.authors or [ebook_metadata.Person(ebook_metadata.Metadata.DEFAULT_AUTHOR, ebook_metadata.Metadata.DEFAULT_AUTHOR)]
fileName = []
authorsFileAs = [author.fileAs for author in authors]
if len(authorsFileAs) < 3:
fileName.append(" & ".join(authorsFileAs))
else:
fileName.append("AA. VV.")
fileName.append(" - ")
if self._metadata.subCollectionName:
collection = ""
if self._metadata.collectionName:
collection += "[{0}] ".format(self._metadata.collectionName)
collection += "[{0} {1}] ".format(self._metadata.subCollectionName, self._metadata.collectionVolume)
if self._metadata.collectionName:
fileName.insert(0, collection)
else:
fileName.append(collection)
fileName.append(self._metadata.title or ebook_metadata.Metadata.DEFAULT_TITLE)
bookId = self._metadata.bookId or ebook_metadata.Metadata.DEFAULT_BOOK_ID
editor = self._metadata.editor or ebook_metadata.Metadata.DEFAULT_EDITOR
fileName.append(" [{0}] (r1.0 {1})".format(bookId, editor))
return utils.toFileName("{0}.epub".format("".join(fileName)))
def _getPersonsListAsText(self, persons):
"""
Convierte una lista de Person a texto. Cada Person se concatena con un & (ampersand).
@param persons: una lista de Person.
@return: una tupla cuyo primer elemento es un string concatenado con todos los nombres, y el
segundo un string concatenado con todos los file-as.
"""
return " & ".join((p.name for p in persons)), " & ".join((p.fileAs for p in persons))
def _getTocTitleForAuthorFile(self, authors):
if not authors or (len(authors) == 1 and authors[0].gender == ebook_metadata.Person.MALE_GENDER):
return "Autor"
else:
return "Autores" if len(authors) > 1 else "Autora"
def _getTitleText(self, h):
"""
Retorna el texto de un título, reemplazando los tags "br" por un espacio.
"""
if h.xpath("descendant::br"):
# No puedo modificar el element "h" directamente, sino que necesito
# trabajar sobre una copia. Una deep copy es otra opción, pero creo
# que va a terminar copiando todoo el tree...
h = etree.fromstring(etree.tostring(h))
for br in h.xpath("descendant::br"):
br.text = " "
etree.strip_tags(h, "br")
return "".join(h.xpath("descendant::text()")) | unlicense |
daishaowei/dpark | dpark/serialize.py | 6 | 9855 | import sys, types
from cStringIO import StringIO
import marshal, new, cPickle
import itertools
from pickle import Pickler, whichmodule
import logging
logger = logging.getLogger(__name__)
class MyPickler(Pickler):
dispatch = Pickler.dispatch.copy()
@classmethod
def register(cls, type, reduce):
def dispatcher(self, obj):
rv = reduce(obj)
if isinstance(rv, str):
self.save_global(obj, rv)
else:
self.save_reduce(obj=obj, *rv)
cls.dispatch[type] = dispatcher
def dumps(o):
io = StringIO()
MyPickler(io, -1).dump(o)
return io.getvalue()
def loads(s):
return cPickle.loads(s)
dump_func = dumps
load_func = loads
def reduce_module(mod):
return load_module, (mod.__name__, )
def load_module(name):
__import__(name)
return sys.modules[name]
MyPickler.register(types.ModuleType, reduce_module)
class RecursiveFunctionPlaceholder(object):
"""
Placeholder for a recursive reference to the current function,
to avoid infinite recursion when serializing recursive functions.
"""
def __eq__(self, other):
return isinstance(other, RecursiveFunctionPlaceholder)
RECURSIVE_FUNCTION_PLACEHOLDER = RecursiveFunctionPlaceholder()
def marshalable(o):
if o is None: return True
t = type(o)
if t in (str, unicode, bool, int, long, float, complex):
return True
if t in (tuple, list, set):
for i in itertools.islice(o, 100):
if not marshalable(i):
return False
return True
if t == dict:
for k,v in itertools.islice(o.iteritems(), 100):
if not marshalable(k) or not marshalable(v):
return False
return True
return False
OBJECT_SIZE_LIMIT = 100 << 10
def create_broadcast(name, obj, func_name):
import dpark
logger.info("use broadcast for object %s %s (used in function %s)",
name, type(obj), func_name)
return dpark._ctx.broadcast(obj)
def dump_obj(f, name, obj):
if obj is f:
# Prevent infinite recursion when dumping a recursive function
return dumps(RECURSIVE_FUNCTION_PLACEHOLDER)
try:
if sys.getsizeof(obj) > OBJECT_SIZE_LIMIT:
obj = create_broadcast(name, obj, f.__name__)
except TypeError:
pass
b = dumps(obj)
if len(b) > OBJECT_SIZE_LIMIT:
b = dumps(create_broadcast(name, obj, f.__name__))
if len(b) > OBJECT_SIZE_LIMIT:
logger.warning("broadcast of %s obj too large", type(obj))
return b
def get_co_names(code):
co_names = code.co_names
for const in code.co_consts:
if isinstance(const, types.CodeType):
co_names += get_co_names(const)
return co_names
def dump_closure(f):
def _do_dump(f):
for i, c in enumerate(f.func_closure):
if hasattr(c, 'cell_contents'):
yield dump_obj(f, 'cell%d' % i, c.cell_contents)
else:
yield None
code = f.func_code
glob = {}
for n in get_co_names(code):
r = f.func_globals.get(n)
if r is not None:
glob[n] = dump_obj(f, n, r)
closure = None
if f.func_closure:
closure = tuple(_do_dump(f))
return marshal.dumps((code, glob, f.func_name, f.func_defaults, closure, f.__module__))
def load_closure(bytes):
code, glob, name, defaults, closure, mod = marshal.loads(bytes)
glob = dict((k, loads(v)) for k,v in glob.items())
glob['__builtins__'] = __builtins__
closure = closure and reconstruct_closure(closure) or None
f = new.function(code, glob, name, defaults, closure)
f.__module__ = mod
# Replace the recursive function placeholders with this simulated function pointer
for key, value in glob.items():
if RECURSIVE_FUNCTION_PLACEHOLDER == value:
f.func_globals[key] = f
return f
def make_cell(value):
return (lambda: value).func_closure[0]
def make_empty_cell():
if False:
unreachable = None
return (lambda: unreachable).func_closure[0]
def reconstruct_closure(closure):
return tuple([make_cell(loads(v)) if v is not None else make_empty_cell() for v in closure])
def get_global_function(module, name):
__import__(module)
mod = sys.modules[module]
return getattr(mod, name)
def reduce_function(obj):
name = obj.__name__
if not name or name == '<lambda>':
return load_closure, (dump_closure(obj),)
module = getattr(obj, "__module__", None)
if module is None:
module = whichmodule(obj, name)
if module == '__main__' and name not in ('load_closure','load_module',
'load_method', 'load_local_class'): # fix for test
return load_closure, (dump_closure(obj),)
try:
f = get_global_function(module, name)
except (ImportError, KeyError, AttributeError):
return load_closure, (dump_closure(obj),)
else:
if f is not obj:
return load_closure, (dump_closure(obj),)
return name
classes_dumping = set()
internal_fields = {
'__weakref__': False,
'__dict__': False,
'__doc__': True
}
def dump_local_class(cls):
name = cls.__name__
if cls in classes_dumping:
return dumps(name)
classes_dumping.add(cls)
internal = {}
external = {}
for k in cls.__dict__:
if k not in internal_fields:
v = getattr(cls, k)
if isinstance(v, property):
k = ('property', k)
v = (v.fget, v.fset, v.fdel, v.__doc__)
if isinstance(v, types.FunctionType):
k = ('staticmethod', k)
external[k] = v
elif internal_fields[k]:
internal[k] = getattr(cls, k)
result = dumps((cls.__name__, cls.__bases__, internal, dumps(external)))
if cls in classes_dumping:
classes_dumping.remove(cls)
return result
classes_loaded = {}
def load_local_class(bytes):
t = loads(bytes)
if not isinstance(t, tuple):
return classes_loaded[t]
name, bases, internal, external = t
if name in classes_loaded:
return classes_loaded[name]
cls = type(name, bases, internal)
classes_loaded[name] = cls
for k, v in loads(external).items():
if isinstance(k, tuple):
t, k = k
if t == 'property':
fget, fset, fdel, doc = v
v = property(fget, fset, fdel, doc)
if t == 'staticmethod':
v = staticmethod(v)
setattr(cls, k, v)
return cls
def reduce_class(obj):
name = obj.__name__
module = getattr(obj, "__module__", None)
if module == '__main__' and name not in ('MyPickler', 'RecursiveFunctionPlaceholder'):
result = load_local_class, (dump_local_class(obj),)
return result
return name
CLS_TYPES = [types.TypeType, types.ClassType]
def dump_method(method):
obj = method.im_self
cls = method.im_class
func = method.im_func
if cls in CLS_TYPES:
cls_name = CLS_TYPES.index(cls)
else:
cls_name = cls.__name__
return dumps((obj, cls_name, func))
def load_method(bytes):
obj, cls_name, func = loads(bytes) # cls referred in func.func_globals
if isinstance(cls_name, int):
cls = CLS_TYPES[cls_name]
else:
cls = classes_loaded[cls_name]
return types.MethodType(func, obj, cls)
def reduce_method(method):
module = method.im_func.__module__
return load_method, (dump_method(method), )
MyPickler.register(types.LambdaType, reduce_function)
MyPickler.register(types.ClassType, reduce_class)
MyPickler.register(types.TypeType, reduce_class)
MyPickler.register(types.MethodType, reduce_method)
if __name__ == "__main__":
assert marshalable(None)
assert marshalable("")
assert marshalable(u"")
assert not marshalable(buffer(""))
assert marshalable(0)
assert marshalable(0L)
assert marshalable(0.0)
assert marshalable(True)
assert marshalable(complex(1,1))
assert marshalable((1,1))
assert marshalable([1,1])
assert marshalable(set([1,1]))
assert marshalable({1:None})
some_global = 'some global'
def glob_func(s):
return "glob:" + s
def get_closure(x):
glob_func(some_global)
last = " last"
def foo(y): return "foo: " + y
def the_closure(a, b=1):
marshal.dumps(a)
return (a * x + int(b), glob_func(foo(some_global)+last))
return the_closure
f = get_closure(10)
ff = loads(dumps(f))
#print globals()
print f(2)
print ff(2)
glob_func = loads(dumps(glob_func))
get_closure = loads(dumps(get_closure))
# Test recursive functions
def fib(n): return n if n <= 1 else fib(n-1) + fib(n-2)
assert fib(8) == loads(dumps(fib))(8)
class Foo1:
def foo(self):
return 1234
class Foo2(object):
def foo(self):
return 5678
class Foo3(Foo2):
x = 1111
def foo(self):
return super(Foo3, self).foo() + Foo3.x
class Foo4(object):
@classmethod
def x(cls):
return 1
@property
def y(self):
return 2
@staticmethod
def z():
return 3
df1 = dumps(Foo1)
df2 = dumps(Foo2)
df3 = dumps(Foo3)
df4 = dumps(Foo4)
del Foo1
del Foo2
del Foo3
del Foo4
Foo1 = loads(df1)
Foo2 = loads(df2)
Foo3 = loads(df3)
Foo4 = loads(df4)
f1 = Foo1()
f2 = Foo2()
f3 = Foo3()
f4 = Foo4()
assert f1.foo() == 1234
assert f2.foo() == 5678
assert f3.foo() == 5678 + 1111
assert Foo4.x() == 1
f = loads(dumps(lambda:(some_global for i in xrange(1))))
print list(f())
assert list(f()) == [some_global]
| bsd-3-clause |
yglazko/bedrock | bedrock/mozorg/tests/test_credits.py | 29 | 3418 | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from collections import OrderedDict
from mock import Mock
from bedrock.mozorg import credits
from bedrock.mozorg.tests import TestCase
class TestCredits(TestCase):
def setUp(self):
self.credits_file = credits.CreditsFile('credits')
self.credits_file.clear_cache()
def test_credits_list(self):
self.credits_file.readlines = Mock(return_value=[
'The Dude,Dude',
'Walter Sobchak,Sobchak',
'Theodore Donald Kerabatsos,Kerabatsos',
])
self.assertListEqual(self.credits_file.rows, [
['The Dude', 'DUDE'],
['Theodore Donald Kerabatsos', 'KERABATSOS'],
['Walter Sobchak', 'SOBCHAK'],
])
def test_credits_ordered_no_sortkey(self):
"""Should give an ordered dict or ordered lists keyed on first letter of name."""
self.credits_file.readlines = Mock(return_value=[
'Bunny Lebowski',
'Maude Lebowski',
'Jeffrey Lebowski',
'Uli Kunkel',
'The Dude',
'Walter Sobchak',
'Theodore Donald Kerabatsos',
])
good_names = OrderedDict()
good_names['B'] = ['Bunny Lebowski']
good_names['J'] = ['Jeffrey Lebowski']
good_names['M'] = ['Maude Lebowski']
good_names['T'] = ['The Dude', 'Theodore Donald Kerabatsos']
good_names['U'] = ['Uli Kunkel']
good_names['W'] = ['Walter Sobchak']
self.assertEqual(self.credits_file.ordered, good_names)
def test_credits_ordered(self):
"""Should give an ordered dict or ordered lists keyed on first letter of sortkey."""
self.credits_file.readlines = Mock(return_value=[
'Bunny Lebowski,Lebowski Bunny',
'Maude Lebowski,Lebowski Maude',
'Jeffrey Lebowski,Lebowski Jeffrey',
'Uli Kunkel,Kunkel',
'The Dude,Dude',
'Walter Sobchak,Sobchak',
'Theodore Donald Kerabatsos,Kerabatsos',
])
good_names = OrderedDict()
good_names['D'] = ['The Dude']
good_names['K'] = ['Theodore Donald Kerabatsos', 'Uli Kunkel']
good_names['L'] = ['Bunny Lebowski', 'Jeffrey Lebowski', 'Maude Lebowski']
good_names['S'] = ['Walter Sobchak']
self.assertEqual(self.credits_file.ordered, good_names)
def test_credits_ordered_skips(self):
"""Should skip lines with more than 2 items."""
self.credits_file.readlines = Mock(return_value=[
'Bunny Lebowski,Lebowski Bunny',
'Maude Lebowski,Lebowski Maude',
'Jeffrey Lebowski,Lebowski Jeffrey',
'Karl Hungus,Karl,Inappropriate',
'Uli Kunkel,Kunkel',
'The Dude,Dude',
'Walter Sobchak,Sobchak',
'Theodore Donald Kerabatsos,Kerabatsos',
])
good_names = OrderedDict()
good_names['D'] = ['The Dude']
good_names['K'] = ['Theodore Donald Kerabatsos', 'Uli Kunkel']
good_names['L'] = ['Bunny Lebowski', 'Jeffrey Lebowski', 'Maude Lebowski']
good_names['S'] = ['Walter Sobchak']
self.assertEqual(self.credits_file.ordered, good_names)
| mpl-2.0 |
spencerrecneps/tdg-heroku-test | src/project/wsgi.py | 8 | 1436 | """
WSGI config for project project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
projectdir = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.insert(0, projectdir)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from dj_static import Cling
application = Cling(application)
from .gzip_middleware import GzipMiddleware
application = GzipMiddleware(application)
from .twinkie import ExpiresMiddleware
application = ExpiresMiddleware(application, {
'application/javascript': 365*24*60*60,
'text/css': 365*24*60*60,
'image/png': 365*24*60*60,
})
from .basic_auth import BasicAuthMiddleware
application = BasicAuthMiddleware(application, exempt=(
r'^/api/',
))
| gpl-3.0 |
rjwil1086/android_kernel_hp_pine | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py | 12527 | 1935 | # Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
| gpl-2.0 |
sdh11/gnuradio | gr-uhd/apps/uhd_app.py | 7 | 19018 | #!/usr/bin/env python
#
# Copyright 2015-2016 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
USRP Helper Module: Common tasks for uhd-based apps.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import sys
import time
import argparse
from gnuradio import eng_arg
from gnuradio import uhd
from gnuradio import gr
from gnuradio import gru
COMMAND_DELAY = .2 # Seconds
COMPACT_TPL = "{mb_id} ({mb_serial}), {db_subdev} ({subdev}, {ant}{db_serial})"
LONG_TPL = """{prefix} Motherboard: {mb_id} ({mb_serial})
{prefix} Daughterboard: {db_subdev}{db_serial}
{prefix} Subdev: {subdev}
{prefix} Antenna: {ant}
"""
class UHDApp(object):
" Base class for simple UHD-based applications "
def __init__(self, prefix=None, args=None):
self.prefix = prefix
self.args = args
self.verbose = args.verbose or 0
if self.args.sync == 'auto' and len(self.args.channels) > 1:
self.args.sync = 'pps'
self.antenna = None
self.gain_range = None
self.samp_rate = None
self.has_lo_sensor = None
self.async_msgq = None
self.async_src = None
self.async_rcv = None
self.tr = None
self.gain = None
self.freq = None
self.channels = None
self.cpu_format = None
self.spec = None
self.clock_source = None
self.time_source = None
self.lo_source = None
self.lo_export = None
def vprint(self, *args):
"""
Print 'string' with 'prefix' prepended if self.verbose is True
"""
if self.verbose:
print("[{prefix}]".format(prefix=self.prefix), *args)
def get_usrp_info_string(self,
compact=False,
tx_or_rx='rx',
chan=0,
mboard=0,
):
"""
Return a nice textual description of the USRP we're using.
"""
if tx_or_rx not in ['rx', 'tx']:
raise ValueError("tx_or_rx argument must be one of ['rx', 'tx']")
try:
info_pp = {}
if self.prefix is None:
info_pp['prefix'] = ""
else:
info_pp['prefix'] = "[{prefix}] ".format(prefix=self.prefix)
usrp_info = self.usrp.get_usrp_info(chan)
info_pp['mb_id'] = usrp_info['mboard_id']
info_pp['mb_serial'] = usrp_info['mboard_serial']
if info_pp['mb_serial'] == "":
info_pp['mb_serial'] = "no serial"
info_pp['db_subdev'] = usrp_info["{xx}_subdev_name".format(xx=tx_or_rx)]
info_pp['db_serial'] = ", " + usrp_info["{xx}_serial".format(xx=tx_or_rx)]
if info_pp['db_serial'] == "":
info_pp['db_serial'] = "no serial"
info_pp['subdev'] = self.usrp.get_subdev_spec(mboard)
info_pp['ant'] = self.usrp.get_antenna(chan)
if info_pp['mb_id'] in ("B200", "B210", "E310"):
# In this case, this is meaningless
info_pp['db_serial'] = ""
tpl = LONG_TPL
if compact:
tpl = COMPACT_TPL
return tpl.format(**info_pp)
except Exception:
return "Can't establish USRP info."
def normalize_sel(self, num_name, arg_name, num, arg):
"""
num_name: meaningful name why we need num arguments
arg_name: name of current argument
num: required number of arguments
arg: actual argument
"""
if arg is None:
return None
args = [x.strip() for x in arg.split(",")]
if len(args) == 1:
args = args * num
if len(args) != num:
raise ValueError("Invalid {m} setting for {n} {b}: {a}".format(
m=arg_name, n=num, a=arg, b=num_name
))
return args
def async_callback(self, msg):
"""
Call this when USRP async metadata needs printing.
"""
metadata = self.async_src.msg_to_async_metadata_t(msg)
print("[{prefix}] Channel: {chan} Time: {t} Event: {e}".format(
prefix=self.prefix,
chan=metadata.channel,
t=metadata.time_spec.get_real_secs(),
e=metadata.event_code,
))
def setup_usrp(self, ctor, args, cpu_format='fc32'):
"""
Instantiate a USRP object; takes care of all kinds of corner cases and settings.
Pop it and some args onto the class that calls this.
"""
self.channels = args.channels
self.cpu_format = cpu_format
# Create a UHD device object:
self.usrp = ctor(
device_addr=args.args,
stream_args=uhd.stream_args(
cpu_format,
args.otw_format,
args=args.stream_args,
channels=self.channels,
)
)
# Set the subdevice spec:
self.spec = self.normalize_sel("mboards", "subdev",
self.usrp.get_num_mboards(), args.spec)
if self.spec:
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.set_subdev_spec(self.spec[mb_idx], mb_idx)
# Set the clock and/or time source:
if args.clock_source is not None:
self.clock_source = self.normalize_sel("mboards", "clock-source",
self.usrp.get_num_mboards(), args.clock_source)
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.set_clock_source(self.clock_source[mb_idx], mb_idx)
if args.time_source is not None:
self.time_source = self.normalize_sel("mboards", "time-source",
self.usrp.get_num_mboards(), args.time_source)
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.set_time_source(self.time_source[mb_idx], mb_idx)
# Sampling rate:
self.usrp.set_samp_rate(args.samp_rate)
self.samp_rate = self.usrp.get_samp_rate()
self.vprint("Using sampling rate: {rate}".format(rate=self.samp_rate))
# Set the antenna:
self.antenna = self.normalize_sel("channels", "antenna", len(args.channels), args.antenna)
if self.antenna is not None:
for i, chan in enumerate(self.channels):
if not self.antenna[i] in self.usrp.get_antennas(i):
print("[ERROR] {} is not a valid antenna name for this USRP device!".format(self.antenna[i]))
exit(1)
self.usrp.set_antenna(self.antenna[i], i)
self.vprint("[{prefix}] Channel {chan}: Using antenna {ant}.".format(
prefix=self.prefix, chan=chan, ant=self.usrp.get_antenna(i)
))
self.antenna = self.usrp.get_antenna(0)
# Set receive daughterboard gain:
self.set_gain(args.gain)
self.gain_range = self.usrp.get_gain_range(0)
# Set frequency (tune request takes lo_offset):
if hasattr(args, 'lo_offset') and args.lo_offset is not None:
treq = uhd.tune_request(args.freq, args.lo_offset)
else:
treq = uhd.tune_request(args.freq)
self.has_lo_sensor = 'lo_locked' in self.usrp.get_sensor_names()
# Set LO export and LO source operation
if (args.lo_export is not None) and (args.lo_source is not None):
self.lo_source = self.normalize_sel("channels", "lo-source", len(self.channels), args.lo_source)
self.lo_export = self.normalize_sel("channels", "lo-export", len(self.channels), args.lo_export)
self.lo_source_channel = None
for chan, lo_source, lo_export in zip(self.channels, self.lo_source, self.lo_export):
if (lo_source == "None") or (lo_export == "None"):
continue
if lo_export == "True":
#If channel is LO source set frequency and store response
self.usrp.set_lo_export_enabled(True, uhd.ALL_LOS, chan)
if lo_source == "internal":
self.lo_source_channel = chan
tune_resp = self.usrp.set_center_freq(treq,chan)
self.usrp.set_lo_source(lo_source, uhd.ALL_LOS,chan)
# Use lo source tune response to tune dsp_freq on remaining channels
if self.lo_source_channel is not None:
if getattr(args, 'lo_offset', None) is not None:
treq = uhd.tune_request(target_freq=args.freq, rf_freq=args.freq+args.lo_offset, rf_freq_policy=uhd.tune_request.POLICY_MANUAL,
dsp_freq=tune_resp.actual_dsp_freq,
dsp_freq_policy=uhd.tune_request.POLICY_MANUAL)
else:
treq = uhd.tune_request(target_freq=args.freq, rf_freq=args.freg, rf_freq_policy=uhd.tune_request.POLICY_MANUAL,
dsp_freq=tune_resp.actual_dsp_freq,
dsp_freq_policy=uhd.tune_request.POLICY_MANUAL)
for chan in args.channels:
if chan == self.lo_source_channel:
continue
self.usrp.set_center_freq(treq,chan)
# Make sure tuning is synched:
command_time_set = False
if len(self.channels) > 1:
if args.sync == 'pps':
self.usrp.set_time_unknown_pps(uhd.time_spec())
cmd_time = self.usrp.get_time_now() + uhd.time_spec(COMMAND_DELAY)
try:
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.set_command_time(cmd_time, mb_idx)
command_time_set = True
except RuntimeError:
sys.stderr.write('[{prefix}] [WARNING] Failed to set command times.\n'.format(prefix=self.prefix))
for i, chan in enumerate(self.channels):
self.tr = self.usrp.set_center_freq(treq, i)
if self.tr == None:
sys.stderr.write('[{prefix}] [ERROR] Failed to set center frequency on channel {chan}\n'.format(
prefix=self.prefix, chan=chan
))
exit(1)
if command_time_set:
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.clear_command_time(mb_idx)
self.vprint("Syncing channels...".format(prefix=self.prefix))
time.sleep(COMMAND_DELAY)
self.freq = self.usrp.get_center_freq(0)
if args.show_async_msg:
self.async_msgq = gr.msg_queue(0)
self.async_src = uhd.amsg_source("", self.async_msgq)
self.async_rcv = gru.msgq_runner(self.async_msgq, self.async_callback)
def set_gain(self, gain):
"""
Safe gain-setter. Catches some special cases:
- If gain is None, set to mid-point in dB.
- If the USRP is multi-channel, set it on all channels.
"""
if gain is None:
if self.args.verbose:
self.vprint("Defaulting to mid-point gains:".format(prefix=self.prefix))
for i, chan in enumerate(self.channels):
self.usrp.set_normalized_gain(.5, i)
if self.args.verbose:
self.vprint("Channel {chan} gain: {g} dB".format(
prefix=self.prefix, chan=chan, g=self.usrp.get_gain(i)
))
else:
self.vprint("Setting gain to {g} dB.".format(g=gain))
for chan in range( len( self.channels ) ):
self.usrp.set_gain(gain, chan)
self.gain = self.usrp.get_gain(0)
def set_freq(self, freq, skip_sync=False):
"""
Safely tune all channels to freq.
"""
self.vprint("Tuning all channels to {freq} MHz.".format(freq=freq / 1e6))
# Set frequency (tune request takes lo_offset):
if hasattr(self.args, 'lo_offset') and self.args.lo_offset is not None:
treq = uhd.tune_request(freq, self.args.lo_offset)
else:
treq = uhd.tune_request(freq)
# Special TwinRX tuning due to LO sharing
if getattr(self, 'lo_source_channel', None) is not None:
tune_resp = self.usrp.set_center_freq(treq, self.lo_source_channel)
if getattr(self.args, 'lo_offset', None) is not None:
treq = uhd.tune_request(target_freq=freq, rf_freq=freq+self.args.lo_offset, rf_freq_policy=uhd.tune_request.POLICY_MANUAL,
dsp_freq=tune_resp.actual_dsp_freq,
dsp_freq_policy=uhd.tune_request.POLICY_MANUAL)
else:
treq = uhd.tune_request(target_freq=freq, rf_freq=freq, rf_freq_policy=uhd.tune_reqest.POLICY_MANUAL,
dsp_freq=tune_resp.actual_dsp_freq,
dsp_freq_policy=uhd.tune_request.POLICY_MANUAL)
for chan in self.channels:
if chan == self.lo_source_channel:
continue
self.usrp.set_center_freq(treq,chan)
# Make sure tuning is synched:
command_time_set = False
if len(self.channels) > 1 and not skip_sync:
cmd_time = self.usrp.get_time_now() + uhd.time_spec(COMMAND_DELAY)
try:
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.set_command_time(cmd_time, mb_idx)
command_time_set = True
except RuntimeError:
sys.stderr.write('[{prefix}] [WARNING] Failed to set command times.\n'.format(prefix=self.prefix))
for i, chan in enumerate(self.channels ):
self.tr = self.usrp.set_center_freq(treq, i)
if self.tr == None:
sys.stderr.write('[{prefix}] [ERROR] Failed to set center frequency on channel {chan}\n'.format(
prefix=self.prefix, chan=chan
))
exit(1)
if command_time_set:
for mb_idx in range(self.usrp.get_num_mboards()):
self.usrp.clear_command_time(mb_idx)
self.vprint("Syncing channels...".format(prefix=self.prefix))
time.sleep(COMMAND_DELAY)
self.freq = self.usrp.get_center_freq(0)
self.vprint("First channel has freq: {freq} MHz.".format(freq=self.freq / 1e6))
@staticmethod
def setup_argparser(
parser=None,
description='USRP App',
allow_mimo=True,
tx_or_rx="",
skip_freq=False,
):
"""
Create or amend an argument parser with typical USRP options.
"""
def cslist(string):
"""
For ArgParser: Turn a comma separated list into an actual list.
"""
try:
return [int(x.strip()) for x in string.split(",")]
except ValueError:
raise argparse.ArgumentTypeError("Not a comma-separated list: {string}".format(string=string))
if parser is None:
parser = argparse.ArgumentParser(
description=description,
)
tx_or_rx = tx_or_rx.strip() + " "
group = parser.add_argument_group('USRP Arguments')
group.add_argument("-a", "--args", default="", help="UHD device address args")
group.add_argument("--spec", help="Subdevice(s) of UHD device where appropriate. Use a comma-separated list to set different boards to different specs.")
group.add_argument("-A", "--antenna", help="Select {xx}antenna(s) where appropriate".format(xx=tx_or_rx))
group.add_argument("-s", "--samp-rate", type=eng_arg.eng_float, default=1e6,
help="Sample rate")
group.add_argument("-g", "--gain", type=eng_arg.eng_float, default=None,
help="Gain (default is midpoint)")
group.add_argument("--gain-type", choices=('db', 'normalized'), default='db',
help="Gain Type (applies to -g)")
if not skip_freq:
group.add_argument("-f", "--freq", type=eng_arg.eng_float, default=None, required=True,
help="Set carrier frequency to FREQ",
metavar="FREQ")
group.add_argument("--lo-offset", type=eng_arg.eng_float, default=0.0,
help="Set daughterboard LO offset to OFFSET [default=hw default]")
if allow_mimo:
group.add_argument("-c", "--channels", default=[0,], type=cslist,
help="Select {xx} Channels".format(xx=tx_or_rx))
group.add_argument("--lo-export", help="Set TwinRX LO export {None, True, False} for each channel with a comma-separated list. None skips a channel.")
group.add_argument("--lo-source", help="Set TwinRX LO source {None, internal, companion, external} for each channel with a comma-separated list. None skips this channel. ")
group.add_argument("--otw-format", choices=['sc16', 'sc12', 'sc8'], default='sc16',
help="Choose over-the-wire data format")
group.add_argument("--stream-args", default="", help="Set additional stream arguments")
group.add_argument("-m", "--amplitude", type=eng_arg.eng_float, default=0.15,
help="Set output amplitude to AMPL (0.0-1.0)", metavar="AMPL")
group.add_argument("-v", "--verbose", action="count", help="Use verbose console output")
group.add_argument("--show-async-msg", action="store_true",
help="Show asynchronous message notifications from UHD")
group.add_argument("--sync", choices=('default', 'pps', 'auto'),
default='auto', help="Set to 'pps' to sync devices to PPS")
group.add_argument("--clock-source",
help="Set the clock source; typically 'internal', 'external' or 'gpsdo'")
group.add_argument("--time-source",
help="Set the time source")
return parser
| gpl-3.0 |
srimai/odoo | addons/project/wizard/project_task_delegate.py | 142 | 6479 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
from openerp import tools
from openerp.tools.translate import _
from openerp.osv import fields, osv
class project_task_delegate(osv.osv_memory):
_name = 'project.task.delegate'
_description = 'Task Delegate'
_columns = {
'name': fields.char('Delegated Title', required=True, help="New title of the task delegated to the user"),
'prefix': fields.char('Your Task Title', help="Title for your validation task"),
'project_id': fields.many2one('project.project', 'Project', help="User you want to delegate this task to"),
'user_id': fields.many2one('res.users', 'Assign To', required=True, help="User you want to delegate this task to"),
'new_task_description': fields.text('New Task Description', help="Reinclude the description of the task in the task of the user"),
'planned_hours': fields.float('Planned Hours', help="Estimated time to close this task by the delegated user"),
'planned_hours_me': fields.float('Hours to Validate', help="Estimated time for you to validate the work done by the user to whom you delegate this task"),
'state': fields.selection([('pending','Pending'), ('done','Done'), ], 'Validation State', help="New state of your own task. Pending will be reopened automatically when the delegated task is closed")
}
def onchange_project_id(self, cr, uid, ids, project_id=False, context=None):
project_project = self.pool.get('project.project')
if not project_id:
return {'value':{'user_id': False}}
project = project_project.browse(cr, uid, project_id, context=context)
return {'value': {'user_id': project.user_id and project.user_id.id or False}}
def default_get(self, cr, uid, fields, context=None):
"""
This function gets default values
"""
res = super(project_task_delegate, self).default_get(cr, uid, fields, context=context)
if context is None:
context = {}
record_id = context and context.get('active_id', False) or False
if not record_id:
return res
task_pool = self.pool.get('project.task')
task = task_pool.browse(cr, uid, record_id, context=context)
task_name =tools.ustr(task.name)
if 'project_id' in fields:
res['project_id'] = int(task.project_id.id) if task.project_id else False
if 'name' in fields:
if task_name.startswith(_('CHECK: ')):
newname = tools.ustr(task_name).replace(_('CHECK: '), '')
else:
newname = tools.ustr(task_name or '')
res['name'] = newname
if 'planned_hours' in fields:
res['planned_hours'] = task.remaining_hours or 0.0
if 'prefix' in fields:
if task_name.startswith(_('CHECK: ')):
newname = tools.ustr(task_name).replace(_('CHECK: '), '')
else:
newname = tools.ustr(task_name or '')
prefix = _('CHECK: %s') % newname
res['prefix'] = prefix
if 'new_task_description' in fields:
res['new_task_description'] = task.description
return res
_defaults = {
'planned_hours_me': 1.0,
'state': 'pending',
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
res = super(project_task_delegate, self).fields_view_get(cr, uid, view_id, view_type, context=context, toolbar=toolbar, submenu=submenu)
users_pool = self.pool.get('res.users')
obj_tm = users_pool.browse(cr, uid, uid, context=context).company_id.project_time_mode_id
tm = obj_tm and obj_tm.name or 'Hours'
if tm in ['Hours','Hour']:
return res
eview = etree.fromstring(res['arch'])
def _check_rec(eview):
if eview.attrib.get('widget','') == 'float_time':
eview.set('widget','float')
for child in eview:
_check_rec(child)
return True
_check_rec(eview)
res['arch'] = etree.tostring(eview)
for field in res['fields']:
if 'Hours' in res['fields'][field]['string']:
res['fields'][field]['string'] = res['fields'][field]['string'].replace('Hours',tm)
return res
def delegate(self, cr, uid, ids, context=None):
if context is None:
context = {}
task_id = context.get('active_id', False)
task_pool = self.pool.get('project.task')
delegate_data = self.read(cr, uid, ids, context=context)[0]
delegated_tasks = task_pool.do_delegate(cr, uid, [task_id], delegate_data, context=context)
models_data = self.pool.get('ir.model.data')
action_model, action_id = models_data.get_object_reference(cr, uid, 'project', 'action_view_task')
view_model, task_view_form_id = models_data.get_object_reference(cr, uid, 'project', 'view_task_form2')
view_model, task_view_tree_id = models_data.get_object_reference(cr, uid, 'project', 'view_task_tree2')
action = self.pool[action_model].read(cr, uid, [action_id], context=context)[0]
action['res_id'] = delegated_tasks[task_id]
action['view_id'] = False
action['views'] = [(task_view_form_id, 'form'), (task_view_tree_id, 'tree')]
action['help'] = False
return action
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
lmyrefelt/CouchPotatoServer | libs/requests/packages/chardet/hebrewprober.py | 215 | 13240 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Shy Shalom
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from charsetprober import CharSetProber
import constants
# This prober doesn't actually recognize a language or a charset.
# It is a helper prober for the use of the Hebrew model probers
### General ideas of the Hebrew charset recognition ###
#
# Four main charsets exist in Hebrew:
# "ISO-8859-8" - Visual Hebrew
# "windows-1255" - Logical Hebrew
# "ISO-8859-8-I" - Logical Hebrew
# "x-mac-hebrew" - ?? Logical Hebrew ??
#
# Both "ISO" charsets use a completely identical set of code points, whereas
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
# these code points. windows-1255 defines additional characters in the range
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
# x-mac-hebrew defines similar additional code points but with a different
# mapping.
#
# As far as an average Hebrew text with no diacritics is concerned, all four
# charsets are identical with respect to code points. Meaning that for the
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
# (including final letters).
#
# The dominant difference between these charsets is their directionality.
# "Visual" directionality means that the text is ordered as if the renderer is
# not aware of a BIDI rendering algorithm. The renderer sees the text and
# draws it from left to right. The text itself when ordered naturally is read
# backwards. A buffer of Visual Hebrew generally looks like so:
# "[last word of first line spelled backwards] [whole line ordered backwards
# and spelled backwards] [first word of first line spelled backwards]
# [end of line] [last word of second line] ... etc' "
# adding punctuation marks, numbers and English text to visual text is
# naturally also "visual" and from left to right.
#
# "Logical" directionality means the text is ordered "naturally" according to
# the order it is read. It is the responsibility of the renderer to display
# the text from right to left. A BIDI algorithm is used to place general
# punctuation marks, numbers and English text in the text.
#
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
# what little evidence I could find, it seems that its general directionality
# is Logical.
#
# To sum up all of the above, the Hebrew probing mechanism knows about two
# charsets:
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
# backwards while line order is natural. For charset recognition purposes
# the line order is unimportant (In fact, for this implementation, even
# word order is unimportant).
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
#
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
# specifically identified.
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
# that contain special punctuation marks or diacritics is displayed with
# some unconverted characters showing as question marks. This problem might
# be corrected using another model prober for x-mac-hebrew. Due to the fact
# that x-mac-hebrew texts are so rare, writing another model prober isn't
# worth the effort and performance hit.
#
#### The Prober ####
#
# The prober is divided between two SBCharSetProbers and a HebrewProber,
# all of which are managed, created, fed data, inquired and deleted by the
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
# fact some kind of Hebrew, Logical or Visual. The final decision about which
# one is it is made by the HebrewProber by combining final-letter scores
# with the scores of the two SBCharSetProbers to produce a final answer.
#
# The SBCSGroupProber is responsible for stripping the original text of HTML
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
# and new lines. It reduces any sequence of such characters to a single space.
# The buffer fed to each prober in the SBCS group prober is pure text in
# high-ASCII.
# The two SBCharSetProbers (model probers) share the same language model:
# Win1255Model.
# The first SBCharSetProber uses the model normally as any other
# SBCharSetProber does, to recognize windows-1255, upon which this model was
# built. The second SBCharSetProber is told to make the pair-of-letter
# lookup in the language model backwards. This in practice exactly simulates
# a visual Hebrew model using the windows-1255 logical Hebrew model.
#
# The HebrewProber is not using any language model. All it does is look for
# final-letter evidence suggesting the text is either logical Hebrew or visual
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
# alone are meaningless. HebrewProber always returns 0.00 as confidence
# since it never identifies a charset by itself. Instead, the pointer to the
# HebrewProber is passed to the model probers as a helper "Name Prober".
# When the Group prober receives a positive identification from any prober,
# it asks for the name of the charset identified. If the prober queried is a
# Hebrew model prober, the model prober forwards the call to the
# HebrewProber to make the final decision. In the HebrewProber, the
# decision is made according to the final-letters scores maintained and Both
# model probers scores. The answer is returned in the form of the name of the
# charset identified, either "windows-1255" or "ISO-8859-8".
# windows-1255 / ISO-8859-8 code points of interest
FINAL_KAF = '\xea'
NORMAL_KAF = '\xeb'
FINAL_MEM = '\xed'
NORMAL_MEM = '\xee'
FINAL_NUN = '\xef'
NORMAL_NUN = '\xf0'
FINAL_PE = '\xf3'
NORMAL_PE = '\xf4'
FINAL_TSADI = '\xf5'
NORMAL_TSADI = '\xf6'
# Minimum Visual vs Logical final letter score difference.
# If the difference is below this, don't rely solely on the final letter score distance.
MIN_FINAL_CHAR_DISTANCE = 5
# Minimum Visual vs Logical model score difference.
# If the difference is below this, don't rely at all on the model score distance.
MIN_MODEL_DISTANCE = 0.01
VISUAL_HEBREW_NAME = "ISO-8859-8"
LOGICAL_HEBREW_NAME = "windows-1255"
class HebrewProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mLogicalProber = None
self._mVisualProber = None
self.reset()
def reset(self):
self._mFinalCharLogicalScore = 0
self._mFinalCharVisualScore = 0
# The two last characters seen in the previous buffer,
# mPrev and mBeforePrev are initialized to space in order to simulate a word
# delimiter at the beginning of the data
self._mPrev = ' '
self._mBeforePrev = ' '
# These probers are owned by the group prober.
def set_model_probers(self, logicalProber, visualProber):
self._mLogicalProber = logicalProber
self._mVisualProber = visualProber
def is_final(self, c):
return c in [FINAL_KAF, FINAL_MEM, FINAL_NUN, FINAL_PE, FINAL_TSADI]
def is_non_final(self, c):
# The normal Tsadi is not a good Non-Final letter due to words like
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
# apostrophe is converted to a space in FilterWithoutEnglishLetters causing
# the Non-Final tsadi to appear at an end of a word even though this is not
# the case in the original text.
# The letters Pe and Kaf rarely display a related behavior of not being a
# good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak' for
# example legally end with a Non-Final Pe or Kaf. However, the benefit of
# these letters as Non-Final letters outweighs the damage since these words
# are quite rare.
return c in [NORMAL_KAF, NORMAL_MEM, NORMAL_NUN, NORMAL_PE]
def feed(self, aBuf):
# Final letter analysis for logical-visual decision.
# Look for evidence that the received buffer is either logical Hebrew or
# visual Hebrew.
# The following cases are checked:
# 1) A word longer than 1 letter, ending with a final letter. This is an
# indication that the text is laid out "naturally" since the final letter
# really appears at the end. +1 for logical score.
# 2) A word longer than 1 letter, ending with a Non-Final letter. In normal
# Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi, should not end with
# the Non-Final form of that letter. Exceptions to this rule are mentioned
# above in isNonFinal(). This is an indication that the text is laid out
# backwards. +1 for visual score
# 3) A word longer than 1 letter, starting with a final letter. Final letters
# should not appear at the beginning of a word. This is an indication that
# the text is laid out backwards. +1 for visual score.
#
# The visual score and logical score are accumulated throughout the text and
# are finally checked against each other in GetCharSetName().
# No checking for final letters in the middle of words is done since that case
# is not an indication for either Logical or Visual text.
#
# We automatically filter out all 7-bit characters (replace them with spaces)
# so the word boundary detection works properly. [MAP]
if self.get_state() == constants.eNotMe:
# Both model probers say it's not them. No reason to continue.
return constants.eNotMe
aBuf = self.filter_high_bit_only(aBuf)
for cur in aBuf:
if cur == ' ':
# We stand on a space - a word just ended
if self._mBeforePrev != ' ':
# next-to-last char was not a space so self._mPrev is not a 1 letter word
if self.is_final(self._mPrev):
# case (1) [-2:not space][-1:final letter][cur:space]
self._mFinalCharLogicalScore += 1
elif self.is_non_final(self._mPrev):
# case (2) [-2:not space][-1:Non-Final letter][cur:space]
self._mFinalCharVisualScore += 1
else:
# Not standing on a space
if (self._mBeforePrev == ' ') and (self.is_final(self._mPrev)) and (cur != ' '):
# case (3) [-2:space][-1:final letter][cur:not space]
self._mFinalCharVisualScore += 1
self._mBeforePrev = self._mPrev
self._mPrev = cur
# Forever detecting, till the end or until both model probers return eNotMe (handled above)
return constants.eDetecting
def get_charset_name(self):
# Make the decision: is it Logical or Visual?
# If the final letter score distance is dominant enough, rely on it.
finalsub = self._mFinalCharLogicalScore - self._mFinalCharVisualScore
if finalsub >= MIN_FINAL_CHAR_DISTANCE:
return LOGICAL_HEBREW_NAME
if finalsub <= -MIN_FINAL_CHAR_DISTANCE:
return VISUAL_HEBREW_NAME
# It's not dominant enough, try to rely on the model scores instead.
modelsub = self._mLogicalProber.get_confidence() - self._mVisualProber.get_confidence()
if modelsub > MIN_MODEL_DISTANCE:
return LOGICAL_HEBREW_NAME
if modelsub < -MIN_MODEL_DISTANCE:
return VISUAL_HEBREW_NAME
# Still no good, back to final letter distance, maybe it'll save the day.
if finalsub < 0.0:
return VISUAL_HEBREW_NAME
# (finalsub > 0 - Logical) or (don't know what to do) default to Logical.
return LOGICAL_HEBREW_NAME
def get_state(self):
# Remain active as long as any of the model probers are active.
if (self._mLogicalProber.get_state() == constants.eNotMe) and \
(self._mVisualProber.get_state() == constants.eNotMe):
return constants.eNotMe
return constants.eDetecting
| gpl-3.0 |
luca76/QGIS | python/console/console_editor.py | 4 | 58453 | # -*- coding:utf-8 -*-
"""
/***************************************************************************
Python Console for QGIS
-------------------
begin : 2012-09-10
copyright : (C) 2012 by Salvatore Larosa
email : lrssvtml (at) gmail (dot) com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
Some portions of code were taken from https://code.google.com/p/pydee/
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.Qsci import (QsciScintilla,
QsciScintillaBase,
QsciLexerPython,
QsciAPIs,
QsciStyle)
from qgis.core import QgsApplication
from qgis.gui import QgsMessageBar
import sys
import os
import subprocess
import datetime
import pyclbr
from operator import itemgetter
import traceback
import codecs
import re
class KeyFilter(QObject):
SHORTCUTS = {
("Control", "T"): lambda w, t: w.newTabEditor(),
("Control", "M"): lambda w, t: t.save(),
("Control", "W"): lambda w, t: t.close()
}
def __init__(self, window, tab, *args):
QObject.__init__(self, *args)
self.window = window
self.tab = tab
self._handlers = {}
for shortcut, handler in KeyFilter.SHORTCUTS.iteritems():
modifiers = shortcut[0]
if not isinstance(modifiers, list):
modifiers = [modifiers]
qt_mod_code = Qt.NoModifier
for each in modifiers:
qt_mod_code |= getattr(Qt, each + "Modifier")
qt_keycode = getattr(Qt, "Key_" + shortcut[1].upper())
handlers = self._handlers.get(qt_keycode, [])
handlers.append((qt_mod_code, handler))
self._handlers[qt_keycode] = handlers
def get_handler(self, key, modifier):
if self.window.count() > 1:
for modifiers, handler in self._handlers.get(key, []):
if modifiers == modifier:
return handler
return None
def eventFilter(self, obj, event):
if event.type() == QEvent.KeyPress and event.key() < 256:
handler = self.get_handler(event.key(), event.modifiers())
if handler:
handler(self.window, self.tab)
return QObject.eventFilter(self, obj, event)
class Editor(QsciScintilla):
MARKER_NUM = 6
def __init__(self, parent=None):
super(Editor,self).__init__(parent)
self.parent = parent
## recent modification time
self.lastModified = 0
self.opening = ['(', '{', '[', "'", '"']
self.closing = [')', '}', ']', "'", '"']
## List of marker line to be deleted from check syntax
self.bufferMarkerLine = []
self.settings = QSettings()
# Enable non-ascii chars for editor
self.setUtf8(True)
# Set the default font
font = QFont()
font.setFamily('Courier')
font.setFixedPitch(True)
font.setPointSize(10)
self.setFont(font)
self.setMarginsFont(font)
# Margin 0 is used for line numbers
#fm = QFontMetrics(font)
fontmetrics = QFontMetrics(font)
self.setMarginsFont(font)
self.setMarginWidth(0, fontmetrics.width("0000") + 5)
self.setMarginLineNumbers(0, True)
self.setMarginsForegroundColor(QColor("#3E3EE3"))
self.setMarginsBackgroundColor(QColor("#f9f9f9"))
self.setCaretLineVisible(True)
self.setCaretWidth(2)
self.markerDefine(QgsApplication.getThemePixmap("console/iconSyntaxErrorConsole.png"),
self.MARKER_NUM)
self.setMinimumHeight(120)
#self.setMinimumWidth(300)
self.setBraceMatching(QsciScintilla.SloppyBraceMatch)
self.setMatchedBraceBackgroundColor(QColor("#b7f907"))
# Folding
self.setFolding(QsciScintilla.PlainFoldStyle)
self.setFoldMarginColors(QColor("#f4f4f4"),QColor("#f4f4f4"))
#self.setWrapMode(QsciScintilla.WrapWord)
## Edge Mode
self.setEdgeMode(QsciScintilla.EdgeLine)
self.setEdgeColumn(80)
self.setEdgeColor(QColor("#FF0000"))
#self.setWrapMode(QsciScintilla.WrapCharacter)
self.setWhitespaceVisibility(QsciScintilla.WsVisibleAfterIndent)
#self.SendScintilla(QsciScintilla.SCI_SETHSCROLLBAR, 0)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.settingsEditor()
# Annotations
self.setAnnotationDisplay(QsciScintilla.ANNOTATION_BOXED)
# Indentation
self.setAutoIndent(True)
self.setIndentationsUseTabs(False)
self.setIndentationWidth(4)
self.setTabIndents(True)
self.setBackspaceUnindents(True)
self.setTabWidth(4)
self.setIndentationGuides(True)
## Disable command key
ctrl, shift = self.SCMOD_CTRL<<16, self.SCMOD_SHIFT<<16
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('L')+ ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('T')+ ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('D')+ ctrl)
self.SendScintilla(QsciScintilla.SCI_CLEARCMDKEY, ord('L')+ ctrl+shift)
## New QShortcut = ctrl+space/ctrl+alt+space for Autocomplete
self.newShortcutCS = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_Space), self)
self.newShortcutCS.setContext(Qt.WidgetShortcut)
self.redoScut = QShortcut(QKeySequence(Qt.CTRL + Qt.SHIFT + Qt.Key_Z), self)
self.redoScut.setContext(Qt.WidgetShortcut)
self.redoScut.activated.connect(self.redo)
self.newShortcutCS.activated.connect(self.autoCompleteKeyBinding)
self.runScut = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_E), self)
self.runScut.setContext(Qt.WidgetShortcut)
self.runScut.activated.connect(self.runSelectedCode)
self.runScriptScut = QShortcut(QKeySequence(Qt.SHIFT + Qt.CTRL + Qt.Key_E), self)
self.runScriptScut.setContext(Qt.WidgetShortcut)
self.runScriptScut.activated.connect(self.runScriptCode)
self.syntaxCheckScut = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_4), self)
self.syntaxCheckScut.setContext(Qt.WidgetShortcut)
self.syntaxCheckScut.activated.connect(self.syntaxCheck)
self.commentScut = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_3), self)
self.commentScut.setContext(Qt.WidgetShortcut)
self.commentScut.activated.connect(self.parent.pc.commentCode)
self.uncommentScut = QShortcut(QKeySequence(Qt.SHIFT + Qt.CTRL + Qt.Key_3), self)
self.uncommentScut.setContext(Qt.WidgetShortcut)
self.uncommentScut.activated.connect(self.parent.pc.uncommentCode)
self.modificationChanged.connect(self.parent.modified)
self.modificationAttempted.connect(self.fileReadOnly)
def settingsEditor(self):
# Set Python lexer
self.setLexers()
threshold = self.settings.value("pythonConsole/autoCompThresholdEditor", 2, type=int)
radioButtonSource = self.settings.value("pythonConsole/autoCompleteSourceEditor", 'fromAPI')
autoCompEnabled = self.settings.value("pythonConsole/autoCompleteEnabledEditor", True, type=bool)
self.setAutoCompletionThreshold(threshold)
if autoCompEnabled:
if radioButtonSource == 'fromDoc':
self.setAutoCompletionSource(self.AcsDocument)
elif radioButtonSource == 'fromAPI':
self.setAutoCompletionSource(self.AcsAPIs)
elif radioButtonSource == 'fromDocAPI':
self.setAutoCompletionSource(self.AcsAll)
else:
self.setAutoCompletionSource(self.AcsNone)
caretLineColorEditor = self.settings.value("pythonConsole/caretLineColorEditor", QColor("#fcf3ed"))
cursorColorEditor = self.settings.value("pythonConsole/cursorColorEditor", QColor(Qt.black))
self.setCaretLineBackgroundColor(caretLineColorEditor)
self.setCaretForegroundColor(cursorColorEditor)
def autoCompleteKeyBinding(self):
radioButtonSource = self.settings.value("pythonConsole/autoCompleteSourceEditor", 'fromAPI')
autoCompEnabled = self.settings.value("pythonConsole/autoCompleteEnabledEditor", True, type=bool)
if autoCompEnabled:
if radioButtonSource == 'fromDoc':
self.autoCompleteFromDocument()
elif radioButtonSource == 'fromAPI':
self.autoCompleteFromAPIs()
elif radioButtonSource == 'fromDocAPI':
self.autoCompleteFromAll()
def setLexers(self):
from qgis.core import QgsApplication
self.lexer = QsciLexerPython()
self.lexer.setIndentationWarning(QsciLexerPython.Inconsistent)
self.lexer.setFoldComments(True)
self.lexer.setFoldQuotes(True)
loadFont = self.settings.value("pythonConsole/fontfamilytextEditor", "Monospace")
fontSize = self.settings.value("pythonConsole/fontsizeEditor", 10, type=int)
font = QFont(loadFont)
font.setFixedPitch(True)
font.setPointSize(fontSize)
font.setStyleHint(QFont.TypeWriter)
font.setStretch(QFont.SemiCondensed)
font.setLetterSpacing(QFont.PercentageSpacing, 87.0)
font.setBold(False)
self.lexer.setDefaultFont(font)
self.lexer.setDefaultColor(QColor(self.settings.value("pythonConsole/defaultFontColorEditor", QColor(Qt.black))))
self.lexer.setColor(QColor(self.settings.value("pythonConsole/commentFontColorEditor", QColor(Qt.gray))), 1)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/keywordFontColorEditor", QColor(Qt.darkGreen))), 5)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/classFontColorEditor", QColor(Qt.blue))), 8)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/methodFontColorEditor", QColor(Qt.darkGray))), 9)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/decorFontColorEditor", QColor(Qt.darkBlue))), 15)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/commentBlockFontColorEditor", QColor(Qt.gray))), 12)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/singleQuoteFontColorEditor", QColor(Qt.blue))), 4)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/doubleQuoteFontColorEditor", QColor(Qt.blue))), 3)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/tripleSingleQuoteFontColorEditor", QColor(Qt.blue))), 6)
self.lexer.setColor(QColor(self.settings.value("pythonConsole/tripleDoubleQuoteFontColorEditor", QColor(Qt.blue))), 7)
self.lexer.setFont(font, 1)
self.lexer.setFont(font, 3)
self.lexer.setFont(font, 4)
for style in range(0, 33):
paperColor = QColor(self.settings.value("pythonConsole/paperBackgroundColorEditor", QColor(Qt.white)))
self.lexer.setPaper(paperColor, style)
self.api = QsciAPIs(self.lexer)
chekBoxAPI = self.settings.value("pythonConsole/preloadAPI", True, type=bool)
chekBoxPreparedAPI = self.settings.value("pythonConsole/usePreparedAPIFile", False, type=bool)
if chekBoxAPI:
pap = os.path.join(QgsApplication.pkgDataPath(), "python", "qsci_apis", "pyqgis.pap")
self.api.loadPrepared(pap)
elif chekBoxPreparedAPI:
self.api.loadPrepared(self.settings.value("pythonConsole/preparedAPIFile"))
else:
apiPath = self.settings.value("pythonConsole/userAPI", [])
for i in range(0, len(apiPath)):
self.api.load(unicode(apiPath[i]))
self.api.prepare()
self.lexer.setAPIs(self.api)
self.setLexer(self.lexer)
def move_cursor_to_end(self):
"""Move cursor to end of text"""
line, index = self.get_end_pos()
self.setCursorPosition(line, index)
self.ensureCursorVisible()
self.ensureLineVisible(line)
def get_end_pos(self):
"""Return (line, index) position of the last character"""
line = self.lines() - 1
return (line, len(self.text(line)))
def contextMenuEvent(self, e):
menu = QMenu(self)
iconRun = QgsApplication.getThemeIcon("console/iconRunConsole.png")
iconRunScript = QgsApplication.getThemeIcon("console/iconRunScriptConsole.png")
iconCodePad = QgsApplication.getThemeIcon("console/iconCodepadConsole.png")
iconCommentEditor = QgsApplication.getThemeIcon("console/iconCommentEditorConsole.png")
iconUncommentEditor = QgsApplication.getThemeIcon("console/iconUncommentEditorConsole.png")
iconSettings = QgsApplication.getThemeIcon("console/iconSettingsConsole.png")
iconFind = QgsApplication.getThemeIcon("console/iconSearchEditorConsole.png")
iconSyntaxCk = QgsApplication.getThemeIcon("console/iconSyntaxErrorConsole.png")
iconObjInsp = QgsApplication.getThemeIcon("console/iconClassBrowserConsole.png")
iconCut = QgsApplication.getThemeIcon("console/iconCutEditorConsole.png")
iconCopy = QgsApplication.getThemeIcon("console/iconCopyEditorConsole.png")
iconPaste = QgsApplication.getThemeIcon("console/iconPasteEditorConsole.png")
hideEditorAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Hide Editor"),
self.hideEditor)
menu.addSeparator() # ------------------------------
syntaxCheck = menu.addAction(iconSyntaxCk,
QCoreApplication.translate("PythonConsole", "Check Syntax"),
self.syntaxCheck, 'Ctrl+4')
menu.addSeparator()
runSelected = menu.addAction(iconRun,
QCoreApplication.translate("PythonConsole", "Run selected"),
self.runSelectedCode, 'Ctrl+E')
runScript = menu.addAction(iconRunScript,
QCoreApplication.translate("PythonConsole", "Run Script"),
self.runScriptCode, 'Shift+Ctrl+E')
menu.addSeparator()
undoAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Undo"),
self.undo, QKeySequence.Undo)
redoAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Redo"),
self.redo, 'Ctrl+Shift+Z')
menu.addSeparator()
findAction = menu.addAction(iconFind,
QCoreApplication.translate("PythonConsole", "Find Text"),
self.showFindWidget)
menu.addSeparator()
cutAction = menu.addAction(iconCut,
QCoreApplication.translate("PythonConsole", "Cut"),
self.cut, QKeySequence.Cut)
copyAction = menu.addAction(iconCopy,
QCoreApplication.translate("PythonConsole", "Copy"),
self.copy, QKeySequence.Copy)
pasteAction = menu.addAction(iconPaste,
QCoreApplication.translate("PythonConsole", "Paste"),
self.paste, QKeySequence.Paste)
menu.addSeparator()
commentCodeAction = menu.addAction(iconCommentEditor,
QCoreApplication.translate("PythonConsole", "Comment"),
self.parent.pc.commentCode, 'Ctrl+3')
uncommentCodeAction = menu.addAction(iconUncommentEditor,
QCoreApplication.translate("PythonConsole", "Uncomment"),
self.parent.pc.uncommentCode, 'Shift+Ctrl+3')
menu.addSeparator()
codePadAction = menu.addAction(iconCodePad,
QCoreApplication.translate("PythonConsole", "Share on codepad"),
self.codepad)
menu.addSeparator()
showCodeInspection = menu.addAction(iconObjInsp,
QCoreApplication.translate("PythonConsole", "Hide/Show Object Inspector"),
self.objectListEditor)
menu.addSeparator()
selectAllAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Select All"),
self.selectAll, QKeySequence.SelectAll)
menu.addSeparator()
settingsDialog = menu.addAction(iconSettings,
QCoreApplication.translate("PythonConsole", "Settings"),
self.parent.pc.openSettings)
syntaxCheck.setEnabled(False)
pasteAction.setEnabled(False)
codePadAction.setEnabled(False)
cutAction.setEnabled(False)
runSelected.setEnabled(False)
copyAction.setEnabled(False)
selectAllAction.setEnabled(False)
undoAction.setEnabled(False)
redoAction.setEnabled(False)
showCodeInspection.setEnabled(False)
if self.hasSelectedText():
runSelected.setEnabled(True)
copyAction.setEnabled(True)
cutAction.setEnabled(True)
codePadAction.setEnabled(True)
if not self.text() == '':
selectAllAction.setEnabled(True)
syntaxCheck.setEnabled(True)
if self.isUndoAvailable():
undoAction.setEnabled(True)
if self.isRedoAvailable():
redoAction.setEnabled(True)
if QApplication.clipboard().text():
pasteAction.setEnabled(True)
if self.settings.value("pythonConsole/enableObjectInsp",
False, type=bool):
showCodeInspection.setEnabled(True)
action = menu.exec_(self.mapToGlobal(e.pos()))
def findText(self, forward):
lineFrom, indexFrom, lineTo, indexTo = self.getSelection()
line, index = self.getCursorPosition()
text = self.parent.pc.lineEditFind.text()
re = False
wrap = self.parent.pc.wrapAround.isChecked()
cs = self.parent.pc.caseSensitive.isChecked()
wo = self.parent.pc.wholeWord.isChecked()
notFound = False
if text:
if not forward:
line = lineFrom
index = indexFrom
## findFirst(QString(), re bool, cs bool, wo bool, wrap, bool, forward=True)
## re = Regular Expression, cs = Case Sensitive, wo = Whole Word, wrap = Wrap Around
if not self.findFirst(text, re, cs, wo, wrap, forward, line, index):
notFound = True
if notFound:
styleError = 'QLineEdit {background-color: #d65253; \
color: #ffffff;}'
msgText = QCoreApplication.translate('PythonConsole',
'<b>"{0}"</b> was not found.').format(text)
self.parent.pc.callWidgetMessageBarEditor(msgText, 0, True)
else:
styleError = ''
self.parent.pc.lineEditFind.setStyleSheet(styleError)
def objectListEditor(self):
listObj = self.parent.pc.listClassMethod
if listObj.isVisible():
listObj.hide()
self.parent.pc.objectListButton.setChecked(False)
else:
listObj.show()
self.parent.pc.objectListButton.setChecked(True)
def codepad(self):
import urllib2, urllib
listText = self.selectedText().split('\n')
getCmd = []
for strLine in listText:
getCmd.append(unicode(strLine))
pasteText= u"\n".join(getCmd)
url = 'http://codepad.org'
values = {'lang' : 'Python',
'code' : pasteText,
'submit':'Submit'}
try:
response = urllib2.urlopen(url, urllib.urlencode(values))
url = response.read()
for href in url.split("</a>"):
if "Link:" in href:
ind=href.index('Link:')
found = href[ind+5:]
for i in found.split('">'):
if '<a href=' in i:
link = i.replace('<a href="',"").strip()
if link:
QApplication.clipboard().setText(link)
msgText = QCoreApplication.translate('PythonConsole', 'URL copied to clipboard.')
self.parent.pc.callWidgetMessageBarEditor(msgText, 0, True)
except urllib2.URLError, e:
msgText = QCoreApplication.translate('PythonConsole', 'Connection error: ')
self.parent.pc.callWidgetMessageBarEditor(msgText + str(e.args), 0, True)
def hideEditor(self):
self.parent.pc.splitterObj.hide()
self.parent.pc.showEditorButton.setChecked(False)
def showFindWidget(self):
wF = self.parent.pc.widgetFind
if wF.isVisible():
wF.hide()
self.parent.pc.findTextButton.setChecked(False)
else:
wF.show()
self.parent.pc.findTextButton.setChecked(True)
def commentEditorCode(self, commentCheck):
self.beginUndoAction()
if self.hasSelectedText():
startLine, _, endLine, _ = self.getSelection()
for line in range(startLine, endLine + 1):
if commentCheck:
self.insertAt('#', line, 0)
else:
if not self.text(line).strip().startswith('#'):
continue
self.setSelection(line, self.indentation(line),
line, self.indentation(line) + 1)
self.removeSelectedText()
else:
line, pos = self.getCursorPosition()
if commentCheck:
self.insertAt('#', line, 0)
else:
if not self.text(line).strip().startswith('#'):
return
self.setSelection(line, self.indentation(line),
line, self.indentation(line) + 1)
self.removeSelectedText()
self.endUndoAction()
def createTempFile(self):
import tempfile
fd, path = tempfile.mkstemp()
tmpFileName = path + '.py'
with codecs.open(path, "w", encoding='utf-8') as f:
f.write(self.text())
os.close(fd)
os.rename(path, tmpFileName)
return tmpFileName
def _runSubProcess(self, filename, tmp=False):
dir = QFileInfo(filename).path()
file = QFileInfo(filename).fileName()
name = QFileInfo(filename).baseName()
if dir not in sys.path:
sys.path.append(dir)
if name in sys.modules:
reload(sys.modules[name])
try:
## set creationflags for running command without shell window
if sys.platform.startswith('win'):
p = subprocess.Popen(['python', unicode(filename)], shell=False, stdin=subprocess.PIPE,
stderr=subprocess.PIPE, stdout=subprocess.PIPE, creationflags=0x08000000)
else:
p = subprocess.Popen(['python', unicode(filename)], shell=False, stdin=subprocess.PIPE,
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
out, _traceback = p.communicate()
## Fix interrupted system call on OSX
if sys.platform == 'darwin':
status = None
while status is None:
try:
status = p.wait()
except OSError, e:
if e.errno == 4:
pass
else:
raise e
if tmp:
tmpFileTr = QCoreApplication.translate('PythonConsole', ' [Temporary file saved in {0}]').format(dir)
file = file + tmpFileTr
if _traceback:
msgTraceTr = QCoreApplication.translate('PythonConsole', '## Script error: {0}').format(file)
print "## %s" % datetime.datetime.now()
print unicode(msgTraceTr)
sys.stderr.write(_traceback)
p.stderr.close()
else:
msgSuccessTr = QCoreApplication.translate('PythonConsole',
'## Script executed successfully: {0}').format(file)
print "## %s" % datetime.datetime.now()
print unicode(msgSuccessTr)
sys.stdout.write(out)
p.stdout.close()
del p
if tmp:
os.remove(filename)
except IOError, error:
IOErrorTr = QCoreApplication.translate('PythonConsole',
'Cannot execute file {0}. Error: {1}\n').format(filename,
error.strerror)
print '## Error: ' + IOErrorTr
except:
s = traceback.format_exc()
print '## Error: '
sys.stderr.write(s)
def runScriptCode(self):
autoSave = self.settings.value("pythonConsole/autoSaveScript", False, type=bool)
tabWidget = self.parent.tw.currentWidget()
filename = tabWidget.path
msgEditorBlank = QCoreApplication.translate('PythonConsole',
'Hey, type something to run!')
msgEditorUnsaved = QCoreApplication.translate('PythonConsole',
'You have to save the file before running it.')
if filename is None:
if not self.isModified():
self.parent.pc.callWidgetMessageBarEditor(msgEditorBlank, 0, True)
return
if self.isModified() and not autoSave:
self.parent.pc.callWidgetMessageBarEditor(msgEditorUnsaved, 0, True)
return
if self.syntaxCheck(fromContextMenu=False):
if autoSave and filename:
self.parent.save(filename)
if autoSave and not filename:
# Create a new temp file if the file isn't already saved.
tmpFile = self.createTempFile()
filename = tmpFile
self.parent.pc.shell.runCommand(u"execfile(u'{0}'.encode('{1}'))"
.format(filename.replace("\\", "/"), sys.getfilesystemencoding()))
def runSelectedCode(self):
cmd = self.selectedText()
self.parent.pc.shell.insertFromDropPaste(cmd)
self.parent.pc.shell.entered()
self.setFocus()
def getTextFromEditor(self):
text = self.text()
textList = text.split("\n")
return textList
def goToLine(self, objName, linenr):
self.SendScintilla(QsciScintilla.SCI_GOTOLINE, linenr-1)
self.SendScintilla(QsciScintilla.SCI_SETTARGETSTART,
self.SendScintilla(QsciScintilla.SCI_GETCURRENTPOS))
self.SendScintilla(QsciScintilla.SCI_SETTARGETEND, len(self.text()))
pos = self.SendScintilla(QsciScintilla.SCI_SEARCHINTARGET, len(objName), objName)
index = pos - self.SendScintilla(QsciScintilla.SCI_GETCURRENTPOS)
#line, _ = self.getCursorPosition()
self.setSelection(linenr - 1, index, linenr - 1, index + len(objName))
self.ensureLineVisible(linenr)
self.setFocus()
def syntaxCheck(self, filename=None, fromContextMenu=True):
eline = None
ecolumn = 0
edescr = ''
source = unicode(self.text())
try:
if not filename:
filename = self.parent.tw.currentWidget().path
#source = open(filename, 'r').read() + '\n'
if type(source) == type(u""):
source = source.encode('utf-8')
if type(filename) == type(u""):
filename = filename.encode('utf-8')
compile(source, str(filename), 'exec')
except SyntaxError, detail:
s = traceback.format_exception_only(SyntaxError, detail)
fn = detail.filename
eline = detail.lineno and detail.lineno or 1
ecolumn = detail.offset and detail.offset or 1
edescr = detail.msg
if eline != None:
eline -= 1
for markerLine in self.bufferMarkerLine:
self.markerDelete(markerLine)
self.clearAnnotations(markerLine)
self.bufferMarkerLine.remove(markerLine)
if (eline) not in self.bufferMarkerLine:
self.bufferMarkerLine.append(eline)
self.markerAdd(eline, self.MARKER_NUM)
loadFont = self.settings.value("pythonConsole/fontfamilytextEditor",
"Monospace")
styleAnn = QsciStyle(-1,"Annotation",
QColor(255,0,0),
QColor(255,200,0),
QFont(loadFont, 8,-1,True),
True)
self.annotate(eline, edescr, styleAnn)
self.setCursorPosition(eline, ecolumn-1)
#self.setSelection(eline, ecolumn, eline, self.lineLength(eline)-1)
self.ensureLineVisible(eline)
#self.ensureCursorVisible()
return False
else:
self.markerDeleteAll()
self.clearAnnotations()
return True
def keyPressEvent(self, e):
t = unicode(e.text())
startLine, _, endLine, endPos = self.getSelection()
line, pos = self.getCursorPosition()
self.autoCloseBracket = self.settings.value("pythonConsole/autoCloseBracketEditor", False, type=bool)
self.autoImport = self.settings.value("pythonConsole/autoInsertionImportEditor", True, type=bool)
txt = self.text(line)[:pos]
## Close bracket automatically
if t in self.opening and self.autoCloseBracket:
self.beginUndoAction()
i = self.opening.index(t)
if self.hasSelectedText():
selText = self.selectedText()
self.removeSelectedText()
if startLine == endLine:
self.insert(self.opening[i] + selText + self.closing[i])
self.setCursorPosition(endLine, endPos+2)
self.endUndoAction()
return
elif startLine < endLine and self.opening[i] in ("'", '"'):
self.insert("'''" + selText + "'''")
self.setCursorPosition(endLine, endPos+3)
self.endUndoAction()
return
elif t == '(' and (re.match(r'^[ \t]*def \w+$', txt) \
or re.match(r'^[ \t]*class \w+$', txt)):
self.insert('):')
else:
self.insert(self.closing[i])
self.endUndoAction()
## FIXES #8392 (automatically removes the redundant char
## when autoclosing brackets option is enabled)
elif t in [')', ']', '}'] and self.autoCloseBracket:
txt = self.text(line)
try:
if txt[pos-1] in self.opening and t == txt[pos]:
self.setCursorPosition(line, pos+1)
self.SendScintilla(QsciScintilla.SCI_DELETEBACK)
except IndexError:
pass
elif t == ' ' and self.autoImport:
ptrn = r'^[ \t]*from [\w.]+$'
if re.match(ptrn, txt):
self.insert(' import')
self.setCursorPosition(line, pos + 7)
QsciScintilla.keyPressEvent(self, e)
def focusInEvent(self, e):
pathfile = self.parent.path
if pathfile:
if not QFileInfo(pathfile).exists():
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>"{0}"</b> has been deleted or is not accessible').format(pathfile)
self.parent.pc.callWidgetMessageBarEditor(msgText, 2, False)
return
if pathfile and self.lastModified != QFileInfo(pathfile).lastModified():
self.beginUndoAction()
self.selectAll()
#fileReplaced = self.selectedText()
self.removeSelectedText()
file = open(pathfile, "r")
fileLines = file.readlines()
file.close()
QApplication.setOverrideCursor(Qt.WaitCursor)
for line in reversed(fileLines):
self.insert(line)
QApplication.restoreOverrideCursor()
self.setModified(False)
self.endUndoAction()
self.parent.tw.listObject(self.parent.tw.currentWidget())
self.lastModified = QFileInfo(pathfile).lastModified()
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>"{0}"</b> has been changed and reloaded').format(pathfile)
self.parent.pc.callWidgetMessageBarEditor(msgText, 1, False)
QsciScintilla.focusInEvent(self, e)
def fileReadOnly(self):
tabWidget = self.parent.tw.currentWidget()
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>"{0}"</b> is read only, please save to different file first.').format(tabWidget.path)
self.parent.pc.callWidgetMessageBarEditor(msgText, 1, False)
class EditorTab(QWidget):
def __init__(self, parent, parentConsole, filename, readOnly):
super(EditorTab, self).__init__(parent)
self.tw = parent
self.pc = parentConsole
self.path = None
self.readOnly = readOnly
self.fileExcuteList = {}
self.fileExcuteList = dict()
self.newEditor = Editor(self)
if filename:
self.path = filename
if QFileInfo(filename).exists():
self.loadFile(filename, False)
# Creates layout for message bar
self.layout = QGridLayout(self.newEditor)
self.layout.setContentsMargins(0, 0, 0, 0)
spacerItem = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.layout.addItem(spacerItem, 1, 0, 1, 1)
# messageBar instance
self.infoBar = QgsMessageBar()
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.infoBar.setSizePolicy(sizePolicy)
self.layout.addWidget(self.infoBar, 0, 0, 1, 1)
self.tabLayout = QGridLayout(self)
self.tabLayout.setContentsMargins(0, 0, 0, 0)
self.tabLayout.addWidget(self.newEditor)
self.keyFilter = KeyFilter(parent, self)
self.setEventFilter(self.keyFilter)
def loadFile(self, filename, modified):
self.newEditor.lastModified = QFileInfo(filename).lastModified()
fn = codecs.open(unicode(filename), "rb", encoding='utf-8')
txt = fn.read()
fn.close()
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.newEditor.setText(txt)
if self.readOnly:
self.newEditor.setReadOnly(self.readOnly)
QApplication.restoreOverrideCursor()
self.newEditor.setModified(modified)
self.newEditor.recolor()
def save(self, fileName=None):
index = self.tw.indexOf(self)
if fileName:
self.path = fileName
if self.path is None:
saveTr = QCoreApplication.translate('PythonConsole',
'Python Console: Save file')
self.path = str(QFileDialog().getSaveFileName(self,
saveTr,
self.tw.tabText(index) + '.py',
"Script file (*.py)"))
# If the user didn't select a file, abort the save operation
if len(self.path) == 0:
self.path = None
return
self.tw.setCurrentWidget(self)
msgText = QCoreApplication.translate('PythonConsole',
'Script was correctly saved.')
self.pc.callWidgetMessageBarEditor(msgText, 0, True)
# Rename the original file, if it exists
path = unicode(self.path)
overwrite = QFileInfo(path).exists()
if overwrite:
try:
permis = os.stat(path).st_mode
#self.newEditor.lastModified = QFileInfo(path).lastModified()
os.chmod(path, permis)
except:
raise
temp_path = path + "~"
if QFileInfo(temp_path).exists():
os.remove(temp_path)
os.rename(path, temp_path)
# Save the new contents
with codecs.open(path, "w", encoding='utf-8') as f:
f.write(self.newEditor.text())
if overwrite:
os.remove(temp_path)
if self.newEditor.isReadOnly():
self.newEditor.setReadOnly(False)
fN = path.split('/')[-1]
self.tw.setTabTitle(index, fN)
self.tw.setTabToolTip(index, path)
self.newEditor.setModified(False)
self.pc.saveFileButton.setEnabled(False)
self.newEditor.lastModified = QFileInfo(path).lastModified()
self.pc.updateTabListScript(path, action='append')
self.tw.listObject(self)
def modified(self, modified):
self.tw.tabModified(self, modified)
def close(self):
self.tw._removeTab(self, tab2index=True)
def setEventFilter(self, filter):
self.newEditor.installEventFilter(filter)
def newTab(self):
self.tw.newTabEditor()
class EditorTabWidget(QTabWidget):
def __init__(self, parent):
QTabWidget.__init__(self, parent=None)
self.parent = parent
self.idx = -1
# Layout for top frame (restore tabs)
self.layoutTopFrame = QGridLayout(self)
self.layoutTopFrame.setContentsMargins(0, 0, 0, 0)
spacerItem = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.layoutTopFrame.addItem(spacerItem, 1, 0, 1, 1)
self.topFrame = QFrame(self)
self.topFrame.setStyleSheet('background-color: rgb(255, 255, 230);')
self.topFrame.setFrameShape(QFrame.StyledPanel)
self.topFrame.setMinimumHeight(24)
self.layoutTopFrame2 = QGridLayout(self.topFrame)
self.layoutTopFrame2.setContentsMargins(0, 0, 0, 0)
label = QCoreApplication.translate("PythonConsole",
"Click on button to restore all tabs from last session.")
self.label = QLabel(label)
self.restoreTabsButton = QToolButton()
toolTipRestore = QCoreApplication.translate("PythonConsole",
"Restore tabs")
self.restoreTabsButton.setToolTip(toolTipRestore)
self.restoreTabsButton.setIcon(QgsApplication.getThemeIcon("console/iconRestoreTabsConsole.png"))
self.restoreTabsButton.setIconSize(QSize(24, 24))
self.restoreTabsButton.setAutoRaise(True)
self.restoreTabsButton.setCursor(Qt.PointingHandCursor)
self.restoreTabsButton.setStyleSheet('QToolButton:hover{border: none } \
QToolButton:pressed{border: none}')
self.clButton = QToolButton()
toolTipClose = QCoreApplication.translate("PythonConsole",
"Close")
self.clButton.setToolTip(toolTipClose)
self.clButton.setIcon(QgsApplication.getThemeIcon("mIconClose.png"))
self.clButton.setIconSize(QSize(18, 18))
self.clButton.setCursor(Qt.PointingHandCursor)
self.clButton.setStyleSheet('QToolButton:hover{border: none } \
QToolButton:pressed{border: none}')
self.clButton.setAutoRaise(True)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.topFrame.setSizePolicy(sizePolicy)
self.layoutTopFrame.addWidget(self.topFrame, 0, 0, 1, 1)
self.layoutTopFrame2.addWidget(self.label, 0, 1, 1, 1)
self.layoutTopFrame2.addWidget(self.restoreTabsButton, 0, 0, 1, 1)
self.layoutTopFrame2.addWidget(self.clButton, 0, 2, 1, 1)
self.topFrame.hide()
self.connect(self.restoreTabsButton, SIGNAL('clicked()'), self.restoreTabs)
self.connect(self.clButton, SIGNAL('clicked()'), self.closeRestore)
# Restore script of the previuos session
self.settings = QSettings()
tabScripts = self.settings.value("pythonConsole/tabScripts", [])
self.restoreTabList = tabScripts
if self.restoreTabList:
self.topFrame.show()
else:
self.newTabEditor(filename=None)
## Fixes #7653
if sys.platform != 'darwin':
self.setDocumentMode(True)
self.setMovable(True)
self.setTabsClosable(True)
self.setTabPosition(QTabWidget.North)
# Menu button list tabs
self.fileTabMenu = QMenu()
self.connect(self.fileTabMenu, SIGNAL("aboutToShow()"),
self.showFileTabMenu)
self.connect(self.fileTabMenu, SIGNAL("triggered(QAction*)"),
self.showFileTabMenuTriggered)
self.fileTabButton = QToolButton()
txtToolTipMenuFile = QCoreApplication.translate("PythonConsole",
"List all tabs")
self.fileTabButton.setToolTip(txtToolTipMenuFile)
self.fileTabButton.setIcon(QgsApplication.getThemeIcon("console/iconFileTabsMenuConsole.png"))
self.fileTabButton.setIconSize(QSize(24, 24))
self.fileTabButton.setAutoRaise(True)
self.fileTabButton.setPopupMode(QToolButton.InstantPopup)
self.fileTabButton.setMenu(self.fileTabMenu)
self.setCornerWidget(self.fileTabButton, Qt.TopRightCorner)
self.connect(self, SIGNAL("tabCloseRequested(int)"), self._removeTab)
self.connect(self, SIGNAL('currentChanged(int)'), self._currentWidgetChanged)
# New Editor button
self.newTabButton = QToolButton()
txtToolTipNewTab = QCoreApplication.translate("PythonConsole",
"New Editor")
self.newTabButton.setToolTip(txtToolTipNewTab)
self.newTabButton.setAutoRaise(True)
self.newTabButton.setIcon(QgsApplication.getThemeIcon("console/iconNewTabEditorConsole.png"))
self.newTabButton.setIconSize(QSize(24, 24))
self.setCornerWidget(self.newTabButton, Qt.TopLeftCorner)
self.connect(self.newTabButton, SIGNAL('clicked()'), self.newTabEditor)
def _currentWidgetChanged(self, tab):
if self.settings.value("pythonConsole/enableObjectInsp",
False, type=bool):
self.listObject(tab)
self.changeLastDirPath(tab)
self.enableSaveIfModified(tab)
def contextMenuEvent(self, e):
tabBar = self.tabBar()
self.idx = tabBar.tabAt(e.pos())
if self.widget(self.idx):
cW = self.widget(self.idx)
menu = QMenu(self)
menu.addSeparator()
newTabAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "New Editor"),
self.newTabEditor)
menu.addSeparator()
closeTabAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Close Tab"),
cW.close)
closeAllTabAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Close All"),
self.closeAll)
closeOthersTabAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Close Others"),
self.closeOthers)
menu.addSeparator()
saveAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Save"),
cW.save)
saveAsAction = menu.addAction(
QCoreApplication.translate("PythonConsole", "Save As"),
self.saveAs)
closeTabAction.setEnabled(False)
closeAllTabAction.setEnabled(False)
closeOthersTabAction.setEnabled(False)
saveAction.setEnabled(False)
if self.count() > 1:
closeTabAction.setEnabled(True)
closeAllTabAction.setEnabled(True)
closeOthersTabAction.setEnabled(True)
if self.widget(self.idx).newEditor.isModified():
saveAction.setEnabled(True)
action = menu.exec_(self.mapToGlobal(e.pos()))
def closeOthers(self):
idx = self.idx
countTab = self.count()
for i in range(countTab - 1, idx, -1) + range(idx - 1, -1, -1):
self._removeTab(i)
def closeAll(self):
countTab = self.count()
cI = self.currentIndex()
for i in range(countTab - 1, 0, -1):
self._removeTab(i)
self.newTabEditor(tabName='Untitled-0')
self._removeTab(0)
def saveAs(self):
idx = self.idx
self.parent.saveAsScriptFile(idx)
self.setCurrentWidget(self.widget(idx))
def enableSaveIfModified(self, tab):
tabWidget = self.widget(tab)
if tabWidget:
self.parent.saveFileButton.setEnabled(tabWidget.newEditor.isModified())
def enableToolBarEditor(self, enable):
if self.topFrame.isVisible():
enable = False
self.parent.toolBarEditor.setEnabled(enable)
def newTabEditor(self, tabName=None, filename=None):
readOnly = False
if filename:
readOnly = not QFileInfo(filename).isWritable()
try:
fn = codecs.open(unicode(filename), "rb", encoding='utf-8')
txt = fn.read()
fn.close()
except IOError, error:
IOErrorTr = QCoreApplication.translate('PythonConsole',
'The file {0} could not be opened. Error: {1}\n').format(filename,
error.strerror)
print '## Error: '
sys.stderr.write(IOErrorTr)
return
nr = self.count()
if not tabName:
tabName = QCoreApplication.translate('PythonConsole', 'Untitled-{0}').format(nr)
self.tab = EditorTab(self, self.parent, filename, readOnly)
self.iconTab = QgsApplication.getThemeIcon('console/iconTabEditorConsole.png')
self.addTab(self.tab, self.iconTab, tabName + ' (ro)' if readOnly else tabName)
self.setCurrentWidget(self.tab)
if filename:
self.setTabToolTip(self.currentIndex(), unicode(filename))
else:
self.setTabToolTip(self.currentIndex(), tabName)
def tabModified(self, tab, modified):
index = self.indexOf(tab)
color = Qt.darkGray if modified else Qt.black
self.tabBar().setTabTextColor(index, color)
self.parent.saveFileButton.setEnabled(modified)
def closeTab(self, tab):
if self.count() < 2:
self.removeTab(self.indexOf(tab))
self.newTabEditor()
else:
self.removeTab(self.indexOf(tab))
self.currentWidget().setFocus(Qt.TabFocusReason)
def setTabTitle(self, tab, title):
self.setTabText(tab, title)
def _removeTab(self, tab, tab2index=False):
if tab2index:
tab = self.indexOf(tab)
tabWidget = self.widget(tab)
if tabWidget.newEditor.isModified():
txtSaveOnRemove = QCoreApplication.translate("PythonConsole",
"Python Console: Save File")
txtMsgSaveOnRemove = QCoreApplication.translate("PythonConsole",
"The file <b>'{0}'</b> has been modified, save changes?").format(self.tabText(tab))
res = QMessageBox.question( self, txtSaveOnRemove,
txtMsgSaveOnRemove,
QMessageBox.Save | QMessageBox.Discard | QMessageBox.Cancel )
if res == QMessageBox.Save:
tabWidget.save()
elif res == QMessageBox.Cancel:
return
if tabWidget.path:
self.parent.updateTabListScript(tabWidget.path, action='remove')
self.removeTab(tab)
if self.count() < 1:
self.newTabEditor()
else:
if tabWidget.path:
self.parent.updateTabListScript(tabWidget.path, action='remove')
if self.count() <= 1:
self.removeTab(tab)
self.newTabEditor()
else:
self.removeTab(tab)
self.currentWidget().newEditor.setFocus(Qt.TabFocusReason)
def buttonClosePressed(self):
self.closeCurrentWidget()
def closeCurrentWidget(self):
currWidget = self.currentWidget()
if currWidget and currWidget.close():
self.removeTab( self.currentIndex() )
currWidget = self.currentWidget()
if currWidget:
currWidget.setFocus(Qt.TabFocusReason)
if currWidget.path in self.restoreTabList:
self.parent.updateTabListScript(currWidget.path, action='remove')
def restoreTabs(self):
for script in self.restoreTabList:
pathFile = unicode(script)
if QFileInfo(pathFile).exists():
tabName = pathFile.split('/')[-1]
self.newTabEditor(tabName, pathFile)
else:
errOnRestore = QCoreApplication.translate("PythonConsole",
"Unable to restore the file: \n{0}\n").format(pathFile)
print '## Error: '
s = errOnRestore
sys.stderr.write(s)
self.parent.updateTabListScript(pathFile, action='remove')
if self.count() < 1:
self.newTabEditor(filename=None)
self.topFrame.close()
self.enableToolBarEditor(True)
self.currentWidget().newEditor.setFocus(Qt.TabFocusReason)
def closeRestore(self):
self.parent.updateTabListScript(None)
self.topFrame.close()
self.newTabEditor(filename=None)
self.enableToolBarEditor(True)
def showFileTabMenu(self):
self.fileTabMenu.clear()
for index in range(self.count()):
action = self.fileTabMenu.addAction(self.tabIcon(index), self.tabText(index))
action.setData(index)
def showFileTabMenuTriggered(self, action):
index = action.data()
if index is not None:
self.setCurrentIndex(index)
def listObject(self, tab):
self.parent.listClassMethod.clear()
if isinstance(tab, EditorTab):
tabWidget = self.widget(self.indexOf(tab))
else:
tabWidget = self.widget(tab)
if tabWidget:
if tabWidget.path:
pathFile, file = os.path.split(unicode(tabWidget.path))
module, ext = os.path.splitext(file)
found = False
if pathFile not in sys.path:
sys.path.append(pathFile)
found = True
try:
reload(pyclbr)
dictObject = {}
readModule = pyclbr.readmodule(module)
readModuleFunction = pyclbr.readmodule_ex(module)
for name, class_data in sorted(readModule.items(), key=lambda x:x[1].lineno):
if os.path.normpath(str(class_data.file)) == os.path.normpath(str(tabWidget.path)):
superClassName = []
for superClass in class_data.super:
if superClass == 'object':
continue
if isinstance(superClass, basestring):
superClassName.append(superClass)
else:
superClassName.append(superClass.name)
classItem = QTreeWidgetItem()
if superClassName:
super = ', '.join([i for i in superClassName])
classItem.setText(0, name + ' [' + super + ']')
classItem.setToolTip(0, name + ' [' + super + ']')
else:
classItem.setText(0, name)
classItem.setToolTip(0, name)
if sys.platform.startswith('win'):
classItem.setSizeHint(0, QSize(18, 18))
classItem.setText(1, str(class_data.lineno))
iconClass = QgsApplication.getThemeIcon("console/iconClassTreeWidgetConsole.png")
classItem.setIcon(0, iconClass)
dictObject[name] = class_data.lineno
for meth, lineno in sorted(class_data.methods.items(), key=itemgetter(1)):
methodItem = QTreeWidgetItem()
methodItem.setText(0, meth + ' ')
methodItem.setText(1, str(lineno))
methodItem.setToolTip(0, meth)
iconMeth = QgsApplication.getThemeIcon("console/iconMethodTreeWidgetConsole.png")
methodItem.setIcon(0, iconMeth)
if sys.platform.startswith('win'):
methodItem.setSizeHint(0, QSize(18, 18))
classItem.addChild(methodItem)
dictObject[meth] = lineno
self.parent.listClassMethod.addTopLevelItem(classItem)
for func_name, data in sorted(readModuleFunction.items(), key=lambda x:x[1].lineno):
if isinstance(data, pyclbr.Function) and \
os.path.normpath(str(data.file)) == os.path.normpath(str(tabWidget.path)):
funcItem = QTreeWidgetItem()
funcItem.setText(0, func_name + ' ')
funcItem.setText(1, str(data.lineno))
funcItem.setToolTip(0, func_name)
iconFunc = QgsApplication.getThemeIcon("console/iconFunctionTreeWidgetConsole.png")
funcItem.setIcon(0, iconFunc)
if sys.platform.startswith('win'):
funcItem.setSizeHint(0, QSize(18, 18))
dictObject[func_name] = data.lineno
self.parent.listClassMethod.addTopLevelItem(funcItem)
if found:
sys.path.remove(pathFile)
except:
msgItem = QTreeWidgetItem()
msgItem.setText(0, QCoreApplication.translate("PythonConsole", "Check Syntax"))
msgItem.setText(1, 'syntaxError')
iconWarning = QgsApplication.getThemeIcon("console/iconSyntaxErrorConsole.png")
msgItem.setIcon(0, iconWarning)
self.parent.listClassMethod.addTopLevelItem(msgItem)
# s = traceback.format_exc()
# print '## Error: '
# sys.stderr.write(s)
# pass
def refreshSettingsEditor(self):
countTab = self.count()
for i in range(countTab):
self.widget(i).newEditor.settingsEditor()
objInspectorEnabled = self.settings.value("pythonConsole/enableObjectInsp",
False, type=bool)
listObj = self.parent.objectListButton
if self.parent.listClassMethod.isVisible():
listObj.setChecked(objInspectorEnabled)
listObj.setEnabled(objInspectorEnabled)
if objInspectorEnabled:
cW = self.currentWidget()
if cW and not self.parent.listClassMethod.isVisible():
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.listObject(cW)
QApplication.restoreOverrideCursor()
def changeLastDirPath(self, tab):
tabWidget = self.widget(tab)
if tabWidget:
self.settings.setValue("pythonConsole/lastDirPath", tabWidget.path)
def widgetMessageBar(self, iface, text, level, timed=True):
messageLevel = [QgsMessageBar.INFO, QgsMessageBar.WARNING, QgsMessageBar.CRITICAL]
if timed:
timeout = iface.messageTimeout()
else:
timeout = 0
currWidget = self.currentWidget()
currWidget.infoBar.pushMessage(text, messageLevel[level], timeout)
| gpl-2.0 |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Pillow-2.9.0/PIL/ImagePalette.py | 26 | 6692 | #
# The Python Imaging Library.
# $Id$
#
# image palette object
#
# History:
# 1996-03-11 fl Rewritten.
# 1997-01-03 fl Up and running.
# 1997-08-23 fl Added load hack
# 2001-04-16 fl Fixed randint shadow bug in random()
#
# Copyright (c) 1997-2001 by Secret Labs AB
# Copyright (c) 1996-1997 by Fredrik Lundh
#
# See the README file for information on usage and redistribution.
#
import array
import warnings
from PIL import ImageColor
class ImagePalette(object):
"Color palette for palette mapped images"
def __init__(self, mode="RGB", palette=None, size=0):
self.mode = mode
self.rawmode = None # if set, palette contains raw data
self.palette = palette or list(range(256))*len(self.mode)
self.colors = {}
self.dirty = None
if ((size == 0 and len(self.mode)*256 != len(self.palette)) or
(size != 0 and size != len(self.palette))):
raise ValueError("wrong palette size")
def copy(self):
new = ImagePalette()
new.mode = self.mode
new.rawmode = self.rawmode
if self.palette is not None:
new.palette = self.palette[:]
new.colors = self.colors.copy()
new.dirty = self.dirty
return new
def getdata(self):
"""
Get palette contents in format suitable # for the low-level
``im.putpalette`` primitive.
.. warning:: This method is experimental.
"""
if self.rawmode:
return self.rawmode, self.palette
return self.mode + ";L", self.tobytes()
def tobytes(self):
"""Convert palette to bytes.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(self.palette, bytes):
return self.palette
arr = array.array("B", self.palette)
if hasattr(arr, 'tobytes'):
# py3k has a tobytes, tostring is deprecated.
return arr.tobytes()
return arr.tostring()
# Declare tostring as an alias for tobytes
tostring = tobytes
def getcolor(self, color):
"""Given an rgb tuple, allocate palette entry.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(color, tuple):
try:
return self.colors[color]
except KeyError:
# allocate new color slot
if isinstance(self.palette, bytes):
self.palette = [int(x) for x in self.palette]
index = len(self.colors)
if index >= 256:
raise ValueError("cannot allocate more than 256 colors")
self.colors[color] = index
self.palette[index] = color[0]
self.palette[index+256] = color[1]
self.palette[index+512] = color[2]
self.dirty = 1
return index
else:
raise ValueError("unknown color specifier: %r" % color)
def save(self, fp):
"""Save palette to text file.
.. warning:: This method is experimental.
"""
if self.rawmode:
raise ValueError("palette contains raw palette data")
if isinstance(fp, str):
fp = open(fp, "w")
fp.write("# Palette\n")
fp.write("# Mode: %s\n" % self.mode)
for i in range(256):
fp.write("%d" % i)
for j in range(i*len(self.mode), (i+1)*len(self.mode)):
try:
fp.write(" %d" % self.palette[j])
except IndexError:
fp.write(" 0")
fp.write("\n")
fp.close()
# --------------------------------------------------------------------
# Internal
def raw(rawmode, data):
palette = ImagePalette()
palette.rawmode = rawmode
palette.palette = data
palette.dirty = 1
return palette
# --------------------------------------------------------------------
# Factories
def _make_linear_lut(black, white):
warnings.warn(
'_make_linear_lut() is deprecated. '
'Please call make_linear_lut() instead.',
DeprecationWarning,
stacklevel=2
)
return make_linear_lut(black, white)
def _make_gamma_lut(exp):
warnings.warn(
'_make_gamma_lut() is deprecated. '
'Please call make_gamma_lut() instead.',
DeprecationWarning,
stacklevel=2
)
return make_gamma_lut(exp)
def make_linear_lut(black, white):
lut = []
if black == 0:
for i in range(256):
lut.append(white*i//255)
else:
raise NotImplementedError # FIXME
return lut
def make_gamma_lut(exp):
lut = []
for i in range(256):
lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5))
return lut
def negative(mode="RGB"):
palette = list(range(256))
palette.reverse()
return ImagePalette(mode, palette * len(mode))
def random(mode="RGB"):
from random import randint
palette = []
for i in range(256*len(mode)):
palette.append(randint(0, 255))
return ImagePalette(mode, palette)
def sepia(white="#fff0c0"):
r, g, b = ImageColor.getrgb(white)
r = make_linear_lut(0, r)
g = make_linear_lut(0, g)
b = make_linear_lut(0, b)
return ImagePalette("RGB", r + g + b)
def wedge(mode="RGB"):
return ImagePalette(mode, list(range(256)) * len(mode))
def load(filename):
# FIXME: supports GIMP gradients only
fp = open(filename, "rb")
lut = None
if not lut:
try:
from PIL import GimpPaletteFile
fp.seek(0)
p = GimpPaletteFile.GimpPaletteFile(fp)
lut = p.getpalette()
except (SyntaxError, ValueError):
# import traceback
# traceback.print_exc()
pass
if not lut:
try:
from PIL import GimpGradientFile
fp.seek(0)
p = GimpGradientFile.GimpGradientFile(fp)
lut = p.getpalette()
except (SyntaxError, ValueError):
# import traceback
# traceback.print_exc()
pass
if not lut:
try:
from PIL import PaletteFile
fp.seek(0)
p = PaletteFile.PaletteFile(fp)
lut = p.getpalette()
except (SyntaxError, ValueError):
#import traceback
#traceback.print_exc()
pass
if not lut:
raise IOError("cannot load palette")
return lut # data, rawmode
| mit |
davidjrichardson/uwcs-zarya | events/migrations/0002_auto_20160917_1617.py | 1 | 4285 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-17 16:17
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.documents.blocks
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('wagtailcore', '0029_unicode_slugfield_dj19'),
('events', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='EventPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.StreamField((('h2', wagtail.core.blocks.CharBlock(classname='title', icon='title')), ('h3', wagtail.core.blocks.CharBlock(classname='title', icon='title')), ('h4', wagtail.core.blocks.CharBlock(classname='title', icon='title')), ('paragraph', wagtail.core.blocks.RichTextBlock(icon='pilcrow')), ('image', wagtail.images.blocks.ImageChooserBlock()), ('pullquote', wagtail.core.blocks.StructBlock((('quote', wagtail.core.blocks.TextBlock('quote title')), ('attribution', wagtail.core.blocks.CharBlock())))), ('document', wagtail.documents.blocks.DocumentChooserBlock(icon='doc-full-inverse')), ('code', wagtail.core.blocks.StructBlock((('language', wagtail.core.blocks.ChoiceBlock(choices=[('bash', 'Bash/Shell'), ('c', 'C'), ('cmake', 'CMake'), ('cpp', 'C++'), ('csharp', 'C#'), ('css', 'CSS'), ('go', 'Go'), ('haskell', 'Haskell'), ('haxe', 'Haxe'), ('html', 'HTML'), ('java', 'Java'), ('js', 'JavaScript'), ('json', 'JSON'), ('kotlin', 'Kotlin'), ('lua', 'Lua'), ('make', 'Makefile'), ('perl', 'Perl'), ('perl6', 'Perl 6'), ('php', 'PHP'), ('python', 'Python'), ('python3', 'Python 3'), ('ruby', 'Ruby'), ('sql', 'SQL'), ('swift', 'Swift'), ('xml', 'XML')])), ('code', wagtail.core.blocks.TextBlock()))))))),
('description', models.CharField(max_length=200)),
('location', models.CharField(default='Department of Computer Science', max_length=50)),
('start', models.DateTimeField(default=datetime.datetime.now)),
('finish', models.DateTimeField(default=datetime.datetime(2016, 9, 17, 17, 17, 44, 619999))),
('cancelled', models.BooleanField()),
('facebook_link', models.URLField(blank=True, help_text='A link to the associated Facebook event if one exists', verbose_name='Facebook event')),
('signup_limit', models.IntegerField(help_text='Enter 0 for unlimited signups', verbose_name='Signup limit')),
('signup_open', models.DateTimeField()),
('signup_close', models.DateTimeField()),
('signup_freshers_open', models.DateTimeField(blank=True, help_text='Set a date for when freshers may sign up to the event, leave blank if they are to sign up at the same time as everyone else')),
('category', models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, to='events.EventType')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='EventSignup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('signup_created', models.DateTimeField(default=datetime.datetime.now)),
('comment', models.TextField(blank=True)),
('event', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='events.EventPage')),
('member', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['signup_created'],
},
),
migrations.AlterUniqueTogether(
name='eventsignup',
unique_together=set([('event', 'member')]),
),
]
| mit |
redhat-openstack/nova | nova/tests/virt/test_volumeutils.py | 36 | 1781 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
# Copyright 2012 University Of Minho
# Copyright 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests fot virt volumeutils.
"""
from nova import exception
from nova import test
from nova import utils
from nova.virt import volumeutils
class VolumeUtilsTestCase(test.TestCase):
def test_get_iscsi_initiator(self):
self.mox.StubOutWithMock(utils, 'execute')
initiator = 'fake.initiator.iqn'
rval = ("junk\nInitiatorName=%s\njunk\n" % initiator, None)
utils.execute('cat', '/etc/iscsi/initiatorname.iscsi',
run_as_root=True).AndReturn(rval)
# Start test
self.mox.ReplayAll()
result = volumeutils.get_iscsi_initiator()
self.assertEqual(initiator, result)
def test_get_missing_iscsi_initiator(self):
self.mox.StubOutWithMock(utils, 'execute')
file_path = '/etc/iscsi/initiatorname.iscsi'
utils.execute('cat', file_path, run_as_root=True).AndRaise(
exception.FileNotFound(file_path=file_path)
)
# Start test
self.mox.ReplayAll()
result = volumeutils.get_iscsi_initiator()
self.assertIsNone(result)
| apache-2.0 |
MotorolaMobilityLLC/external-chromium_org | native_client_sdk/src/tools/tests/quote_test.py | 54 | 5596 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PARENT_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(PARENT_DIR)
import quote
verbose = False
# Wrapped versions of the functions that we're testing, so that during
# debugging we can more easily see what their inputs were.
def VerboseQuote(in_string, specials, *args, **kwargs):
if verbose:
sys.stdout.write('Invoking quote(%s, %s, %s)\n' %
(repr(in_string), repr(specials),
', '.join([repr(a) for a in args] +
[repr(k) + ':' + repr(v)
for k, v in kwargs])))
return quote.quote(in_string, specials, *args, **kwargs)
def VerboseUnquote(in_string, specials, *args, **kwargs):
if verbose:
sys.stdout.write('Invoking unquote(%s, %s, %s)\n' %
(repr(in_string), repr(specials),
', '.join([repr(a) for a in args] +
[repr(k) + ':' + repr(v)
for k, v in kwargs])))
return quote.unquote(in_string, specials, *args, **kwargs)
class TestQuote(unittest.TestCase):
# test utilities
def generic_test(self, fn, in_args, expected_out_obj):
actual = apply(fn, in_args)
self.assertEqual(actual, expected_out_obj)
def check_invertible(self, in_string, specials, escape='\\'):
q = VerboseQuote(in_string, specials, escape)
qq = VerboseUnquote(q, specials, escape)
self.assertEqual(''.join(qq), in_string)
def run_test_tuples(self, test_tuples):
for func, in_args, expected in test_tuples:
self.generic_test(func, in_args, expected)
def testQuote(self):
test_tuples = [[VerboseQuote,
['foo, bar, baz, and quux too!', 'abc'],
'foo, \\b\\ar, \\b\\az, \\and quux too!'],
[VerboseQuote,
['when \\ appears in the input', 'a'],
'when \\\\ \\appe\\ars in the input']]
self.run_test_tuples(test_tuples)
def testUnquote(self):
test_tuples = [[VerboseUnquote,
['key\\:still_key:value\\:more_value', ':'],
['key:still_key', ':', 'value:more_value']],
[VerboseUnquote,
['about that sep\\ar\\ator in the beginning', 'ab'],
['', 'ab', 'out th', 'a', 't separator in the ',
'b', 'eginning']],
[VerboseUnquote,
['the rain in spain fall\\s ma\\i\\nly on the plains',
'ins'],
['the ra', 'in', ' ', 'in', ' ', 's', 'pa', 'in',
' falls mainly o', 'n', ' the pla', 'ins']],
]
self.run_test_tuples(test_tuples)
def testInvertible(self):
self.check_invertible('abcdefg', 'bc')
self.check_invertible('a\\bcdefg', 'bc')
self.check_invertible('ab\\cdefg', 'bc')
self.check_invertible('\\ab\\cdefg', 'abc')
self.check_invertible('abcde\\fg', 'efg')
self.check_invertible('a\\b', '')
# Invoke this file directly for simple manual testing. For running
# the unittests, use the -t flag. Any flags to be passed to the
# unittest module should be passed as after the optparse processing,
# e.g., "quote_test.py -t -- -v" to pass the -v flag to the unittest
# module.
def main(argv):
global verbose
parser = optparse.OptionParser(
usage='Usage: %prog [options] word...')
parser.add_option('-s', '--special-chars', dest='special_chars', default=':',
help='Special characters to quote (default is ":")')
parser.add_option('-q', '--quote', dest='quote', default='\\',
help='Quote or escape character (default is "\")')
parser.add_option('-t', '--run-tests', dest='tests', action='store_true',
help='Run built-in tests\n')
parser.add_option('-u', '--unquote-input', dest='unquote_input',
action='store_true', help='Unquote command line argument')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
help='Verbose test output')
options, args = parser.parse_args(argv)
if options.verbose:
verbose = True
num_errors = 0
if options.tests:
sys.argv = [sys.argv[0]] + args
unittest.main()
else:
for word in args:
# NB: there are inputs x for which quote(unquote(x) != x, but
# there should be no input x for which unquote(quote(x)) != x.
if options.unquote_input:
qq = quote.unquote(word, options.special_chars, options.quote)
sys.stdout.write('unquote(%s) = %s\n'
% (word, ''.join(qq)))
# There is no expected output for unquote -- this is just for
# manual testing, so it is okay that we do not (and cannot)
# update num_errors here.
else:
q = quote.quote(word, options.special_chars, options.quote)
qq = quote.unquote(q, options.special_chars, options.quote)
sys.stdout.write('quote(%s) = %s, unquote(%s) = %s\n'
% (word, q, q, ''.join(qq)))
if word != ''.join(qq):
num_errors += 1
if num_errors > 0:
sys.stderr.write('[ FAILED ] %d test failures\n' % num_errors)
return num_errors
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
praekelt/go-http-api | go_http/tests/test_account.py | 1 | 8152 | """
Tests for go_http.account
"""
import collections
import copy
import json
from unittest import TestCase
from requests import HTTPError, Session
from requests.adapters import HTTPAdapter
from requests_testadapter import TestSession, Resp, TestAdapter
from go_http.account import AccountApiClient
from go_http.exceptions import JsonRpcException
from go_http.tests.fixtures import account as fixtures
class FakeAccountApiAdapter(HTTPAdapter):
"""
Adapter providing a fake account API.
This inherits directly from HTTPAdapter instead of using TestAdapter
because it overrides everything TestAdaptor does.
"""
def __init__(self, account_api):
self.account_api = account_api
super(FakeAccountApiAdapter, self).__init__()
def send(self, request, stream=False, timeout=None,
verify=True, cert=None, proxies=None):
response = self.account_api.handle_request(request)
r = self.build_response(request, response)
if not stream:
# force prefetching content unless streaming in use
r.content
return r
class FakeAccountApi(object):
def __init__(self, api_path, auth_token):
self.api_path = api_path
self.auth_token = auth_token
self.responses = collections.defaultdict(list)
def http_error_response(self, http_code, error):
return Resp("403 Forbidden", 403, headers={})
def jsonrpc_error_response(self, fault, fault_code, fault_string):
return Resp(json.dumps({
"error": {
"fault": fault, "faultCode": fault_code,
"faultString": fault_string,
},
}), 200, headers={})
def jsonrpc_success_response(self, result):
return Resp(json.dumps({
"error": None,
"result": result,
}), 200, headers={})
def add_success_response(self, method, params, result):
self.responses[method].append((params, copy.deepcopy(result), None))
def add_error_response(self, method, params, **error):
self.responses[method].append((params, None, error))
def handle_request(self, request):
if request.headers['Authorization'] != 'Bearer %s' % (
self.auth_token):
return self.http_error_response(403, "403 Forbidden")
if request.headers['Content-Type'] != (
'application/json; charset=utf-8'):
return self.http_error_response(400, "Invalid Content-Type.")
if request.method != "POST":
return self.jsonrpc_error_response(
"Fault", 8000, "Only POST method supported")
data = json.loads(request.body)
params, result, error = self.responses[data['method']].pop()
assert params == data['params']
if error is not None:
return self.jsonrpc_error_response(**error)
return self.jsonrpc_success_response(result)
class TestAccountApiClient(TestCase):
API_URL = "http://example.com/go"
AUTH_TOKEN = "auth_token"
def setUp(self):
self.account_backend = FakeAccountApi("go/", self.AUTH_TOKEN)
self.session = TestSession()
self.adapter = FakeAccountApiAdapter(self.account_backend)
self.simulate_api_up()
def simulate_api_down(self):
self.session.mount(self.API_URL, TestAdapter("API is down", 500))
def simulate_api_up(self):
self.session.mount(self.API_URL, self.adapter)
def make_client(self, auth_token=AUTH_TOKEN):
return AccountApiClient(
auth_token, api_url=self.API_URL, session=self.session)
def assert_http_error(self, expected_status, func, *args, **kw):
try:
func(*args, **kw)
except HTTPError as err:
self.assertEqual(err.response.status_code, expected_status)
else:
self.fail(
"Expected HTTPError with status %s." % (expected_status,))
def assert_jsonrpc_exception(self, f, *args, **kw):
try:
f(*args, **kw)
except Exception as err:
self.assertTrue(isinstance(err, JsonRpcException))
self.assertTrue(isinstance(err.fault, unicode))
self.assertTrue(isinstance(err.fault_code, int))
self.assertTrue(isinstance(err.fault_string, unicode))
return err
def test_assert_http_error(self):
self.session.mount("http://bad.example.com/", TestAdapter("", 500))
def bad_req():
r = self.session.get("http://bad.example.com/")
r.raise_for_status()
# Fails when no exception is raised.
self.assertRaises(
self.failureException, self.assert_http_error, 404, lambda: None)
# Fails when an HTTPError with the wrong status code is raised.
self.assertRaises(
self.failureException, self.assert_http_error, 404, bad_req)
# Passes when an HTTPError with the expected status code is raised.
self.assert_http_error(500, bad_req)
# Non-HTTPError exceptions aren't caught.
def raise_error():
raise ValueError()
self.assertRaises(ValueError, self.assert_http_error, 404, raise_error)
def test_default_session(self):
client = AccountApiClient(self.AUTH_TOKEN)
self.assertTrue(isinstance(client.session, Session))
def test_default_api_url(self):
client = AccountApiClient(self.AUTH_TOKEN)
self.assertEqual(
client.api_url, "https://go.vumi.org/api/v1/go")
def test_auth_failure(self):
client = self.make_client(auth_token="bogus_token")
self.assert_http_error(403, client.campaigns)
def test_jsonrpc_error_handling(self):
client = self.make_client()
self.account_backend.add_error_response(
"campaigns", [],
fault="Fault", fault_code=8002, fault_string="Meep")
err = self.assert_jsonrpc_exception(client.campaigns)
self.assertEqual(err.fault, "Fault")
self.assertEqual(err.fault_code, 8002)
self.assertEqual(err.fault_string, "Meep")
def test_campaigns(self):
client = self.make_client()
self.account_backend.add_success_response(
"campaigns", [], fixtures.campaigns)
self.assertEqual(client.campaigns(), fixtures.campaigns)
def test_conversations(self):
client = self.make_client()
self.account_backend.add_success_response(
"conversations", ["campaign-1"], fixtures.conversations)
self.assertEqual(
client.conversations("campaign-1"),
fixtures.conversations)
def test_channels(self):
client = self.make_client()
self.account_backend.add_success_response(
"channels", ["campaign-1"], fixtures.channels)
self.assertEqual(
client.channels("campaign-1"),
fixtures.channels)
def test_routers(self):
client = self.make_client()
self.account_backend.add_success_response(
"routers", ["campaign-1"], fixtures.routers)
self.assertEqual(
client.routers("campaign-1"),
fixtures.routers)
def test_routing_entries(self):
client = self.make_client()
self.account_backend.add_success_response(
"routing_entries", ["campaign-1"], fixtures.routing_entries)
self.assertEqual(
client.routing_entries("campaign-1"),
fixtures.routing_entries)
def test_routing_table(self):
client = self.make_client()
self.account_backend.add_success_response(
"routing_table", ["campaign-1"], fixtures.routing_table)
self.assertEqual(
client.routing_table("campaign-1"),
fixtures.routing_table)
def test_update_routing_tabel(self):
client = self.make_client()
self.account_backend.add_success_response(
"update_routing_table", ["campaign-1", fixtures.routing_table],
None)
self.assertEqual(
client.update_routing_table("campaign-1", fixtures.routing_table),
None)
| bsd-3-clause |
tscohen/chainer | examples/word2vec/train_word2vec.py | 9 | 5456 | #!/usr/bin/env python
"""Sample script of word embedding model.
This code implements skip-gram model and continuous-bow model.
Use ../ptb/download.py to download 'ptb.train.txt'.
"""
import argparse
import collections
import time
import numpy as np
import six.moves.cPickle as pickle
import chainer
from chainer import cuda
import chainer.functions as F
import chainer.optimizers as O
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', '-g', default=-1, type=int,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--unit', '-u', default=100, type=int,
help='number of units')
parser.add_argument('--window', '-w', default=5, type=int,
help='window size')
parser.add_argument('--batchsize', '-b', type=int, default=100,
help='learning minibatch size')
parser.add_argument('--epoch', '-e', default=10, type=int,
help='number of epochs to learn')
parser.add_argument('--model', '-m', choices=['skipgram', 'cbow'],
default='skipgram',
help='model type ("skipgram", "cbow")')
parser.add_argument('--out-type', '-o', choices=['hsm', 'ns', 'original'],
default='hsm',
help='output model type ("hsm": hierarchical softmax, '
'"ns": negative sampling, "original": no approximation)')
args = parser.parse_args()
if args.gpu >= 0:
cuda.check_cuda_available()
xp = cuda.cupy if args.gpu >= 0 else np
print('GPU: {}'.format(args.gpu))
print('# unit: {}'.format(args.unit))
print('Window: {}'.format(args.window))
print('Minibatch-size: {}'.format(args.batchsize))
print('# epoch: {}'.format(args.epoch))
print('Training model: {}'.format(args.model))
print('Output type: {}'.format(args.out_type))
print('')
def continuous_bow(dataset, position):
h = None
# use random window size in the same way as the original word2vec
# implementation.
w = np.random.randint(args.window - 1) + 1
for offset in range(-w, w + 1):
if offset == 0:
continue
d = xp.asarray(dataset[position + offset])
x = chainer.Variable(d)
e = model.embed(x)
h = h + e if h is not None else e
d = xp.asarray(dataset[position])
t = chainer.Variable(d)
return loss_func(h, t)
def skip_gram(dataset, position):
d = xp.asarray(dataset[position])
t = chainer.Variable(d)
# use random window size in the same way as the original word2vec
# implementation.
w = np.random.randint(args.window - 1) + 1
loss = None
for offset in range(-w, w + 1):
if offset == 0:
continue
d = xp.asarray(dataset[position + offset])
x = chainer.Variable(d)
e = model.embed(x)
loss_i = loss_func(e, t)
loss = loss_i if loss is None else loss + loss_i
return loss
if args.gpu >= 0:
cuda.get_device(args.gpu).use()
index2word = {}
word2index = {}
counts = collections.Counter()
dataset = []
with open('ptb.train.txt') as f:
for line in f:
for word in line.split():
if word not in word2index:
ind = len(word2index)
word2index[word] = ind
index2word[ind] = word
counts[word2index[word]] += 1
dataset.append(word2index[word])
n_vocab = len(word2index)
print('n_vocab: %d' % n_vocab)
print('data length: %d' % len(dataset))
if args.model == 'skipgram':
train_model = skip_gram
elif args.model == 'cbow':
train_model = continuous_bow
else:
raise Exception('Unknown model type: {}'.format(args.model))
model = chainer.FunctionSet(
embed=F.EmbedID(n_vocab, args.unit),
)
if args.out_type == 'hsm':
HSM = F.BinaryHierarchicalSoftmax
tree = HSM.create_huffman_tree(counts)
model.l = HSM(args.unit, tree)
loss_func = model.l
elif args.out_type == 'ns':
cs = [counts[w] for w in range(len(counts))]
model.l = F.NegativeSampling(args.unit, cs, 20)
loss_func = model.l
elif args.out_type == 'original':
model.l = F.Linear(args.unit, n_vocab)
loss_func = lambda h, t: F.softmax_cross_entropy(model.l(h), t)
else:
raise Exception('Unknown output type: {}'.format(args.out_type))
if args.gpu >= 0:
model.to_gpu()
dataset = np.array(dataset, dtype=np.int32)
optimizer = O.Adam()
optimizer.setup(model)
begin_time = time.time()
cur_at = begin_time
word_count = 0
skip = (len(dataset) - args.window * 2) // args.batchsize
next_count = 100000
for epoch in range(args.epoch):
accum_loss = 0
print('epoch: {0}'.format(epoch))
indexes = np.random.permutation(skip)
for i in indexes:
if word_count >= next_count:
now = time.time()
duration = now - cur_at
throuput = 100000. / (now - cur_at)
print('{} words, {:.2f} sec, {:.2f} words/sec'.format(
word_count, duration, throuput))
next_count += 100000
cur_at = now
position = np.array(
range(0, args.batchsize)) * skip + (args.window + i)
loss = train_model(dataset, position)
accum_loss += loss.data
word_count += args.batchsize
optimizer.zero_grads()
loss.backward()
optimizer.update()
print(accum_loss)
model.to_cpu()
with open('model.pickle', 'wb') as f:
obj = (model, index2word, word2index)
pickle.dump(obj, f)
| mit |
artminster/artminster | core/utils/fields.py | 1 | 10035 | from django.utils.translation import ugettext as _
from django.db import models, connection
from django.utils.text import capfirst
from itertools import chain
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from django.utils.encoding import force_unicode, smart_unicode
from django import forms
from itertools import chain
from django.conf import settings
from django.contrib.admin import widgets
from django.utils.html import escape
from django.forms.fields import EMPTY_VALUES, Field
from django.forms import ValidationError
from django.db.models.signals import post_delete, post_save
from south.modelsinspector import add_introspection_rules
from django.db.models import OneToOneField
from django.db.models.fields.related import SingleRelatedObjectDescriptor
qn = connection.ops.quote_name
import re
uk_landline_re = re.compile(r'^[0]{1}[1-9]{1}[0-9]{9}$')
uk_landline_no08or09_re = re.compile(r'^[0]{1}[1-7]{1}[0-9]{9}$')
uk_mobile_re = re.compile(r'^(07)[0-9]{9}')
international_number_re = re.compile(r'^[+]?([0-9]*[\.\s\-\(\)]|[0-9]+){3,24}$')
from django.db.models import OneToOneField
from django.db.models.fields.related import SingleRelatedObjectDescriptor
class AutoSingleRelatedObjectDescriptor(SingleRelatedObjectDescriptor):
def __get__(self, instance, instance_type=None):
try:
return super(AutoSingleRelatedObjectDescriptor, self).__get__(instance, instance_type)
except self.related.model.DoesNotExist:
obj = self.related.model(**{self.related.field.name: instance})
obj.save()
return obj
class AutoOneToOneField(OneToOneField):
'''
OneToOneField creates related object on first call if it doesnt exist yet.
Use it instead of original OneToOne field.
example:
class MyProfile(models.Model):
user = AutoOneToOneField(User, primary_key=True)
home_page = models.URLField(max_length=255, blank=True)
icq = models.IntegerField(max_length=255, null=True)
'''
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(), AutoSingleRelatedObjectDescriptor(related))
def south_field_triple(self):
"Returns a suitable description of this field for South."
from south.modelsinspector import introspector
field_class = OneToOneField.__module__ + "." + OneToOneField.__name__
args, kwargs = introspector(self)
return (field_class, args, kwargs)
# ISO 3166-1 country names and codes adapted from http://opencountrycodes.appspot.com/python/
COUNTRIES = [
('GB', _('United Kingdom')),
('US', _('United States')),
('AF', _('Afghanistan')),
('AX', _('Aland Islands')),
('AL', _('Albania')),
('DZ', _('Algeria')),
('AS', _('American Samoa')),
('AD', _('Andorra')),
('AO', _('Angola')),
('AI', _('Anguilla')),
('AQ', _('Antarctica')),
('AG', _('Antigua and Barbuda')),
('AR', _('Argentina')),
('AM', _('Armenia')),
('AW', _('Aruba')),
('AU', _('Australia')),
('AT', _('Austria')),
('AZ', _('Azerbaijan')),
('BS', _('Bahamas')),
('BH', _('Bahrain')),
('BD', _('Bangladesh')),
('BB', _('Barbados')),
('BY', _('Belarus')),
('BE', _('Belgium')),
('BZ', _('Belize')),
('BJ', _('Benin')),
('BM', _('Bermuda')),
('BT', _('Bhutan')),
('BO', _('Bolivia')),
('BA', _('Bosnia and Herzegovina')),
('BW', _('Botswana')),
('BV', _('Bouvet Island')),
('BR', _('Brazil')),
('BN', _('Brunei Darussalam')),
('BG', _('Bulgaria')),
('BF', _('Burkina Faso')),
('BI', _('Burundi')),
('KH', _('Cambodia')),
('CM', _('Cameroon')),
('CA', _('Canada')),
('CV', _('Cape Verde')),
('KY', _('Cayman Islands')),
('CF', _('Central African Republic')),
('TD', _('Chad')),
('CL', _('Chile')),
('CN', _('China')),
('CX', _('Christmas Island')),
('CC', _('Cocos Islands')),
('CO', _('Colombia')),
('KM', _('Comoros')),
('CG', _('Congo')),
('CD', _('Congo')),
('CK', _('Cook Islands')),
('CR', _('Costa Rica')),
('CI', _("Cote d'Ivoire")),
('HR', _('Croatia')),
('CU', _('Cuba')),
('CY', _('Cyprus')),
('CZ', _('Czech Republic')),
('DK', _('Denmark')),
('DJ', _('Djibouti')),
('DM', _('Dominica')),
('DO', _('Dominican Republic')),
('EC', _('Ecuador')),
('EG', _('Egypt')),
('SV', _('El Salvador')),
('GQ', _('Equatorial Guinea')),
('ER', _('Eritrea')),
('EE', _('Estonia')),
('ET', _('Ethiopia')),
('FK', _('Falkland Islands')),
('FO', _('Faroe Islands')),
('FJ', _('Fiji')),
('FI', _('Finland')),
('FR', _('France')),
('GF', _('French Guiana')),
('PF', _('French Polynesia')),
('GA', _('Gabon')),
('GM', _('Gambia')),
('GE', _('Georgia')),
('DE', _('Germany')),
('GH', _('Ghana')),
('GI', _('Gibraltar')),
('GR', _('Greece')),
('GL', _('Greenland')),
('GD', _('Grenada')),
('GP', _('Guadeloupe')),
('GU', _('Guam')),
('GT', _('Guatemala')),
('GG', _('Guernsey')),
('GN', _('Guinea')),
('GW', _('Guinea-Bissau')),
('GY', _('Guyana')),
('HT', _('Haiti')),
('HN', _('Honduras')),
('HK', _('Hong Kong')),
('HU', _('Hungary')),
('IS', _('Iceland')),
('IN', _('India')),
('ID', _('Indonesia')),
('IR', _('Iran')),
('IQ', _('Iraq')),
('IE', _('Ireland')),
('IM', _('Isle of Man')),
('IL', _('Israel')),
('IT', _('Italy')),
('JM', _('Jamaica')),
('JP', _('Japan')),
('JE', _('Jersey')),
('JO', _('Jordan')),
('KZ', _('Kazakhstan')),
('KE', _('Kenya')),
('KI', _('Kiribati')),
('KP', _('Korea')),
('KR', _('Korea, Republic of')),
('KW', _('Kuwait')),
('KG', _('Kyrgyzstan')),
('LA', _('Lao')),
('LV', _('Latvia')),
('LB', _('Lebanon')),
('LS', _('Lesotho')),
('LR', _('Liberia')),
('LY', _('Libyan Arab Jamahiriya')),
('LI', _('Liechtenstein')),
('LT', _('Lithuania')),
('LU', _('Luxembourg')),
('MO', _('Macao')),
('MK', _('Macedonia')),
('MG', _('Madagascar')),
('MW', _('Malawi')),
('MY', _('Malaysia')),
('MV', _('Maldives')),
('ML', _('Mali')),
('MT', _('Malta')),
('MH', _('Marshall Islands')),
('MQ', _('Martinique')),
('MR', _('Mauritania')),
('MU', _('Mauritius')),
('YT', _('Mayotte')),
('MX', _('Mexico')),
('MD', _('Moldova')),
('MC', _('Monaco')),
('MN', _('Mongolia')),
('ME', _('Montenegro')),
('MS', _('Montserrat')),
('MA', _('Morocco')),
('MZ', _('Mozambique')),
('MM', _('Myanmar')),
('NA', _('Namibia')),
('NR', _('Nauru')),
('NP', _('Nepal')),
('NL', _('Netherlands')),
('AN', _('Netherlands Antilles')),
('NC', _('New Caledonia')),
('NZ', _('New Zealand')),
('NI', _('Nicaragua')),
('NE', _('Niger')),
('NG', _('Nigeria')),
('NU', _('Niue')),
('NF', _('Norfolk Island')),
('MP', _('Northern Mariana Islands')),
('NO', _('Norway')),
('OM', _('Oman')),
('PK', _('Pakistan')),
('PW', _('Palau')),
('PA', _('Panama')),
('PG', _('Papua New Guinea')),
('PY', _('Paraguay')),
('PE', _('Peru')),
('PH', _('Philippines')),
('PN', _('Pitcairn')),
('PL', _('Poland')),
('PT', _('Portugal')),
('PR', _('Puerto Rico')),
('QA', _('Qatar')),
('RE', _('Reunion')),
('RO', _('Romania')),
('RU', _('Russian Federation')),
('RW', _('Rwanda')),
('BL', _('Saint Barthelemy')),
('SH', _('Saint Helena')),
('KN', _('Saint Kitts and Nevis')),
('LC', _('Saint Lucia')),
('MF', _('Saint Martin')),
('WS', _('Samoa')),
('SM', _('San Marino')),
('ST', _('Sao Tome and Principe')),
('SA', _('Saudi Arabia')),
('SN', _('Senegal')),
('RS', _('Serbia')),
('SC', _('Seychelles')),
('SL', _('Sierra Leone')),
('SG', _('Singapore')),
('SK', _('Slovakia')),
('SI', _('Slovenia')),
('SB', _('Solomon Islands')),
('SO', _('Somalia')),
('ZA', _('South Africa')),
('ES', _('Spain')),
('LK', _('Sri Lanka')),
('SD', _('Sudan')),
('SR', _('Suriname')),
('SJ', _('Svalbard and Jan Mayen')),
('SZ', _('Swaziland')),
('SE', _('Sweden')),
('CH', _('Switzerland')),
('SY', _('Syrian Arab Republic')),
('TW', _('Taiwan')),
('TJ', _('Tajikistan')),
('TZ', _('Tanzania')),
('TH', _('Thailand')),
('TL', _('Timor-Leste')),
('TG', _('Togo')),
('TK', _('Tokelau')),
('TO', _('Tonga')),
('TT', _('Trinidad and Tobago')),
('TN', _('Tunisia')),
('TR', _('Turkey')),
('TM', _('Turkmenistan')),
('TC', _('Turks and Caicos Islands')),
('TV', _('Tuvalu')),
('UG', _('Uganda')),
('UA', _('Ukraine')),
('AE', _('United Arab Emirates')),
('UY', _('Uruguay')),
('UZ', _('Uzbekistan')),
('VU', _('Vanuatu')),
('VE', _('Venezuela')),
('VN', _('Viet Nam')),
('VG', _('Virgin Islands, British')),
('VI', _('Virgin Islands, U.S.')),
('WF', _('Wallis and Futuna')),
('EH', _('Western Sahara')),
('YE', _('Yemen')),
('ZM', _('Zambia')),
('ZW', _('Zimbabwe')),
]
class CountryField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_length', 2)
kwargs.setdefault('choices', COUNTRIES)
super(CountryField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "CharField"
# SOUTH INTROSPECTION RULES
add_introspection_rules([], ["^filebrowser\.fields\.FileBrowseField"])
add_introspection_rules([], ["^artminster\.core\.utils\.fields\.CountryField"]) | mit |
gnieboer/gnuradio | gr-wxgui/python/wxgui/gui.py | 76 | 4565 | #
# Copyright 2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import wx
from gnuradio import gr
#
# Top-level display panel with vertical box sizer. User does not create or
# subclass this class; rather, the user supplies his own class constructor
# that gets invoked with needed parameters.
#
class top_panel(wx.Panel):
def __init__(self, frame, top_block, gui, options, args):
wx.Panel.__init__(self, frame, -1)
vbox = wx.BoxSizer(wx.VERTICAL)
# Create the user's GUI class
if gui is not None:
self.gui = gui(frame, # Top-level window frame
self, # Parent class for user created windows
vbox, # Sizer for user to add windows to
top_block, # GUI-unaware flowgraph to manipulate
options, # Command-line options
args) # Command-line arguments
else:
# User hasn't made their own GUI, create our default
# We don't have a default GUI yet either :)
p = wx.Panel(self)
p.SetSize((640,480))
vbox.Add(p, 1, wx.EXPAND)
self.SetSizer(vbox)
self.SetAutoLayout(True)
vbox.Fit(self)
def shutdown(self):
try:
self.gui.shutdown()
except AttributeError:
pass
#
# Top-level window frame with menu and status bars.
#
class top_frame(wx.Frame):
def __init__ (self, top_block, gui, options, args,
title, nstatus, start, realtime):
wx.Frame.__init__(self, None, -1, title)
self.top_block = top_block
self.CreateStatusBar(nstatus)
mainmenu = wx.MenuBar()
self.SetMenuBar(mainmenu)
menu = wx.Menu()
item = menu.Append(200, 'E&xit', 'Exit Application') # FIXME magic ID
self.Bind(wx.EVT_MENU, self.OnCloseWindow, item)
mainmenu.Append(menu, "&File")
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
# Create main panel, creates user GUI class with supplied parameters
self.panel = top_panel(self, top_block, gui, options, args)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.Add(self.panel, 1, wx.EXPAND)
self.SetSizer(vbox)
self.SetAutoLayout(True)
vbox.Fit(self)
if realtime:
if gr.enable_realtime_scheduling() != gr.RT_OK:
self.SetStatusText("Failed to enable realtime scheduling")
if start and self.top_block is not None:
self.top_block.start()
def OnCloseWindow(self, event):
# Give user API a chance to do something
self.panel.shutdown()
# Stop flowgraph as a convenience
self.SetStatusText("Ensuring flowgraph has completed before exiting...")
if self.top_block is not None:
self.top_block.stop()
self.top_block.wait()
self.Destroy()
#
# Top-level wxPython application object. User creates or subclasses this
# in their GUI script.
#
class app(wx.App):
def __init__ (self, top_block=None, gui=None, options=None, args=None,
title="GNU Radio", nstatus=1, start=False, realtime=False):
self.top_block = top_block
self.gui = gui
self.options = options
self.args = args
self.title = title
self.nstatus = nstatus
self.start = start
self.realtime = realtime
wx.App.__init__ (self, redirect=False)
def OnInit(self):
# Pass user parameters to top window frame
frame = top_frame(self.top_block, self.gui, self.options, self.args,
self.title, self.nstatus, self.start, self.realtime)
frame.Show(True)
self.SetTopWindow(frame)
return True
| gpl-3.0 |
crcresearch/osf.io | scripts/tests/test_embargo_registrations.py | 17 | 6441 | # -*- coding: utf-8 -*-
from datetime import timedelta
from django.utils import timezone
from nose.tools import * # noqa
from tests.base import OsfTestCase
from osf_tests.factories import RegistrationFactory, UserFactory
from scripts.embargo_registrations import main
class TestRetractRegistrations(OsfTestCase):
def setUp(self):
super(TestRetractRegistrations, self).setUp()
self.user = UserFactory()
self.registration = RegistrationFactory(creator=self.user)
self.registration.embargo_registration(
self.user,
timezone.now() + timedelta(days=10)
)
self.registration.save()
def test_new_embargo_should_be_unapproved(self):
assert_true(self.registration.is_pending_embargo)
assert_false(self.registration.embargo_end_date)
main(dry_run=False)
assert_true(self.registration.is_pending_embargo)
assert_false(self.registration.embargo_end_date)
def test_should_not_activate_pending_embargo_less_than_48_hours_old(self):
# Embargo#iniation_date is read only
self.registration.embargo._fields['initiation_date'].__set__(
self.registration.embargo,
(timezone.now() - timedelta(hours=47)),
safe=True
)
self.registration.embargo.save()
assert_false(self.registration.embargo_end_date)
main(dry_run=False)
assert_false(self.registration.embargo_end_date)
def test_should_activate_pending_embargo_that_is_48_hours_old(self):
# Embargo#iniation_date is read only
self.registration.embargo._fields['initiation_date'].__set__(
self.registration.embargo,
(timezone.now() - timedelta(hours=48)),
safe=True
)
self.registration.embargo.save()
assert_true(self.registration.is_pending_embargo)
assert_false(self.registration.embargo_end_date)
main(dry_run=False)
assert_false(self.registration.is_pending_embargo)
assert_true(self.registration.embargo_end_date)
def test_should_activate_pending_embargo_more_than_48_hours_old(self):
# Embargo#iniation_date is read only
self.registration.embargo._fields['initiation_date'].__set__(
self.registration.embargo,
(timezone.now() - timedelta(days=365)),
safe=True
)
self.registration.embargo.save()
assert_true(self.registration.is_pending_embargo)
assert_false(self.registration.embargo_end_date)
main(dry_run=False)
assert_false(self.registration.is_pending_embargo)
assert_true(self.registration.embargo_end_date)
def test_embargo_past_end_date_should_be_completed(self):
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
self.registration.save()
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
# Embargo#iniation_date is read only
self.registration.embargo._fields['end_date'].__set__(
self.registration.embargo,
(timezone.now() - timedelta(days=1)),
safe=True
)
self.registration.embargo.save()
assert_false(self.registration.is_public)
main(dry_run=False)
assert_true(self.registration.is_public)
assert_false(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
assert_equal(self.registration.embargo.state, 'completed')
def test_embargo_before_end_date_should_not_be_completed(self):
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
self.registration.save()
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
# Embargo#iniation_date is read only
self.registration.embargo._fields['end_date'].__set__(
self.registration.embargo,
(timezone.now() + timedelta(days=1)),
safe=True
)
self.registration.embargo.save()
assert_false(self.registration.is_public)
main(dry_run=False)
assert_false(self.registration.is_public)
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
def test_embargo_approval_adds_to_parent_projects_log(self):
initial_project_logs = len(self.registration.registered_from.logs)
# Embargo#iniation_date is read only
self.registration.embargo._fields['initiation_date'].__set__(
self.registration.embargo,
(timezone.now() - timedelta(days=365)),
safe=True
)
self.registration.embargo.save()
main(dry_run=False)
# Logs: Created, made public, registered, embargo initiated, embargo approved
embargo_approved_log = self.registration.registered_from.logs[initial_project_logs + 1]
assert_equal(len(self.registration.registered_from.logs), initial_project_logs + 1)
assert_equal(embargo_approved_log.params['node'], self.registration.registered_from._id)
def test_embargo_completion_adds_to_parent_projects_log(self):
initial_project_logs = len(self.registration.registered_from.logs)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
self.registration.save()
# Embargo#iniation_date is read only
self.registration.embargo._fields['end_date'].__set__(
self.registration.embargo,
(timezone.now() - timedelta(days=1)),
safe=True
)
self.registration.embargo.save()
main(dry_run=False)
# Logs: Created, made public, registered, embargo initiated, embargo approved, embargo completed
embargo_completed_log = self.registration.registered_from.logs[initial_project_logs + 1]
assert_equal(len(self.registration.registered_from.logs), initial_project_logs + 2)
assert_equal(embargo_completed_log.params['node'], self.registration.registered_from._id)
| apache-2.0 |
girving/tensorflow | tensorflow/compiler/tests/nullary_ops_test.py | 19 | 2828 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test cases for operators with no arguments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.platform import googletest
class NullaryOpsTest(xla_test.XLATestCase):
def _testNullary(self, op, expected):
with self.cached_session() as session:
with self.test_scope():
output = op()
result = session.run(output)
self.assertAllClose(result, expected, rtol=1e-3)
def testNoOp(self):
with self.cached_session():
with self.test_scope():
output = control_flow_ops.no_op()
# This should not crash.
output.run()
def testConstants(self):
for dtype in self.numeric_types:
constants = [
dtype(42),
np.array([], dtype=dtype),
np.array([1, 2], dtype=dtype),
np.array([7, 7, 7, 7, 7], dtype=dtype),
np.array([[1, 2, 3], [4, 5, 6]], dtype=dtype),
np.array([[[1, 2], [3, 4], [5, 6]], [[10, 20], [30, 40], [50, 60]]],
dtype=dtype),
np.array([[[]], [[]]], dtype=dtype),
np.array([[[[1]]]], dtype=dtype),
]
for c in constants:
self._testNullary(lambda c=c: constant_op.constant(c), expected=c)
def testComplexConstants(self):
for dtype in self.complex_types:
constants = [
dtype(42 + 3j),
np.array([], dtype=dtype),
np.ones([50], dtype=dtype) * (3 + 4j),
np.array([1j, 2 + 1j], dtype=dtype),
np.array([[1, 2j, 7j], [4, 5, 6]], dtype=dtype),
np.array([[[1, 2], [3, 4 + 6j], [5, 6]],
[[10 + 7j, 20], [30, 40], [50, 60]]],
dtype=dtype),
np.array([[[]], [[]]], dtype=dtype),
np.array([[[[1 + 3j]]]], dtype=dtype),
]
for c in constants:
self._testNullary(lambda c=c: constant_op.constant(c), expected=c)
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
stclair/wes-cms | django/conf/global_settings.py | 95 | 21089 | # Default Django settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
# This is defined here as a do-nothing function because we can't import
# django.utils.translation -- that module depends on the settings.
gettext_noop = lambda s: s
####################
# CORE #
####################
DEBUG = False
TEMPLATE_DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing siutations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# Whether to use the "Etag" header. This saves bandwidth but slows down performance.
USE_ETAGS = False
# People who get code error notifications.
# In the format (('Full Name', 'email@example.com'), ('Full Name', 'anotheremail@example.com'))
ADMINS = ()
# Tuple of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = ()
# Local time zone for this installation. All choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities).
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Languages we provide translations for, out of the box. The language name
# should be the utf-8 encoded local name for the language.
LANGUAGES = (
('ar', gettext_noop('Arabic')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('bn', gettext_noop('Bengali')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('en-gb', gettext_noop('British English')),
('es', gettext_noop('Spanish')),
('es-ar', gettext_noop('Argentinian Spanish')),
('es-mx', gettext_noop('Mexican Spanish')),
('es-ni', gettext_noop('Nicaraguan Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('fy-nl', gettext_noop('Frisian')),
('ga', gettext_noop('Irish')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hu', gettext_noop('Hungarian')),
('id', gettext_noop('Indonesian')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('nl', gettext_noop('Dutch')),
('no', gettext_noop('Norwegian')),
('nb', gettext_noop('Norwegian Bokmal')),
('nn', gettext_noop('Norwegian Nynorsk')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sr-latn', gettext_noop('Serbian Latin')),
('sv', gettext_noop('Swedish')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('vi', gettext_noop('Vietnamese')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-tw', gettext_noop('Traditional Chinese')),
)
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ("he", "ar", "fa")
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = ()
LANGUAGE_COOKIE_NAME = 'django_language'
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various e-mails.
MANAGERS = ADMINS
# Default content type and charset to use for all HttpResponse objects, if a
# MIME type isn't manually specified. These are used to construct the
# Content-Type header.
DEFAULT_CONTENT_TYPE = 'text/html'
DEFAULT_CHARSET = 'utf-8'
# Encoding of files read from disk (template and initial SQL files).
FILE_CHARSET = 'utf-8'
# E-mail address that error messages come from.
SERVER_EMAIL = 'root@localhost'
# Whether to send broken-link e-mails.
SEND_BROKEN_LINK_EMAILS = False
# Database connection info.
# Legacy format
DATABASE_ENGINE = '' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
DATABASE_OPTIONS = {} # Set to empty dictionary for default.
# New format
DATABASES = {
}
# Classes used to implement db routing behaviour
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending e-mail.
EMAIL_HOST = 'localhost'
# Port for sending e-mail.
EMAIL_PORT = 25
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
# List of strings representing installed apps.
INSTALLED_APPS = ()
# List of locations of the template source files, in search order.
TEMPLATE_DIRS = ()
# List of callables that know how to import templates from various sources.
# See the comments in django/core/template/loader.py for interface
# documentation.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
# List of processors used by RequestContext to populate the context.
# Each one should be a callable that takes the request object as its
# only parameter and returns a dictionary to add to the context.
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
# 'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages',
)
# Output to use in template system for invalid (e.g. misspelled) variables.
TEMPLATE_STRING_IF_INVALID = ''
# Default e-mail address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = '[Django] '
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = (
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search')
# )
DISALLOWED_USER_AGENTS = ()
ABSOLUTE_URL_OVERRIDES = {}
# Tuple of strings representing allowed prefixes for the {% ssi %} tag.
# Example: ('/home/html', '/var/www')
ALLOWED_INCLUDE_ROOTS = ()
# If this is a admin settings module, this should be a list of
# settings modules (in the format 'foo.bar.baz') for which this admin
# is an admin.
ADMIN_FOR = ()
# 404s that may be ignored.
IGNORABLE_404_STARTS = ('/cgi-bin/', '/_vti_bin', '/_vti_inf')
IGNORABLE_404_ENDS = ('mail.pl', 'mailform.pl', 'mail.cgi', 'mailform.cgi', 'favicon.ico', '.php')
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ''
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com/media/"
MEDIA_URL = ''
# Absolute path to the directory that holds static files.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see http://docs.python.org/lib/os-file-dir.html.
FILE_UPLOAD_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
# Default formatting for datetime objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = 'N j, Y, P'
# Default formatting for time objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = 'P'
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = 'F Y'
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = 'F j'
# Default short formatting for date objects. See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = 'm/d/Y'
# Default short formatting for datetime objects.
# See all available format strings here:
# http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = 'm/d/Y P'
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# http://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = '.'
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when spliting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ','
# Do you want to manage transactions manually?
# Hint: you really don't!
TRANSACTIONS_MANAGED = False
# The User-Agent string to use when checking for URL validity through the
# isExistingURL validator.
from django import get_version
URL_VALIDATOR_USER_AGENT = "Django/%s (http://www.djangoproject.com)" % get_version()
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ''
DEFAULT_INDEX_TABLESPACE = ''
##############
# MIDDLEWARE #
##############
# List of middleware classes to use. Order is important; in the request phase,
# this middleware classes will be applied in the order given, and in the
# response phase the middleware will be applied in reverse order.
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# 'django.middleware.http.ConditionalGetMiddleware',
# 'django.middleware.gzip.GZipMiddleware',
)
############
# SESSIONS #
############
SESSION_COOKIE_NAME = 'sessionid' # Cookie name. This can be whatever you want.
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_DOMAIN = None # A string like ".lawrence.com", or None for standard domain cookie.
SESSION_COOKIE_SECURE = False # Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_PATH = '/' # The path of the session cookie.
SESSION_COOKIE_HTTPONLY = False # Whether to use the non-RFC standard httpOnly flag (IE, FF3+, others)
SESSION_SAVE_EVERY_REQUEST = False # Whether to save the session data on every request.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False # Whether a user's session cookie expires when the Web browser is closed.
SESSION_ENGINE = 'django.contrib.sessions.backends.db' # The module to store session data
SESSION_FILE_PATH = None # Directory to store session files if using the file session module. If None, the backend will use a sensible default.
#########
# CACHE #
#########
# New format
CACHES = {
}
# The cache backend to use. See the docstring in django.core.cache for the
# possible values.
CACHE_MIDDLEWARE_KEY_PREFIX = ''
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = 'default'
####################
# COMMENTS #
####################
COMMENTS_ALLOW_PROFANITIES = False
# The profanities that will trigger a validation error in the
# 'hasNoProfanities' validator. All of these should be in lowercase.
PROFANITIES_LIST = ()
# The group ID that designates which users are banned.
# Set to None if you're not using it.
COMMENTS_BANNED_USERS_GROUP = None
# The group ID that designates which users can moderate comments.
# Set to None if you're not using it.
COMMENTS_MODERATORS_GROUP = None
# The group ID that designates the users whose comments should be e-mailed to MANAGERS.
# Set to None if you're not using it.
COMMENTS_SKETCHY_USERS_GROUP = None
# The system will e-mail MANAGERS the first COMMENTS_FIRST_FEW comments by each
# user. Set this to 0 if you want to disable it.
COMMENTS_FIRST_FEW = 0
# A tuple of IP addresses that have been banned from participating in various
# Django-powered features.
BANNED_IPS = ()
##################
# AUTHENTICATION #
##################
AUTHENTICATION_BACKENDS = ('django.contrib.auth.backends.ModelBackend',)
LOGIN_URL = '/accounts/login/'
LOGOUT_URL = '/accounts/logout/'
LOGIN_REDIRECT_URL = '/accounts/profile/'
# The number of days a password reset link is valid for
PASSWORD_RESET_TIMEOUT_DAYS = 3
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
# Name and domain for CSRF cookie.
CSRF_COOKIE_NAME = 'csrftoken'
CSRF_COOKIE_DOMAIN = None
############
# MESSAGES #
############
# Class to use as messges backend
MESSAGE_STORAGE = 'django.contrib.messages.storage.user_messages.LegacyFallbackStorage'
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = 'django.utils.log.dictConfig'
# The default logging configuration. This sends an email to
# the site admins on every HTTP 500 error. All other log
# records are sent to the bit bucket.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
# The name of the database to use for testing purposes.
# If None, a name of 'test_' + DATABASE_NAME will be assumed
TEST_DATABASE_NAME = None
# Strings used to set the character set and collation order for the test
# database. These values are passed literally to the server, so they are
# backend-dependent. If None, no special settings are sent (system defaults are
# used).
TEST_DATABASE_CHARSET = None
TEST_DATABASE_COLLATION = None
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = ()
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = ()
# The default file storage backend used during the build process
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# URL prefix for admin media -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
| bsd-3-clause |
thinkerou/grpc | tools/run_tests/performance/patch_scenario_results_schema.py | 16 | 1841 | #!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Use to patch schema of existing scenario results tables (after adding fields).
from __future__ import print_function
import argparse
import calendar
import json
import os
import sys
import time
import uuid
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../../gcp/utils'))
sys.path.append(gcp_utils_dir)
import big_query_utils
_PROJECT_ID = 'grpc-testing'
def _patch_results_table(dataset_id, table_id):
bq = big_query_utils.create_big_query()
with open(os.path.dirname(__file__) + '/scenario_result_schema.json',
'r') as f:
table_schema = json.loads(f.read())
desc = 'Results of performance benchmarks.'
return big_query_utils.patch_table(bq, _PROJECT_ID, dataset_id, table_id,
table_schema)
argp = argparse.ArgumentParser(
description='Patch schema of scenario results table.')
argp.add_argument(
'--bq_result_table',
required=True,
default=None,
type=str,
help='Bigquery "dataset.table" to patch.')
args = argp.parse_args()
dataset_id, table_id = args.bq_result_table.split('.', 2)
_patch_results_table(dataset_id, table_id)
print('Successfully patched schema of %s.\n' % args.bq_result_table)
| apache-2.0 |
webmasterraj/FogOrNot | flask/lib/python2.7/site-packages/docutils/parsers/rst/languages/he.py | 128 | 3640 | # Author: Meir Kriheli
# Id: $Id: he.py 7119 2011-09-02 13:00:23Z milde $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
English-language mappings for language-dependent features of
reStructuredText.
"""
__docformat__ = 'reStructuredText'
directives = {
# language-dependent: fixed
u'\u05ea\u05e9\u05d5\u05de\u05ea \u05dc\u05d1': 'attention',
u'\u05d6\u05d4\u05d9\u05e8\u05d5\u05ea': 'caution',
u'code (translation required)': 'code',
u'\u05e1\u05db\u05e0\u05d4': 'danger',
u'\u05e9\u05d2\u05d9\u05d0\u05d4' : 'error',
u'\u05e8\u05de\u05d6': 'hint',
u'\u05d7\u05e9\u05d5\u05d1': 'important',
u'\u05d4\u05e2\u05e8\u05d4': 'note',
u'\u05d8\u05d9\u05e4': 'tip',
u'\u05d0\u05d6\u05d4\u05e8\u05d4': 'warning',
'admonition': 'admonition',
'sidebar': 'sidebar',
'topic': 'topic',
'line-block': 'line-block',
'parsed-literal': 'parsed-literal',
'rubric': 'rubric',
'epigraph': 'epigraph',
'highlights': 'highlights',
'pull-quote': 'pull-quote',
'compound': 'compound',
'container': 'container',
#'questions': 'questions',
'table': 'table',
'csv-table': 'csv-table',
'list-table': 'list-table',
#'qa': 'questions',
#'faq': 'questions',
'meta': 'meta',
'math (translation required)': 'math',
#'imagemap': 'imagemap',
u'\u05ea\u05de\u05d5\u05e0\u05d4': 'image',
'figure': 'figure',
'include': 'include',
'raw': 'raw',
'replace': 'replace',
'unicode': 'unicode',
'date': 'date',
u'\u05e1\u05d2\u05e0\u05d5\u05df': 'class',
'role': 'role',
'default-role': 'default-role',
'title': 'title',
u'\u05ea\u05d5\u05db\u05df': 'contents',
'sectnum': 'sectnum',
'section-numbering': 'sectnum',
'header': 'header',
'footer': 'footer',
#'footnotes': 'footnotes',
#'citations': 'citations',
'target-notes': 'target-notes',
'restructuredtext-test-directive': 'restructuredtext-test-directive'}
"""English name to registered (in directives/__init__.py) directive name
mapping."""
roles = {
# language-dependent: fixed
'abbreviation': 'abbreviation',
'ab': 'abbreviation',
'acronym': 'acronym',
'ac': 'acronym',
u'code (translation required)': 'code',
'index': 'index',
'i': 'index',
u'\u05ea\u05d7\u05ea\u05d9': 'subscript',
'sub': 'subscript',
u'\u05e2\u05d9\u05dc\u05d9': 'superscript',
'sup': 'superscript',
'title-reference': 'title-reference',
'title': 'title-reference',
't': 'title-reference',
'pep-reference': 'pep-reference',
'pep': 'pep-reference',
'rfc-reference': 'rfc-reference',
'rfc': 'rfc-reference',
'emphasis': 'emphasis',
'strong': 'strong',
'literal': 'literal',
'math (translation required)': 'math',
'named-reference': 'named-reference',
'anonymous-reference': 'anonymous-reference',
'footnote-reference': 'footnote-reference',
'citation-reference': 'citation-reference',
'substitution-reference': 'substitution-reference',
'target': 'target',
'uri-reference': 'uri-reference',
'uri': 'uri-reference',
'url': 'uri-reference',
'raw': 'raw',}
"""Mapping of English role names to canonical role names for interpreted text.
"""
| gpl-2.0 |
apache/dispatch | tests/mock/dispatch.py | 6 | 1917 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
Mock implementation of the dispatch C extension module for use in unit tests.
"""
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
LOG_TRACE = 1
LOG_DEBUG = 2
LOG_INFO = 4
LOG_NOTICE = 8
LOG_WARNING = 16
LOG_ERROR = 32
LOG_CRITICAL = 64
LOG_STACK_LIMIT = 8
TREATMENT_MULTICAST_FLOOD = 0
TREATMENT_MULTICAST_ONCE = 1
TREATMENT_ANYCAST_CLOSEST = 2
TREATMENT_ANYCAST_BALANCED = 3
TREATMENT_LINK_BALANCED = 4
class LogAdapter:
def __init__(self, mod_name):
self.mod_name = mod_name
def log(self, level, text):
print("LOG: mod=%s level=%d text=%s" % (self.mod_name, level, text))
class IoAdapter:
def __init__(self, handler, address, global_address=False):
self.handler = handler
self.address = address
self.global_address = global_address
def send(self, address, properties, application_properties, body, correlation_id=None):
print("IO: send(addr=%s properties=%r application_properties=%r body=%r"
% (address, properties, application_properties, body))
| apache-2.0 |
rubencabrera/odoo | addons/l10n_co/wizard/__init__.py | 313 | 1165 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) David Arnold (devCO).
# Author David Arnold (devCO), dar@devco.co
# Co-Authors Juan Pablo Aries (devCO), jpa@devco.co
# Hector Ivan Valencia Muñoz (TIX SAS)
# Nhomar Hernandez (Vauxoo)
# Humberto Ochoa (Vauxoo)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
############################################################################## | agpl-3.0 |
tiwillia/openshift-tools | ansible/roles/lib_openshift_3.2/build/src/oc_serviceaccount.py | 13 | 2920 | # pylint: skip-file
# pylint: disable=too-many-instance-attributes
class OCServiceAccount(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
config,
verbose=False):
''' Constructor for OCVolume '''
super(OCServiceAccount, self).__init__(config.namespace, config.kubeconfig)
self.config = config
self.namespace = config.namespace
self._service_account = None
@property
def service_account(self):
''' property function service'''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter function for yedit var '''
self._service_account = data
def exists(self):
''' return whether a volume exists '''
if self.service_account:
return True
return False
def get(self):
'''return volume information '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.service_account = ServiceAccount(content=result['results'][0])
elif '\"%s\" not found' % self.config.name in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create the object'''
return self._create_from_content(self.config.name, self.config.data)
def update(self):
'''update the object'''
# need to update the tls information and the service name
for secret in self.config.secrets:
result = self.service_account.find_secret(secret)
if not result:
self.service_account.add_secret(secret)
for secret in self.config.image_pull_secrets:
result = self.service_account.find_image_pull_secret(secret)
if not result:
self.service_account.add_image_pull_secret(secret)
return self._replace_content(self.kind, self.config.name, self.config.data)
def needs_update(self):
''' verify an update is needed '''
# since creating an service account generates secrets and imagepullsecrets
# check_def_equal will not work
# Instead, verify all secrets passed are in the list
for secret in self.config.secrets:
result = self.service_account.find_secret(secret)
if not result:
return True
for secret in self.config.image_pull_secrets:
result = self.service_account.find_image_pull_secret(secret)
if not result:
return True
return False
| apache-2.0 |
nopjmp/SickRage | lib/pgi/codegen/cbargs.py | 19 | 3599 | # Copyright 2013 Christoph Reiter
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
from pgi.clib.gir import GITypeTag, GIDirection, GITransfer, GIInfoType
class CallbackArgument(object):
TAG = None
is_aux = False
py_type = None
def __init__(self, backend, info, type_, name):
self.info = info
self.name = name
self.backend = backend
self.type = type_
@classmethod
def get_class(cls, type_):
return cls
def setup(self):
pass
def process(self):
return None, self.name
def is_direction_in(self):
return self.direction in (GIDirection.INOUT, GIDirection.IN)
def is_direction_out(self):
return self.direction in (GIDirection.INOUT, GIDirection.OUT)
def is_direction_inout(self):
return self.direction == GIDirection.INOUT
def transfer_nothing(self):
return self.info.ownership_transfer.value == GITransfer.NOTHING
def transfer_container(self):
return self.info.ownership_transfer.value == GITransfer.CONTAINER
def transfer_everything(self):
return self.info.ownership_transfer.value == GITransfer.EVERYTHING
class BaseInterfaceArgument(CallbackArgument):
TAG = GITypeTag.INTERFACE
py_type = object
@classmethod
def get_class(cls, type_):
iface = type_.get_interface()
iface_type = iface.type.value
if iface_type == GIInfoType.STRUCT:
return StructArgument
elif iface_type == GIInfoType.OBJECT:
return ObjectArgument
elif iface_type == GIInfoType.UNION:
return UnionArgument
elif iface_type == GIInfoType.FLAGS:
return FlagsArgument
elif iface_type == GIInfoType.ENUM:
return EnumArgument
raise NotImplementedError("Unsupported interface type %r" % iface.type)
def process(self):
var = self.backend.get_type(self.type)
out = var.unpack_return(self.name)
return var.block, out
class FlagsArgument(BaseInterfaceArgument):
pass
class EnumArgument(BaseInterfaceArgument):
pass
class ObjectArgument(BaseInterfaceArgument):
pass
class StructArgument(BaseInterfaceArgument):
pass
class UnionArgument(BaseInterfaceArgument):
pass
class Utf8Argument(CallbackArgument):
TAG = GITypeTag.UTF8
py_type = str
class BooleanArgument(CallbackArgument):
TAG = GITypeTag.BOOLEAN
py_type = bool
class Int64Argument(CallbackArgument):
TAG = GITypeTag.INT64
py_type = int
class Int32Argument(CallbackArgument):
TAG = GITypeTag.INT32
py_type = int
class UInt64Argument(CallbackArgument):
TAG = GITypeTag.UINT64
py_type = int
class VoidArgument(CallbackArgument):
TAG = GITypeTag.VOID
py_type = int
_classes = {}
def _find_cbargs():
global _classes
for var in globals().values():
if not isinstance(var, type):
continue
if issubclass(var, CallbackArgument) and var is not CallbackArgument:
_classes[var.TAG] = var
_find_cbargs()
def get_cbarg_class(arg_type):
global _classes
tag_value = arg_type.tag.value
try:
cls = _classes[tag_value]
except KeyError:
raise NotImplementedError(
"%r signal argument not implemented" % arg_type.tag)
else:
return cls.get_class(arg_type)
| gpl-3.0 |
jiangzhuo/kbengine | kbe/res/scripts/common/Lib/turtle.py | 70 | 143547 | #
# turtle.py: a Tkinter based turtle graphics module for Python
# Version 1.1b - 4. 5. 2009
#
# Copyright (C) 2006 - 2010 Gregor Lingl
# email: glingl@aon.at
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
"""
Turtle graphics is a popular way for introducing programming to
kids. It was part of the original Logo programming language developed
by Wally Feurzig and Seymour Papert in 1966.
Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it
the command turtle.forward(15), and it moves (on-screen!) 15 pixels in
the direction it is facing, drawing a line as it moves. Give it the
command turtle.right(25), and it rotates in-place 25 degrees clockwise.
By combining together these and similar commands, intricate shapes and
pictures can easily be drawn.
----- turtle.py
This module is an extended reimplementation of turtle.py from the
Python standard distribution up to Python 2.5. (See: http://www.python.org)
It tries to keep the merits of turtle.py and to be (nearly) 100%
compatible with it. This means in the first place to enable the
learning programmer to use all the commands, classes and methods
interactively when using the module from within IDLE run with
the -n switch.
Roughly it has the following features added:
- Better animation of the turtle movements, especially of turning the
turtle. So the turtles can more easily be used as a visual feedback
instrument by the (beginning) programmer.
- Different turtle shapes, gif-images as turtle shapes, user defined
and user controllable turtle shapes, among them compound
(multicolored) shapes. Turtle shapes can be stretched and tilted, which
makes turtles very versatile geometrical objects.
- Fine control over turtle movement and screen updates via delay(),
and enhanced tracer() and speed() methods.
- Aliases for the most commonly used commands, like fd for forward etc.,
following the early Logo traditions. This reduces the boring work of
typing long sequences of commands, which often occur in a natural way
when kids try to program fancy pictures on their first encounter with
turtle graphics.
- Turtles now have an undo()-method with configurable undo-buffer.
- Some simple commands/methods for creating event driven programs
(mouse-, key-, timer-events). Especially useful for programming games.
- A scrollable Canvas class. The default scrollable Canvas can be
extended interactively as needed while playing around with the turtle(s).
- A TurtleScreen class with methods controlling background color or
background image, window and canvas size and other properties of the
TurtleScreen.
- There is a method, setworldcoordinates(), to install a user defined
coordinate-system for the TurtleScreen.
- The implementation uses a 2-vector class named Vec2D, derived from tuple.
This class is public, so it can be imported by the application programmer,
which makes certain types of computations very natural and compact.
- Appearance of the TurtleScreen and the Turtles at startup/import can be
configured by means of a turtle.cfg configuration file.
The default configuration mimics the appearance of the old turtle module.
- If configured appropriately the module reads in docstrings from a docstring
dictionary in some different language, supplied separately and replaces
the English ones by those read in. There is a utility function
write_docstringdict() to write a dictionary with the original (English)
docstrings to disc, so it can serve as a template for translations.
Behind the scenes there are some features included with possible
extensions in mind. These will be commented and documented elsewhere.
"""
_ver = "turtle 1.1b- - for Python 3.1 - 4. 5. 2009"
# print(_ver)
import tkinter as TK
import types
import math
import time
import inspect
import sys
from os.path import isfile, split, join
from copy import deepcopy
from tkinter import simpledialog
_tg_classes = ['ScrolledCanvas', 'TurtleScreen', 'Screen',
'RawTurtle', 'Turtle', 'RawPen', 'Pen', 'Shape', 'Vec2D']
_tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye',
'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas',
'getshapes', 'listen', 'mainloop', 'mode', 'numinput',
'onkey', 'onkeypress', 'onkeyrelease', 'onscreenclick', 'ontimer',
'register_shape', 'resetscreen', 'screensize', 'setup',
'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', 'update',
'window_height', 'window_width']
_tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk',
'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color',
'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd',
'fillcolor', 'filling', 'forward', 'get_poly', 'getpen', 'getscreen', 'get_shapepoly',
'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown',
'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd',
'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position',
'pu', 'radians', 'right', 'reset', 'resizemode', 'rt',
'seth', 'setheading', 'setpos', 'setposition', 'settiltangle',
'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle',
'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards',
'turtlesize', 'undo', 'undobufferentries', 'up', 'width',
'write', 'xcor', 'ycor']
_tg_utilities = ['write_docstringdict', 'done']
__all__ = (_tg_classes + _tg_screen_functions + _tg_turtle_functions +
_tg_utilities + ['Terminator']) # + _math_functions)
_alias_list = ['addshape', 'backward', 'bk', 'fd', 'ht', 'lt', 'pd', 'pos',
'pu', 'rt', 'seth', 'setpos', 'setposition', 'st',
'turtlesize', 'up', 'width']
_CFG = {"width" : 0.5, # Screen
"height" : 0.75,
"canvwidth" : 400,
"canvheight": 300,
"leftright": None,
"topbottom": None,
"mode": "standard", # TurtleScreen
"colormode": 1.0,
"delay": 10,
"undobuffersize": 1000, # RawTurtle
"shape": "classic",
"pencolor" : "black",
"fillcolor" : "black",
"resizemode" : "noresize",
"visible" : True,
"language": "english", # docstrings
"exampleturtle": "turtle",
"examplescreen": "screen",
"title": "Python Turtle Graphics",
"using_IDLE": False
}
def config_dict(filename):
"""Convert content of config-file into dictionary."""
with open(filename, "r") as f:
cfglines = f.readlines()
cfgdict = {}
for line in cfglines:
line = line.strip()
if not line or line.startswith("#"):
continue
try:
key, value = line.split("=")
except:
print("Bad line in config-file %s:\n%s" % (filename,line))
continue
key = key.strip()
value = value.strip()
if value in ["True", "False", "None", "''", '""']:
value = eval(value)
else:
try:
if "." in value:
value = float(value)
else:
value = int(value)
except:
pass # value need not be converted
cfgdict[key] = value
return cfgdict
def readconfig(cfgdict):
"""Read config-files, change configuration-dict accordingly.
If there is a turtle.cfg file in the current working directory,
read it from there. If this contains an importconfig-value,
say 'myway', construct filename turtle_mayway.cfg else use
turtle.cfg and read it from the import-directory, where
turtle.py is located.
Update configuration dictionary first according to config-file,
in the import directory, then according to config-file in the
current working directory.
If no config-file is found, the default configuration is used.
"""
default_cfg = "turtle.cfg"
cfgdict1 = {}
cfgdict2 = {}
if isfile(default_cfg):
cfgdict1 = config_dict(default_cfg)
if "importconfig" in cfgdict1:
default_cfg = "turtle_%s.cfg" % cfgdict1["importconfig"]
try:
head, tail = split(__file__)
cfg_file2 = join(head, default_cfg)
except:
cfg_file2 = ""
if isfile(cfg_file2):
cfgdict2 = config_dict(cfg_file2)
_CFG.update(cfgdict2)
_CFG.update(cfgdict1)
try:
readconfig(_CFG)
except:
print ("No configfile read, reason unknown")
class Vec2D(tuple):
"""A 2 dimensional vector class, used as a helper class
for implementing turtle graphics.
May be useful for turtle graphics programs also.
Derived from tuple, so a vector is a tuple!
Provides (for a, b vectors, k number):
a+b vector addition
a-b vector subtraction
a*b inner product
k*a and a*k multiplication with scalar
|a| absolute value of a
a.rotate(angle) rotation
"""
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __add__(self, other):
return Vec2D(self[0]+other[0], self[1]+other[1])
def __mul__(self, other):
if isinstance(other, Vec2D):
return self[0]*other[0]+self[1]*other[1]
return Vec2D(self[0]*other, self[1]*other)
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0]*other, self[1]*other)
def __sub__(self, other):
return Vec2D(self[0]-other[0], self[1]-other[1])
def __neg__(self):
return Vec2D(-self[0], -self[1])
def __abs__(self):
return (self[0]**2 + self[1]**2)**0.5
def rotate(self, angle):
"""rotate self counterclockwise by angle
"""
perp = Vec2D(-self[1], self[0])
angle = angle * math.pi / 180.0
c, s = math.cos(angle), math.sin(angle)
return Vec2D(self[0]*c+perp[0]*s, self[1]*c+perp[1]*s)
def __getnewargs__(self):
return (self[0], self[1])
def __repr__(self):
return "(%.2f,%.2f)" % self
##############################################################################
### From here up to line : Tkinter - Interface for turtle.py ###
### May be replaced by an interface to some different graphics toolkit ###
##############################################################################
## helper functions for Scrolled Canvas, to forward Canvas-methods
## to ScrolledCanvas class
def __methodDict(cls, _dict):
"""helper function for Scrolled Canvas"""
baseList = list(cls.__bases__)
baseList.reverse()
for _super in baseList:
__methodDict(_super, _dict)
for key, value in cls.__dict__.items():
if type(value) == types.FunctionType:
_dict[key] = value
def __methods(cls):
"""helper function for Scrolled Canvas"""
_dict = {}
__methodDict(cls, _dict)
return _dict.keys()
__stringBody = (
'def %(method)s(self, *args, **kw): return ' +
'self.%(attribute)s.%(method)s(*args, **kw)')
def __forwardmethods(fromClass, toClass, toPart, exclude = ()):
### MANY CHANGES ###
_dict_1 = {}
__methodDict(toClass, _dict_1)
_dict = {}
mfc = __methods(fromClass)
for ex in _dict_1.keys():
if ex[:1] == '_' or ex[-1:] == '_' or ex in exclude or ex in mfc:
pass
else:
_dict[ex] = _dict_1[ex]
for method, func in _dict.items():
d = {'method': method, 'func': func}
if isinstance(toPart, str):
execString = \
__stringBody % {'method' : method, 'attribute' : toPart}
exec(execString, d)
setattr(fromClass, method, d[method]) ### NEWU!
class ScrolledCanvas(TK.Frame):
"""Modeled after the scrolled canvas class from Grayons's Tkinter book.
Used as the default canvas, which pops up automatically when
using turtle graphics functions or the Turtle class.
"""
def __init__(self, master, width=500, height=350,
canvwidth=600, canvheight=500):
TK.Frame.__init__(self, master, width=width, height=height)
self._rootwindow = self.winfo_toplevel()
self.width, self.height = width, height
self.canvwidth, self.canvheight = canvwidth, canvheight
self.bg = "white"
self._canvas = TK.Canvas(master, width=width, height=height,
bg=self.bg, relief=TK.SUNKEN, borderwidth=2)
self.hscroll = TK.Scrollbar(master, command=self._canvas.xview,
orient=TK.HORIZONTAL)
self.vscroll = TK.Scrollbar(master, command=self._canvas.yview)
self._canvas.configure(xscrollcommand=self.hscroll.set,
yscrollcommand=self.vscroll.set)
self.rowconfigure(0, weight=1, minsize=0)
self.columnconfigure(0, weight=1, minsize=0)
self._canvas.grid(padx=1, in_ = self, pady=1, row=0,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.reset()
self._rootwindow.bind('<Configure>', self.onResize)
def reset(self, canvwidth=None, canvheight=None, bg = None):
"""Adjust canvas and scrollbars according to given canvas size."""
if canvwidth:
self.canvwidth = canvwidth
if canvheight:
self.canvheight = canvheight
if bg:
self.bg = bg
self._canvas.config(bg=bg,
scrollregion=(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2))
self._canvas.xview_moveto(0.5*(self.canvwidth - self.width + 30) /
self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight- self.height + 30) /
self.canvheight)
self.adjustScrolls()
def adjustScrolls(self):
""" Adjust scrollbars according to window- and canvas-size.
"""
cwidth = self._canvas.winfo_width()
cheight = self._canvas.winfo_height()
self._canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight)
if cwidth < self.canvwidth or cheight < self.canvheight:
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
else:
self.hscroll.grid_forget()
self.vscroll.grid_forget()
def onResize(self, event):
"""self-explanatory"""
self.adjustScrolls()
def bbox(self, *args):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.bbox(*args)
def cget(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.cget(*args, **kwargs)
def config(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.config(*args, **kwargs)
def bind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.bind(*args, **kwargs)
def unbind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.unbind(*args, **kwargs)
def focus_force(self):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.focus_force()
__forwardmethods(ScrolledCanvas, TK.Canvas, '_canvas')
class _Root(TK.Tk):
"""Root class for Screen based on Tkinter."""
def __init__(self):
TK.Tk.__init__(self)
def setupcanvas(self, width, height, cwidth, cheight):
self._canvas = ScrolledCanvas(self, width, height, cwidth, cheight)
self._canvas.pack(expand=1, fill="both")
def _getcanvas(self):
return self._canvas
def set_geometry(self, width, height, startx, starty):
self.geometry("%dx%d%+d%+d"%(width, height, startx, starty))
def ondestroy(self, destroy):
self.wm_protocol("WM_DELETE_WINDOW", destroy)
def win_width(self):
return self.winfo_screenwidth()
def win_height(self):
return self.winfo_screenheight()
Canvas = TK.Canvas
class TurtleScreenBase(object):
"""Provide the basic graphics functionality.
Interface between Tkinter and turtle.py.
To port turtle.py to some different graphics toolkit
a corresponding TurtleScreenBase class has to be implemented.
"""
@staticmethod
def _blankimage():
"""return a blank image object
"""
img = TK.PhotoImage(width=1, height=1)
img.blank()
return img
@staticmethod
def _image(filename):
"""return an image object containing the
imagedata from a gif-file named filename.
"""
return TK.PhotoImage(file=filename)
def __init__(self, cv):
self.cv = cv
if isinstance(cv, ScrolledCanvas):
w = self.cv.canvwidth
h = self.cv.canvheight
else: # expected: ordinary TK.Canvas
w = int(self.cv.cget("width"))
h = int(self.cv.cget("height"))
self.cv.config(scrollregion = (-w//2, -h//2, w//2, h//2 ))
self.canvwidth = w
self.canvheight = h
self.xscale = self.yscale = 1.0
def _createpoly(self):
"""Create an invisible polygon item on canvas self.cv)
"""
return self.cv.create_polygon((0, 0, 0, 0, 0, 0), fill="", outline="")
def _drawpoly(self, polyitem, coordlist, fill=None,
outline=None, width=None, top=False):
"""Configure polygonitem polyitem according to provided
arguments:
coordlist is sequence of coordinates
fill is filling color
outline is outline color
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(polyitem, *cl)
if fill is not None:
self.cv.itemconfigure(polyitem, fill=fill)
if outline is not None:
self.cv.itemconfigure(polyitem, outline=outline)
if width is not None:
self.cv.itemconfigure(polyitem, width=width)
if top:
self.cv.tag_raise(polyitem)
def _createline(self):
"""Create an invisible line item on canvas self.cv)
"""
return self.cv.create_line(0, 0, 0, 0, fill="", width=2,
capstyle = TK.ROUND)
def _drawline(self, lineitem, coordlist=None,
fill=None, width=None, top=False):
"""Configure lineitem according to provided arguments:
coordlist is sequence of coordinates
fill is drawing color
width is width of drawn line.
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
if coordlist is not None:
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(lineitem, *cl)
if fill is not None:
self.cv.itemconfigure(lineitem, fill=fill)
if width is not None:
self.cv.itemconfigure(lineitem, width=width)
if top:
self.cv.tag_raise(lineitem)
def _delete(self, item):
"""Delete graphics item from canvas.
If item is"all" delete all graphics items.
"""
self.cv.delete(item)
def _update(self):
"""Redraw graphics items on canvas
"""
self.cv.update()
def _delay(self, delay):
"""Delay subsequent canvas actions for delay ms."""
self.cv.after(delay)
def _iscolorstring(self, color):
"""Check if the string color is a legal Tkinter color string.
"""
try:
rgb = self.cv.winfo_rgb(color)
ok = True
except TK.TclError:
ok = False
return ok
def _bgcolor(self, color=None):
"""Set canvas' backgroundcolor if color is not None,
else return backgroundcolor."""
if color is not None:
self.cv.config(bg = color)
self._update()
else:
return self.cv.cget("bg")
def _write(self, pos, txt, align, font, pencolor):
"""Write txt at pos in canvas with specified font
and color.
Return text item and x-coord of right bottom corner
of text's bounding box."""
x, y = pos
x = x * self.xscale
y = y * self.yscale
anchor = {"left":"sw", "center":"s", "right":"se" }
item = self.cv.create_text(x-1, -y, text = txt, anchor = anchor[align],
fill = pencolor, font = font)
x0, y0, x1, y1 = self.cv.bbox(item)
self.cv.update()
return item, x1-1
## def _dot(self, pos, size, color):
## """may be implemented for some other graphics toolkit"""
def _onclick(self, item, fun, num=1, add=None):
"""Bind fun to mouse-click event on turtle.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
"""
if fun is None:
self.cv.tag_unbind(item, "<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button-%s>" % num, eventfun, add)
def _onrelease(self, item, fun, num=1, add=None):
"""Bind fun to mouse-button-release event on turtle.
fun must be a function with two arguments, the coordinates
of the point on the canvas where mouse button is released.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-ButtonRelease>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button%s-ButtonRelease>" % num,
eventfun, add)
def _ondrag(self, item, fun, num=1, add=None):
"""Bind fun to mouse-move-event (with pressed mouse button) on turtle.
fun must be a function with two arguments, the coordinates of the
actual mouse position on the canvas.
num, the number of the mouse-button defaults to 1
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-Motion>" % num)
else:
def eventfun(event):
try:
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
except:
pass
self.cv.tag_bind(item, "<Button%s-Motion>" % num, eventfun, add)
def _onscreenclick(self, fun, num=1, add=None):
"""Bind fun to mouse-click event on canvas.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.unbind("<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.bind("<Button-%s>" % num, eventfun, add)
def _onkeyrelease(self, fun, key):
"""Bind fun to key-release event of key.
Canvas must have focus. See method listen
"""
if fun is None:
self.cv.unbind("<KeyRelease-%s>" % key, None)
else:
def eventfun(event):
fun()
self.cv.bind("<KeyRelease-%s>" % key, eventfun)
def _onkeypress(self, fun, key=None):
"""If key is given, bind fun to key-press event of key.
Otherwise bind fun to any key-press.
Canvas must have focus. See method listen.
"""
if fun is None:
if key is None:
self.cv.unbind("<KeyPress>", None)
else:
self.cv.unbind("<KeyPress-%s>" % key, None)
else:
def eventfun(event):
fun()
if key is None:
self.cv.bind("<KeyPress>", eventfun)
else:
self.cv.bind("<KeyPress-%s>" % key, eventfun)
def _listen(self):
"""Set focus on canvas (in order to collect key-events)
"""
self.cv.focus_force()
def _ontimer(self, fun, t):
"""Install a timer, which calls fun after t milliseconds.
"""
if t == 0:
self.cv.after_idle(fun)
else:
self.cv.after(t, fun)
def _createimage(self, image):
"""Create and return image item on canvas.
"""
return self.cv.create_image(0, 0, image=image)
def _drawimage(self, item, pos, image):
"""Configure image item as to draw image object
at position (x,y) on canvas)
"""
x, y = pos
self.cv.coords(item, (x * self.xscale, -y * self.yscale))
self.cv.itemconfig(item, image=image)
def _setbgpic(self, item, image):
"""Configure image item as to draw image object
at center of canvas. Set item to the first item
in the displaylist, so it will be drawn below
any other item ."""
self.cv.itemconfig(item, image=image)
self.cv.tag_lower(item)
def _type(self, item):
"""Return 'line' or 'polygon' or 'image' depending on
type of item.
"""
return self.cv.type(item)
def _pointlist(self, item):
"""returns list of coordinate-pairs of points of item
Example (for insiders):
>>> from turtle import *
>>> getscreen()._pointlist(getturtle().turtle._item)
[(0.0, 9.9999999999999982), (0.0, -9.9999999999999982),
(9.9999999999999982, 0.0)]
>>> """
cl = self.cv.coords(item)
pl = [(cl[i], -cl[i+1]) for i in range(0, len(cl), 2)]
return pl
def _setscrollregion(self, srx1, sry1, srx2, sry2):
self.cv.config(scrollregion=(srx1, sry1, srx2, sry2))
def _rescale(self, xscalefactor, yscalefactor):
items = self.cv.find_all()
for item in items:
coordinates = list(self.cv.coords(item))
newcoordlist = []
while coordinates:
x, y = coordinates[:2]
newcoordlist.append(x * xscalefactor)
newcoordlist.append(y * yscalefactor)
coordinates = coordinates[2:]
self.cv.coords(item, *newcoordlist)
def _resize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on. Does
not alter the drawing window.
"""
# needs amendment
if not isinstance(self.cv, ScrolledCanvas):
return self.canvwidth, self.canvheight
if canvwidth is canvheight is bg is None:
return self.cv.canvwidth, self.cv.canvheight
if canvwidth is not None:
self.canvwidth = canvwidth
if canvheight is not None:
self.canvheight = canvheight
self.cv.reset(canvwidth, canvheight, bg)
def _window_size(self):
""" Return the width and height of the turtle window.
"""
width = self.cv.winfo_width()
if width <= 1: # the window isn't managed by a geometry manager
width = self.cv['width']
height = self.cv.winfo_height()
if height <= 1: # the window isn't managed by a geometry manager
height = self.cv['height']
return width, height
def mainloop(self):
"""Starts event loop - calling Tkinter's mainloop function.
No argument.
Must be last statement in a turtle graphics program.
Must NOT be used if a script is run from within IDLE in -n mode
(No subprocess) - for interactive use of turtle graphics.
Example (for a TurtleScreen instance named screen):
>>> screen.mainloop()
"""
TK.mainloop()
def textinput(self, title, prompt):
"""Pop up a dialog window for input of a string.
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what information to input.
Return the string input
If the dialog is canceled, return None.
Example (for a TurtleScreen instance named screen):
>>> screen.textinput("NIM", "Name of first player:")
"""
return simpledialog.askstring(title, prompt)
def numinput(self, title, prompt, default=None, minval=None, maxval=None):
"""Pop up a dialog window for input of a number.
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what numerical information to input.
default: default value
minval: minimum value for imput
maxval: maximum value for input
The number input must be in the range minval .. maxval if these are
given. If not, a hint is issued and the dialog remains open for
correction. Return the number input.
If the dialog is canceled, return None.
Example (for a TurtleScreen instance named screen):
>>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000)
"""
return simpledialog.askfloat(title, prompt, initialvalue=default,
minvalue=minval, maxvalue=maxval)
##############################################################################
### End of Tkinter - interface ###
##############################################################################
class Terminator (Exception):
"""Will be raised in TurtleScreen.update, if _RUNNING becomes False.
This stops execution of a turtle graphics script.
Main purpose: use in the Demo-Viewer turtle.Demo.py.
"""
pass
class TurtleGraphicsError(Exception):
"""Some TurtleGraphics Error
"""
class Shape(object):
"""Data structure modeling shapes.
attribute _type is one of "polygon", "image", "compound"
attribute _data is - depending on _type a poygon-tuple,
an image or a list constructed using the addcomponent method.
"""
def __init__(self, type_, data=None):
self._type = type_
if type_ == "polygon":
if isinstance(data, list):
data = tuple(data)
elif type_ == "image":
if isinstance(data, str):
if data.lower().endswith(".gif") and isfile(data):
data = TurtleScreen._image(data)
# else data assumed to be Photoimage
elif type_ == "compound":
data = []
else:
raise TurtleGraphicsError("There is no shape type %s" % type_)
self._data = data
def addcomponent(self, poly, fill, outline=None):
"""Add component to a shape of type compound.
Arguments: poly is a polygon, i. e. a tuple of number pairs.
fill is the fillcolor of the component,
outline is the outline color of the component.
call (for a Shapeobject namend s):
-- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue")
Example:
>>> poly = ((0,0),(10,-5),(0,10),(-10,-5))
>>> s = Shape("compound")
>>> s.addcomponent(poly, "red", "blue")
>>> # .. add more components and then use register_shape()
"""
if self._type != "compound":
raise TurtleGraphicsError("Cannot add component to %s Shape"
% self._type)
if outline is None:
outline = fill
self._data.append([poly, fill, outline])
class Tbuffer(object):
"""Ring buffer used as undobuffer for RawTurtle objects."""
def __init__(self, bufsize=10):
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
self.cumulate = False
def reset(self, bufsize=None):
if bufsize is None:
for i in range(self.bufsize):
self.buffer[i] = [None]
else:
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
def push(self, item):
if self.bufsize > 0:
if not self.cumulate:
self.ptr = (self.ptr + 1) % self.bufsize
self.buffer[self.ptr] = item
else:
self.buffer[self.ptr].append(item)
def pop(self):
if self.bufsize > 0:
item = self.buffer[self.ptr]
if item is None:
return None
else:
self.buffer[self.ptr] = [None]
self.ptr = (self.ptr - 1) % self.bufsize
return (item)
def nr_of_items(self):
return self.bufsize - self.buffer.count([None])
def __repr__(self):
return str(self.buffer) + " " + str(self.ptr)
class TurtleScreen(TurtleScreenBase):
"""Provides screen oriented methods like setbg etc.
Only relies upon the methods of TurtleScreenBase and NOT
upon components of the underlying graphics toolkit -
which is Tkinter in this case.
"""
_RUNNING = True
def __init__(self, cv, mode=_CFG["mode"],
colormode=_CFG["colormode"], delay=_CFG["delay"]):
self._shapes = {
"arrow" : Shape("polygon", ((-10,0), (10,0), (0,10))),
"turtle" : Shape("polygon", ((0,16), (-2,14), (-1,10), (-4,7),
(-7,9), (-9,8), (-6,5), (-7,1), (-5,-3), (-8,-6),
(-6,-8), (-4,-5), (0,-7), (4,-5), (6,-8), (8,-6),
(5,-3), (7,1), (6,5), (9,8), (7,9), (4,7), (1,10),
(2,14))),
"circle" : Shape("polygon", ((10,0), (9.51,3.09), (8.09,5.88),
(5.88,8.09), (3.09,9.51), (0,10), (-3.09,9.51),
(-5.88,8.09), (-8.09,5.88), (-9.51,3.09), (-10,0),
(-9.51,-3.09), (-8.09,-5.88), (-5.88,-8.09),
(-3.09,-9.51), (-0.00,-10.00), (3.09,-9.51),
(5.88,-8.09), (8.09,-5.88), (9.51,-3.09))),
"square" : Shape("polygon", ((10,-10), (10,10), (-10,10),
(-10,-10))),
"triangle" : Shape("polygon", ((10,-5.77), (0,11.55),
(-10,-5.77))),
"classic": Shape("polygon", ((0,0),(-5,-9),(0,-7),(5,-9))),
"blank" : Shape("image", self._blankimage())
}
self._bgpics = {"nopic" : ""}
TurtleScreenBase.__init__(self, cv)
self._mode = mode
self._delayvalue = delay
self._colormode = _CFG["colormode"]
self._keys = []
self.clear()
if sys.platform == 'darwin':
# Force Turtle window to the front on OS X. This is needed because
# the Turtle window will show behind the Terminal window when you
# start the demo from the command line.
rootwindow = cv.winfo_toplevel()
rootwindow.call('wm', 'attributes', '.', '-topmost', '1')
rootwindow.call('wm', 'attributes', '.', '-topmost', '0')
def clear(self):
"""Delete all drawings and all turtles from the TurtleScreen.
No argument.
Reset empty TurtleScreen to its initial state: white background,
no backgroundimage, no eventbindings and tracing on.
Example (for a TurtleScreen instance named screen):
>>> screen.clear()
Note: this method is not available as function.
"""
self._delayvalue = _CFG["delay"]
self._colormode = _CFG["colormode"]
self._delete("all")
self._bgpic = self._createimage("")
self._bgpicname = "nopic"
self._tracing = 1
self._updatecounter = 0
self._turtles = []
self.bgcolor("white")
for btn in 1, 2, 3:
self.onclick(None, btn)
self.onkeypress(None)
for key in self._keys[:]:
self.onkey(None, key)
self.onkeypress(None, key)
Turtle._pen = None
def mode(self, mode=None):
"""Set turtle-mode ('standard', 'logo' or 'world') and perform reset.
Optional argument:
mode -- on of the strings 'standard', 'logo' or 'world'
Mode 'standard' is compatible with turtle.py.
Mode 'logo' is compatible with most Logo-Turtle-Graphics.
Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in
this mode angles appear distorted if x/y unit-ratio doesn't equal 1.
If mode is not given, return the current mode.
Mode Initial turtle heading positive angles
------------|-------------------------|-------------------
'standard' to the right (east) counterclockwise
'logo' upward (north) clockwise
Examples:
>>> mode('logo') # resets turtle heading to north
>>> mode()
'logo'
"""
if mode is None:
return self._mode
mode = mode.lower()
if mode not in ["standard", "logo", "world"]:
raise TurtleGraphicsError("No turtle-graphics-mode %s" % mode)
self._mode = mode
if mode in ["standard", "logo"]:
self._setscrollregion(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2)
self.xscale = self.yscale = 1.0
self.reset()
def setworldcoordinates(self, llx, lly, urx, ury):
"""Set up a user defined coordinate-system.
Arguments:
llx -- a number, x-coordinate of lower left corner of canvas
lly -- a number, y-coordinate of lower left corner of canvas
urx -- a number, x-coordinate of upper right corner of canvas
ury -- a number, y-coordinate of upper right corner of canvas
Set up user coodinat-system and switch to mode 'world' if necessary.
This performs a screen.reset. If mode 'world' is already active,
all drawings are redrawn according to the new coordinates.
But ATTENTION: in user-defined coordinatesystems angles may appear
distorted. (see Screen.mode())
Example (for a TurtleScreen instance named screen):
>>> screen.setworldcoordinates(-10,-0.5,50,1.5)
>>> for _ in range(36):
... left(10)
... forward(0.5)
"""
if self.mode() != "world":
self.mode("world")
xspan = float(urx - llx)
yspan = float(ury - lly)
wx, wy = self._window_size()
self.screensize(wx-20, wy-20)
oldxscale, oldyscale = self.xscale, self.yscale
self.xscale = self.canvwidth / xspan
self.yscale = self.canvheight / yspan
srx1 = llx * self.xscale
sry1 = -ury * self.yscale
srx2 = self.canvwidth + srx1
sry2 = self.canvheight + sry1
self._setscrollregion(srx1, sry1, srx2, sry2)
self._rescale(self.xscale/oldxscale, self.yscale/oldyscale)
self.update()
def register_shape(self, name, shape=None):
"""Adds a turtle shape to TurtleScreen's shapelist.
Arguments:
(1) name is the name of a gif-file and shape is None.
Installs the corresponding image shape.
!! Image-shapes DO NOT rotate when turning the turtle,
!! so they do not display the heading of the turtle!
(2) name is an arbitrary string and shape is a tuple
of pairs of coordinates. Installs the corresponding
polygon shape
(3) name is an arbitrary string and shape is a
(compound) Shape object. Installs the corresponding
compound shape.
To use a shape, you have to issue the command shape(shapename).
call: register_shape("turtle.gif")
--or: register_shape("tri", ((0,0), (10,10), (-10,10)))
Example (for a TurtleScreen instance named screen):
>>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3)))
"""
if shape is None:
# image
if name.lower().endswith(".gif"):
shape = Shape("image", self._image(name))
else:
raise TurtleGraphicsError("Bad arguments for register_shape.\n"
+ "Use help(register_shape)" )
elif isinstance(shape, tuple):
shape = Shape("polygon", shape)
## else shape assumed to be Shape-instance
self._shapes[name] = shape
def _colorstr(self, color):
"""Return color string corresponding to args.
Argument may be a string or a tuple of three
numbers corresponding to actual colormode,
i.e. in the range 0<=n<=colormode.
If the argument doesn't represent a color,
an error is raised.
"""
if len(color) == 1:
color = color[0]
if isinstance(color, str):
if self._iscolorstring(color) or color == "":
return color
else:
raise TurtleGraphicsError("bad color string: %s" % str(color))
try:
r, g, b = color
except:
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
if self._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(color))
return "#%02x%02x%02x" % (r, g, b)
def _color(self, cstr):
if not cstr.startswith("#"):
return cstr
if len(cstr) == 7:
cl = [int(cstr[i:i+2], 16) for i in (1, 3, 5)]
elif len(cstr) == 4:
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
else:
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
return tuple([c * self._colormode/255 for c in cl])
def colormode(self, cmode=None):
"""Return the colormode or set it to 1.0 or 255.
Optional argument:
cmode -- one of the values 1.0 or 255
r, g, b values of colortriples have to be in range 0..cmode.
Example (for a TurtleScreen instance named screen):
>>> screen.colormode()
1.0
>>> screen.colormode(255)
>>> pencolor(240,160,80)
"""
if cmode is None:
return self._colormode
if cmode == 1.0:
self._colormode = float(cmode)
elif cmode == 255:
self._colormode = int(cmode)
def reset(self):
"""Reset all Turtles on the Screen to their initial state.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.reset()
"""
for turtle in self._turtles:
turtle._setmode(self._mode)
turtle.reset()
def turtles(self):
"""Return the list of turtles on the screen.
Example (for a TurtleScreen instance named screen):
>>> screen.turtles()
[<turtle.Turtle object at 0x00E11FB0>]
"""
return self._turtles
def bgcolor(self, *args):
"""Set or return backgroundcolor of the TurtleScreen.
Arguments (if given): a color string or three numbers
in the range 0..colormode or a 3-tuple of such numbers.
Example (for a TurtleScreen instance named screen):
>>> screen.bgcolor("orange")
>>> screen.bgcolor()
'orange'
>>> screen.bgcolor(0.5,0,0.5)
>>> screen.bgcolor()
'#800080'
"""
if args:
color = self._colorstr(args)
else:
color = None
color = self._bgcolor(color)
if color is not None:
color = self._color(color)
return color
def tracer(self, n=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a TurtleScreen instance named screen):
>>> screen.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... fd(dist)
... rt(90)
... dist += 2
"""
if n is None:
return self._tracing
self._tracing = int(n)
self._updatecounter = 0
if delay is not None:
self._delayvalue = int(delay)
if self._tracing:
self.update()
def delay(self, delay=None):
""" Return or set the drawing delay in milliseconds.
Optional argument:
delay -- positive integer
Example (for a TurtleScreen instance named screen):
>>> screen.delay(15)
>>> screen.delay()
15
"""
if delay is None:
return self._delayvalue
self._delayvalue = int(delay)
def _incrementudc(self):
"""Increment update counter."""
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNNING = True
raise Terminator
if self._tracing > 0:
self._updatecounter += 1
self._updatecounter %= self._tracing
def update(self):
"""Perform a TurtleScreen update.
"""
tracing = self._tracing
self._tracing = True
for t in self.turtles():
t._update_data()
t._drawturtle()
self._tracing = tracing
self._update()
def window_width(self):
""" Return the width of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_width()
640
"""
return self._window_size()[0]
def window_height(self):
""" Return the height of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_height()
480
"""
return self._window_size()[1]
def getcanvas(self):
"""Return the Canvas of this TurtleScreen.
No argument.
Example (for a Screen instance named screen):
>>> cv = screen.getcanvas()
>>> cv
<turtle.ScrolledCanvas instance at 0x010742D8>
"""
return self.cv
def getshapes(self):
"""Return a list of names of all currently available turtle shapes.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.getshapes()
['arrow', 'blank', 'circle', ... , 'turtle']
"""
return sorted(self._shapes.keys())
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on canvas.
Arguments:
fun -- a function with two arguments, the coordinates of the
clicked point on the canvas.
num -- the number of the mouse-button, defaults to 1
Example (for a TurtleScreen instance named screen)
>>> screen.onclick(goto)
>>> # Subsequently clicking into the TurtleScreen will
>>> # make the turtle move to the clicked point.
>>> screen.onclick(None)
"""
self._onscreenclick(fun, btn, add)
def onkey(self, fun, key):
"""Bind fun to key-release event of key.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkey(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, consequently drawing a hexagon
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key not in self._keys:
self._keys.append(key)
self._onkeyrelease(fun, key)
def onkeypress(self, fun, key=None):
"""Bind fun to key-press event of key if key is given,
or to any key-press-event if no key is given.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen
and a Turtle instance named turtle):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkeypress(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, or by keeping pressed the up-arrow key.
consequently drawing a hexagon.
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key is not None and key not in self._keys:
self._keys.append(key)
self._onkeypress(fun, key)
def listen(self, xdummy=None, ydummy=None):
"""Set focus on TurtleScreen (in order to collect key-events)
No arguments.
Dummy arguments are provided in order
to be able to pass listen to the onclick method.
Example (for a TurtleScreen instance named screen):
>>> screen.listen()
"""
self._listen()
def ontimer(self, fun, t=0):
"""Install a timer, which calls fun after t milliseconds.
Arguments:
fun -- a function with no arguments.
t -- a number >= 0
Example (for a TurtleScreen instance named screen):
>>> running = True
>>> def f():
... if running:
... fd(50)
... lt(60)
... screen.ontimer(f, 250)
...
>>> f() # makes the turtle marching around
>>> running = False
"""
self._ontimer(fun, t)
def bgpic(self, picname=None):
"""Set background image or return name of current backgroundimage.
Optional argument:
picname -- a string, name of a gif-file or "nopic".
If picname is a filename, set the corresponding image as background.
If picname is "nopic", delete backgroundimage, if present.
If picname is None, return the filename of the current backgroundimage.
Example (for a TurtleScreen instance named screen):
>>> screen.bgpic()
'nopic'
>>> screen.bgpic("landscape.gif")
>>> screen.bgpic()
'landscape.gif'
"""
if picname is None:
return self._bgpicname
if picname not in self._bgpics:
self._bgpics[picname] = self._image(picname)
self._setbgpic(self._bgpic, self._bgpics[picname])
self._bgpicname = picname
def screensize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on.
Optional arguments:
canvwidth -- positive integer, new width of canvas in pixels
canvheight -- positive integer, new height of canvas in pixels
bg -- colorstring or color-tuple, new backgroundcolor
If no arguments are given, return current (canvaswidth, canvasheight)
Do not alter the drawing window. To observe hidden parts of
the canvas use the scrollbars. (Can make visible those parts
of a drawing, which were outside the canvas before!)
Example (for a Turtle instance named turtle):
>>> turtle.screensize(2000,1500)
>>> # e.g. to search for an erroneously escaped turtle ;-)
"""
return self._resize(canvwidth, canvheight, bg)
onscreenclick = onclick
resetscreen = reset
clearscreen = clear
addshape = register_shape
onkeyrelease = onkey
class TNavigator(object):
"""Navigation part of the RawTurtle.
Implements methods for turtle movement.
"""
START_ORIENTATION = {
"standard": Vec2D(1.0, 0.0),
"world" : Vec2D(1.0, 0.0),
"logo" : Vec2D(0.0, 1.0) }
DEFAULT_MODE = "standard"
DEFAULT_ANGLEOFFSET = 0
DEFAULT_ANGLEORIENT = 1
def __init__(self, mode=DEFAULT_MODE):
self._angleOffset = self.DEFAULT_ANGLEOFFSET
self._angleOrient = self.DEFAULT_ANGLEORIENT
self._mode = mode
self.undobuffer = None
self.degrees()
self._mode = None
self._setmode(mode)
TNavigator.reset(self)
def reset(self):
"""reset turtle to its initial values
Will be overwritten by parent class
"""
self._position = Vec2D(0.0, 0.0)
self._orient = TNavigator.START_ORIENTATION[self._mode]
def _setmode(self, mode=None):
"""Set turtle-mode to 'standard', 'world' or 'logo'.
"""
if mode is None:
return self._mode
if mode not in ["standard", "logo", "world"]:
return
self._mode = mode
if mode in ["standard", "world"]:
self._angleOffset = 0
self._angleOrient = 1
else: # mode == "logo":
self._angleOffset = self._fullcircle/4.
self._angleOrient = -1
def _setDegreesPerAU(self, fullcircle):
"""Helper function for degrees() and radians()"""
self._fullcircle = fullcircle
self._degreesPerAU = 360/fullcircle
if self._mode == "standard":
self._angleOffset = 0
else:
self._angleOffset = fullcircle/4.
def degrees(self, fullcircle=360.0):
""" Set angle measurement units to degrees.
Optional argument:
fullcircle - a number
Set angle measurement units, i. e. set number
of 'degrees' for a full circle. Dafault value is
360 degrees.
Example (for a Turtle instance named turtle):
>>> turtle.left(90)
>>> turtle.heading()
90
Change angle measurement unit to grad (also known as gon,
grade, or gradian and equals 1/100-th of the right angle.)
>>> turtle.degrees(400.0)
>>> turtle.heading()
100
"""
self._setDegreesPerAU(fullcircle)
def radians(self):
""" Set the angle measurement units to radians.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.heading()
90
>>> turtle.radians()
>>> turtle.heading()
1.5707963267948966
"""
self._setDegreesPerAU(2*math.pi)
def _go(self, distance):
"""move turtle forward by specified distance"""
ende = self._position + self._orient * distance
self._goto(ende)
def _rotate(self, angle):
"""Turn turtle counterclockwise by specified angle if angle > 0."""
angle *= self._degreesPerAU
self._orient = self._orient.rotate(angle)
def _goto(self, end):
"""move turtle to position end."""
self._position = end
def forward(self, distance):
"""Move the turtle forward by the specified distance.
Aliases: forward | fd
Argument:
distance -- a number (integer or float)
Move the turtle forward by the specified distance, in the direction
the turtle is headed.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.forward(25)
>>> turtle.position()
(25.00,0.00)
>>> turtle.forward(-75)
>>> turtle.position()
(-50.00,0.00)
"""
self._go(distance)
def back(self, distance):
"""Move the turtle backward by distance.
Aliases: back | backward | bk
Argument:
distance -- a number
Move the turtle backward by distance ,opposite to the direction the
turtle is headed. Do not change the turtle's heading.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.backward(30)
>>> turtle.position()
(-30.00, 0.00)
"""
self._go(-distance)
def right(self, angle):
"""Turn turtle right by angle units.
Aliases: right | rt
Argument:
angle -- a number (integer or float)
Turn turtle right by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.right(45)
>>> turtle.heading()
337.0
"""
self._rotate(-angle)
def left(self, angle):
"""Turn turtle left by angle units.
Aliases: left | lt
Argument:
angle -- a number (integer or float)
Turn turtle left by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.left(45)
>>> turtle.heading()
67.0
"""
self._rotate(angle)
def pos(self):
"""Return the turtle's current location (x,y), as a Vec2D-vector.
Aliases: pos | position
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 240.00)
"""
return self._position
def xcor(self):
""" Return the turtle's x coordinate.
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.xcor()
50.0
"""
return self._position[0]
def ycor(self):
""" Return the turtle's y coordinate
---
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.ycor()
86.6025403784
"""
return self._position[1]
def goto(self, x, y=None):
"""Move turtle to an absolute position.
Aliases: setpos | setposition | goto:
Arguments:
x -- a number or a pair/vector of numbers
y -- a number None
call: goto(x, y) # two coordinates
--or: goto((x, y)) # a pair (tuple) of coordinates
--or: goto(vec) # e.g. as returned by pos()
Move turtle to an absolute position. If the pen is down,
a line will be drawn. The turtle's orientation does not change.
Example (for a Turtle instance named turtle):
>>> tp = turtle.pos()
>>> tp
(0.00, 0.00)
>>> turtle.setpos(60,30)
>>> turtle.pos()
(60.00,30.00)
>>> turtle.setpos((20,80))
>>> turtle.pos()
(20.00,80.00)
>>> turtle.setpos(tp)
>>> turtle.pos()
(0.00,0.00)
"""
if y is None:
self._goto(Vec2D(*x))
else:
self._goto(Vec2D(x, y))
def home(self):
"""Move turtle to the origin - coordinates (0,0).
No arguments.
Move turtle to the origin - coordinates (0,0) and set its
heading to its start-orientation (which depends on mode).
Example (for a Turtle instance named turtle):
>>> turtle.home()
"""
self.goto(0, 0)
self.setheading(0)
def setx(self, x):
"""Set the turtle's first coordinate to x
Argument:
x -- a number (integer or float)
Set the turtle's first coordinate to x, leave second coordinate
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 240.00)
>>> turtle.setx(10)
>>> turtle.position()
(10.00, 240.00)
"""
self._goto(Vec2D(x, self._position[1]))
def sety(self, y):
"""Set the turtle's second coordinate to y
Argument:
y -- a number (integer or float)
Set the turtle's first coordinate to x, second coordinate remains
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 40.00)
>>> turtle.sety(-10)
>>> turtle.position()
(0.00, -10.00)
"""
self._goto(Vec2D(self._position[0], y))
def distance(self, x, y=None):
"""Return the distance from the turtle to (x,y) in turtle step units.
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 0.00)
>>> turtle.distance(30,40)
50.0
>>> pen = Turtle()
>>> pen.forward(77)
>>> turtle.distance(pen)
77.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
return abs(pos - self._position)
def towards(self, x, y=None):
"""Return the angle of the line from the turtle's position to (x, y).
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Return the angle, between the line from turtle-position to position
specified by x, y and the turtle's start orientation. (Depends on
modes - "standard" or "logo")
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(10.00, 10.00)
>>> turtle.towards(0,0)
225.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
x, y = pos - self._position
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def heading(self):
""" Return the turtle's current heading.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.left(67)
>>> turtle.heading()
67.0
"""
x, y = self._orient
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def setheading(self, to_angle):
"""Set the orientation of the turtle to to_angle.
Aliases: setheading | seth
Argument:
to_angle -- a number (integer or float)
Set the orientation of the turtle to to_angle.
Here are some common directions in degrees:
standard - mode: logo-mode:
-------------------|--------------------
0 - east 0 - north
90 - north 90 - east
180 - west 180 - south
270 - south 270 - west
Example (for a Turtle instance named turtle):
>>> turtle.setheading(90)
>>> turtle.heading()
90
"""
angle = (to_angle - self.heading())*self._angleOrient
full = self._fullcircle
angle = (angle+full/2.)%full - full/2.
self._rotate(angle)
def circle(self, radius, extent = None, steps = None):
""" Draw a circle with given radius.
Arguments:
radius -- a number
extent (optional) -- a number
steps (optional) -- an integer
Draw a circle with given radius. The center is radius units left
of the turtle; extent - an angle - determines which part of the
circle is drawn. If extent is not given, draw the entire circle.
If extent is not a full circle, one endpoint of the arc is the
current pen position. Draw the arc in counterclockwise direction
if radius is positive, otherwise in clockwise direction. Finally
the direction of the turtle is changed by the amount of extent.
As the circle is approximated by an inscribed regular polygon,
steps determines the number of steps to use. If not given,
it will be calculated automatically. Maybe used to draw regular
polygons.
call: circle(radius) # full circle
--or: circle(radius, extent) # arc
--or: circle(radius, extent, steps)
--or: circle(radius, steps=6) # 6-sided polygon
Example (for a Turtle instance named turtle):
>>> turtle.circle(50)
>>> turtle.circle(120, 180) # semicircle
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
speed = self.speed()
if extent is None:
extent = self._fullcircle
if steps is None:
frac = abs(extent)/self._fullcircle
steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
w = 1.0 * extent / steps
w2 = 0.5 * w
l = 2.0 * radius * math.sin(w2*math.pi/180.0*self._degreesPerAU)
if radius < 0:
l, w, w2 = -l, -w, -w2
tr = self._tracer()
dl = self._delay()
if speed == 0:
self._tracer(0, 0)
else:
self.speed(0)
self._rotate(w2)
for i in range(steps):
self.speed(speed)
self._go(l)
self.speed(0)
self._rotate(w)
self._rotate(-w2)
if speed == 0:
self._tracer(tr, dl)
self.speed(speed)
if self.undobuffer:
self.undobuffer.cumulate = False
## three dummy methods to be implemented by child class:
def speed(self, s=0):
"""dummy method - to be overwritten by child class"""
def _tracer(self, a=None, b=None):
"""dummy method - to be overwritten by child class"""
def _delay(self, n=None):
"""dummy method - to be overwritten by child class"""
fd = forward
bk = back
backward = back
rt = right
lt = left
position = pos
setpos = goto
setposition = goto
seth = setheading
class TPen(object):
"""Drawing part of the RawTurtle.
Implements drawing properties.
"""
def __init__(self, resizemode=_CFG["resizemode"]):
self._resizemode = resizemode # or "user" or "noresize"
self.undobuffer = None
TPen._reset(self)
def _reset(self, pencolor=_CFG["pencolor"],
fillcolor=_CFG["fillcolor"]):
self._pensize = 1
self._shown = True
self._pencolor = pencolor
self._fillcolor = fillcolor
self._drawing = True
self._speed = 3
self._stretchfactor = (1., 1.)
self._shearfactor = 0.
self._tilt = 0.
self._shapetrafo = (1., 0., 0., 1.)
self._outlinewidth = 1
def resizemode(self, rmode=None):
"""Set resizemode to one of the values: "auto", "user", "noresize".
(Optional) Argument:
rmode -- one of the strings "auto", "user", "noresize"
Different resizemodes have the following effects:
- "auto" adapts the appearance of the turtle
corresponding to the value of pensize.
- "user" adapts the appearance of the turtle according to the
values of stretchfactor and outlinewidth (outline),
which are set by shapesize()
- "noresize" no adaption of the turtle's appearance takes place.
If no argument is given, return current resizemode.
resizemode("user") is called by a call of shapesize with arguments.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("noresize")
>>> turtle.resizemode()
'noresize'
"""
if rmode is None:
return self._resizemode
rmode = rmode.lower()
if rmode in ["auto", "user", "noresize"]:
self.pen(resizemode=rmode)
def pensize(self, width=None):
"""Set or return the line thickness.
Aliases: pensize | width
Argument:
width -- positive number
Set the line thickness to width or return it. If resizemode is set
to "auto" and turtleshape is a polygon, that polygon is drawn with
the same line thickness. If no argument is given, current pensize
is returned.
Example (for a Turtle instance named turtle):
>>> turtle.pensize()
1
>>> turtle.pensize(10) # from here on lines of width 10 are drawn
"""
if width is None:
return self._pensize
self.pen(pensize=width)
def penup(self):
"""Pull the pen up -- no drawing when moving.
Aliases: penup | pu | up
No argument
Example (for a Turtle instance named turtle):
>>> turtle.penup()
"""
if not self._drawing:
return
self.pen(pendown=False)
def pendown(self):
"""Pull the pen down -- drawing when moving.
Aliases: pendown | pd | down
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.pendown()
"""
if self._drawing:
return
self.pen(pendown=True)
def isdown(self):
"""Return True if pen is down, False if it's up.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.penup()
>>> turtle.isdown()
False
>>> turtle.pendown()
>>> turtle.isdown()
True
"""
return self._drawing
def speed(self, speed=None):
""" Return or set the turtle's speed.
Optional argument:
speed -- an integer in the range 0..10 or a speedstring (see below)
Set the turtle's speed to an integer value in the range 0 .. 10.
If no argument is given: return current speed.
If input is a number greater than 10 or smaller than 0.5,
speed is set to 0.
Speedstrings are mapped to speedvalues in the following way:
'fastest' : 0
'fast' : 10
'normal' : 6
'slow' : 3
'slowest' : 1
speeds from 1 to 10 enforce increasingly faster animation of
line drawing and turtle turning.
Attention:
speed = 0 : *no* animation takes place. forward/back makes turtle jump
and likewise left/right make the turtle turn instantly.
Example (for a Turtle instance named turtle):
>>> turtle.speed(3)
"""
speeds = {'fastest':0, 'fast':10, 'normal':6, 'slow':3, 'slowest':1 }
if speed is None:
return self._speed
if speed in speeds:
speed = speeds[speed]
elif 0.5 < speed < 10.5:
speed = int(round(speed))
else:
speed = 0
self.pen(speed=speed)
def color(self, *args):
"""Return or set the pencolor and fillcolor.
Arguments:
Several input formats are allowed.
They use 0, 1, 2, or 3 arguments as follows:
color()
Return the current pencolor and the current fillcolor
as a pair of color specification strings as are returned
by pencolor and fillcolor.
color(colorstring), color((r,g,b)), color(r,g,b)
inputs as in pencolor, set both, fillcolor and pencolor,
to the given value.
color(colorstring1, colorstring2),
color((r1,g1,b1), (r2,g2,b2))
equivalent to pencolor(colorstring1) and fillcolor(colorstring2)
and analogously, if the other input format is used.
If turtleshape is a polygon, outline and interior of that polygon
is drawn with the newly set colors.
For mor info see: pencolor, fillcolor
Example (for a Turtle instance named turtle):
>>> turtle.color('red', 'green')
>>> turtle.color()
('red', 'green')
>>> colormode(255)
>>> color((40, 80, 120), (160, 200, 240))
>>> color()
('#285078', '#a0c8f0')
"""
if args:
l = len(args)
if l == 1:
pcolor = fcolor = args[0]
elif l == 2:
pcolor, fcolor = args
elif l == 3:
pcolor = fcolor = args
pcolor = self._colorstr(pcolor)
fcolor = self._colorstr(fcolor)
self.pen(pencolor=pcolor, fillcolor=fcolor)
else:
return self._color(self._pencolor), self._color(self._fillcolor)
def pencolor(self, *args):
""" Return or set the pencolor.
Arguments:
Four input formats are allowed:
- pencolor()
Return the current pencolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- pencolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- pencolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- pencolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the outline of that polygon is drawn
with the newly set pencolor.
Example (for a Turtle instance named turtle):
>>> turtle.pencolor('brown')
>>> tup = (0.2, 0.8, 0.55)
>>> turtle.pencolor(tup)
>>> turtle.pencolor()
'#33cc8c'
"""
if args:
color = self._colorstr(args)
if color == self._pencolor:
return
self.pen(pencolor=color)
else:
return self._color(self._pencolor)
def fillcolor(self, *args):
""" Return or set the fillcolor.
Arguments:
Four input formats are allowed:
- fillcolor()
Return the current fillcolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- fillcolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- fillcolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- fillcolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the interior of that polygon is drawn
with the newly set fillcolor.
Example (for a Turtle instance named turtle):
>>> turtle.fillcolor('violet')
>>> col = turtle.pencolor()
>>> turtle.fillcolor(col)
>>> turtle.fillcolor(0, .5, 0)
"""
if args:
color = self._colorstr(args)
if color == self._fillcolor:
return
self.pen(fillcolor=color)
else:
return self._color(self._fillcolor)
def showturtle(self):
"""Makes the turtle visible.
Aliases: showturtle | st
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> turtle.showturtle()
"""
self.pen(shown=True)
def hideturtle(self):
"""Makes the turtle invisible.
Aliases: hideturtle | ht
No argument.
It's a good idea to do this while you're in the
middle of a complicated drawing, because hiding
the turtle speeds up the drawing observably.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
"""
self.pen(shown=False)
def isvisible(self):
"""Return True if the Turtle is shown, False if it's hidden.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> print turtle.isvisible():
False
"""
return self._shown
def pen(self, pen=None, **pendict):
"""Return or set the pen's attributes.
Arguments:
pen -- a dictionary with some or all of the below listed keys.
**pendict -- one or more keyword-arguments with the below
listed keys as keywords.
Return or set the pen's attributes in a 'pen-dictionary'
with the following key/value pairs:
"shown" : True/False
"pendown" : True/False
"pencolor" : color-string or color-tuple
"fillcolor" : color-string or color-tuple
"pensize" : positive number
"speed" : number in range 0..10
"resizemode" : "auto" or "user" or "noresize"
"stretchfactor": (positive number, positive number)
"shearfactor": number
"outline" : positive number
"tilt" : number
This dictionary can be used as argument for a subsequent
pen()-call to restore the former pen-state. Moreover one
or more of these attributes can be provided as keyword-arguments.
This can be used to set several pen attributes in one statement.
Examples (for a Turtle instance named turtle):
>>> turtle.pen(fillcolor="black", pencolor="red", pensize=10)
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'black',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
>>> penstate=turtle.pen()
>>> turtle.color("yellow","")
>>> turtle.penup()
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'yellow', 'pendown': False, 'fillcolor': '',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
>>> p.pen(penstate, fillcolor="green")
>>> p.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'green',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
"""
_pd = {"shown" : self._shown,
"pendown" : self._drawing,
"pencolor" : self._pencolor,
"fillcolor" : self._fillcolor,
"pensize" : self._pensize,
"speed" : self._speed,
"resizemode" : self._resizemode,
"stretchfactor" : self._stretchfactor,
"shearfactor" : self._shearfactor,
"outline" : self._outlinewidth,
"tilt" : self._tilt
}
if not (pen or pendict):
return _pd
if isinstance(pen, dict):
p = pen
else:
p = {}
p.update(pendict)
_p_buf = {}
for key in p:
_p_buf[key] = _pd[key]
if self.undobuffer:
self.undobuffer.push(("pen", _p_buf))
newLine = False
if "pendown" in p:
if self._drawing != p["pendown"]:
newLine = True
if "pencolor" in p:
if isinstance(p["pencolor"], tuple):
p["pencolor"] = self._colorstr((p["pencolor"],))
if self._pencolor != p["pencolor"]:
newLine = True
if "pensize" in p:
if self._pensize != p["pensize"]:
newLine = True
if newLine:
self._newLine()
if "pendown" in p:
self._drawing = p["pendown"]
if "pencolor" in p:
self._pencolor = p["pencolor"]
if "pensize" in p:
self._pensize = p["pensize"]
if "fillcolor" in p:
if isinstance(p["fillcolor"], tuple):
p["fillcolor"] = self._colorstr((p["fillcolor"],))
self._fillcolor = p["fillcolor"]
if "speed" in p:
self._speed = p["speed"]
if "resizemode" in p:
self._resizemode = p["resizemode"]
if "stretchfactor" in p:
sf = p["stretchfactor"]
if isinstance(sf, (int, float)):
sf = (sf, sf)
self._stretchfactor = sf
if "shearfactor" in p:
self._shearfactor = p["shearfactor"]
if "outline" in p:
self._outlinewidth = p["outline"]
if "shown" in p:
self._shown = p["shown"]
if "tilt" in p:
self._tilt = p["tilt"]
if "stretchfactor" in p or "tilt" in p or "shearfactor" in p:
scx, scy = self._stretchfactor
shf = self._shearfactor
sa, ca = math.sin(self._tilt), math.cos(self._tilt)
self._shapetrafo = ( scx*ca, scy*(shf*ca + sa),
-scx*sa, scy*(ca - shf*sa))
self._update()
## three dummy methods to be implemented by child class:
def _newLine(self, usePos = True):
"""dummy method - to be overwritten by child class"""
def _update(self, count=True, forced=False):
"""dummy method - to be overwritten by child class"""
def _color(self, args):
"""dummy method - to be overwritten by child class"""
def _colorstr(self, args):
"""dummy method - to be overwritten by child class"""
width = pensize
up = penup
pu = penup
pd = pendown
down = pendown
st = showturtle
ht = hideturtle
class _TurtleImage(object):
"""Helper class: Datatype to store Turtle attributes
"""
def __init__(self, screen, shapeIndex):
self.screen = screen
self._type = None
self._setshape(shapeIndex)
def _setshape(self, shapeIndex):
screen = self.screen
self.shapeIndex = shapeIndex
if self._type == "polygon" == screen._shapes[shapeIndex]._type:
return
if self._type == "image" == screen._shapes[shapeIndex]._type:
return
if self._type in ["image", "polygon"]:
screen._delete(self._item)
elif self._type == "compound":
for item in self._item:
screen._delete(item)
self._type = screen._shapes[shapeIndex]._type
if self._type == "polygon":
self._item = screen._createpoly()
elif self._type == "image":
self._item = screen._createimage(screen._shapes["blank"]._data)
elif self._type == "compound":
self._item = [screen._createpoly() for item in
screen._shapes[shapeIndex]._data]
class RawTurtle(TPen, TNavigator):
"""Animation part of the RawTurtle.
Puts RawTurtle upon a TurtleScreen and provides tools for
its animation.
"""
screens = []
def __init__(self, canvas=None,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if isinstance(canvas, _Screen):
self.screen = canvas
elif isinstance(canvas, TurtleScreen):
if canvas not in RawTurtle.screens:
RawTurtle.screens.append(canvas)
self.screen = canvas
elif isinstance(canvas, (ScrolledCanvas, Canvas)):
for screen in RawTurtle.screens:
if screen.cv == canvas:
self.screen = screen
break
else:
self.screen = TurtleScreen(canvas)
RawTurtle.screens.append(self.screen)
else:
raise TurtleGraphicsError("bad canvas argument %s" % canvas)
screen = self.screen
TNavigator.__init__(self, screen.mode())
TPen.__init__(self)
screen._turtles.append(self)
self.drawingLineItem = screen._createline()
self.turtle = _TurtleImage(screen, shape)
self._poly = None
self._creatingPoly = False
self._fillitem = self._fillpath = None
self._shown = visible
self._hidden_from_screen = False
self.currentLineItem = screen._createline()
self.currentLine = [self._position]
self.items = [self.currentLineItem]
self.stampItems = []
self._undobuffersize = undobuffersize
self.undobuffer = Tbuffer(undobuffersize)
self._update()
def reset(self):
"""Delete the turtle's drawings and restore its default values.
No argument.
Delete the turtle's drawings from the screen, re-center the turtle
and set variables to the default values.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00,-22.00)
>>> turtle.heading()
100.0
>>> turtle.reset()
>>> turtle.position()
(0.00,0.00)
>>> turtle.heading()
0.0
"""
TNavigator.reset(self)
TPen._reset(self)
self._clear()
self._drawturtle()
self._update()
def setundobuffer(self, size):
"""Set or disable undobuffer.
Argument:
size -- an integer or None
If size is an integer an empty undobuffer of given size is installed.
Size gives the maximum number of turtle-actions that can be undone
by the undo() function.
If size is None, no undobuffer is present.
Example (for a Turtle instance named turtle):
>>> turtle.setundobuffer(42)
"""
if size is None or size <= 0:
self.undobuffer = None
else:
self.undobuffer = Tbuffer(size)
def undobufferentries(self):
"""Return count of entries in the undobuffer.
No argument.
Example (for a Turtle instance named turtle):
>>> while undobufferentries():
... undo()
"""
if self.undobuffer is None:
return 0
return self.undobuffer.nr_of_items()
def _clear(self):
"""Delete all of pen's drawings"""
self._fillitem = self._fillpath = None
for item in self.items:
self.screen._delete(item)
self.currentLineItem = self.screen._createline()
self.currentLine = []
if self._drawing:
self.currentLine.append(self._position)
self.items = [self.currentLineItem]
self.clearstamps()
self.setundobuffer(self._undobuffersize)
def clear(self):
"""Delete the turtle's drawings from the screen. Do not move turtle.
No arguments.
Delete the turtle's drawings from the screen. Do not move turtle.
State and position of the turtle as well as drawings of other
turtles are not affected.
Examples (for a Turtle instance named turtle):
>>> turtle.clear()
"""
self._clear()
self._update()
def _update_data(self):
self.screen._incrementudc()
if self.screen._updatecounter != 0:
return
if len(self.currentLine)>1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
def _update(self):
"""Perform a Turtle-data update.
"""
screen = self.screen
if screen._tracing == 0:
return
elif screen._tracing == 1:
self._update_data()
self._drawturtle()
screen._update() # TurtleScreenBase
screen._delay(screen._delayvalue) # TurtleScreenBase
else:
self._update_data()
if screen._updatecounter == 0:
for t in screen.turtles():
t._drawturtle()
screen._update()
def _tracer(self, flag=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a Turtle instance named turtle):
>>> turtle.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... turtle.fd(dist)
... turtle.rt(90)
... dist += 2
"""
return self.screen.tracer(flag, delay)
def _color(self, args):
return self.screen._color(args)
def _colorstr(self, args):
return self.screen._colorstr(args)
def _cc(self, args):
"""Convert colortriples to hexstrings.
"""
if isinstance(args, str):
return args
try:
r, g, b = args
except:
raise TurtleGraphicsError("bad color arguments: %s" % str(args))
if self.screen._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(args))
return "#%02x%02x%02x" % (r, g, b)
def clone(self):
"""Create and return a clone of the turtle.
No argument.
Create and return a clone of the turtle with same position, heading
and turtle properties.
Example (for a Turtle instance named mick):
mick = Turtle()
joe = mick.clone()
"""
screen = self.screen
self._newLine(self._drawing)
turtle = self.turtle
self.screen = None
self.turtle = None # too make self deepcopy-able
q = deepcopy(self)
self.screen = screen
self.turtle = turtle
q.screen = screen
q.turtle = _TurtleImage(screen, self.turtle.shapeIndex)
screen._turtles.append(q)
ttype = screen._shapes[self.turtle.shapeIndex]._type
if ttype == "polygon":
q.turtle._item = screen._createpoly()
elif ttype == "image":
q.turtle._item = screen._createimage(screen._shapes["blank"]._data)
elif ttype == "compound":
q.turtle._item = [screen._createpoly() for item in
screen._shapes[self.turtle.shapeIndex]._data]
q.currentLineItem = screen._createline()
q._update()
return q
def shape(self, name=None):
"""Set turtle shape to shape with given name / return current shapename.
Optional argument:
name -- a string, which is a valid shapename
Set turtle shape to shape with given name or, if name is not given,
return name of current shape.
Shape with name must exist in the TurtleScreen's shape dictionary.
Initially there are the following polygon shapes:
'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'.
To learn about how to deal with shapes see Screen-method register_shape.
Example (for a Turtle instance named turtle):
>>> turtle.shape()
'arrow'
>>> turtle.shape("turtle")
>>> turtle.shape()
'turtle'
"""
if name is None:
return self.turtle.shapeIndex
if not name in self.screen.getshapes():
raise TurtleGraphicsError("There is no shape named %s" % name)
self.turtle._setshape(name)
self._update()
def shapesize(self, stretch_wid=None, stretch_len=None, outline=None):
"""Set/return turtle's stretchfactors/outline. Set resizemode to "user".
Optional arguments:
stretch_wid : positive number
stretch_len : positive number
outline : positive number
Return or set the pen's attributes x/y-stretchfactors and/or outline.
Set resizemode to "user".
If and only if resizemode is set to "user", the turtle will be displayed
stretched according to its stretchfactors:
stretch_wid is stretchfactor perpendicular to orientation
stretch_len is stretchfactor in direction of turtles orientation.
outline determines the width of the shapes's outline.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("user")
>>> turtle.shapesize(5, 5, 12)
>>> turtle.shapesize(outline=8)
"""
if stretch_wid is stretch_len is outline is None:
stretch_wid, stretch_len = self._stretchfactor
return stretch_wid, stretch_len, self._outlinewidth
if stretch_wid == 0 or stretch_len == 0:
raise TurtleGraphicsError("stretch_wid/stretch_len must not be zero")
if stretch_wid is not None:
if stretch_len is None:
stretchfactor = stretch_wid, stretch_wid
else:
stretchfactor = stretch_wid, stretch_len
elif stretch_len is not None:
stretchfactor = self._stretchfactor[0], stretch_len
else:
stretchfactor = self._stretchfactor
if outline is None:
outline = self._outlinewidth
self.pen(resizemode="user",
stretchfactor=stretchfactor, outline=outline)
def shearfactor(self, shear=None):
"""Set or return the current shearfactor.
Optional argument: shear -- number, tangent of the shear angle
Shear the turtleshape according to the given shearfactor shear,
which is the tangent of the shear angle. DO NOT change the
turtle's heading (direction of movement).
If shear is not given: return the current shearfactor, i. e. the
tangent of the shear angle, by which lines parallel to the
heading of the turtle are sheared.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.shearfactor(0.5)
>>> turtle.shearfactor()
>>> 0.5
"""
if shear is None:
return self._shearfactor
self.pen(resizemode="user", shearfactor=shear)
def settiltangle(self, angle):
"""Rotate the turtleshape to point in the specified direction
Argument: angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.settiltangle(45)
>>> stamp()
>>> turtle.fd(50)
>>> turtle.settiltangle(-45)
>>> stamp()
>>> turtle.fd(50)
"""
tilt = -angle * self._degreesPerAU * self._angleOrient
tilt = (tilt * math.pi / 180.0) % (2*math.pi)
self.pen(resizemode="user", tilt=tilt)
def tiltangle(self, angle=None):
"""Set or return the current tilt-angle.
Optional argument: angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
If angle is not given: return the current tilt-angle, i. e. the angle
between the orientation of the turtleshape and the heading of the
turtle (its direction of movement).
Deprecated since Python 3.1
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(45)
>>> turtle.tiltangle()
"""
if angle is None:
tilt = -self._tilt * (180.0/math.pi) * self._angleOrient
return (tilt / self._degreesPerAU) % self._fullcircle
else:
self.settiltangle(angle)
def tilt(self, angle):
"""Rotate the turtleshape by angle.
Argument:
angle - a number
Rotate the turtleshape by angle from its current tilt-angle,
but do NOT change the turtle's heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(30)
>>> turtle.fd(50)
>>> turtle.tilt(30)
>>> turtle.fd(50)
"""
self.settiltangle(angle + self.tiltangle())
def shapetransform(self, t11=None, t12=None, t21=None, t22=None):
"""Set or return the current transformation matrix of the turtle shape.
Optional arguments: t11, t12, t21, t22 -- numbers.
If none of the matrix elements are given, return the transformation
matrix.
Otherwise set the given elements and transform the turtleshape
according to the matrix consisting of first row t11, t12 and
second row t21, 22.
Modify stretchfactor, shearfactor and tiltangle according to the
given matrix.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("square")
>>> turtle.shapesize(4,2)
>>> turtle.shearfactor(-0.5)
>>> turtle.shapetransform()
(4.0, -1.0, -0.0, 2.0)
"""
if t11 is t12 is t21 is t22 is None:
return self._shapetrafo
m11, m12, m21, m22 = self._shapetrafo
if t11 is not None: m11 = t11
if t12 is not None: m12 = t12
if t21 is not None: m21 = t21
if t22 is not None: m22 = t22
if t11 * t22 - t12 * t21 == 0:
raise TurtleGraphicsError("Bad shape transform matrix: must not be singular")
self._shapetrafo = (m11, m12, m21, m22)
alfa = math.atan2(-m21, m11) % (2 * math.pi)
sa, ca = math.sin(alfa), math.cos(alfa)
a11, a12, a21, a22 = (ca*m11 - sa*m21, ca*m12 - sa*m22,
sa*m11 + ca*m21, sa*m12 + ca*m22)
self._stretchfactor = a11, a22
self._shearfactor = a12/a22
self._tilt = alfa
self.pen(resizemode="user")
def _polytrafo(self, poly):
"""Computes transformed polygon shapes from a shape
according to current position and heading.
"""
screen = self.screen
p0, p1 = self._position
e0, e1 = self._orient
e = Vec2D(e0, e1 * screen.yscale / screen.xscale)
e0, e1 = (1.0 / abs(e)) * e
return [(p0+(e1*x+e0*y)/screen.xscale, p1+(-e0*x+e1*y)/screen.yscale)
for (x, y) in poly]
def get_shapepoly(self):
"""Return the current shape polygon as tuple of coordinate pairs.
No argument.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("square")
>>> turtle.shapetransform(4, -1, 0, 2)
>>> turtle.get_shapepoly()
((50, -20), (30, 20), (-50, 20), (-30, -20))
"""
shape = self.screen._shapes[self.turtle.shapeIndex]
if shape._type == "polygon":
return self._getshapepoly(shape._data, shape._type == "compound")
# else return None
def _getshapepoly(self, polygon, compound=False):
"""Calculate transformed shape polygon according to resizemode
and shapetransform.
"""
if self._resizemode == "user" or compound:
t11, t12, t21, t22 = self._shapetrafo
elif self._resizemode == "auto":
l = max(1, self._pensize/5.0)
t11, t12, t21, t22 = l, 0, 0, l
elif self._resizemode == "noresize":
return polygon
return tuple([(t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon])
def _drawturtle(self):
"""Manages the correct rendering of the turtle with respect to
its shape, resizemode, stretch and tilt etc."""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
titem = self.turtle._item
if self._shown and screen._updatecounter == 0 and screen._tracing > 0:
self._hidden_from_screen = False
tshape = shape._data
if ttype == "polygon":
if self._resizemode == "noresize": w = 1
elif self._resizemode == "auto": w = self._pensize
else: w =self._outlinewidth
shape = self._polytrafo(self._getshapepoly(tshape))
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(titem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
screen._drawimage(titem, self._position, tshape)
elif ttype == "compound":
for item, (poly, fc, oc) in zip(titem, tshape):
poly = self._polytrafo(self._getshapepoly(poly, True))
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=self._outlinewidth, top=True)
else:
if self._hidden_from_screen:
return
if ttype == "polygon":
screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), "", "")
elif ttype == "image":
screen._drawimage(titem, self._position,
screen._shapes["blank"]._data)
elif ttype == "compound":
for item in titem:
screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), "", "")
self._hidden_from_screen = True
############################## stamp stuff ###############################
def stamp(self):
"""Stamp a copy of the turtleshape onto the canvas and return its id.
No argument.
Stamp a copy of the turtle shape onto the canvas at the current
turtle position. Return a stamp_id for that stamp, which can be
used to delete it by calling clearstamp(stamp_id).
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> turtle.stamp()
13
>>> turtle.fd(50)
"""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
tshape = shape._data
if ttype == "polygon":
stitem = screen._createpoly()
if self._resizemode == "noresize": w = 1
elif self._resizemode == "auto": w = self._pensize
else: w =self._outlinewidth
shape = self._polytrafo(self._getshapepoly(tshape))
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(stitem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
stitem = screen._createimage("")
screen._drawimage(stitem, self._position, tshape)
elif ttype == "compound":
stitem = []
for element in tshape:
item = screen._createpoly()
stitem.append(item)
stitem = tuple(stitem)
for item, (poly, fc, oc) in zip(stitem, tshape):
poly = self._polytrafo(self._getshapepoly(poly, True))
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=self._outlinewidth, top=True)
self.stampItems.append(stitem)
self.undobuffer.push(("stamp", stitem))
return stitem
def _clearstamp(self, stampid):
"""does the work for clearstamp() and clearstamps()
"""
if stampid in self.stampItems:
if isinstance(stampid, tuple):
for subitem in stampid:
self.screen._delete(subitem)
else:
self.screen._delete(stampid)
self.stampItems.remove(stampid)
# Delete stampitem from undobuffer if necessary
# if clearstamp is called directly.
item = ("stamp", stampid)
buf = self.undobuffer
if item not in buf.buffer:
return
index = buf.buffer.index(item)
buf.buffer.remove(item)
if index <= buf.ptr:
buf.ptr = (buf.ptr - 1) % buf.bufsize
buf.buffer.insert((buf.ptr+1)%buf.bufsize, [None])
def clearstamp(self, stampid):
"""Delete stamp with given stampid
Argument:
stampid - an integer, must be return value of previous stamp() call.
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> astamp = turtle.stamp()
>>> turtle.fd(50)
>>> turtle.clearstamp(astamp)
"""
self._clearstamp(stampid)
self._update()
def clearstamps(self, n=None):
"""Delete all or first/last n of turtle's stamps.
Optional argument:
n -- an integer
If n is None, delete all of pen's stamps,
else if n > 0 delete first n stamps
else if n < 0 delete last n stamps.
Example (for a Turtle instance named turtle):
>>> for i in range(8):
... turtle.stamp(); turtle.fd(30)
...
>>> turtle.clearstamps(2)
>>> turtle.clearstamps(-2)
>>> turtle.clearstamps()
"""
if n is None:
toDelete = self.stampItems[:]
elif n >= 0:
toDelete = self.stampItems[:n]
else:
toDelete = self.stampItems[n:]
for item in toDelete:
self._clearstamp(item)
self._update()
def _goto(self, end):
"""Move the pen to the point end, thereby drawing a line
if pen is down. All other methods for turtle movement depend
on this one.
"""
## Version with undo-stuff
go_modes = ( self._drawing,
self._pencolor,
self._pensize,
isinstance(self._fillpath, list))
screen = self.screen
undo_entry = ("go", self._position, end, go_modes,
(self.currentLineItem,
self.currentLine[:],
screen._pointlist(self.currentLineItem),
self.items[:])
)
if self.undobuffer:
self.undobuffer.push(undo_entry)
start = self._position
if self._speed and screen._tracing == 1:
diff = (end-start)
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = start + delta * n
if self._drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
self._pencolor, self._pensize, top)
self._update()
if self._drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=self._pensize)
# Turtle now at end,
if self._drawing: # now update currentLine
self.currentLine.append(end)
if isinstance(self._fillpath, list):
self._fillpath.append(end)
###### vererbung!!!!!!!!!!!!!!!!!!!!!!
self._position = end
if self._creatingPoly:
self._poly.append(end)
if len(self.currentLine) > 42: # 42! answer to the ultimate question
# of life, the universe and everything
self._newLine()
self._update() #count=True)
def _undogoto(self, entry):
"""Reverse a _goto. Used for undo()
"""
old, new, go_modes, coodata = entry
drawing, pc, ps, filling = go_modes
cLI, cL, pl, items = coodata
screen = self.screen
if abs(self._position - new) > 0.5:
print ("undogoto: HALLO-DA-STIMMT-WAS-NICHT!")
# restore former situation
self.currentLineItem = cLI
self.currentLine = cL
if pl == [(0, 0), (0, 0)]:
usepc = ""
else:
usepc = pc
screen._drawline(cLI, pl, fill=usepc, width=ps)
todelete = [i for i in self.items if (i not in items) and
(screen._type(i) == "line")]
for i in todelete:
screen._delete(i)
self.items.remove(i)
start = old
if self._speed and screen._tracing == 1:
diff = old - new
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = new + delta * n
if drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
pc, ps, top)
self._update()
if drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=ps)
# Turtle now at position old,
self._position = old
## if undo is done during creating a polygon, the last vertex
## will be deleted. if the polygon is entirely deleted,
## creatingPoly will be set to False.
## Polygons created before the last one will not be affected by undo()
if self._creatingPoly:
if len(self._poly) > 0:
self._poly.pop()
if self._poly == []:
self._creatingPoly = False
self._poly = None
if filling:
if self._fillpath == []:
self._fillpath = None
print("Unwahrscheinlich in _undogoto!")
elif self._fillpath is not None:
self._fillpath.pop()
self._update() #count=True)
def _rotate(self, angle):
"""Turns pen clockwise by angle.
"""
if self.undobuffer:
self.undobuffer.push(("rot", angle, self._degreesPerAU))
angle *= self._degreesPerAU
neworient = self._orient.rotate(angle)
tracing = self.screen._tracing
if tracing == 1 and self._speed > 0:
anglevel = 3.0 * self._speed
steps = 1 + int(abs(angle)/anglevel)
delta = 1.0*angle/steps
for _ in range(steps):
self._orient = self._orient.rotate(delta)
self._update()
self._orient = neworient
self._update()
def _newLine(self, usePos=True):
"""Closes current line item and starts a new one.
Remark: if current line became too long, animation
performance (via _drawline) slowed down considerably.
"""
if len(self.currentLine) > 1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
self.currentLineItem = self.screen._createline()
self.items.append(self.currentLineItem)
else:
self.screen._drawline(self.currentLineItem, top=True)
self.currentLine = []
if usePos:
self.currentLine = [self._position]
def filling(self):
"""Return fillstate (True if filling, False else).
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.begin_fill()
>>> if turtle.filling():
... turtle.pensize(5)
... else:
... turtle.pensize(3)
"""
return isinstance(self._fillpath, list)
def begin_fill(self):
"""Called just before drawing a shape to be filled.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.color("black", "red")
>>> turtle.begin_fill()
>>> turtle.circle(60)
>>> turtle.end_fill()
"""
if not self.filling():
self._fillitem = self.screen._createpoly()
self.items.append(self._fillitem)
self._fillpath = [self._position]
self._newLine()
if self.undobuffer:
self.undobuffer.push(("beginfill", self._fillitem))
self._update()
def end_fill(self):
"""Fill the shape drawn after the call begin_fill().
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.color("black", "red")
>>> turtle.begin_fill()
>>> turtle.circle(60)
>>> turtle.end_fill()
"""
if self.filling():
if len(self._fillpath) > 2:
self.screen._drawpoly(self._fillitem, self._fillpath,
fill=self._fillcolor)
if self.undobuffer:
self.undobuffer.push(("dofill", self._fillitem))
self._fillitem = self._fillpath = None
self._update()
def dot(self, size=None, *color):
"""Draw a dot with diameter size, using color.
Optional arguments:
size -- an integer >= 1 (if given)
color -- a colorstring or a numeric color tuple
Draw a circular dot with diameter size, using color.
If size is not given, the maximum of pensize+4 and 2*pensize is used.
Example (for a Turtle instance named turtle):
>>> turtle.dot()
>>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50)
"""
if not color:
if isinstance(size, (str, tuple)):
color = self._colorstr(size)
size = self._pensize + max(self._pensize, 4)
else:
color = self._pencolor
if not size:
size = self._pensize + max(self._pensize, 4)
else:
if size is None:
size = self._pensize + max(self._pensize, 4)
color = self._colorstr(color)
if hasattr(self.screen, "_dot"):
item = self.screen._dot(self._position, size, color)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("dot", item))
else:
pen = self.pen()
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
try:
if self.resizemode() == 'auto':
self.ht()
self.pendown()
self.pensize(size)
self.pencolor(color)
self.forward(0)
finally:
self.pen(pen)
if self.undobuffer:
self.undobuffer.cumulate = False
def _write(self, txt, align, font):
"""Performs the writing for write()
"""
item, end = self.screen._write(self._position, txt, align, font,
self._pencolor)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("wri", item))
return end
def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")):
"""Write text at the current turtle position.
Arguments:
arg -- info, which is to be written to the TurtleScreen
move (optional) -- True/False
align (optional) -- one of the strings "left", "center" or right"
font (optional) -- a triple (fontname, fontsize, fonttype)
Write text - the string representation of arg - at the current
turtle position according to align ("left", "center" or right")
and with the given font.
If move is True, the pen is moved to the bottom-right corner
of the text. By default, move is False.
Example (for a Turtle instance named turtle):
>>> turtle.write('Home = ', True, align="center")
>>> turtle.write((0,0), True)
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
end = self._write(str(arg), align.lower(), font)
if move:
x, y = self.pos()
self.setpos(end, y)
if self.undobuffer:
self.undobuffer.cumulate = False
def begin_poly(self):
"""Start recording the vertices of a polygon.
No argument.
Start recording the vertices of a polygon. Current turtle position
is first point of polygon.
Example (for a Turtle instance named turtle):
>>> turtle.begin_poly()
"""
self._poly = [self._position]
self._creatingPoly = True
def end_poly(self):
"""Stop recording the vertices of a polygon.
No argument.
Stop recording the vertices of a polygon. Current turtle position is
last point of polygon. This will be connected with the first point.
Example (for a Turtle instance named turtle):
>>> turtle.end_poly()
"""
self._creatingPoly = False
def get_poly(self):
"""Return the lastly recorded polygon.
No argument.
Example (for a Turtle instance named turtle):
>>> p = turtle.get_poly()
>>> turtle.register_shape("myFavouriteShape", p)
"""
## check if there is any poly?
if self._poly is not None:
return tuple(self._poly)
def getscreen(self):
"""Return the TurtleScreen object, the turtle is drawing on.
No argument.
Return the TurtleScreen object, the turtle is drawing on.
So TurtleScreen-methods can be called for that object.
Example (for a Turtle instance named turtle):
>>> ts = turtle.getscreen()
>>> ts
<turtle.TurtleScreen object at 0x0106B770>
>>> ts.bgcolor("pink")
"""
return self.screen
def getturtle(self):
"""Return the Turtleobject itself.
No argument.
Only reasonable use: as a function to return the 'anonymous turtle':
Example:
>>> pet = getturtle()
>>> pet.fd(50)
>>> pet
<turtle.Turtle object at 0x0187D810>
>>> turtles()
[<turtle.Turtle object at 0x0187D810>]
"""
return self
getpen = getturtle
################################################################
### screen oriented methods recurring to methods of TurtleScreen
################################################################
def _delay(self, delay=None):
"""Set delay value which determines speed of turtle animation.
"""
return self.screen.delay(delay)
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
add -- True or False. If True, new binding will be added, otherwise
it will replace a former binding.
Example for the anonymous turtle, i. e. the procedural way:
>>> def turn(x, y):
... left(360)
...
>>> onclick(turn) # Now clicking into the turtle will turn it.
>>> onclick(None) # event-binding will be removed
"""
self.screen._onclick(self.turtle._item, fun, btn, add)
self._update()
def onrelease(self, fun, btn=1, add=None):
"""Bind fun to mouse-button-release event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Example (for a MyTurtle instance named joe):
>>> class MyTurtle(Turtle):
... def glow(self,x,y):
... self.fillcolor("red")
... def unglow(self,x,y):
... self.fillcolor("")
...
>>> joe = MyTurtle()
>>> joe.onclick(joe.glow)
>>> joe.onrelease(joe.unglow)
Clicking on joe turns fillcolor red, unclicking turns it to
transparent.
"""
self.screen._onrelease(self.turtle._item, fun, btn, add)
self._update()
def ondrag(self, fun, btn=1, add=None):
"""Bind fun to mouse-move event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
Example (for a Turtle instance named turtle):
>>> turtle.ondrag(turtle.goto)
Subsequently clicking and dragging a Turtle will move it
across the screen thereby producing handdrawings (if pen is
down).
"""
self.screen._ondrag(self.turtle._item, fun, btn, add)
def _undo(self, action, data):
"""Does the main part of the work for undo()
"""
if self.undobuffer is None:
return
if action == "rot":
angle, degPAU = data
self._rotate(-angle*degPAU/self._degreesPerAU)
dummy = self.undobuffer.pop()
elif action == "stamp":
stitem = data[0]
self.clearstamp(stitem)
elif action == "go":
self._undogoto(data)
elif action in ["wri", "dot"]:
item = data[0]
self.screen._delete(item)
self.items.remove(item)
elif action == "dofill":
item = data[0]
self.screen._drawpoly(item, ((0, 0),(0, 0),(0, 0)),
fill="", outline="")
elif action == "beginfill":
item = data[0]
self._fillitem = self._fillpath = None
if item in self.items:
self.screen._delete(item)
self.items.remove(item)
elif action == "pen":
TPen.pen(self, data[0])
self.undobuffer.pop()
def undo(self):
"""undo (repeatedly) the last turtle action.
No argument.
undo (repeatedly) the last turtle action.
Number of available undo actions is determined by the size of
the undobuffer.
Example (for a Turtle instance named turtle):
>>> for i in range(4):
... turtle.fd(50); turtle.lt(80)
...
>>> for i in range(8):
... turtle.undo()
...
"""
if self.undobuffer is None:
return
item = self.undobuffer.pop()
action = item[0]
data = item[1:]
if action == "seq":
while data:
item = data.pop()
self._undo(item[0], item[1:])
else:
self._undo(action, data)
turtlesize = shapesize
RawPen = RawTurtle
### Screen - Singleton ########################
def Screen():
"""Return the singleton screen object.
If none exists at the moment, create a new one and return it,
else return the existing one."""
if Turtle._screen is None:
Turtle._screen = _Screen()
return Turtle._screen
class _Screen(TurtleScreen):
_root = None
_canvas = None
_title = _CFG["title"]
def __init__(self):
# XXX there is no need for this code to be conditional,
# as there will be only a single _Screen instance, anyway
# XXX actually, the turtle demo is injecting root window,
# so perhaps the conditional creation of a root should be
# preserved (perhaps by passing it as an optional parameter)
if _Screen._root is None:
_Screen._root = self._root = _Root()
self._root.title(_Screen._title)
self._root.ondestroy(self._destroy)
if _Screen._canvas is None:
width = _CFG["width"]
height = _CFG["height"]
canvwidth = _CFG["canvwidth"]
canvheight = _CFG["canvheight"]
leftright = _CFG["leftright"]
topbottom = _CFG["topbottom"]
self._root.setupcanvas(width, height, canvwidth, canvheight)
_Screen._canvas = self._root._getcanvas()
TurtleScreen.__init__(self, _Screen._canvas)
self.setup(width, height, leftright, topbottom)
def setup(self, width=_CFG["width"], height=_CFG["height"],
startx=_CFG["leftright"], starty=_CFG["topbottom"]):
""" Set the size and position of the main window.
Arguments:
width: as integer a size in pixels, as float a fraction of the screen.
Default is 50% of screen.
height: as integer the height in pixels, as float a fraction of the
screen. Default is 75% of screen.
startx: if positive, starting position in pixels from the left
edge of the screen, if negative from the right edge
Default, startx=None is to center window horizontally.
starty: if positive, starting position in pixels from the top
edge of the screen, if negative from the bottom edge
Default, starty=None is to center window vertically.
Examples (for a Screen instance named screen):
>>> screen.setup (width=200, height=200, startx=0, starty=0)
sets window to 200x200 pixels, in upper left of screen
>>> screen.setup(width=.75, height=0.5, startx=None, starty=None)
sets window to 75% of screen by 50% of screen and centers
"""
if not hasattr(self._root, "set_geometry"):
return
sw = self._root.win_width()
sh = self._root.win_height()
if isinstance(width, float) and 0 <= width <= 1:
width = sw*width
if startx is None:
startx = (sw - width) / 2
if isinstance(height, float) and 0 <= height <= 1:
height = sh*height
if starty is None:
starty = (sh - height) / 2
self._root.set_geometry(width, height, startx, starty)
self.update()
def title(self, titlestring):
"""Set title of turtle-window
Argument:
titlestring -- a string, to appear in the titlebar of the
turtle graphics window.
This is a method of Screen-class. Not available for TurtleScreen-
objects.
Example (for a Screen instance named screen):
>>> screen.title("Welcome to the turtle-zoo!")
"""
if _Screen._root is not None:
_Screen._root.title(titlestring)
_Screen._title = titlestring
def _destroy(self):
root = self._root
if root is _Screen._root:
Turtle._pen = None
Turtle._screen = None
_Screen._root = None
_Screen._canvas = None
TurtleScreen._RUNNING = True
root.destroy()
def bye(self):
"""Shut the turtlegraphics window.
Example (for a TurtleScreen instance named screen):
>>> screen.bye()
"""
self._destroy()
def exitonclick(self):
"""Go into mainloop until the mouse is clicked.
No arguments.
Bind bye() method to mouseclick on TurtleScreen.
If "using_IDLE" - value in configuration dictionary is False
(default value), enter mainloop.
If IDLE with -n switch (no subprocess) is used, this value should be
set to True in turtle.cfg. In this case IDLE's mainloop
is active also for the client script.
This is a method of the Screen-class and not available for
TurtleScreen instances.
Example (for a Screen instance named screen):
>>> screen.exitonclick()
"""
def exitGracefully(x, y):
"""Screen.bye() with two dummy-parameters"""
self.bye()
self.onclick(exitGracefully)
if _CFG["using_IDLE"]:
return
try:
mainloop()
except AttributeError:
exit(0)
class Turtle(RawTurtle):
"""RawTurtle auto-creating (scrolled) canvas.
When a Turtle object is created or a function derived from some
Turtle method is called a TurtleScreen object is automatically created.
"""
_pen = None
_screen = None
def __init__(self,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if Turtle._screen is None:
Turtle._screen = Screen()
RawTurtle.__init__(self, Turtle._screen,
shape=shape,
undobuffersize=undobuffersize,
visible=visible)
Pen = Turtle
def _getpen():
"""Create the 'anonymous' turtle if not already present."""
if Turtle._pen is None:
Turtle._pen = Turtle()
return Turtle._pen
def _getscreen():
"""Create a TurtleScreen if not already present."""
if Turtle._screen is None:
Turtle._screen = Screen()
return Turtle._screen
def write_docstringdict(filename="turtle_docstringdict"):
"""Create and write docstring-dictionary to file.
Optional argument:
filename -- a string, used as filename
default value is turtle_docstringdict
Has to be called explicitly, (not used by the turtle-graphics classes)
The docstring dictionary will be written to the Python script <filname>.py
It is intended to serve as a template for translation of the docstrings
into different languages.
"""
docsdict = {}
for methodname in _tg_screen_functions:
key = "_Screen."+methodname
docsdict[key] = eval(key).__doc__
for methodname in _tg_turtle_functions:
key = "Turtle."+methodname
docsdict[key] = eval(key).__doc__
with open("%s.py" % filename,"w") as f:
keys = sorted([x for x in docsdict.keys()
if x.split('.')[1] not in _alias_list])
f.write('docsdict = {\n\n')
for key in keys[:-1]:
f.write('%s :\n' % repr(key))
f.write(' """%s\n""",\n\n' % docsdict[key])
key = keys[-1]
f.write('%s :\n' % repr(key))
f.write(' """%s\n"""\n\n' % docsdict[key])
f.write("}\n")
f.close()
def read_docstrings(lang):
"""Read in docstrings from lang-specific docstring dictionary.
Transfer docstrings, translated to lang, from a dictionary-file
to the methods of classes Screen and Turtle and - in revised form -
to the corresponding functions.
"""
modname = "turtle_docstringdict_%(language)s" % {'language':lang.lower()}
module = __import__(modname)
docsdict = module.docsdict
for key in docsdict:
try:
# eval(key).im_func.__doc__ = docsdict[key]
eval(key).__doc__ = docsdict[key]
except:
print("Bad docstring-entry: %s" % key)
_LANGUAGE = _CFG["language"]
try:
if _LANGUAGE != "english":
read_docstrings(_LANGUAGE)
except ImportError:
print("Cannot find docsdict for", _LANGUAGE)
except:
print ("Unknown Error when trying to import %s-docstring-dictionary" %
_LANGUAGE)
def getmethparlist(ob):
"""Get strings describing the arguments for the given object
Returns a pair of strings representing function parameter lists
including parenthesis. The first string is suitable for use in
function definition and the second is suitable for use in function
call. The "self" parameter is not included.
"""
defText = callText = ""
# bit of a hack for methods - turn it into a function
# but we drop the "self" param.
# Try and build one for Python defined functions
args, varargs, varkw = inspect.getargs(ob.__code__)
items2 = args[1:]
realArgs = args[1:]
defaults = ob.__defaults__ or []
defaults = ["=%r" % (value,) for value in defaults]
defaults = [""] * (len(realArgs)-len(defaults)) + defaults
items1 = [arg + dflt for arg, dflt in zip(realArgs, defaults)]
if varargs is not None:
items1.append("*" + varargs)
items2.append("*" + varargs)
if varkw is not None:
items1.append("**" + varkw)
items2.append("**" + varkw)
defText = ", ".join(items1)
defText = "(%s)" % defText
callText = ", ".join(items2)
callText = "(%s)" % callText
return defText, callText
def _turtle_docrevise(docstr):
"""To reduce docstrings from RawTurtle class for functions
"""
import re
if docstr is None:
return None
turtlename = _CFG["exampleturtle"]
newdocstr = docstr.replace("%s." % turtlename,"")
parexp = re.compile(r' \(.+ %s\):' % turtlename)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
def _screen_docrevise(docstr):
"""To reduce docstrings from TurtleScreen class for functions
"""
import re
if docstr is None:
return None
screenname = _CFG["examplescreen"]
newdocstr = docstr.replace("%s." % screenname,"")
parexp = re.compile(r' \(.+ %s\):' % screenname)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
## The following mechanism makes all methods of RawTurtle and Turtle available
## as functions. So we can enhance, change, add, delete methods to these
## classes and do not need to change anything here.
for methodname in _tg_screen_functions:
pl1, pl2 = getmethparlist(eval('_Screen.' + methodname))
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = ("def %(key)s%(pl1)s: return _getscreen().%(key)s%(pl2)s" %
{'key':methodname, 'pl1':pl1, 'pl2':pl2})
exec(defstr)
eval(methodname).__doc__ = _screen_docrevise(eval('_Screen.'+methodname).__doc__)
for methodname in _tg_turtle_functions:
pl1, pl2 = getmethparlist(eval('Turtle.' + methodname))
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = ("def %(key)s%(pl1)s: return _getpen().%(key)s%(pl2)s" %
{'key':methodname, 'pl1':pl1, 'pl2':pl2})
exec(defstr)
eval(methodname).__doc__ = _turtle_docrevise(eval('Turtle.'+methodname).__doc__)
done = mainloop
if __name__ == "__main__":
def switchpen():
if isdown():
pu()
else:
pd()
def demo1():
"""Demo of old turtle.py - module"""
reset()
tracer(True)
up()
backward(100)
down()
# draw 3 squares; the last filled
width(3)
for i in range(3):
if i == 2:
begin_fill()
for _ in range(4):
forward(20)
left(90)
if i == 2:
color("maroon")
end_fill()
up()
forward(30)
down()
width(1)
color("black")
# move out of the way
tracer(False)
up()
right(90)
forward(100)
right(90)
forward(100)
right(180)
down()
# some text
write("startstart", 1)
write("start", 1)
color("red")
# staircase
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
# filled staircase
tracer(True)
begin_fill()
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
end_fill()
# more text
def demo2():
"""Demo of some new features."""
speed(1)
st()
pensize(3)
setheading(towards(0, 0))
radius = distance(0, 0)/2.0
rt(90)
for _ in range(18):
switchpen()
circle(radius, 10)
write("wait a moment...")
while undobufferentries():
undo()
reset()
lt(90)
colormode(255)
laenge = 10
pencolor("green")
pensize(3)
lt(180)
for i in range(-2, 16):
if i > 0:
begin_fill()
fillcolor(255-15*i, 0, 15*i)
for _ in range(3):
fd(laenge)
lt(120)
end_fill()
laenge += 10
lt(15)
speed((speed()+1)%12)
#end_fill()
lt(120)
pu()
fd(70)
rt(30)
pd()
color("red","yellow")
speed(0)
begin_fill()
for _ in range(4):
circle(50, 90)
rt(90)
fd(30)
rt(90)
end_fill()
lt(90)
pu()
fd(30)
pd()
shape("turtle")
tri = getturtle()
tri.resizemode("auto")
turtle = Turtle()
turtle.resizemode("auto")
turtle.shape("turtle")
turtle.reset()
turtle.left(90)
turtle.speed(0)
turtle.up()
turtle.goto(280, 40)
turtle.lt(30)
turtle.down()
turtle.speed(6)
turtle.color("blue","orange")
turtle.pensize(2)
tri.speed(6)
setheading(towards(turtle))
count = 1
while tri.distance(turtle) > 4:
turtle.fd(3.5)
turtle.lt(0.6)
tri.setheading(tri.towards(turtle))
tri.fd(4)
if count % 20 == 0:
turtle.stamp()
tri.stamp()
switchpen()
count += 1
tri.write("CAUGHT! ", font=("Arial", 16, "bold"), align="right")
tri.pencolor("black")
tri.pencolor("red")
def baba(xdummy, ydummy):
clearscreen()
bye()
time.sleep(2)
while undobufferentries():
tri.undo()
turtle.undo()
tri.fd(50)
tri.write(" Click me!", font = ("Courier", 12, "bold") )
tri.onclick(baba, 1)
demo1()
demo2()
exitonclick()
| lgpl-3.0 |
meteorcloudy/tensorflow | tensorflow/python/debug/lib/debug_graph_reconstruction_test.py | 13 | 7413 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the reconstruction of non-debugger-decorated GraphDefs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.lib import debug_graphs
from tensorflow.python.debug.lib import debug_utils
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class ReconstructNonDebugGraphTest(test_util.TensorFlowTestCase):
_OP_TYPE_BLACKLIST = (
"_Send", "_Recv", "_HostSend", "_HostRecv", "_Retval")
def _no_rewrite_session_config(self):
rewriter_config = rewriter_config_pb2.RewriterConfig(
dependency_optimization=rewriter_config_pb2.RewriterConfig.OFF,
min_graph_nodes=-1)
graph_options = config_pb2.GraphOptions(rewrite_options=rewriter_config)
return config_pb2.ConfigProto(graph_options=graph_options)
def setUp(self):
super(ReconstructNonDebugGraphTest, self).setUp()
self._dump_dir = tempfile.mkdtemp()
self._debug_url = "file://" + self._dump_dir
ops.reset_default_graph()
def tearDown(self):
shutil.rmtree(self._dump_dir)
super(ReconstructNonDebugGraphTest, self).tearDown()
def _graphDefWithoutBlacklistedNodes(self, graph_def):
output_graph_def = graph_pb2.GraphDef()
for node in graph_def.node:
if node.op not in self._OP_TYPE_BLACKLIST:
new_node = output_graph_def.node.add()
new_node.CopyFrom(node)
if new_node.op == "Enter":
# The debugger sets parallel_iterations attribute of while-loop Enter
# nodes to 1 for debugging.
for attr_key in new_node.attr:
if attr_key == "parallel_iterations":
new_node.attr[attr_key].i = 1
elif new_node.op == "Switch":
# We don't check the inputs to Switch ops as their inputs may be
# Send/Recv nodes.
del new_node.input[:]
return output_graph_def
def _compareOriginalAndReconstructedGraphDefs(self,
sess,
fetches,
feed_dict=None,
expected_output=None):
run_options = config_pb2.RunOptions(output_partition_graphs=True)
run_metadata = config_pb2.RunMetadata()
output = sess.run(fetches, feed_dict=feed_dict, options=run_options,
run_metadata=run_metadata)
if expected_output is not None:
self.assertAllClose(expected_output, output)
non_debug_graph_defs = run_metadata.partition_graphs
debug_utils.watch_graph(
run_options, sess.graph, debug_urls=self._debug_url)
run_metadata = config_pb2.RunMetadata()
output = sess.run(fetches, feed_dict=feed_dict, options=run_options,
run_metadata=run_metadata)
if expected_output is not None:
self.assertAllClose(expected_output, output)
dump = debug_data.DebugDumpDir(
self._dump_dir, partition_graphs=run_metadata.partition_graphs,
validate=True)
reconstructed = dump.reconstructed_non_debug_partition_graphs()
self.assertEqual(len(non_debug_graph_defs), len(reconstructed))
for i, non_debug_graph_def in enumerate(non_debug_graph_defs):
device_name = debug_graphs._infer_device_name(non_debug_graph_def)
test_util.assert_equal_graph_def(
self._graphDefWithoutBlacklistedNodes(reconstructed[device_name]),
self._graphDefWithoutBlacklistedNodes(non_debug_graph_def))
# Test debug_graphs.reconstruct_non_debug_graph_def.
reconstructed_again = (
debug_graphs.reconstruct_non_debug_graph_def(
run_metadata.partition_graphs[i]))
test_util.assert_equal_graph_def(
self._graphDefWithoutBlacklistedNodes(reconstructed_again),
self._graphDefWithoutBlacklistedNodes(non_debug_graph_def))
def testReconstructSimpleGraph(self):
with session.Session() as sess:
u = variables.Variable([12.0], name="u")
v = variables.Variable([30.0], name="v")
w = math_ops.add(u, v, name="w")
sess.run(u.initializer)
sess.run(v.initializer)
self._compareOriginalAndReconstructedGraphDefs(
sess, w, expected_output=[42.0])
def testReconstructGraphWithControlEdge(self):
with session.Session() as sess:
a = variables.Variable(10.0, name="a")
with ops.control_dependencies([a]):
b = math_ops.add(a, a, name="b")
with ops.control_dependencies([a, b]):
c = math_ops.multiply(b, b, name="c")
sess.run(a.initializer)
self._compareOriginalAndReconstructedGraphDefs(
sess, c, expected_output=400.0)
def testReonstructGraphWithCond(self):
with session.Session(config=self._no_rewrite_session_config()) as sess:
x = variables.Variable(10.0, name="x")
y = variables.Variable(20.0, name="y")
cond = control_flow_ops.cond(
x > y, lambda: math_ops.add(x, 1), lambda: math_ops.add(y, 1))
sess.run(x.initializer)
sess.run(y.initializer)
self._compareOriginalAndReconstructedGraphDefs(
sess, cond, expected_output=21.0)
def testReconstructGraphWithWhileLoop(self):
with session.Session() as sess:
loop_body = lambda i: math_ops.add(i, 2)
loop_cond = lambda i: math_ops.less(i, 16)
i = constant_op.constant(10, name="i")
loop = control_flow_ops.while_loop(loop_cond, loop_body, [i])
self._compareOriginalAndReconstructedGraphDefs(sess, loop)
def testReconstructGraphWithGradients(self):
with session.Session(config=self._no_rewrite_session_config()) as sess:
u = variables.Variable(12.0, name="u")
v = variables.Variable(30.0, name="v")
x = constant_op.constant(1.1, name="x")
toy_loss = x * (u - v)
train_op = gradient_descent.GradientDescentOptimizer(
learning_rate=0.1).minimize(toy_loss, name="train_op")
sess.run(u.initializer)
sess.run(v.initializer)
self._compareOriginalAndReconstructedGraphDefs(sess, train_op)
if __name__ == "__main__":
test.main()
| apache-2.0 |
warner83/micropython | tests/basics/unpack1.py | 57 | 1749 | # unpack sequences
a, = 1, ; print(a)
a, b = 2, 3 ; print(a, b)
a, b, c = 1, 2, 3; print(a, b, c)
a, = range(1); print(a)
a, b = range(2); print(a, b)
a, b, c = range(3); print(a, b, c)
(a) = range(1); print(a)
(a,) = range(1); print(a)
(a, b) = range(2); print(a, b)
(a, b, c) = range(3); print(a, b, c)
# lists
[] = []
[a] = range(1); print(a)
[a, b] = range(2); print(a, b)
[a, b, c] = range(3); print(a, b, c)
# with star
*a, = () ; print(a)
*a, = 4, ; print(a)
*a, = 5, 6 ; print(a)
*a, b = 7, ; print(a, b)
*a, b = 8, 9 ; print(a, b)
*a, b = 10, 11, 12 ; print(a, b)
a, *b = 13, ; print(a, b)
a, *b = 14, 15 ; print(a, b)
a, *b = 16, 17, 18 ; print(a, b)
a, *b, c = 19, 20 ; print(a, b)
a, *b, c = 21, 22, 23 ; print(a, b)
a, *b, c = 24, 25, 26, 27 ; print(a, b)
a = [28, 29]
*b, = a
print(a, b, a == b)
[*a] = [1, 2, 3]
print(a)
try:
a, *b, c = (30,)
except ValueError:
print("ValueError")
# with star and generic iterator
*a, = range(5) ; print(a)
*a, b = range(5) ; print(a, b)
*a, b, c = range(5) ; print(a, b, c)
a, *b = range(5) ; print(a, b)
a, *b, c = range(5) ; print(a, b, c)
a, *b, c, d = range(5) ; print(a, b, c, d)
a, b, *c = range(5) ; print(a, b, c)
a, b, *c, d = range(5) ; print(a, b, c, d)
a, b, *c, d, e = range(5) ; print(a, b, c, d, e)
*a, = [x * 2 for x in [1, 2, 3, 4]] ; print(a)
*a, b = [x * 2 for x in [1, 2, 3, 4]] ; print(a, b)
a, *b = [x * 2 for x in [1, 2, 3, 4]] ; print(a, b)
a, *b, c = [x * 2 for x in [1, 2, 3, 4]]; print(a, b, c)
try:
a, *b, c = range(0)
except ValueError:
print("ValueError")
try:
a, *b, c = range(1)
except ValueError:
print("ValueError")
| mit |
JioCloud/oslo-incubator | openstack/common/db/sqlalchemy/migration.py | 1 | 10048 | # coding: utf-8
#
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Base on code in migrate/changeset/databases/sqlite.py which is under
# the following license:
#
# The MIT License
#
# Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
import distutils.version as dist_version
import os
import re
import migrate
from migrate.changeset import ansisql
from migrate.changeset.databases import sqlite
from migrate.versioning import util as migrate_util
import sqlalchemy
from sqlalchemy.schema import UniqueConstraint
from openstack.common.db import exception
from openstack.common.db.sqlalchemy import session as db_session
from openstack.common.gettextutils import _ # noqa
@migrate_util.decorator
def patched_with_engine(f, *a, **kw):
url = a[0]
engine = migrate_util.construct_engine(url, **kw)
try:
kw['engine'] = engine
return f(*a, **kw)
finally:
if isinstance(engine, migrate_util.Engine) and engine is not url:
migrate_util.log.debug('Disposing SQLAlchemy engine %s', engine)
engine.dispose()
# TODO(jkoelker) When migrate 0.7.3 is released and nova depends
# on that version or higher, this can be removed
MIN_PKG_VERSION = dist_version.StrictVersion('0.7.3')
if (not hasattr(migrate, '__version__') or
dist_version.StrictVersion(migrate.__version__) < MIN_PKG_VERSION):
migrate_util.with_engine = patched_with_engine
# NOTE(jkoelker) Delay importing migrate until we are patched
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
_REPOSITORY = None
get_engine = db_session.get_engine
def _get_unique_constraints(self, table):
"""Retrieve information about existing unique constraints of the table
This feature is needed for _recreate_table() to work properly.
Unfortunately, it's not available in sqlalchemy 0.7.x/0.8.x.
"""
data = table.metadata.bind.execute(
"""SELECT sql
FROM sqlite_master
WHERE
type='table' AND
name=:table_name""",
table_name=table.name
).fetchone()[0]
UNIQUE_PATTERN = "CONSTRAINT (\w+) UNIQUE \(([^\)]+)\)"
return [
UniqueConstraint(
*[getattr(table.columns, c.strip(' "')) for c in cols.split(",")],
name=name
)
for name, cols in re.findall(UNIQUE_PATTERN, data)
]
def _recreate_table(self, table, column=None, delta=None, omit_uniques=None):
"""Recreate the table properly
Unlike the corresponding original method of sqlalchemy-migrate this one
doesn't drop existing unique constraints when creating a new one.
"""
table_name = self.preparer.format_table(table)
# we remove all indexes so as not to have
# problems during copy and re-create
for index in table.indexes:
index.drop()
# reflect existing unique constraints
for uc in self._get_unique_constraints(table):
table.append_constraint(uc)
# omit given unique constraints when creating a new table if required
table.constraints = set([
cons for cons in table.constraints
if omit_uniques is None or cons.name not in omit_uniques
])
self.append('ALTER TABLE %s RENAME TO migration_tmp' % table_name)
self.execute()
insertion_string = self._modify_table(table, column, delta)
table.create(bind=self.connection)
self.append(insertion_string % {'table_name': table_name})
self.execute()
self.append('DROP TABLE migration_tmp')
self.execute()
def _visit_migrate_unique_constraint(self, *p, **k):
"""Drop the given unique constraint
The corresponding original method of sqlalchemy-migrate just
raises NotImplemented error
"""
self.recreate_table(p[0].table, omit_uniques=[p[0].name])
def patch_migrate():
"""A workaround for SQLite's inability to alter things
SQLite abilities to alter tables are very limited (please read
http://www.sqlite.org/lang_altertable.html for more details).
E. g. one can't drop a column or a constraint in SQLite. The
workaround for this is to recreate the original table omitting
the corresponding constraint (or column).
sqlalchemy-migrate library has recreate_table() method that
implements this workaround, but it does it wrong:
- information about unique constraints of a table
is not retrieved. So if you have a table with one
unique constraint and a migration adding another one
you will end up with a table that has only the
latter unique constraint, and the former will be lost
- dropping of unique constraints is not supported at all
The proper way to fix this is to provide a pull-request to
sqlalchemy-migrate, but the project seems to be dead. So we
can go on with monkey-patching of the lib at least for now.
"""
# this patch is needed to ensure that recreate_table() doesn't drop
# existing unique constraints of the table when creating a new one
helper_cls = sqlite.SQLiteHelper
helper_cls.recreate_table = _recreate_table
helper_cls._get_unique_constraints = _get_unique_constraints
# this patch is needed to be able to drop existing unique constraints
constraint_cls = sqlite.SQLiteConstraintDropper
constraint_cls.visit_migrate_unique_constraint = \
_visit_migrate_unique_constraint
constraint_cls.__bases__ = (ansisql.ANSIColumnDropper,
sqlite.SQLiteConstraintGenerator)
def db_sync(abs_path, version=None, init_version=0):
"""Upgrade or downgrade a database.
Function runs the upgrade() or downgrade() functions in change scripts.
:param abs_path: Absolute path to migrate repository.
:param version: Database will upgrade/downgrade until this version.
If None - database will update to the latest
available version.
:param init_version: Initial database version
"""
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.DbMigrationError(
message=_("version should be an integer"))
current_version = db_version(abs_path, init_version)
repository = _find_migrate_repo(abs_path)
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(), repository, version)
else:
return versioning_api.downgrade(get_engine(), repository,
version)
def db_version(abs_path, init_version):
"""Show the current version of the repository.
:param abs_path: Absolute path to migrate repository
:param version: Initial database version
"""
repository = _find_migrate_repo(abs_path)
try:
return versioning_api.db_version(get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
engine = get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(abs_path, init_version)
return versioning_api.db_version(get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.DbMigrationError(
message=_("Upgrade DB using Essex release first."))
def db_version_control(abs_path, version=None):
"""Mark a database as under this repository's version control.
Once a database is under version control, schema changes should
only be done via change scripts in this repository.
:param abs_path: Absolute path to migrate repository
:param version: Initial database version
"""
repository = _find_migrate_repo(abs_path)
versioning_api.version_control(get_engine(), repository, version)
return version
def _find_migrate_repo(abs_path):
"""Get the project's change script repository
:param abs_path: Absolute path to migrate repository
"""
global _REPOSITORY
if not os.path.exists(abs_path):
raise exception.DbMigrationError("Path %s not found" % abs_path)
if _REPOSITORY is None:
_REPOSITORY = Repository(abs_path)
return _REPOSITORY
| apache-2.0 |
aron-bordin/kivy-designer | designer/components/start_page.py | 4 | 2797 | import webbrowser
from designer.utils.utils import get_designer, get_fs_encoding
from kivy.properties import ObjectProperty, StringProperty
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.scrollview import ScrollView
class DesignerLinkLabel(Button):
'''DesignerLinkLabel displays a http link and opens it in a browser window
when clicked.
'''
link = StringProperty(None)
'''Contains the http link to be opened.
:data:`link` is a :class:`~kivy.properties.StringProperty`
'''
def on_release(self, *args):
'''Default event handler for 'on_release' event.
'''
if self.link:
webbrowser.open(self.link)
class RecentItem(BoxLayout):
path = StringProperty('')
'''Contains the application path
:data:`path` is a :class:`~kivy.properties.StringProperty`
'''
__events__ = ('on_press', )
def on_press(self, *args):
'''Item pressed
'''
class RecentFilesBox(ScrollView):
'''Container consistings of buttons, with their names specifying
the recent files.
'''
grid = ObjectProperty(None)
'''The grid layout consisting of all buttons.
This property is an instance of :class:`~kivy.uix.gridlayout`
:data:`grid` is a :class:`~kivy.properties.ObjectProperty`
'''
def __init__(self, **kwargs):
super(RecentFilesBox, self).__init__(**kwargs)
def add_recent(self, list_files):
'''To add buttons representing Recent Files.
:param list_files: array of paths
'''
for p in list_files:
if isinstance(p, bytes):
p = p.decode(get_fs_encoding())
recent_item = RecentItem(path=p)
self.grid.add_widget(recent_item)
recent_item.bind(on_press=self.btn_release)
self.grid.height += recent_item.height
self.grid.height = max(self.grid.height, self.height)
def btn_release(self, instance):
'''Event Handler for 'on_release' of an event.
'''
d = get_designer()
d._perform_open(instance.path)
class DesignerStartPage(BoxLayout):
recent_files_box = ObjectProperty(None)
'''This property is an instance
of :class:`~designer.components.start_page.RecentFilesBox`
:data:`recent_files_box` is a :class:`~kivy.properties.ObjectProperty`
'''
__events__ = ('on_open_down', 'on_new_down', 'on_help')
def on_open_down(self, *args):
'''Default Event Handler for 'on_open_down'
'''
pass
def on_new_down(self, *args):
'''Default Event Handler for 'on_new_down'
'''
pass
def on_help(self, *args):
'''Default Event Handler for 'on_help'
'''
pass
| mit |
heiths/allura | Allura/allura/tests/test_decorators.py | 3 | 1951 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest import TestCase
from mock import patch
from allura.lib.decorators import task
class TestTask(TestCase):
def test_no_params(self):
@task
def func():
pass
self.assertTrue(hasattr(func, 'post'))
def test_with_params(self):
@task(disable_notifications=True)
def func():
pass
self.assertTrue(hasattr(func, 'post'))
@patch('allura.lib.decorators.c')
@patch('allura.model.MonQTask')
def test_post(self, c, MonQTask):
@task(disable_notifications=True)
def func(s, foo=None, **kw):
pass
def mock_post(f, args, kw, delay=None):
self.assertTrue(c.project.notifications_disabled)
self.assertFalse('delay' in kw)
self.assertEqual(delay, 1)
self.assertEqual(kw, dict(foo=2))
self.assertEqual(args, ('test',))
self.assertEqual(f, func)
c.project.notifications_disabled = False
MonQTask.post.side_effect = mock_post
func.post('test', foo=2, delay=1)
| apache-2.0 |
enikesha/pacioli | pacioli/views.py | 1 | 31775 | # Copyright (c) 2014, Satoshi Nakamoto Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import io
import uuid
import ast
import csv
import calendar
from collections import OrderedDict
from datetime import datetime,date
from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file
from pacioli import app, db, forms, models
import sqlalchemy
from sqlalchemy.sql import func
from sqlalchemy.orm import aliased
from pacioli.accounting.memoranda import process_filestorage
import pacioli.accounting.ledgers as ledgers
import pacioli.accounting.rates as rates
import pacioli.accounting.valuations as valuations
@app.route('/')
def index():
return render_template("index.html")
@app.route('/Configure')
def configure():
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts')
def chart_of_accounts():
classificationform = forms.NewClassification()
accountform = forms.NewAccount()
subaccountform = forms.NewSubAccount()
subaccounts = models.Subaccounts.query.all()
return render_template("configure/chart_of_accounts.html",
subaccounts=subaccounts,
classificationform=classificationform,
accountform=accountform,
subaccountform=subaccountform)
@app.route('/Configure/ChartOfAccounts/AddClassification', methods=['POST','GET'])
def add_classification():
if request.method == 'POST':
form = request.form.copy().to_dict()
name = form['classification']
parent = form['classificationparent']
parent = models.Elements.query.filter_by(id=parent).one()
parent = parent.name
classification = models.Classifications(name=name, parent=parent)
db.session.add(classification)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/DeleteClassification/<classification>')
def delete_classification(classification):
classification = models.Classifications \
.query \
.filter_by(name=classification) \
.first()
db.session.delete(classification)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/AddAccount', methods=['POST','GET'])
def add_account():
if request.method == 'POST':
form = request.form.copy().to_dict()
name = form['account']
parent = form['accountparent']
parent = models.Classifications \
.query \
.filter_by(id=parent) \
.one()
parent = parent.name
account = models.Accounts(name=name, parent=parent)
db.session.add(account)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/DeleteAccount/<account>')
def delete_account(account):
account = models.Accounts.query.filter_by(name=account).first()
db.session.delete(account)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/AddSubAccount', methods=['POST','GET'])
def add_subaccount():
if request.method == 'POST':
form = request.form.copy().to_dict()
name = form['subaccount']
parent = form['subaccountparent']
parent = models.Accounts.query.filter_by(id=parent).one()
parent = parent.name
subaccount = models.Subaccounts(name=name, parent=parent)
db.session.add(subaccount)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/DeleteSubAccount/<subaccount>')
def delete_subaccount(subaccount):
subaccount = models.Accounts.query.filter_by(name=subaccount).first()
db.session.delete(subaccount)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Bookkeeping')
def bookkeeping():
return redirect(url_for('upload_csv'))
@app.route('/Bookkeeping/Memoranda/Upload', methods=['POST','GET'])
def upload_csv():
filenames = ''
if request.method == 'POST':
uploaded_files = request.files.getlist("file[]")
for file in uploaded_files:
process_filestorage(file)
return redirect(url_for('upload_csv'))
memos = models.Memoranda \
.query \
.order_by(models.Memoranda.date.desc()) \
.all()
return render_template('bookkeeping/upload.html',
title = 'Upload',
memos=memos)
@app.route('/Bookkeeping/Memoranda/ExchangeRates')
def exchange_rates():
return render_template("bookkeeping/exchange_rates.html")
@app.route('/Bookkeeping/Memoranda/DownloadRates')
def download_rates():
rates.download_rates()
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/ExchangeRates/Summarize')
def summarize_rates():
rates.summarize_rates("pacioli")
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/ExchangeRates/Import')
def import_rates():
rates.import_rates("pacioli")
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/ExchangeRates/CalculateGains/<method>')
def calc_gains(method):
valuations.calculate_bitcoin_gains(method)
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/Memos', methods=['POST','GET'])
def memoranda():
memos = models.Memoranda \
.query \
.order_by(models.Memoranda.date.desc()) \
.all()
for memo in memos:
transactions = models.MemorandaTransactions \
.query \
.filter_by(memoranda_id=memo.id) \
.all()
memo.count = len(transactions)
return render_template('bookkeeping/memos.html',
title = 'Memoranda',
memos=memos)
@app.route('/Bookkeeping/Memoranda/Memos/Delete/<fileName>')
def delete_memoranda(fileName):
memo = models.Memoranda \
.query \
.filter_by(fileName=fileName) \
.first()
transactions = models.MemorandaTransactions \
.query \
.filter_by(memoranda_id=memo.id) \
.all()
for transaction in transactions:
journal_entry = models.JournalEntries \
.query \
.filter_by(memoranda_transactions_id=transaction.id) \
.first()
ledger_entries = models.LedgerEntries \
.query \
.filter_by(journal_entry_id = journal_entry.id) \
.all()
for entry in ledger_entries:
db.session.delete(entry)
db.session.commit()
db.session.delete(journal_entry)
db.session.commit()
db.session.delete(transaction)
db.session.commit()
db.session.delete(memo)
db.session.commit()
return redirect(url_for('upload_csv'))
@app.route('/Bookkeeping/Memoranda/Memos/<fileName>')
def memo_file(fileName):
memo = models.Memoranda.query.filter_by(fileName=fileName).first()
fileText = memo.fileText
document = io.StringIO(fileText)
reader = csv.reader(document)
rows = [pair for pair in reader]
return render_template('bookkeeping/memo_file.html',
title = 'Memo',
rows=rows,
fileName=fileName)
@app.route('/Bookkeeping/Memoranda/Memos/Transactions')
def transactions():
transactions = models.MemorandaTransactions.query.all()
for transaction in transactions:
transaction.details = ast.literal_eval(transaction.details)
journal_entry = models.JournalEntries.query.filter_by(memoranda_transactions_id=transaction.id).first()
transaction.journal_entry_id = journal_entry.id
return render_template('bookkeeping/memo_transactions.html',
title = 'Memo',
transactions=transactions)
@app.route('/Bookkeeping/Memoranda/Memos/<fileName>/Transactions')
def memo_transactions(fileName):
memo = models.Memoranda.query.filter_by(fileName=fileName).first()
transactions = models.MemorandaTransactions.query.filter_by(memoranda_id=memo.id).all()
for transaction in transactions:
transaction.details = ast.literal_eval(transaction.details)
journal_entry = models.JournalEntries.query.filter_by(memoranda_transactions_id=transaction.id).first()
transaction.journal_entry_id = journal_entry.id
return render_template('bookkeeping/memo_transactions.html',
title = 'Memo',
transactions=transactions,
fileName=fileName)
@app.route('/Bookkeeping/GeneralJournal/<currency>')
def general_journal(currency):
journal_entries = db.session \
.query(models.JournalEntries) \
.filter(models.JournalEntries.ledgerentries \
.any(currency=currency)) \
.join(models.LedgerEntries) \
.order_by(models.LedgerEntries.date.desc()) \
.all()
for journal_entry in journal_entries:
journal_entry.ledgerentries = [c for c in journal_entry.ledgerentries if c.currency == currency]
return render_template('bookkeeping/general_journal.html',
title = 'General Journal',
journal_entries=journal_entries,
currency=currency)
@app.route('/Bookkeeping/GeneralJournal/Entry/<id>')
def journal_entry(id):
journal_entry = models.JournalEntries.query.filter_by(id = id).first()
ledger_entries = models.LedgerEntries.query.filter_by(journal_entry_id = id).order_by(models.LedgerEntries.date.desc()).order_by(models.LedgerEntries.tside.desc()).all()
transaction = models.MemorandaTransactions.query.filter_by(id=journal_entry.memoranda_transactions_id).first()
memo = models.Memoranda.query.filter_by(id=transaction.memoranda_id).first()
transaction.details = ast.literal_eval(transaction.details)
print(ledger_entries)
return render_template('bookkeeping/journal_entry.html',
title = 'Journal Entry',
journal_entry=journal_entry,
ledger_entries=ledger_entries,
transaction=transaction,
memo=memo)
@app.route('/Bookkeeping/GeneralJournal/<id>/Edit', methods=['POST','GET'])
def edit_journal_entry(id):
journal_entry = models.JournalEntries.query.filter_by(id = id).first()
ledger_entries = models.LedgerEntries.query.filter_by(journal_entry_id = id).order_by(models.LedgerEntries.date.desc()).order_by(models.LedgerEntries.tside.desc()).all()
transaction = models.MemorandaTransactions.query.filter_by(id=journal_entry.memoranda_transactions_id).first()
memo = models.Memoranda.query.filter_by(id=transaction.memoranda_id).first()
transaction.details = ast.literal_eval(transaction.details)
return render_template('bookkeeping/journal_entry_edit.html',
title = 'Journal Entry',
journal_entry=journal_entry,
ledger_entries=ledger_entries,
transaction=transaction,
memo=memo)
@app.route('/Bookkeeping/GeneralLedger/<currency>')
def general_ledger(currency):
accountsQuery = db.session\
.query(models.LedgerEntries.ledger)\
.group_by(models.LedgerEntries.ledger).all()
accounts = []
for accountResult in accountsQuery:
accountName = accountResult[0]
query = ledgers.query_entries(accountName, 'Monthly', currency)
accounts.append(query)
return render_template('bookkeeping/general_ledger.html',
title = 'General Ledger',
accounts=accounts,
currency=currency)
@app.route('/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>')
def ledger(accountName, currency, groupby):
query = ledgers.query_entries(accountName, groupby, currency)
return render_template('bookkeeping/ledger.html',
title = 'Ledger',
currency=currency,
account=query[0],
ledger_entries=query[1],
groupby = groupby,
accountName=accountName)
@app.route('/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>/<interval>')
def ledger_page(accountName, currency, groupby, interval):
if groupby == "Daily":
interval = datetime.strptime(interval, "%m-%d-%Y")
year = interval.year
month = interval.month
day = interval.day
ledger_entries = models.LedgerEntries \
.query \
.filter_by(ledger=accountName) \
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year, \
func.date_part('month', models.LedgerEntries.date)==month, \
func.date_part('day', models.LedgerEntries.date)==day) \
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.asc()) \
.all()
account = ledgers.foot_account(accountName, ledger_entries, 'All')
if groupby == "Monthly":
interval = datetime.strptime(interval, "%m-%Y")
year = interval.year
month = interval.month
ledger_entries = models.LedgerEntries\
.query\
.filter_by(ledger=accountName) \
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year, \
func.date_part('month', models.LedgerEntries.date)==month)\
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.desc()) \
.all()
account = ledgers.foot_account(accountName, ledger_entries, 'All')
return render_template('bookkeeping/ledger.html',
title = 'Ledger',
account=account,
ledger_entries=ledger_entries,
groupby2 = groupby,
groupby = 'All',
accountName=accountName,
interval=interval,
currency=currency)
@app.route('/Bookkeeping/TrialBalance/<currency>')
def trial_balance(currency):
accountsQuery = db.session \
.query(models.LedgerEntries.ledger) \
.group_by(models.LedgerEntries.ledger) \
.filter(models.LedgerEntries.currency==currency) \
.all()
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date) + '-'+
func.date_part('month', models.LedgerEntries.date)) \
.filter(models.LedgerEntries.currency==currency) \
.group_by(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
period = datetime.now()
year = period.year
month = period.month
accounts = []
totalDebits = 0
totalCredits = 0
for accountResult in accountsQuery:
accountName = accountResult[0]
ledger_entries = models.LedgerEntries \
.query \
.filter_by(ledger=accountName)\
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year,
func.date_part('month', models.LedgerEntries.date)==month) \
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.desc()) \
.all()
query = ledgers.foot_account(accountName, ledger_entries, 'All')
totalDebits += query['debitBalance']
totalCredits += query['creditBalance']
accounts.append(query)
return render_template('bookkeeping/trial_balance.html',
currency=currency,
periods=periods,
period=period,
accounts=accounts,
totalDebits=totalDebits,
totalCredits=totalCredits)
@app.route('/Bookkeeping/TrialBalance/<currency>/<groupby>/<period>')
def trial_balance_historical(currency, groupby, period):
accountsQuery = db.session \
.query(models.LedgerEntries.ledger) \
.group_by(models.LedgerEntries.ledger) \
.filter(models.LedgerEntries.currency==currency) \
.all()
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date) + '-'+
func.date_part('month', models.LedgerEntries.date)) \
.group_by(\
func.date_part('year', models.LedgerEntries.date),\
func.date_part('month', models.LedgerEntries.date)) \
.filter(models.LedgerEntries.currency==currency) \
.all()
period = datetime.strptime(period, "%Y-%m")
year = period.year
month = period.month
day = calendar.monthrange(year, month)[1]
period = datetime(year, month, day, 23, 59, 59)
accounts = []
totalDebits = 0
totalCredits = 0
for accountResult in accountsQuery:
accountName = accountResult[0]
ledger_entries = models.LedgerEntries \
.query \
.filter_by(ledger=accountName) \
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year, \
func.date_part('month', models.LedgerEntries.date)==month) \
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.desc()) \
.all()
query = ledgers.foot_account(accountName, ledger_entries, 'All')
totalDebits += query['debitBalance']
totalCredits += query['creditBalance']
accounts.append(query)
return render_template('bookkeeping/trial_balance.html',
currency=currency,
periods=periods,
period=period,
accounts=accounts,
totalDebits=totalDebits,
totalCredits=totalCredits)
@app.route('/FinancialStatements')
def financial_statements():
return redirect(url_for('income_statement', currency='satoshis'))
@app.route('/FinancialStatements/IncomeStatement/<currency>')
def income_statement(currency):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date),\
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date),\
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.now()
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, period.day, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.filter(models.Classifications.name.in_(['Revenues', 'Expenses', 'Gains', 'Losses']))\
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
net_income = 0
for element in elements:
element.classifications = [c for c in element.classifications if c.name in ['Revenues', 'Expenses', 'Gains', 'Losses']]
for classification in element.classifications:
for account in classification.accounts:
for subaccount in account.subaccounts:
subaccount.total = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if period_beg <= c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
subaccount.total += ledger_entry.amount
net_income += ledger_entry.amount
elif ledger_entry.tside == 'debit':
net_income -= ledger_entry.amount
subaccount.total -= ledger_entry.amount
return render_template('financial_statements/income_statement.html',
title = 'Income Statement',
periods = periods,
currency = currency,
elements = elements,
net_income = net_income)
@app.route('/FinancialStatements/IncomeStatement/<currency>/<period>')
def income_statement_historical(currency, period):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.strptime(period, "%Y-%m")
lastday = calendar.monthrange(period.year, period.month)[1]
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, lastday, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.filter(models.Classifications.name.in_(['Revenues', 'Expenses', 'Gains', 'Losses']))\
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
net_income = 0
for element in elements:
element.classifications = [c for c in element.classifications if c.name in ['Revenues', 'Expenses', 'Gains', 'Losses']]
for classification in element.classifications:
for account in classification.accounts:
for subaccount in account.subaccounts:
subaccount.total = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if period_beg <= c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
net_income += ledger_entry.amount
subaccount.total += ledger_entry.amount
elif ledger_entry.tside == 'debit':
net_income -= ledger_entry.amount
subaccount.total -= ledger_entry.amount
return render_template('financial_statements/income_statement.html',
title = 'Income Statement',
periods = periods,
currency = currency,
elements = elements,
net_income = net_income)
@app.route('/FinancialStatements/BalanceSheet/<currency>')
def balance_sheet(currency):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.now()
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, period.day, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
retained_earnings = 0
for element in elements:
element.balance = 0
for classification in element.classifications:
classification.balance = 0
for account in classification.accounts:
account.balance = 0
for subaccount in account.subaccounts:
subaccount.balance = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
element.balance -= ledger_entry.amount
classification.balance -= ledger_entry.amount
account.balance -= ledger_entry.amount
subaccount.balance -= ledger_entry.amount
elif ledger_entry.tside == 'debit':
element.balance += ledger_entry.amount
classification.balance += ledger_entry.amount
account.balance += ledger_entry.amount
subaccount.balance += ledger_entry.amount
if element.name == 'Equity':
retained_earnings = -element.balance
print(retained_earnings)
elements = [c for c in elements if c.name in ['Assets', 'Liabilities']]
return render_template('financial_statements/balance_sheet.html',
periods=periods,
currency=currency,
elements=elements,
retained_earnings=retained_earnings,
period=period_end)
@app.route('/FinancialStatements/BalanceSheet/<currency>/<period>')
def balance_sheet_historical(currency, period):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.strptime(period, "%Y-%m")
lastday = calendar.monthrange(period.year, period.month)[1]
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, lastday, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
retained_earnings = 0
for element in elements:
element.balance = 0
for classification in element.classifications:
classification.balance = 0
for account in classification.accounts:
account.balance = 0
for subaccount in account.subaccounts:
subaccount.balance = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
element.balance -= ledger_entry.amount
classification.balance -= ledger_entry.amount
account.balance -= ledger_entry.amount
subaccount.balance -= ledger_entry.amount
elif ledger_entry.tside == 'debit':
element.balance += ledger_entry.amount
classification.balance += ledger_entry.amount
account.balance += ledger_entry.amount
subaccount.balance += ledger_entry.amount
if element.name == 'Equity':
retained_earnings = -element.balance
print(retained_earnings)
elements = [c for c in elements if c.name in ['Assets', 'Liabilities']]
return render_template('financial_statements/balance_sheet.html',
periods=periods,
currency=currency,
elements=elements,
retained_earnings=retained_earnings,
period=period_end)
@app.route('/FinancialStatements/StatementOfCashFlows/<currency>/<period>')
def statement_of_cash_flows(currency, period):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
if period == 'Current':
period = datetime.now()
lastday = period.day
else:
period = datetime.strptime(period, "%Y-%m")
lastday = calendar.monthrange(period.year, period.month)[1]
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, lastday, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.filter(models.Classifications.name.in_(['Revenues', 'Expenses', 'Gains', 'Losses']))\
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
net_income = 0
for element in elements:
element.classifications = [c for c in element.classifications if c.name in ['Revenues', 'Expenses', 'Gains', 'Losses']]
for classification in element.classifications:
classification.balance = 0
for account in classification.accounts:
account.balance = 0
for subaccount in account.subaccounts:
subaccount.balance = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if period_beg <= c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
classification.balance -= ledger_entry.amount
account.balance -= ledger_entry.amount
subaccount.balance -= ledger_entry.amount
elif ledger_entry.tside == 'debit':
classification.balance += ledger_entry.amount
account.balance += ledger_entry.amount
subaccount.balance += ledger_entry.amount
return render_template('financial_statements/statement_of_cash_flows.html',
period = period,
periods = periods,
currency = currency,
elements = elements,
net_income = net_income)
| bsd-3-clause |
GdZ/scriptfile | software/googleAppEngine/lib/django_1_4/django/contrib/formtools/tests/wizard/namedwizardtests/tests.py | 88 | 14807 | from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.test import TestCase
from django.contrib.auth.models import User
from django.contrib.formtools.wizard.views import (NamedUrlSessionWizardView,
NamedUrlCookieWizardView)
from django.contrib.formtools.tests.wizard.forms import get_request, Step1, Step2
class NamedWizardTests(object):
urls = 'django.contrib.formtools.tests.wizard.namedwizardtests.urls'
def setUp(self):
self.testuser, created = User.objects.get_or_create(username='testuser1')
self.wizard_step_data[0]['form1-user'] = self.testuser.pk
def test_initial_call(self):
response = self.client.get(reverse('%s_start' % self.wizard_urlname))
self.assertEqual(response.status_code, 302)
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
wizard = response.context['wizard']
self.assertEqual(wizard['steps'].current, 'form1')
self.assertEqual(wizard['steps'].step0, 0)
self.assertEqual(wizard['steps'].step1, 1)
self.assertEqual(wizard['steps'].last, 'form4')
self.assertEqual(wizard['steps'].prev, None)
self.assertEqual(wizard['steps'].next, 'form2')
self.assertEqual(wizard['steps'].count, 4)
self.assertEqual(wizard['url_name'], self.wizard_urlname)
def test_initial_call_with_params(self):
get_params = {'getvar1': 'getval1', 'getvar2': 'getval2'}
response = self.client.get(reverse('%s_start' % self.wizard_urlname),
get_params)
self.assertEqual(response.status_code, 302)
# Test for proper redirect GET parameters
location = response['Location']
self.assertNotEqual(location.find('?'), -1)
querydict = QueryDict(location[location.find('?') + 1:])
self.assertEqual(dict(querydict.items()), get_params)
def test_form_post_error(self):
response = self.client.post(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}),
self.wizard_step_1_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
self.assertEqual(response.context['wizard']['form'].errors,
{'name': [u'This field is required.'],
'user': [u'This field is required.']})
def test_form_post_success(self):
response = self.client.post(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}),
self.wizard_step_data[0])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
wizard = response.context['wizard']
self.assertEqual(wizard['steps'].current, 'form2')
self.assertEqual(wizard['steps'].step0, 1)
self.assertEqual(wizard['steps'].prev, 'form1')
self.assertEqual(wizard['steps'].next, 'form3')
def test_form_stepback(self):
response = self.client.get(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
response = self.client.post(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}),
self.wizard_step_data[0])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
response = self.client.post(
reverse(self.wizard_urlname, kwargs={
'step': response.context['wizard']['steps'].current
}), {'wizard_goto_step': response.context['wizard']['steps'].prev})
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
def test_form_jump(self):
response = self.client.get(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
response = self.client.get(
reverse(self.wizard_urlname, kwargs={'step': 'form3'}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form3')
def test_form_finish(self):
response = self.client.get(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[0])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
post_data = self.wizard_step_data[1]
post_data['form2-file1'] = open(__file__)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
post_data)
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form3')
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[2])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form4')
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[3])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
all_data = response.context['form_list']
self.assertEqual(all_data[1]['file1'].read(), open(__file__).read())
del all_data[1]['file1']
self.assertEqual(all_data, [
{'name': u'Pony', 'thirsty': True, 'user': self.testuser},
{'address1': u'123 Main St', 'address2': u'Djangoland'},
{'random_crap': u'blah blah'},
[{'random_crap': u'blah blah'}, {'random_crap': u'blah blah'}]])
def test_cleaned_data(self):
response = self.client.get(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}))
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[0])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
post_data = self.wizard_step_data[1]
post_data['form2-file1'] = open(__file__)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
post_data)
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
step2_url = reverse(self.wizard_urlname, kwargs={'step': 'form2'})
response = self.client.get(step2_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
self.assertEqual(response.context['wizard']['form'].files['form2-file1'].read(), open(__file__).read())
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[2])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[3])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
all_data = response.context['all_cleaned_data']
self.assertEqual(all_data['file1'].read(), open(__file__).read())
del all_data['file1']
self.assertEqual(
all_data,
{'name': u'Pony', 'thirsty': True, 'user': self.testuser,
'address1': u'123 Main St', 'address2': u'Djangoland',
'random_crap': u'blah blah', 'formset-form4': [
{'random_crap': u'blah blah'},
{'random_crap': u'blah blah'}
]})
def test_manipulated_data(self):
response = self.client.get(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}))
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[0])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
post_data = self.wizard_step_data[1]
post_data['form2-file1'] = open(__file__)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
post_data)
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[2])
loc = response['Location']
response = self.client.get(loc)
self.assertEqual(response.status_code, 200, loc)
self.client.cookies.pop('sessionid', None)
self.client.cookies.pop('wizard_cookie_contact_wizard', None)
response = self.client.post(
reverse(self.wizard_urlname,
kwargs={'step': response.context['wizard']['steps'].current}),
self.wizard_step_data[3])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
def test_form_reset(self):
response = self.client.post(
reverse(self.wizard_urlname, kwargs={'step': 'form1'}),
self.wizard_step_data[0])
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
response = self.client.get(
'%s?reset=1' % reverse('%s_start' % self.wizard_urlname))
self.assertEqual(response.status_code, 302)
response = self.client.get(response['Location'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
class NamedSessionWizardTests(NamedWizardTests, TestCase):
wizard_urlname = 'nwiz_session'
wizard_step_1_data = {
'session_contact_wizard-current_step': 'form1',
}
wizard_step_data = (
{
'form1-name': 'Pony',
'form1-thirsty': '2',
'session_contact_wizard-current_step': 'form1',
},
{
'form2-address1': '123 Main St',
'form2-address2': 'Djangoland',
'session_contact_wizard-current_step': 'form2',
},
{
'form3-random_crap': 'blah blah',
'session_contact_wizard-current_step': 'form3',
},
{
'form4-INITIAL_FORMS': '0',
'form4-TOTAL_FORMS': '2',
'form4-MAX_NUM_FORMS': '0',
'form4-0-random_crap': 'blah blah',
'form4-1-random_crap': 'blah blah',
'session_contact_wizard-current_step': 'form4',
}
)
class NamedCookieWizardTests(NamedWizardTests, TestCase):
wizard_urlname = 'nwiz_cookie'
wizard_step_1_data = {
'cookie_contact_wizard-current_step': 'form1',
}
wizard_step_data = (
{
'form1-name': 'Pony',
'form1-thirsty': '2',
'cookie_contact_wizard-current_step': 'form1',
},
{
'form2-address1': '123 Main St',
'form2-address2': 'Djangoland',
'cookie_contact_wizard-current_step': 'form2',
},
{
'form3-random_crap': 'blah blah',
'cookie_contact_wizard-current_step': 'form3',
},
{
'form4-INITIAL_FORMS': '0',
'form4-TOTAL_FORMS': '2',
'form4-MAX_NUM_FORMS': '0',
'form4-0-random_crap': 'blah blah',
'form4-1-random_crap': 'blah blah',
'cookie_contact_wizard-current_step': 'form4',
}
)
class NamedFormTests(object):
urls = 'django.contrib.formtools.tests.wizard.namedwizardtests.urls'
def test_revalidation(self):
request = get_request()
testform = self.formwizard_class.as_view(
[('start', Step1), ('step2', Step2)],
url_name=self.wizard_urlname)
response, instance = testform(request, step='done')
instance.render_done(None)
self.assertEqual(instance.storage.current_step, 'start')
class TestNamedUrlSessionWizardView(NamedUrlSessionWizardView):
def dispatch(self, request, *args, **kwargs):
response = super(TestNamedUrlSessionWizardView, self).dispatch(request, *args, **kwargs)
return response, self
class TestNamedUrlCookieWizardView(NamedUrlCookieWizardView):
def dispatch(self, request, *args, **kwargs):
response = super(TestNamedUrlCookieWizardView, self).dispatch(request, *args, **kwargs)
return response, self
class NamedSessionFormTests(NamedFormTests, TestCase):
formwizard_class = TestNamedUrlSessionWizardView
wizard_urlname = 'nwiz_session'
class NamedCookieFormTests(NamedFormTests, TestCase):
formwizard_class = TestNamedUrlCookieWizardView
wizard_urlname = 'nwiz_cookie'
| mit |
n0trax/ansible | lib/ansible/modules/packaging/language/cpanm.py | 14 | 6523 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Franck Cuny <franck@lumberjaph.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cpanm
short_description: Manages Perl library dependencies.
description:
- Manage Perl library dependencies.
version_added: "1.6"
options:
name:
description:
- The name of the Perl library to install. You may use the "full distribution path", e.g. MIYAGAWA/Plack-0.99_05.tar.gz
required: false
default: null
aliases: ["pkg"]
from_path:
description:
- The local directory from where to install
required: false
default: null
notest:
description:
- Do not run unit tests
required: false
default: false
locallib:
description:
- Specify the install base to install modules
required: false
default: false
mirror:
description:
- Specifies the base URL for the CPAN mirror to use
required: false
default: false
mirror_only:
description:
- Use the mirror's index file instead of the CPAN Meta DB
required: false
default: false
installdeps:
description:
- Only install dependencies
required: false
default: false
version_added: "2.0"
version:
description:
- minimum version of perl module to consider acceptable
required: false
default: false
version_added: "2.1"
system_lib:
description:
- Use this if you want to install modules to the system perl include path. You must be root or have "passwordless" sudo for this to work.
- This uses the cpanm commandline option '--sudo', which has nothing to do with ansible privilege escalation.
required: false
default: false
version_added: "2.0"
aliases: ['use_sudo']
executable:
description:
- Override the path to the cpanm executable
required: false
default: null
version_added: "2.1"
notes:
- Please note that U(http://search.cpan.org/dist/App-cpanminus/bin/cpanm, cpanm) must be installed on the remote host.
author: "Franck Cuny (@franckcuny)"
'''
EXAMPLES = '''
# install Dancer perl package
- cpanm:
name: Dancer
# install version 0.99_05 of the Plack perl package
- cpanm:
name: MIYAGAWA/Plack-0.99_05.tar.gz
# install Dancer into the specified locallib
- cpanm:
name: Dancer
locallib: /srv/webapps/my_app/extlib
# install perl dependencies from local directory
- cpanm:
from_path: /srv/webapps/my_app/src/
# install Dancer perl package without running the unit tests in indicated locallib
- cpanm:
name: Dancer
notest: True
locallib: /srv/webapps/my_app/extlib
# install Dancer perl package from a specific mirror
- cpanm:
name: Dancer
mirror: 'http://cpan.cpantesters.org/'
# install Dancer perl package into the system root path
- cpanm:
name: Dancer
system_lib: yes
# install Dancer if it's not already installed
# OR the installed version is older than version 1.0
- cpanm:
name: Dancer
version: '1.0'
'''
import os
from ansible.module_utils.basic import AnsibleModule
def _is_package_installed(module, name, locallib, cpanm, version):
cmd = ""
if locallib:
os.environ["PERL5LIB"] = "%s/lib/perl5" % locallib
cmd = "%s perl -e ' use %s" % (cmd, name)
if version:
cmd = "%s %s;'" % (cmd, version)
else:
cmd = "%s;'" % cmd
res, stdout, stderr = module.run_command(cmd, check_rc=False)
return res == 0
def _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo):
# this code should use "%s" like everything else and just return early but not fixing all of it now.
# don't copy stuff like this
if from_path:
cmd = cpanm + " " + from_path
else:
cmd = cpanm + " " + name
if notest is True:
cmd = cmd + " -n"
if locallib is not None:
cmd = cmd + " -l " + locallib
if mirror is not None:
cmd = cmd + " --mirror " + mirror
if mirror_only is True:
cmd = cmd + " --mirror-only"
if installdeps is True:
cmd = cmd + " --installdeps"
if use_sudo is True:
cmd = cmd + " --sudo"
return cmd
def _get_cpanm_path(module):
if module.params['executable']:
result = module.params['executable']
else:
result = module.get_bin_path('cpanm', True)
return result
def main():
arg_spec = dict(
name=dict(default=None, required=False, aliases=['pkg']),
from_path=dict(default=None, required=False, type='path'),
notest=dict(default=False, type='bool'),
locallib=dict(default=None, required=False, type='path'),
mirror=dict(default=None, required=False),
mirror_only=dict(default=False, type='bool'),
installdeps=dict(default=False, type='bool'),
system_lib=dict(default=False, type='bool', aliases=['use_sudo']),
version=dict(default=None, required=False),
executable=dict(required=False, type='path'),
)
module = AnsibleModule(
argument_spec=arg_spec,
required_one_of=[['name', 'from_path']],
)
cpanm = _get_cpanm_path(module)
name = module.params['name']
from_path = module.params['from_path']
notest = module.boolean(module.params.get('notest', False))
locallib = module.params['locallib']
mirror = module.params['mirror']
mirror_only = module.params['mirror_only']
installdeps = module.params['installdeps']
use_sudo = module.params['system_lib']
version = module.params['version']
changed = False
installed = _is_package_installed(module, name, locallib, cpanm, version)
if not installed:
cmd = _build_cmd_line(name, from_path, notest, locallib, mirror, mirror_only, installdeps, cpanm, use_sudo)
rc_cpanm, out_cpanm, err_cpanm = module.run_command(cmd, check_rc=False)
if rc_cpanm != 0:
module.fail_json(msg=err_cpanm, cmd=cmd)
if (err_cpanm.find('is up to date') == -1 and out_cpanm.find('is up to date') == -1):
changed = True
module.exit_json(changed=changed, binary=cpanm, name=name)
if __name__ == '__main__':
main()
| gpl-3.0 |
jpasosa/asturiana | plugins/apostrophePlugin/web/js/fckeditor/fckeditor.py | 86 | 4371 | """
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2009 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&Toolbar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| mit |
yanlend/scikit-learn | doc/datasets/mldata_fixture.py | 367 | 1183 | """Fixture module to skip the datasets loading when offline
Mock urllib2 access to mldata.org and create a temporary data folder.
"""
from os import makedirs
from os.path import join
import numpy as np
import tempfile
import shutil
from sklearn import datasets
from sklearn.utils.testing import install_mldata_mock
from sklearn.utils.testing import uninstall_mldata_mock
def globs(globs):
# Create a temporary folder for the data fetcher
global custom_data_home
custom_data_home = tempfile.mkdtemp()
makedirs(join(custom_data_home, 'mldata'))
globs['custom_data_home'] = custom_data_home
return globs
def setup_module():
# setup mock urllib2 module to avoid downloading from mldata.org
install_mldata_mock({
'mnist-original': {
'data': np.empty((70000, 784)),
'label': np.repeat(np.arange(10, dtype='d'), 7000),
},
'iris': {
'data': np.empty((150, 4)),
},
'datasets-uci-iris': {
'double0': np.empty((150, 4)),
'class': np.empty((150,)),
},
})
def teardown_module():
uninstall_mldata_mock()
shutil.rmtree(custom_data_home)
| bsd-3-clause |
Russell-IO/ansible | test/runner/lib/sanity/pep8.py | 28 | 6213 | """Sanity test for PEP 8 style guidelines using pycodestyle."""
from __future__ import absolute_import, print_function
import os
import re
from lib.sanity import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
)
from lib.util import (
SubprocessError,
display,
run_command,
)
from lib.config import (
SanityConfig,
)
from lib.test import (
calculate_best_confidence,
)
PEP8_SKIP_PATH = 'test/sanity/pep8/skip.txt'
PEP8_LEGACY_PATH = 'test/sanity/pep8/legacy-files.txt'
class Pep8Test(SanitySingleVersion):
"""Sanity test for PEP 8 style guidelines using pycodestyle."""
def test(self, args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: TestResult
"""
with open(PEP8_SKIP_PATH, 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
with open(PEP8_LEGACY_PATH, 'r') as legacy_fd:
legacy_paths = legacy_fd.read().splitlines()
with open('test/sanity/pep8/legacy-ignore.txt', 'r') as ignore_fd:
legacy_ignore = set(ignore_fd.read().splitlines())
with open('test/sanity/pep8/current-ignore.txt', 'r') as ignore_fd:
current_ignore = sorted(ignore_fd.read().splitlines())
skip_paths_set = set(skip_paths)
legacy_paths_set = set(legacy_paths)
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
cmd = [
args.python_executable,
'-m', 'pycodestyle',
'--max-line-length', '160',
'--config', '/dev/null',
'--ignore', ','.join(sorted(current_ignore)),
] + paths
if paths:
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
else:
stdout = None
if args.explain:
return SanitySuccess(self.name)
if stdout:
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[WE][0-9]{3}) (?P<message>.*)$'
results = [re.search(pattern, line).groupdict() for line in stdout.splitlines()]
else:
results = []
results = [SanityMessage(
message=r['message'],
path=r['path'],
line=int(r['line']),
column=int(r['column']),
level='warning' if r['code'].startswith('W') else 'error',
code=r['code'],
) for r in results]
failed_result_paths = set([result.path for result in results])
used_paths = set(paths)
errors = []
summary = {}
line = 0
for path in legacy_paths:
line += 1
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append(SanityMessage(
code='A101',
message='Remove "%s" since it does not exist' % path,
path=PEP8_LEGACY_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PEP8_LEGACY_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
if path in used_paths and path not in failed_result_paths:
# Keep files out of the list which no longer require the relaxed rule set.
errors.append(SanityMessage(
code='A201',
message='Remove "%s" since it passes the current rule set' % path,
path=PEP8_LEGACY_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PEP8_LEGACY_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
line = 0
for path in skip_paths:
line += 1
if not os.path.exists(path):
# Keep files out of the list which no longer exist in the repo.
errors.append(SanityMessage(
code='A101',
message='Remove "%s" since it does not exist' % path,
path=PEP8_SKIP_PATH,
line=line,
column=1,
confidence=calculate_best_confidence(((PEP8_SKIP_PATH, line), (path, 0)), args.metadata) if args.metadata.changes else None,
))
for result in results:
if result.path in legacy_paths_set and result.code in legacy_ignore:
# Files on the legacy list are permitted to have errors on the legacy ignore list.
# However, we want to report on their existence to track progress towards eliminating these exceptions.
display.info('PEP 8: %s (legacy)' % result, verbosity=3)
key = '%s %s' % (result.code, re.sub('[0-9]+', 'NNN', result.message))
if key not in summary:
summary[key] = 0
summary[key] += 1
else:
# Files not on the legacy list and errors not on the legacy ignore list are PEP 8 policy errors.
errors.append(result)
if summary:
lines = []
count = 0
for key in sorted(summary):
count += summary[key]
lines.append('PEP 8: %5d %s' % (summary[key], key))
display.info('PEP 8: There were %d different legacy issues found (%d total):' % (len(summary), count), verbosity=1)
display.info('PEP 8: Count Code Message', verbosity=1)
for line in lines:
display.info(line, verbosity=1)
if errors:
return SanityFailure(self.name, messages=errors)
return SanitySuccess(self.name)
| gpl-3.0 |
waseem18/bedrock | vendor-local/lib/python/south/creator/freezer.py | 129 | 7258 | """
Handles freezing of models into FakeORMs.
"""
from __future__ import print_function
import sys
from django.db import models
from django.db.models.base import ModelBase, Model
from django.contrib.contenttypes.generic import GenericRelation
from south.utils import get_attribute, auto_through
from south import modelsinspector
from south.utils.py3 import string_types
def freeze_apps(apps):
"""
Takes a list of app labels, and returns a string of their frozen form.
"""
if isinstance(apps, string_types):
apps = [apps]
frozen_models = set()
# For each app, add in all its models
for app in apps:
for model in models.get_models(models.get_app(app)):
# Only add if it's not abstract or proxy
if not model._meta.abstract and not getattr(model._meta, "proxy", False):
frozen_models.add(model)
# Now, add all the dependencies
for model in list(frozen_models):
frozen_models.update(model_dependencies(model))
# Serialise!
model_defs = {}
model_classes = {}
for model in frozen_models:
model_defs[model_key(model)] = prep_for_freeze(model)
model_classes[model_key(model)] = model
# Check for any custom fields that failed to freeze.
missing_fields = False
for key, fields in model_defs.items():
for field_name, value in fields.items():
if value is None:
missing_fields = True
model_class = model_classes[key]
field_class = model_class._meta.get_field_by_name(field_name)[0]
print(" ! Cannot freeze field '%s.%s'" % (key, field_name))
print(" ! (this field has class %s.%s)" % (field_class.__class__.__module__, field_class.__class__.__name__))
if missing_fields:
print("")
print(" ! South cannot introspect some fields; this is probably because they are custom")
print(" ! fields. If they worked in 0.6 or below, this is because we have removed the")
print(" ! models parser (it often broke things).")
print(" ! To fix this, read http://south.aeracode.org/wiki/MyFieldsDontWork")
sys.exit(1)
return model_defs
def freeze_apps_to_string(apps):
return pprint_frozen_models(freeze_apps(apps))
###
def model_key(model):
"For a given model, return 'appname.modelname'."
return "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
def prep_for_freeze(model):
"""
Takes a model and returns the ready-to-serialise dict (all you need
to do is just pretty-print it).
"""
fields = modelsinspector.get_model_fields(model, m2m=True)
# Remove useless attributes (like 'choices')
for name, field in fields.items():
fields[name] = remove_useless_attributes(field)
# See if there's a Meta
fields['Meta'] = remove_useless_meta(modelsinspector.get_model_meta(model))
# Add in our own special items to track the object name and managed
fields['Meta']['object_name'] = model._meta.object_name # Special: not eval'able.
if not getattr(model._meta, "managed", True):
fields['Meta']['managed'] = repr(model._meta.managed)
return fields
### Dependency resolvers
def model_dependencies(model, checked_models=None):
"""
Returns a set of models this one depends on to be defined; things like
OneToOneFields as ID, ForeignKeys everywhere, etc.
"""
depends = set()
checked_models = checked_models or set()
# Get deps for each field
for field in model._meta.fields + model._meta.many_to_many:
depends.update(field_dependencies(field, checked_models))
# Add in any non-abstract bases
for base in model.__bases__:
if issubclass(base, models.Model) and hasattr(base, '_meta') and not base._meta.abstract:
depends.add(base)
# Now recurse
new_to_check = depends - checked_models
while new_to_check:
checked_model = new_to_check.pop()
if checked_model == model or checked_model in checked_models:
continue
checked_models.add(checked_model)
deps = model_dependencies(checked_model, checked_models)
# Loop through dependencies...
for dep in deps:
# If the new dep is not already checked, add to the queue
if (dep not in depends) and (dep not in new_to_check) and (dep not in checked_models):
new_to_check.add(dep)
depends.add(dep)
return depends
def field_dependencies(field, checked_models=None):
checked_models = checked_models or set()
depends = set()
arg_defs, kwarg_defs = modelsinspector.matching_details(field)
for attrname, options in arg_defs + list(kwarg_defs.values()):
if options.get("ignore_if_auto_through", False) and auto_through(field):
continue
if options.get("is_value", False):
value = attrname
elif attrname == 'rel.through' and hasattr(getattr(field, 'rel', None), 'through_model'):
# Hack for django 1.1 and below, where the through model is stored
# in rel.through_model while rel.through stores only the model name.
value = field.rel.through_model
else:
try:
value = get_attribute(field, attrname)
except AttributeError:
if options.get("ignore_missing", False):
continue
raise
if isinstance(value, Model):
value = value.__class__
if not isinstance(value, ModelBase):
continue
if getattr(value._meta, "proxy", False):
value = value._meta.proxy_for_model
if value in checked_models:
continue
checked_models.add(value)
depends.add(value)
depends.update(model_dependencies(value, checked_models))
return depends
### Prettyprinters
def pprint_frozen_models(models):
return "{\n %s\n }" % ",\n ".join([
"%r: %s" % (name, pprint_fields(fields))
for name, fields in sorted(models.items())
])
def pprint_fields(fields):
return "{\n %s\n }" % ",\n ".join([
"%r: %r" % (name, defn)
for name, defn in sorted(fields.items())
])
### Output sanitisers
USELESS_KEYWORDS = ["choices", "help_text", "verbose_name"]
USELESS_DB_KEYWORDS = ["related_name", "default", "blank"] # Important for ORM, not for DB.
INDEX_KEYWORDS = ["db_index"]
def remove_useless_attributes(field, db=False, indexes=False):
"Removes useless (for database) attributes from the field's defn."
# Work out what to remove, and remove it.
keywords = USELESS_KEYWORDS[:]
if db:
keywords += USELESS_DB_KEYWORDS[:]
if indexes:
keywords += INDEX_KEYWORDS[:]
if field:
for name in keywords:
if name in field[2]:
del field[2][name]
return field
USELESS_META = ["verbose_name", "verbose_name_plural"]
def remove_useless_meta(meta):
"Removes useless (for database) attributes from the table's meta."
if meta:
for name in USELESS_META:
if name in meta:
del meta[name]
return meta
| mpl-2.0 |
dermoth/gramps | gramps/gen/utils/place.py | 4 | 31276 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007-2009 B. Malengier
# Copyright (C) 2009 Swoon on bug tracker
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
import math
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
#-------------------------------------------------------------------------
#
# begin localisation part
#
#-------------------------------------------------------------------------
# translation of N/S/E/W, make sure translator understands
degrees = "1"
North = _("%(north_latitude)s N") % {'north_latitude' : degrees}
South = _("%(south_latitude)s S") % {'south_latitude' : degrees}
East = _("%(east_longitude)s E") % {'east_longitude' : degrees}
West = _("%(west_longitude)s W") % {'west_longitude' : degrees}
# extract letters we really need
North = North.replace("1", " ").strip()
South = South.replace("1", " ").strip()
East = East.replace("1", " ").strip()
West = West.replace("1", " ").strip()
# build dictionary with translation en to local language
translate_en_loc = {}
translate_en_loc['N'] = North
translate_en_loc['S'] = South
translate_en_loc['E'] = East
translate_en_loc['W'] = West
# keep translation only if it does not conflict with english
if 'N' == South or 'S' == North or 'E' == West or 'W' == East:
translate_en_loc['N'] = 'N'
translate_en_loc['S'] = 'S'
translate_en_loc['E'] = 'E'
translate_en_loc['W'] = 'W'
# end localisation part
#------------------
#
# helper functions
#
#------------------
def __convert_structure_to_float(sign, degs, mins=0, secs=0.0):
"""helper function which converts a structure to a nice
representation
"""
v = float(degs)
if mins is not None:
v += float(mins) / 60.
if secs is not None:
v += secs / 3600.
return -v if sign == "-" else v
def __convert_using_float_repr(stringValue):
""" helper function that tries to convert the string using the float
representation
"""
try:
v = float(stringValue)
return v
except ValueError:
return None
def __convert_using_colon_repr(stringValue):
""" helper function that tries to convert the string using the colon
representation
"""
if stringValue.find(r':') == -1:
return None
l = stringValue.split(':')
if len(l) < 2 or len(l) > 3:
return None
l[0] = l[0].strip()
# if no characters before ':' nothing useful is input!
if len(l[0]) == 0:
return None
if l[0][0] in ['+', '-']:
sign = l[0][0]
l[0] = l[0][1:].strip()
# regard a second sign as an error
if l[0][0] in ['+', '-']:
return None
else:
sign = '+'
try:
degs = int(l[0])
if degs < 0:
return None
except:
return None
try:
mins = int(l[1])
if mins < 0 or mins >= 60:
return None
except:
return None
secs = 0.
if len(l) == 3:
try:
secs = float(l[2])
if secs < 0. or secs >= 60.:
return None
except:
return None
return __convert_structure_to_float(sign, degs, mins, secs)
def __convert_using_classic_repr(stringValue, typedeg):
"""helper function that tries to convert the string using the colon
representation
"""
if stringValue.find(r'_') != -1:
return None # not a valid lat or lon
#exchange some characters
stringValue = stringValue.replace('°', r'_')
#allow to input ° as #, UTF-8 code c2b00a
stringValue = stringValue.replace('º', r'_')
#allow to input º as #, UTF-8 code c2b a0a
stringValue = stringValue.replace(r'#', r'_')
#allow to input " as ''
stringValue = stringValue.replace(r"''", r'"')
#allow some special unicode symbols
stringValue = stringValue.replace('\u2033', r'"')
stringValue = stringValue.replace('\u2032', r"'")
#ignore spaces, a regex with \s* would be better here...
stringValue = stringValue.replace(r' ', r'')
stringValue = stringValue.replace(r'\t', r'')
# get the degrees, must be present
if stringValue.find(r'_') == -1:
return None
l = stringValue.split(r'_')
if len(l) != 2:
return None
try:
degs = int(l[0]) #degrees must be integer value
if degs < 0:
return None
except:
return None
# next: minutes might be present once
l2 = l[1].split(r"'")
l3 = l2
mins = 0
# See if minutes might be decimal?
# Then no seconds is supposed to be given
if l2[0].find(r'.') > 0:
# Split in integer and decimal parts
l4 = l2[0].split(r".")
# Set integer minutes
l2[0] = l4[0]
# Convert the decimal part of minutes to seconds
try:
lsecs = float('0.' + l4[1]) * 60.0
# Set the seconds followed by direction letter N/S/W/E
l2[1] = str(lsecs) + '"' + l2[1]
except:
return None
if len(l2) > 2:
return None
if len(l2) == 2:
l3 = [l2[1],]
try:
mins = int(l2[0]) #minutes must be integer value
if mins < 0 or mins >= 60:
return None
except:
return None
# next: seconds might be present once
l3 = l3[0].split(r'"')
last = l3[0]
secs = 0.
if len(l3) > 2:
return None
if len(l3) == 2:
last = l3[1]
try:
secs = float(l3[0])
if secs < 0. or secs >= 60.:
return None
except:
return None
# last entry should be the direction
if typedeg == 'lat':
if last == 'N':
sign = '+'
elif last == 'S':
sign = '-'
else:
return None
elif typedeg == 'lon':
if last == 'E':
sign = '+'
elif last == 'W':
sign = '-'
else:
return None
else:
return None
return __convert_structure_to_float(sign, degs, mins, secs)
def __convert_using_modgedcom_repr(val, typedeg):
""" helper function that tries to convert the string using the
modified GEDCOM representation where direction [NSEW] is appended
instead of prepended. This particular representation is the result
of value normalization done on values passed to this function
"""
if typedeg == 'lat':
pos = val.find('N')
if pos >= 0:
stringValue = val[:pos]
else:
pos = val.find('S')
if pos >= 0:
stringValue = '-' + val[:pos]
else:
return None
else:
pos = val.find('E')
if pos >= 0:
stringValue = val[:pos]
else:
pos = val.find('W')
if pos >= 0:
stringValue = '-' + val[:pos]
else:
return None
try:
v = float(stringValue)
return v
except ValueError:
return None
def __convert_float_val(val, typedeg="lat"):
# function converting input to float, recognizing decimal input, or
# degree notation input. Only english input
# There is no check on maximum/minimum of degree
# In case of degree minutes seconds direction input,
# it is checked that degree >0, 0<= minutes <= 60,
# 0<= seconds <= 60, direction is in the directions dic.
#change , to . so that , input works in non , localization
#this is no problem, as a number like 100,000.20 cannot appear in
#lat/lon
#change XX,YY into XX.YY
if val.find(r'.') == -1:
val = val.replace(',', '.')
# format: XX.YYYY
v = __convert_using_float_repr(val)
if v is not None:
return v
# format: XX:YY:ZZ
v = __convert_using_colon_repr(val)
if v is not None:
return v
# format: XX° YY' ZZ" [NSWE]
v = __convert_using_classic_repr(val, typedeg)
if v is not None:
return v
# format XX.YYYY[NSWE]
v = __convert_using_modgedcom_repr(val, typedeg)
if v is not None:
return v
# no format succeeded
return None
#-------------------------------------------------------------------------
#
# conversion function
#
#-------------------------------------------------------------------------
def conv_lat_lon(latitude, longitude, format="D.D4"):
"""
Convert given string latitude and longitude to a required format.
:param latitude: Latitude
:type latitude: string
:param longitude: Longitude
:type longitude: string
:param format: Ouput format
:type format: string
:returns: a tuple of 2 strings, or a string (for ISO formats). If
conversion fails: returns: (None, None) or None (for ISO formats)
Possible formats:
========= ============================================================
Format Description
========= ============================================================
'D.D4' degree notation, 4 decimals
eg +12.0154 , -124.3647
'D.D8' degree notation, 8 decimals (precision like ISO-DMS)
eg +12.01543265 , -124.36473268
'DEG' degree, minutes, seconds notation
eg 50°52'21.92''N , 124°52'21.92''E ° has UTF-8 code c2b00a
or N50º52'21.92" , E14º52'21.92" º has UTF-8 code c2ba0a
or N50º52.3456' , E14º52.9876' ; decimal minutes, no seconds
'DEG-:' degree, minutes, seconds notation with :
eg -50:52:21.92 , 124:52:21.92
'ISO-D' ISO 6709 degree notation i.e. ±DD.DDDD±DDD.DDDD
'ISO-DM' ISO 6709 degree, minutes notation
i.e. ±DDMM.MMM±DDDMM.MMM
'ISO-DMS' ISO 6709 degree, minutes, seconds notation
i.e. ±DDMMSS.SS±DDDMMSS.SS
'RT90' Output format for the Swedish coordinate system RT90
========= ============================================================
Some generalities:
* -90 <= latitude <= +90 with +00 the equator
* -180 <= longitude < +180 with +000 prime meridian and -180 the 180th
meridian
"""
# we start the function changing latitude/longitude in english
if latitude.find('N') == -1 and latitude.find('S') == -1:
# entry is not in english, convert to english
latitude = latitude.replace(translate_en_loc['N'], 'N')
latitude = latitude.replace(translate_en_loc['S'], 'S')
if longitude.find('E') == -1 and longitude.find('W') == -1:
# entry is not in english, convert to english
longitude = longitude.replace(translate_en_loc['W'], 'W')
longitude = longitude.replace(translate_en_loc['E'], 'E')
# take away leading spaces
latitude = latitude.lstrip()
longitude = longitude.lstrip()
# check if first character is alpha i.e. N or S, put it last
if len(latitude) > 1 and latitude[0].isalpha():
latitude = latitude[1:] + latitude[0]
# check if first character is alpha i.e. E or W, put it last
if len(longitude) > 1 and longitude[0].isalpha():
longitude = longitude[1:] + longitude[0]
# convert to float
lat_float = __convert_float_val(latitude, 'lat')
lon_float = __convert_float_val(longitude, 'lon')
# give output (localized if needed)
if lat_float is None or lon_float is None:
if format == "ISO-D" or format == "ISO-DM" or format == "ISO-DMS":
return None
else:
return (None, None)
if lat_float > 90. or lat_float < -90. \
or lon_float >= 180. or lon_float < -180.:
if format == "ISO-D" or format == "ISO-DM" or format == "ISO-DMS":
return None
else:
return (None, None)
if format == "D.D4":
# correct possible roundoff error
str_lon = "%.4f" % (lon_float)
if str_lon == "180.0000":
str_lon = "-180.0000"
return ("%.4f" % lat_float, str_lon)
if format == "D.D8" or format == "RT90":
# correct possible roundoff error
str_lon = "%.8f" % (lon_float)
if str_lon == "180.00000000":
str_lon = "-180.00000000"
if format == "RT90":
tx = __conv_WGS84_SWED_RT90(lat_float, lon_float)
return ("%i" % tx[0], "%i" % tx[1])
else:
return ("%.8f" % lat_float, str_lon)
if format == "GEDCOM":
# The 5.5.1 spec is inconsistent. Length is supposedly 5 to 8 chars,
# but the sample values are longer, using up to 6 fraction digits.
# As a compromise, we will produce up to 6 fraction digits, but only
# if necessary
# correct possible roundoff error
if lon_float >= 0:
str_lon = "%.6f" % (lon_float)
if str_lon == "180.000000":
str_lon = "W180.000000"
else:
str_lon = "E" + str_lon
else:
str_lon = "W" + "%.6f" % (-lon_float)
str_lon = str_lon[:-5] + str_lon[-5:].rstrip("0")
str_lat = ("%s%.6f" % (("N", lat_float) if lat_float >= 0
else ("S", -lat_float)))
str_lat = str_lat[:-5] + str_lat[-5:].rstrip("0")
return (str_lat, str_lon)
deg_lat = int(lat_float)
deg_lon = int(lon_float)
min_lat = int(60. * (lat_float - float(deg_lat)))
min_lon = int(60. * (lon_float - float(deg_lon)))
sec_lat = 3600. * (lat_float - float(deg_lat) - float(min_lat) / 60.)
sec_lon = 3600. * (lon_float - float(deg_lon) - float(min_lon) / 60.)
# dump minus sign on all, store minus sign. Carefull: int(-0.8)=0 !!
if (deg_lat) < 0:
deg_lat = -1 * deg_lat
if (min_lat) < 0:
min_lat = -1 * min_lat
if (sec_lat) < 0.:
sec_lat = -1. * sec_lat
if (deg_lon) < 0:
deg_lon = -1 * deg_lon
if (min_lon) < 0:
min_lon = -1 * min_lon
if (sec_lon) < 0.:
sec_lon = -1. * sec_lon
# keep sign as -1* 0 = +0, so 0°2'S is given correct sign in ISO
sign_lat = "+"
dir_lat = ""
if lat_float >= 0.:
dir_lat = translate_en_loc['N']
else:
dir_lat = translate_en_loc['S']
sign_lat = "-"
sign_lon = "+"
dir_lon = ""
if lon_float >= 0.:
dir_lon = translate_en_loc['E']
else:
dir_lon = translate_en_loc['W']
sign_lon = "-"
if format == "DEG":
str_lat = ("%d°%02d'%05.2f\"" % (deg_lat, min_lat, sec_lat)) + dir_lat
str_lon = ("%d°%02d'%05.2f\"" % (deg_lon, min_lon, sec_lon)) + dir_lon
# correct possible roundoff error in seconds
if str_lat[-6-len(dir_lat)] == '6':
if min_lat == 59:
str_lat = ("%d°%02d'%05.2f\"" % (deg_lat+1, 0, 0.)) + dir_lat
else:
str_lat = ("%d°%02d'%05.2f\"" % (deg_lat, min_lat+1, 0.)) \
+ dir_lat
if str_lon[-6-len(dir_lon)] == '6':
if min_lon == 59:
if deg_lon == 179 and sign_lon == "+":
str_lon = ("%d°%02d'%05.2f\"" % (180, 0, 0.)) \
+ translate_en_loc['W']
else:
str_lon = ("%d°%02d'%05.2f\"" % (deg_lon+1, 0, 0.)) \
+ dir_lon
else:
str_lon = ("%d°%02d'%05.2f\"" % (deg_lon, min_lon+1, 0.)) \
+ dir_lon
return (str_lat, str_lon)
if format == "DEG-:":
if sign_lat == "+":
sign_lat = ""
sign_lon_h = sign_lon
if sign_lon == "+":
sign_lon_h = ""
str_lat = sign_lat + ("%d:%02d:%05.2f" % (deg_lat, min_lat, sec_lat))
str_lon = sign_lon_h + ("%d:%02d:%05.2f" % (deg_lon, min_lon, sec_lon))
# correct possible roundoff error in seconds
if str_lat[-5] == '6':
if min_lat == 59:
str_lat = sign_lat + ("%d:%02d:%05.2f" % (deg_lat+1, 0, 0.))
else:
str_lat = sign_lat + \
("%d:%02d:%05.2f" % (deg_lat, min_lat+1, 0.))
if str_lon[-5] == '6':
if min_lon == 59:
if deg_lon == 179 and sign_lon == "+":
str_lon = '-' + ("%d:%02d:%05.2f" % (180, 0, 0.))
else:
str_lon = sign_lon_h + \
("%d:%02d:%05.2f" % (deg_lon+1, 0, 0.))
else:
str_lon = sign_lon_h + \
("%d:%02d:%05.2f" % (deg_lon, min_lon+1, 0.))
return (str_lat, str_lon)
if format == "ISO-D": # ±DD.DDDD±DDD.DDDD
str_lon = "%+09.4f" % (lon_float)
# correct possible roundoff error
if str_lon == "+180.0000":
str_lon = "-180.0000"
return ("%+08.4f" % lat_float) + str_lon
if format == "ISO-DM": # ±DDMM.MMM±DDDMM.MMM
min_fl_lat = float(min_lat)+ sec_lat/60.
min_fl_lon = float(min_lon)+ sec_lon/60.
str_lat = sign_lat + ("%02d%06.3f" % (deg_lat, min_fl_lat))
str_lon = sign_lon + ("%03d%06.3f" % (deg_lon, min_fl_lon))
# correct possible roundoff error
if str_lat[3:] == "60.000":
str_lat = sign_lat + ("%02d%06.3f" % (deg_lat+1, 0.))
if str_lon[4:] == "60.000":
if deg_lon == 179 and sign_lon == "+":
str_lon = "-" + ("%03d%06.3f" % (180, 0.))
else:
str_lon = sign_lon + ("%03d%06.3f" % (deg_lon+1, 0.))
return str_lat + str_lon
if format == "ISO-DMS": # ±DDMMSS.SS±DDDMMSS.SS
str_lat = sign_lat + ("%02d%02d%06.3f" % (deg_lat, min_lat, sec_lat))
str_lon = sign_lon + ("%03d%02d%06.3f" % (deg_lon, min_lon, sec_lon))
# correct possible roundoff error
if str_lat[5:] == "60.000":
if min_lat == 59:
str_lat = sign_lat + ("%02d%02d%06.3f" % (deg_lat+1, 0, 0.))
else:
str_lat = sign_lat + \
("%02d%02d%06.3f" % (deg_lat, min_lat +1, 0.))
if str_lon[6:] == "60.000":
if min_lon == 59:
if deg_lon == 179 and sign_lon == "+":
str_lon = "-" + ("%03d%02d%06.3f" % (180, 0, 0))
else:
str_lon = sign_lon + \
("%03d%02d%06.3f" % (deg_lon+1, 0, 0.))
else:
str_lon = sign_lon + \
("%03d%02d%06.3f" % (deg_lon, min_lon+1, 0.))
return str_lat + str_lon
def atanh(x):
"""arctangent hyperbolicus"""
return 1.0/2.0*math.log((1.0 + x)/(1.0 -x))
def __conv_WGS84_SWED_RT90(lat, lon):
"""
Input is lat and lon as two float numbers
Output is X and Y coordinates in RT90
as a tuple of float numbers
The code below converts to/from the Swedish RT90 koordinate
system. The converion functions use "Gauss Conformal Projection
(Transverse Marcator)" Krüger Formulas.
The constanst are for the Swedish RT90-system.
With other constants the conversion should be useful for
other geographical areas.
"""
# Some constants used for conversion to/from Swedish RT90
f = 1.0/298.257222101
e2 = f*(2.0-f)
n = f/(2.0-f)
L0 = math.radians(15.8062845294) # 15 deg 48 min 22.624306 sec
k0 = 1.00000561024
a = 6378137.0 # meter
at = a/(1.0+n)*(1.0+ 1.0/4.0* pow(n, 2)+1.0/64.0*pow(n, 4))
FN = -667.711 # m
FE = 1500064.274 # m
#the conversion
lat_rad = math.radians(lat)
lon_rad = math.radians(lon)
A = e2
B = 1.0/6.0*(5.0*pow(e2, 2) - pow(e2, 3))
C = 1.0/120.0*(104.0*pow(e2, 3) - 45.0*pow(e2, 4))
D = 1.0/1260.0*(1237.0*pow(e2, 4))
DL = lon_rad - L0
E = A + B*pow(math.sin(lat_rad), 2) + \
C*pow(math.sin(lat_rad), 4) + \
D*pow(math.sin(lat_rad), 6)
psi = lat_rad - math.sin(lat_rad)*math.cos(lat_rad)*E
xi = math.atan2(math.tan(psi), math.cos(DL))
eta = atanh(math.cos(psi)*math.sin(DL))
B1 = 1.0/2.0*n - 2.0/3.0*pow(n, 2) + 5.0/16.0*pow(n, 3) + \
41.0/180.0*pow(n, 4)
B2 = 13.0/48.0*pow(n, 2) - 3.0/5.0*pow(n, 3) + 557.0/1440.0*pow(n, 4)
B3 = 61.0/240.0*pow(n, 3) - 103.0/140.0*pow(n, 4)
B4 = 49561.0/161280.0*pow(n, 4)
X = xi + B1*math.sin(2.0*xi)*math.cosh(2.0*eta) + \
B2*math.sin(4.0*xi)*math.cosh(4.0*eta) + \
B3*math.sin(6.0*xi)*math.cosh(6.0*eta) + \
B4*math.sin(8.0*xi)*math.cosh(8.0*eta)
Y = eta + B1*math.cos(2.0*xi)*math.sinh(2.0*eta) + \
B2*math.cos(4.0*xi)*math.sinh(4.0*eta) + \
B3*math.cos(6.0*xi)*math.sinh(6.0*eta) + \
B4*math.cos(8.0*xi)*math.sinh(8.0*eta)
X = X*k0*at + FN
Y = Y*k0*at + FE
return (X, Y)
def __conv_SWED_RT90_WGS84(X, Y):
"""
Input is X and Y coordinates in RT90 as float
Output is lat and long in degrees, float as tuple
"""
# Some constants used for conversion to/from Swedish RT90
f = 1.0/298.257222101
e2 = f*(2.0-f)
n = f/(2.0-f)
L0 = math.radians(15.8062845294) # 15 deg 48 min 22.624306 sec
k0 = 1.00000561024
a = 6378137.0 # meter
at = a/(1.0+n)*(1.0+ 1.0/4.0* pow(n, 2)+1.0/64.0*pow(n, 4))
FN = -667.711 # m
FE = 1500064.274 # m
xi = (X - FN)/(k0*at)
eta = (Y - FE)/(k0*at)
D1 = 1.0/2.0*n - 2.0/3.0*pow(n, 2) + 37.0/96.0*pow(n, 3) - \
1.0/360.0*pow(n, 4)
D2 = 1.0/48.0*pow(n, 2) + 1.0/15.0*pow(n, 3) - 437.0/1440.0*pow(n, 4)
D3 = 17.0/480.0*pow(n, 3) - 37.0/840.0*pow(n, 4)
D4 = 4397.0/161280.0*pow(n, 4)
xip = xi - D1*math.sin(2.0*xi)*math.cosh(2.0*eta) - \
D2*math.sin(4.0*xi)*math.cosh(4.0*eta) - \
D3*math.sin(6.0*xi)*math.cosh(6.0*eta) - \
D4*math.sin(8.0*xi)*math.cosh(8.0*eta)
etap = eta - D1*math.cos(2.0*xi)*math.sinh(2.0*eta) - \
D2*math.cos(4.0*xi)*math.sinh(4.0*eta) - \
D3*math.cos(6.0*xi)*math.sinh(6.0*eta) - \
D4*math.cos(8.0*xi)*math.sinh(8.0*eta)
psi = math.asin(math.sin(xip)/math.cosh(etap))
DL = math.atan2(math.sinh(etap), math.cos(xip))
LON = L0 + DL
A = e2 + pow(e2, 2) + pow(e2, 3) + pow(e2, 4)
B = -1.0/6.0*(7.0*pow(e2, 2) + 17*pow(e2, 3) + 30*pow(e2, 4))
C = 1.0/120.0*(224.0*pow(e2, 3) + 889.0*pow(e2, 4))
D = 1.0/1260.0*(4279.0*pow(e2, 4))
E = A + B*pow(math.sin(psi), 2) + \
C*pow(math.sin(psi), 4) + \
D*pow(math.sin(psi), 6)
LAT = psi + math.sin(psi)*math.cos(psi)*E
LAT = math.degrees(LAT)
LON = math.degrees(LON)
return LAT, LON
#-------------------------------------------------------------------------
#
# For Testing the convert function in this module, apply it as a script:
# ==> in command line do "python PlaceUtils.py"
#
#-------------------------------------------------------------------------
if __name__ == '__main__':
def test_formats_success(lat1, lon1, text=''):
format0 = "D.D4"
format1 = "D.D8"
format2 = "DEG"
format3 = "DEG-:"
format4 = "ISO-D"
format5 = "ISO-DM"
format6 = "ISO-DMS"
format7 = "RT90"
format8 = "GEDCOM"
print("Testing conv_lat_lon function, "+text+':')
res1, res2 = conv_lat_lon(lat1, lon1, format0)
print(lat1, lon1, "in format", format0, "is ", res1, res2)
res1, res2 = conv_lat_lon(lat1, lon1, format1)
print(lat1, lon1, "in format", format1, "is ", res1, res2)
res1, res2 = conv_lat_lon(lat1, lon1, format2)
print(lat1, lon1, "in format", format2, "is ", res1, res2)
res1, res2 = conv_lat_lon(lat1, lon1, format3)
print(lat1, lon1, "in format", format3, "is ", res1, res2)
res = conv_lat_lon(lat1, lon1, format4)
print(lat1, lon1, "in format", format4, "is ", res)
res = conv_lat_lon(lat1, lon1, format5)
print(lat1, lon1, "in format", format5, "is", res)
res = conv_lat_lon(lat1, lon1, format6)
print(lat1, lon1, "in format", format6, "is", res)
res1, res2 = conv_lat_lon(lat1, lon1, format7)
print(lat1, lon1, "in format", format7, "is", res1, res2, "\n")
res1, res2 = conv_lat_lon(lat1, lon1, format8)
print(lat1, lon1, "in format", format8, "is", res1, res2, "\n")
def test_formats_fail(lat1, lon1, text=''):
print("This test should make conv_lat_lon function fail, %s:") % text
res1, res2 = conv_lat_lon(lat1, lon1)
print(lat1, lon1, " fails to convert, result=", res1, res2, "\n")
def test_RT90_conversion():
"""
a given lat/lon is converted to RT90 and back as a test:
"""
la = 59.0 + 40.0/60. + 9.09/3600.0
lo = 12.0 + 58.0/60.0 + 57.74/3600.0
x, y = __conv_WGS84_SWED_RT90(la, lo)
lanew, lonew = __conv_SWED_RT90_WGS84(x, y)
assert math.fabs(lanew - la) < 1e-6, math.fabs(lanew - la)
assert math.fabs(lonew - lo) < 1e-6, math.fabs(lonew - lo)
lat, lon = '50.849888888888', '2.885897222222'
test_formats_success(lat, lon)
lat, lon = ' 50°50\'59.60"N', ' 2°53\'9.23"E'
test_formats_success(lat, lon)
lat, lon = ' 50 : 50 : 59.60 ', ' -2:53 : 9.23 '
test_formats_success(lat, lon)
lat, lon = ' dummy', ' 2#53 \' 9.23 " E '
test_formats_fail(lat, lon)
lat, lon = ' 50:50: 59.60', ' d u m my'
test_formats_fail(lat, lon)
lat, lon = ' 50°59.60"N', ' 2°53\'E'
test_formats_success(lat, lon)
lat, lon = ' 11° 11\' 11" N, 11° 11\' 11" O', ' '
test_formats_fail(lat, lon)
# very small negative
lat, lon = '-0.00006', '-0.00006'
test_formats_success(lat, lon)
# missing direction N/S
lat, lon = ' 50°59.60"', ' 2°53\'E'
test_formats_fail(lat, lon)
# wrong direction on latitude
lat, lon = ' 50°59.60"E', ' 2°53\'N'
test_formats_fail(lat, lon)
# same as above
lat, lon = ' 50°59.99"E', ' 2°59\'59.99"N'
test_formats_fail(lat, lon)
# test precision
lat, lon = ' 50°59.99"S', ' 2°59\'59.99"E'
test_formats_success(lat, lon)
lat, lon = 'N50.849888888888', 'E2.885897222222'
test_formats_success(lat, lon)
# to large value of lat
lat, lon = '90.849888888888', '2.885897222222'
test_formats_fail(lat, lon)
# extreme values allowed
lat, lon = '90', '-180'
test_formats_success(lat, lon)
# extreme values allowed
lat, lon = '90° 00\' 00.00" S ', '179° 59\'59.99"W'
test_formats_success(lat, lon)
# extreme value not allowed
lat, lon = '90° 00\' 00.00" N', '180° 00\'00.00" E'
test_formats_fail(lat, lon)
# extreme values allowed
lat, lon = '90: 00: 00.00 ', '-179: 59:59.99'
test_formats_success(lat, lon)
# extreme value not allowed
lat, lon = '90° 00\' 00.00" N', '180:00:00.00'
test_formats_fail(lat, lon)
# extreme values not allowed
lat, lon = '90', '180'
test_formats_fail(lat, lon)
lat, lon = ' 89°59\'60"N', ' 2°53\'W'
test_formats_fail(lat, lon)
lat, lon = ' 89°60\'00"N', ' 2°53\'W'
test_formats_fail(lat, lon)
lat, lon = ' 89.1°40\'00"N', ' 2°53\'W'
test_formats_fail(lat, lon)
lat, lon = ' 89°40\'00"N', ' 2°53.1\'W'
test_formats_fail(lat, lon)
lat, lon = '0', '0'
test_formats_success(lat, lon,
"Special 0 value, crossing 0-meridian and equator")
# small values close to equator
lat, lon = ' 1°1"N', ' 1°1\'E'
test_formats_success(lat, lon)
# roundoff
lat, lon = ' 1°59.999"N', ' 1°59.999"E'
test_formats_success(lat, lon, 'Examples of round off and how it behaves')
lat, lon = ' 1°59\'59.9999"N', ' 1°59\'59.9999"E'
test_formats_success(lat, lon, 'Examples of round off and how it behaves')
lat, lon = '89°59\'59.9999"S', '179°59\'59.9999"W'
test_formats_success(lat, lon, 'Examples of round off and how it behaves')
lat, lon = '89°59\'59.9999"N', '179°59\'59.9999"E'
test_formats_success(lat, lon, 'Examples of round off and how it behaves')
#insane number of decimals:
lat, lon = '89°59\'59.99999999"N', '179°59\'59.99999999"E'
test_formats_success(lat, lon, 'Examples of round off and how it begaves')
#recognise '' as seconds "
lat, lon = '89°59\'59.99\'\' N', '179°59\'59.99\'\'E'
test_formats_success(lat, lon, "input \" as ''")
#test localisation of , and . as delimiter
lat, lon = '50.849888888888', '2,885897222222'
test_formats_success(lat, lon, 'localisation of . and , ')
lat, lon = '89°59\'59.9999"S', '179°59\'59,9999"W'
test_formats_success(lat, lon, 'localisation of . and , ')
lat, lon = '89°59\'1.599,999"S', '179°59\'59,9999"W'
test_formats_fail(lat, lon, 'localisation of . and , ')
#rest
lat, lon = '81.2', '-182.3'
test_formats_fail(lat, lon)
lat, lon = '-91.2', '-1'
test_formats_fail(lat, lon)
lat, lon = '++50:10:1', '2:1:2'
test_formats_fail(lat, lon)
lat, lon = '-50:10:1', '-+2:1:2'
test_formats_success(lat, lon)
lat, lon = '-50::1', '-2:1:2'
test_formats_fail(lat, lon)
lat, lon = '- 50 : 2 : 1 ', '-2:1:2'
test_formats_success(lat, lon)
lat, lon = '+ 50:2 : 1', '-2:1:2'
test_formats_success(lat, lon)
lat, lon = '+50:', '-2:1:2'
test_formats_fail(lat, lon)
lat, lon = '+50:1', '-2:1:2'
test_formats_success(lat, lon)
lat, lon = '+50: 0 : 1 : 1', '-2:1:2'
test_formats_fail(lat, lon)
lat, lon = '+61° 43\' 60.00"', '+17° 7\' 60.00"'
test_formats_fail(lat, lon)
lat, lon = '+61° 44\' 00.00"N', '+17° 8\' 00.00"E'
test_formats_success(lat, lon)
lat, lon = ': 0 : 1 : 1', ':1:2'
test_formats_fail(lat, lon)
lat, lon = 'N 50º52\'21.92"', 'E 124º52\'21.92"'
test_formats_success(lat, lon,
'New format with N/E first and another º - character')
lat, lon = 'S 50º52\'21.92"', 'W 124º52\'21.92"'
test_formats_success(lat, lon,
'New format with S/W first and another º - character')
test_RT90_conversion()
| gpl-2.0 |
aidanlister/django | django/contrib/gis/sitemaps/views.py | 341 | 2421 | from __future__ import unicode_literals
from django.apps import apps
from django.contrib.gis.db.models.fields import GeometryField
from django.contrib.gis.db.models.functions import AsKML, Transform
from django.contrib.gis.shortcuts import render_to_kml, render_to_kmz
from django.core.exceptions import FieldDoesNotExist
from django.db import DEFAULT_DB_ALIAS, connections
from django.http import Http404
def kml(request, label, model, field_name=None, compress=False, using=DEFAULT_DB_ALIAS):
"""
This view generates KML for the given app label, model, and field name.
The model's default manager must be GeoManager, and the field name
must be that of a geographic field.
"""
placemarks = []
try:
klass = apps.get_model(label, model)
except LookupError:
raise Http404('You must supply a valid app label and module name. Got "%s.%s"' % (label, model))
if field_name:
try:
field = klass._meta.get_field(field_name)
if not isinstance(field, GeometryField):
raise FieldDoesNotExist
except FieldDoesNotExist:
raise Http404('Invalid geometry field.')
connection = connections[using]
if connection.features.has_AsKML_function:
# Database will take care of transformation.
placemarks = klass._default_manager.using(using).annotate(kml=AsKML(field_name))
else:
# If the database offers no KML method, we use the `kml`
# attribute of the lazy geometry instead.
placemarks = []
if connection.features.has_Transform_function:
qs = klass._default_manager.using(using).annotate(
**{'%s_4326' % field_name: Transform(field_name, 4326)})
field_name += '_4326'
else:
qs = klass._default_manager.using(using).all()
for mod in qs:
mod.kml = getattr(mod, field_name).kml
placemarks.append(mod)
# Getting the render function and rendering to the correct.
if compress:
render = render_to_kmz
else:
render = render_to_kml
return render('gis/kml/placemarks.kml', {'places': placemarks})
def kmz(request, label, model, field_name=None, using=DEFAULT_DB_ALIAS):
"""
This view returns KMZ for the given app label, model, and field name.
"""
return kml(request, label, model, field_name, compress=True, using=using)
| bsd-3-clause |
zding5/Microblog-Flask | flask/lib/python2.7/site-packages/pbr/git.py | 25 | 9311 | # Copyright 2011 OpenStack LLC.
# Copyright 2012-2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import distutils.errors
from distutils import log
import io
import os
import re
import subprocess
import pkg_resources
from pbr import options
def _run_shell_command(cmd, throw_on_error=False, buffer=True, env=None):
if buffer:
out_location = subprocess.PIPE
err_location = subprocess.PIPE
else:
out_location = None
err_location = None
newenv = os.environ.copy()
if env:
newenv.update(env)
output = subprocess.Popen(cmd,
stdout=out_location,
stderr=err_location,
env=newenv)
out = output.communicate()
if output.returncode and throw_on_error:
raise distutils.errors.DistutilsError(
"%s returned %d" % (cmd, output.returncode))
if len(out) == 0 or not out[0] or not out[0].strip():
return ''
return out[0].strip().decode('utf-8')
def _run_git_command(cmd, git_dir, **kwargs):
if not isinstance(cmd, (list, tuple)):
cmd = [cmd]
return _run_shell_command(
['git', '--git-dir=%s' % git_dir] + cmd, **kwargs)
def _get_git_directory():
return _run_shell_command(['git', 'rev-parse', '--git-dir'])
def _git_is_installed():
try:
# We cannot use 'which git' as it may not be available
# in some distributions, So just try 'git --version'
# to see if we run into trouble
_run_shell_command(['git', '--version'])
except OSError:
return False
return True
def _get_highest_tag(tags):
"""Find the highest tag from a list.
Pass in a list of tag strings and this will return the highest
(latest) as sorted by the pkg_resources version parser.
"""
return max(tags, key=pkg_resources.parse_version)
def _find_git_files(dirname='', git_dir=None):
"""Behave like a file finder entrypoint plugin.
We don't actually use the entrypoints system for this because it runs
at absurd times. We only want to do this when we are building an sdist.
"""
file_list = []
if git_dir is None:
git_dir = _run_git_functions()
if git_dir:
log.info("[pbr] In git context, generating filelist from git")
file_list = _run_git_command(['ls-files', '-z'], git_dir)
file_list = file_list.split(b'\x00'.decode('utf-8'))
return [f for f in file_list if f]
def _get_raw_tag_info(git_dir):
describe = _run_git_command(['describe', '--always'], git_dir)
if "-" in describe:
return describe.rsplit("-", 2)[-2]
if "." in describe:
return 0
return None
def get_is_release(git_dir):
return _get_raw_tag_info(git_dir) == 0
def _run_git_functions():
git_dir = None
if _git_is_installed():
git_dir = _get_git_directory()
return git_dir or None
def get_git_short_sha(git_dir=None):
"""Return the short sha for this repo, if it exists."""
if not git_dir:
git_dir = _run_git_functions()
if git_dir:
return _run_git_command(
['log', '-n1', '--pretty=format:%h'], git_dir)
return None
def _iter_changelog(changelog):
"""Convert a oneline log iterator to formatted strings.
:param changelog: An iterator of one line log entries like
that given by _iter_log_oneline.
:return: An iterator over (release, formatted changelog) tuples.
"""
first_line = True
current_release = None
yield current_release, "CHANGES\n=======\n\n"
for hash, tags, msg in changelog:
if tags:
current_release = _get_highest_tag(tags)
underline = len(current_release) * '-'
if not first_line:
yield current_release, '\n'
yield current_release, (
"%(tag)s\n%(underline)s\n\n" %
dict(tag=current_release, underline=underline))
if not msg.startswith("Merge "):
if msg.endswith("."):
msg = msg[:-1]
yield current_release, "* %(msg)s\n" % dict(msg=msg)
first_line = False
def _iter_log_oneline(git_dir=None, option_dict=None):
"""Iterate over --oneline log entries if possible.
This parses the output into a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples, or None if
changelog generation is disabled / not available.
"""
if not option_dict:
option_dict = {}
should_skip = options.get_boolean_option(option_dict, 'skip_changelog',
'SKIP_WRITE_GIT_CHANGELOG')
if should_skip:
return
if git_dir is None:
git_dir = _get_git_directory()
if not git_dir:
return
return _iter_log_inner(git_dir)
def _iter_log_inner(git_dir):
"""Iterate over --oneline log entries.
This parses the output intro a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples.
"""
log.info('[pbr] Generating ChangeLog')
log_cmd = ['log', '--oneline', '--decorate']
changelog = _run_git_command(log_cmd, git_dir)
for line in changelog.split('\n'):
line_parts = line.split()
if len(line_parts) < 2:
continue
# Tags are in a list contained in ()'s. If a commit
# subject that is tagged happens to have ()'s in it
# this will fail
if line_parts[1].startswith('(') and ')' in line:
msg = line.split(')')[1].strip()
else:
msg = " ".join(line_parts[1:])
if "tag:" in line:
tags = set([
tag.split(",")[0]
for tag in line.split(")")[0].split("tag: ")[1:]])
else:
tags = set()
yield line_parts[0], tags, msg
def write_git_changelog(git_dir=None, dest_dir=os.path.curdir,
option_dict=dict(), changelog=None):
"""Write a changelog based on the git changelog."""
if not changelog:
changelog = _iter_log_oneline(git_dir=git_dir, option_dict=option_dict)
if changelog:
changelog = _iter_changelog(changelog)
if not changelog:
return
log.info('[pbr] Writing ChangeLog')
new_changelog = os.path.join(dest_dir, 'ChangeLog')
# If there's already a ChangeLog and it's not writable, just use it
if (os.path.exists(new_changelog)
and not os.access(new_changelog, os.W_OK)):
return
with io.open(new_changelog, "w", encoding="utf-8") as changelog_file:
for release, content in changelog:
changelog_file.write(content)
def generate_authors(git_dir=None, dest_dir='.', option_dict=dict()):
"""Create AUTHORS file using git commits."""
should_skip = options.get_boolean_option(option_dict, 'skip_authors',
'SKIP_GENERATE_AUTHORS')
if should_skip:
return
old_authors = os.path.join(dest_dir, 'AUTHORS.in')
new_authors = os.path.join(dest_dir, 'AUTHORS')
# If there's already an AUTHORS file and it's not writable, just use it
if (os.path.exists(new_authors)
and not os.access(new_authors, os.W_OK)):
return
log.info('[pbr] Generating AUTHORS')
ignore_emails = '(jenkins@review|infra@lists|jenkins@openstack)'
if git_dir is None:
git_dir = _get_git_directory()
if git_dir:
authors = []
# don't include jenkins email address in AUTHORS file
git_log_cmd = ['log', '--format=%aN <%aE>']
authors += _run_git_command(git_log_cmd, git_dir).split('\n')
authors = [a for a in authors if not re.search(ignore_emails, a)]
# get all co-authors from commit messages
co_authors_out = _run_git_command('log', git_dir)
co_authors = re.findall('Co-authored-by:.+', co_authors_out,
re.MULTILINE)
co_authors = [signed.split(":", 1)[1].strip()
for signed in co_authors if signed]
authors += co_authors
authors = sorted(set(authors))
with open(new_authors, 'wb') as new_authors_fh:
if os.path.exists(old_authors):
with open(old_authors, "rb") as old_authors_fh:
new_authors_fh.write(old_authors_fh.read())
new_authors_fh.write(('\n'.join(authors) + '\n')
.encode('utf-8'))
| mit |
Motorhead1991/android_kernel_samsung_s5pv210 | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
rdmorganiser/rdmo | rdmo/projects/models/value.py | 1 | 6697 | import mimetypes
from pathlib import Path
import iso8601
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django_cleanup import cleanup
from rdmo.core.constants import (VALUE_TYPE_BOOLEAN, VALUE_TYPE_CHOICES,
VALUE_TYPE_DATETIME, VALUE_TYPE_TEXT)
from rdmo.core.models import Model
from rdmo.domain.models import Attribute
from rdmo.options.models import Option
from ..managers import ValueManager
from ..utils import get_value_path
def get_file_upload_to(instance, filename):
return str(get_value_path(instance.project, instance.snapshot) / str(instance.id) / filename)
class Value(Model):
objects = ValueManager()
FALSE_TEXT = [None, '', '0', 'f', 'F', 'false', 'False']
project = models.ForeignKey(
'Project', on_delete=models.CASCADE, related_name='values',
verbose_name=_('Project'),
help_text=_('The project this value belongs to.')
)
snapshot = models.ForeignKey(
'Snapshot', blank=True, null=True,
on_delete=models.CASCADE, related_name='values',
verbose_name=_('Snapshot'),
help_text=_('The snapshot this value belongs to.')
)
attribute = models.ForeignKey(
Attribute, blank=True, null=True,
on_delete=models.SET_NULL, related_name='values',
verbose_name=_('Attribute'),
help_text=_('The attribute this value belongs to.')
)
set_index = models.IntegerField(
default=0,
verbose_name=_('Set index'),
help_text=_('The position of this value in an entity collection (i.e. in the question set)')
)
collection_index = models.IntegerField(
default=0,
verbose_name=_('Collection index'),
help_text=_('The position of this value in an attribute collection.')
)
text = models.TextField(
blank=True,
verbose_name=_('Text'),
help_text=_('The string stored for this value.')
)
option = models.ForeignKey(
Option, blank=True, null=True, on_delete=models.SET_NULL, related_name='values',
verbose_name=_('Option'),
help_text=_('The option stored for this value.')
)
file = models.FileField(
upload_to=get_file_upload_to, null=True, blank=True,
verbose_name=_('File'),
help_text=_('The file stored for this value.')
)
value_type = models.CharField(
max_length=8, choices=VALUE_TYPE_CHOICES, default=VALUE_TYPE_TEXT,
verbose_name=_('Value type'),
help_text=_('Type of this value.')
)
unit = models.CharField(
max_length=64, blank=True,
verbose_name=_('Unit'),
help_text=_('Unit for this value.')
)
external_id = models.CharField(
max_length=256, blank=True,
verbose_name=_('External id'),
help_text=_('External id for this value.')
)
class Meta:
ordering = ('attribute', 'set_index', 'collection_index')
verbose_name = _('Value')
verbose_name_plural = _('Values')
@property
def as_dict(self):
value_dict = {
'id': self.id,
'created': self.created,
'updated': self.updated,
'set_index': self.set_index,
'collection_index': self.collection_index,
'value_type': self.value_type,
'unit': self.unit,
'external_id': self.external_id,
'value': self.value,
'value_and_unit': self.value_and_unit,
'is_true': self.is_true,
'is_false': self.is_false,
'as_number': self.as_number
}
if self.file:
value_dict.update({
'file_name': self.file_name,
'file_url': self.file_url,
'file_type': self.file_type,
'file_path': self.file_path
})
return value_dict
@property
def value(self):
if self.option:
value = self.option.text or ''
if self.option.additional_input and self.text:
value += ': ' + self.text
return value
elif self.file:
return self.file_name
elif self.text:
if self.value_type == VALUE_TYPE_DATETIME:
try:
return iso8601.parse_date(self.text).date()
except iso8601.ParseError:
return self.text
elif self.value_type == VALUE_TYPE_BOOLEAN:
if self.text == '1':
return _('Yes')
else:
return _('No')
else:
return self.text
else:
return None
@property
def value_and_unit(self):
value = self.value
if value is None:
return ''
elif self.unit:
return '%s %s' % (value, self.unit)
else:
return value
@property
def is_true(self):
return self.text not in self.FALSE_TEXT
@property
def is_false(self):
return self.text in self.FALSE_TEXT
@property
def as_number(self):
try:
val = self.text
except AttributeError:
return 0
else:
if isinstance(val, str):
val = val.replace(',', '.')
if isinstance(val, float) is False:
try:
return int(val)
except (ValueError, TypeError):
pass
try:
return float(val)
except (ValueError, TypeError):
return 0
else:
return val
@property
def file_name(self):
if self.file:
return Path(self.file.name).name
@property
def file_url(self):
if self.file:
return reverse('v1-projects:value-file', args=[self.id])
@property
def file_type(self):
if self.file:
return mimetypes.guess_type(self.file.name)[0]
@property
def file_path(self):
if self.file:
resource_path = get_value_path(self.project, self.snapshot)
return Path(self.file.name).relative_to(resource_path).as_posix()
def copy_file(self, file_name, file_content):
# copies a file field from a different value over to this value
# this is tricky, because we need to trick django_cleanup to not delete the original file
# important for snapshots and import from projects
self.file.save(file_name, file_content, save=False)
cleanup.refresh(self)
self.save()
| apache-2.0 |
sxjscience/tvm | tests/python/relay/test_ir_parser.py | 1 | 22635 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
from tvm import relay
import tvm.relay.testing
import pytest
from numpy import isclose
from typing import Union
from functools import wraps
SEMVER = '#[version = "0.0.5"]\n'
BINARY_OPS = {
"*": relay.multiply,
"/": relay.divide,
"+": relay.add,
"-": relay.subtract,
"<": relay.less,
">": relay.greater,
"<=": relay.less_equal,
">=": relay.greater_equal,
"==": relay.equal,
"!=": relay.not_equal,
}
TYPES = {
"int8",
"int16",
"int32",
"int64",
"uint8",
"uint16",
"uint32",
"uint64",
"float16",
"float32",
"float64",
"bool",
"int8x4",
"uint1x4",
"float16x4",
}
LIST_DEFN = """
type List[A] {
Cons(A, List[A]),
Nil,
}
"""
def assert_graph_equal(lhs, rhs):
tvm.ir.assert_structural_equal(lhs, rhs, map_free_vars=True)
def graph_equal(lhs, rhs):
return tvm.ir.structural_equal(lhs, rhs, map_free_vars=True)
def roundtrip_expr(expr):
text = tvm.relay.Expr.astext(expr, show_meta_data=False)
x = tvm.parser.parse_expr(text)
assert_graph_equal(x, expr)
# Testing Utilities for expressions.
def roundtrip(expr):
x = tvm.parser.fromtext(expr.astext())
assert_graph_equal(x, expr)
def parse_text(code):
expr = tvm.parser.parse_expr(code)
roundtrip_expr(expr)
return expr
def parses_as(code, expr):
# type: (str, relay.Expr) -> bool
parsed = parse_text(code)
result = graph_equal(parsed, expr)
return result
# Testing Utilities for full modules.
def parse_module(code):
mod = tvm.parser.parse(SEMVER + code)
roundtrip(mod)
return mod
def assert_parses_as(code, expr):
parsed = parse_text(code)
assert_graph_equal(parsed, expr)
def assert_parse_module_as(code, mod):
mod = tvm.relay.transform.InferType()(mod)
parsed = parse_module(code)
assert_graph_equal(parsed, mod)
def get_scalar(x):
# type: (relay.Constant) -> (Union[float, int, bool])
return x.data.asnumpy().item()
int32 = relay.scalar_type("int32")
_ = relay.Var("_")
X = relay.Var("x")
Y = relay.Var("y")
X_ANNO = relay.Var("x", int32)
Y_ANNO = relay.Var("y", int32)
UNIT = relay.Tuple([])
def test_comments():
assert_parses_as(
"""
// This is a line comment!
()
""",
UNIT,
)
assert_parses_as(
"""
/* This is a block comment!
This is still a block comment!
*/
()
""",
UNIT,
)
assert_parses_as(
"""
/* This is a block comment!
/*Block comment is recursive!*/
*/
()
""",
UNIT,
)
def test_int_literal():
assert isinstance(parse_text("1"), relay.Constant)
assert isinstance(parse_text("1").data, tvm.nd.NDArray)
assert get_scalar(parse_text("1")) == 1
assert get_scalar(parse_text("10")) == 10
assert get_scalar(parse_text("0")) == 0
assert get_scalar(parse_text("-100")) == -100
assert get_scalar(parse_text("-05")) == -5
def test_float_literal():
assert get_scalar(parse_text("1.0f")) == 1.0
assert isclose(get_scalar(parse_text("1.56667f")), 1.56667)
assert get_scalar(parse_text("0.0f")) == 0.0
assert get_scalar(parse_text("-10.0f")) == -10.0
# scientific notation
assert isclose(get_scalar(parse_text("1e-1f")), 1e-1)
assert get_scalar(parse_text("1e+1f")) == 1e1
assert isclose(get_scalar(parse_text("1E-1f")), 1e-1)
assert get_scalar(parse_text("1E+1f")) == 1e1
assert isclose(get_scalar(parse_text("1.0e-1f")), 1.0e-1)
assert get_scalar(parse_text("1.0e+1f")) == 1.0e1
assert isclose(get_scalar(parse_text("1.0E-1f")), 1.0e-1)
assert get_scalar(parse_text("1.0E+1f")) == 1.0e1
def test_bool_literal():
assert get_scalar(parse_text("True")) == True
assert get_scalar(parse_text("False")) == False
def test_negative():
# need to handle parsing non-literal operations
# assert isinstance(parse_text("let %x = 1; -%x").body, relay.Call)
assert get_scalar(parse_text("--10")) == 10
assert get_scalar(parse_text("---10")) == -10
def test_bin_op():
for bin_op in BINARY_OPS.keys():
assert_parses_as(
"1 {} 1".format(bin_op), BINARY_OPS.get(bin_op)(relay.const(1), relay.const(1))
)
def test_parens():
assert graph_equal(parse_text("1 * 1 + 1"), parse_text("(1 * 1) + 1"))
assert not graph_equal(parse_text("1 * 1 + 1"), parse_text("1 * (1 + 1)"))
def test_op_assoc():
assert graph_equal(parse_text("1 * 1 + 1 < 1 == 1"), parse_text("(((1 * 1) + 1) < 1) == 1"))
assert graph_equal(parse_text("1 == 1 < 1 + 1 * 1"), parse_text("1 == (1 < (1 + (1 * 1)))"))
def test_vars():
# var
var = parse_text("let %foo = (); %foo")
assert isinstance(var.body, relay.Var)
assert var.body.name_hint == "foo"
# global var
global_var = parse_text("@foo")
assert isinstance(global_var, relay.GlobalVar)
assert global_var.name_hint == "foo"
# operator id
op = parse_text("add")
assert isinstance(op, tvm.ir.Op)
assert op.name == "add"
# operator id with prefix
op = parse_text("nn.global_avg_pool2d")
assert isinstance(op, tvm.ir.Op)
assert op.name == "nn.global_avg_pool2d"
def test_meta_ref():
with pytest.raises(tvm.error.DiagnosticError):
meta_op = parse_text("meta[type_key][1337]")
assert meta_op.attrs.node_type_key == "type_key"
assert meta_op.attrs.node_index == 1337
def test_let():
assert_parses_as("let %x = 1; ()", relay.Let(X, relay.const(1), UNIT))
assert_parses_as(
"""
let %x = 1;
let %y = 2;
()
""",
relay.Let(X, relay.const(1), relay.Let(Y, relay.const(2), UNIT)),
)
def test_seq():
assert_parses_as("(); ()", relay.Let(_, UNIT, UNIT))
assert_parses_as("let %_ = 1; ()", relay.Let(X, relay.const(1), UNIT))
def test_graph():
code = "%0 = (); %1 = 1; (%0, %0, %1)"
assert_parses_as(code, relay.Tuple([UNIT, UNIT, relay.const(1)]))
def test_graph_single():
assert_parses_as("%1 = (); %1", relay.Tuple([]))
def test_let_global_var():
with pytest.raises(tvm.error.DiagnosticError):
parse_text("let @x = 1; ()")
def test_let_op():
with pytest.raises(tvm.error.DiagnosticError):
parse_text("let x = 1; ()")
def test_tuple():
assert_parses_as("()", relay.Tuple([]))
assert_parses_as("(0,)", relay.Tuple([relay.const(0)]))
assert_parses_as("(0, 1)", relay.Tuple([relay.const(0), relay.const(1)]))
assert_parses_as("(0, 1, 2)", relay.Tuple([relay.const(0), relay.const(1), relay.const(2)]))
def test_tuple_proj():
x = relay.var("x", shape=())
assert_parses_as(
"free_var %x: float32; %x((%x,).0, %x)",
relay.Call(x, [relay.TupleGetItem(relay.Tuple([x]), 0), x]),
)
def test_func():
# 0 args
assert_parses_as("fn () { 0 }", relay.Function([], relay.const(0), None, []))
# 1 arg
assert_parses_as("fn (%x) { %x }", relay.Function([X], X, None, []))
# 2 args
assert_parses_as("fn (%x, %y) { %x + %y }", relay.Function([X, Y], relay.add(X, Y), None, []))
# annotations
assert_parses_as("fn (%x: int32) -> int32 { %x }", relay.Function([X_ANNO], X_ANNO, int32, []))
# Refactor the attribute syntax and printing.
#
# # attributes
# assert_parses_as(
# "fn (n=5) { () }",
# relay.Function([], UNIT, None, None, tvm.ir.make_node("DictAttrs", n=relay.const(5)))
# )
# TODO(@jmp): Crashes if %x isn't annnotated.
def test_defn():
id_defn = parse_module(
"""
def @id(%x: int32) -> int32 {
%x
}
"""
)
assert isinstance(id_defn, tvm.IRModule)
def test_recursive_call():
id_defn = parse_module(
"""
def @id(%x: int32) -> int32 {
@id(%x)
}
"""
)
assert isinstance(id_defn, tvm.IRModule)
def test_ifelse():
assert_parses_as(
"""
if (True) {
0
} else {
1
}
""",
relay.If(relay.const(True), relay.const(0), relay.const(1)),
)
def test_ifelse_scope():
with pytest.raises(tvm.error.DiagnosticError):
parse_text(
"""
if (True) {
let %x = ();
()
} else {
%x
}
"""
)
def test_ref():
program = """
#[version = "0.0.5"]
def @main(%x: float32) {
%0 = ref(%x);
ref_write(%0, 1f);
ref_read(%0)
}
"""
tvm.parser.parse(program)
def test_call():
# select right function to call: simple ident case
id_func = relay.Var("id")
assert_parses_as(
"""
let %id = fn (%x) { %x };
10 * %id(10)
""",
relay.Let(
id_func,
relay.Function([X], X, None, []),
relay.multiply(relay.const(10), relay.Call(id_func, [relay.const(10)])),
),
)
# 0 args
constant = relay.Var("constant")
assert_parses_as(
"""
let %constant = fn () { 0 };
%constant()
""",
relay.Let(
constant,
relay.Function([], relay.const(0), None, []),
relay.Call(constant, [], None, None),
),
)
# 1 arg
id_var = relay.Var("id")
assert_parses_as(
"""
let %id = fn (%x) { %x };
%id(1)
""",
relay.Let(
id_var,
relay.Function([X], X, None, []),
relay.Call(id_var, [relay.const(1)], None, None),
),
)
# 2 args
multiply = relay.Var("multiply")
assert_parses_as(
"""
let %multiply = fn (%x, %y) { %x * %y };
%multiply(0, 0)
""",
relay.Let(
multiply,
relay.Function([X, Y], relay.multiply(X, Y), None, []),
relay.Call(multiply, [relay.const(0), relay.const(0)], None, None),
),
)
# anonymous function
assert_parses_as(
"""
(fn (%x) { %x })(0)
""",
relay.Call(relay.Function([X], X, None, []), [relay.const(0)], None, None),
)
# curried function
curried_mult = relay.Var("curried_mult")
assert_parses_as(
"""
let %curried_mult =
fn (%x) {
fn (%y) {
%x * %y
}
};
%curried_mult(0);
%curried_mult(0)(0)
""",
relay.Let(
curried_mult,
relay.Function([X], relay.Function([Y], relay.multiply(X, Y), None, []), None, []),
relay.Let(
_,
relay.Call(curried_mult, [relay.const(0)], None, None),
relay.Call(
relay.Call(curried_mult, [relay.const(0)], None, None),
[relay.const(0)],
None,
None,
),
),
),
)
# op
assert_parses_as("abs(1)", relay.Call(relay.op.get("abs"), [relay.const(1)], None, None))
# Types
def test_incomplete_type():
assert_parses_as("let %_ : _ = (); ()", relay.Let(_, UNIT, UNIT))
def test_builtin_types():
for builtin_type in TYPES:
parse_text("let %_ : {} = (); ()".format(builtin_type))
def test_tensor_type():
assert_parses_as(
"let %_ : Tensor[(), float32] = (); ()",
relay.Let(relay.Var("_", relay.TensorType((), "float32")), UNIT, UNIT),
)
assert_parses_as(
"let %_ : Tensor[(1), float32] = (); ()",
relay.Let(relay.Var("_", relay.TensorType((1,), "float32")), UNIT, UNIT),
)
assert_parses_as(
"let %_ : Tensor[(1, 1), float32] = (); ()",
relay.Let(relay.Var("_", relay.TensorType((1, 1), "float32")), UNIT, UNIT),
)
assert_parses_as(
"let %_ : Tensor[(?, 1), float32] = (); ()",
relay.Let(relay.Var("_", relay.TensorType((tvm.tir.Any(), 1), "float32")), UNIT, UNIT),
)
def test_function_type():
assert_parses_as(
"""
let %_: fn () -> int32 = fn () -> int32 { 0 }; ()
""",
relay.Let(
relay.Var("_", relay.FuncType([], int32, [], [])),
relay.Function([], relay.const(0), int32, []),
UNIT,
),
)
assert_parses_as(
"""
let %_: fn (int32) -> int32 = fn (%x: int32) -> int32 { 0 }; ()
""",
relay.Let(
relay.Var("_", relay.FuncType([int32], int32, [], [])),
relay.Function([relay.Var("x", int32)], relay.const(0), int32, []),
UNIT,
),
)
assert_parses_as(
"""
let %_: fn (int32, int32) -> int32 = fn (%x: int32, %y: int32) -> int32 { 0 }; ()
""",
relay.Let(
relay.Var("_", relay.FuncType([int32, int32], int32, [], [])),
relay.Function(
[relay.Var("x", int32), relay.Var("y", int32)], relay.const(0), int32, []
),
UNIT,
),
)
def test_tuple_type():
assert_parses_as(
"""
let %_: () = (); ()
""",
relay.Let(relay.Var("_", relay.TupleType([])), UNIT, UNIT),
)
assert_parses_as(
"""
let %_: (int32,) = (0,); ()
""",
relay.Let(relay.Var("_", relay.TupleType([int32])), relay.Tuple([relay.const(0)]), UNIT),
)
assert_parses_as(
"""
let %_: (int32, int32) = (0, 1); ()
""",
relay.Let(
relay.Var("_", relay.TupleType([int32, int32])),
relay.Tuple([relay.const(0), relay.const(1)]),
UNIT,
),
)
def test_adt_defn():
mod = tvm.IRModule()
glob_typ_var = relay.GlobalTypeVar("Ayy")
prog = relay.TypeData(glob_typ_var, [], [relay.Constructor("Nil", [], glob_typ_var)])
mod[glob_typ_var] = prog
assert_parse_module_as(
"""
type Ayy { Nil }
""",
mod,
)
def test_adt_any():
code = """
type my_dtype {
my_cons(Tensor[(?, 1), uint16]),
}
"""
mod = parse_module(code)
items = mod.type_definitions.items()
global_type_var, type_data = items[0]
assert global_type_var.name_hint == "my_dtype"
ctors = type_data.constructors
assert len(ctors) == 1
my_cons = ctors[0]
assert my_cons.name_hint == "my_cons"
ty_shape = my_cons.inputs[0].shape
assert isinstance(ty_shape[0], tvm.tir.Any)
assert ty_shape[1] == 1
def test_empty_adt_defn():
mod = tvm.IRModule()
glob_typ_var = relay.GlobalTypeVar("Ayy")
prog = relay.TypeData(glob_typ_var, [], [])
mod[glob_typ_var] = prog
assert_parse_module_as(
"""
type Ayy { }
""",
mod,
)
def test_multiple_cons_defn():
mod = tvm.IRModule()
list_var = relay.GlobalTypeVar("List")
typ_var = relay.TypeVar("A")
prog = relay.TypeData(
list_var,
[typ_var],
[
relay.Constructor("Cons", [typ_var, list_var(typ_var)], list_var),
relay.Constructor("Nil", [], list_var),
],
)
mod[list_var] = prog
assert_parse_module_as(LIST_DEFN, mod)
def test_multiple_type_param_defn():
glob_typ_var = relay.GlobalTypeVar("Either")
typ_var_a = relay.TypeVar("A")
typ_var_b = relay.TypeVar("B")
prog = relay.TypeData(
glob_typ_var,
[typ_var_a, typ_var_b],
[
relay.Constructor("Left", [typ_var_a], glob_typ_var),
relay.Constructor("Right", [typ_var_b], glob_typ_var),
],
)
mod = tvm.IRModule()
mod[glob_typ_var] = prog
assert_parse_module_as(
"""
type Either[A, B] {
Left(A),
Right(B),
}
""",
mod,
)
def test_match():
# pair each match keyword with whether it specifies a complete match or not
match_keywords = [("match", True), ("match?", False)]
for (match_keyword, is_complete) in match_keywords:
mod = tvm.IRModule()
list_var = relay.GlobalTypeVar("List")
typ_var = relay.TypeVar("A")
cons_constructor = relay.Constructor("Cons", [typ_var, list_var(typ_var)], list_var)
nil_constructor = relay.Constructor("Nil", [], list_var)
list_def = relay.TypeData(list_var, [typ_var], [cons_constructor, nil_constructor])
mod[list_var] = list_def
length_var = relay.GlobalVar("length")
typ_var = relay.TypeVar("A")
input_type = list_var(typ_var)
input_var = relay.Var("xs", input_type)
rest_var = relay.Var("rest")
cons_case = relay.Let(
relay.var("", type_annotation=None),
UNIT,
relay.add(relay.const(1), relay.Call(length_var, [rest_var])),
)
body = relay.Match(
input_var,
[
relay.Clause(
relay.PatternConstructor(
cons_constructor, [relay.PatternWildcard(), relay.PatternVar(rest_var)]
),
cons_case,
),
relay.Clause(relay.PatternConstructor(nil_constructor, []), relay.const(0)),
],
complete=is_complete,
)
length_func = relay.Function([input_var], body, int32, [typ_var])
mod[length_var] = length_func
assert_parse_module_as(
"""
%s
def @length[A](%%xs: List[A]) -> int32 {
%s (%%xs) {
Cons(_, %%rest : List[A]) => {
();
1 + @length(%%rest)
},
Nil => 0,
}
}
"""
% (LIST_DEFN, match_keyword),
mod,
)
def test_adt_cons_expr():
mod = tvm.IRModule()
list_var = relay.GlobalTypeVar("List")
typ_var = relay.TypeVar("A")
cons_constructor = relay.Constructor("Cons", [typ_var, list_var(typ_var)], list_var)
nil_constructor = relay.Constructor("Nil", [], list_var)
list_def = relay.TypeData(list_var, [typ_var], [cons_constructor, nil_constructor])
mod[list_var] = list_def
make_singleton_var = relay.GlobalVar("make_singleton")
input_var = relay.Var("x", int32)
make_singleton_func = relay.Function(
[input_var], cons_constructor(input_var, nil_constructor()), list_var(int32)
)
mod[make_singleton_var] = make_singleton_func
assert_parse_module_as(
"""
%s
def @make_singleton(%%x: int32) -> List[int32] {
Cons(%%x, Nil)
}
"""
% LIST_DEFN,
mod,
)
def test_duplicate_adt_defn():
with pytest.raises(tvm.error.DiagnosticError):
parse_module(
"""
%s
type List[A] {
Cons(A, List[A]),
Nil,
}
"""
% LIST_DEFN
)
def test_duplicate_adt_cons():
with pytest.raises(tvm.error.DiagnosticError):
parse_text(
"""
type Ayy { Lmao }
type Haha { Lmao }
"""
)
def test_duplicate_adt_cons_defn():
with pytest.raises(tvm.error.DiagnosticError):
parse_text(
"""
type Ayy { Lmao }
type Lmao { Ayy }
"""
)
def test_duplicate_global_var():
with pytest.raises(tvm.error.DiagnosticError):
parse_text(
"""
def @id[A](%x: A) -> A { x }
def @id[A](%x: A) -> A { x }
"""
)
def test_extern_adt_defn():
mod = tvm.IRModule()
extern_var = relay.GlobalTypeVar("T")
typ_var = relay.TypeVar("A")
extern_def = relay.TypeData(extern_var, [typ_var], [])
mod[extern_var] = extern_def
assert_parse_module_as(
"""
extern type T[A]
""",
mod,
)
def test_import_grad():
mod = tvm.IRModule()
mod.import_from_std("gradient.rly")
def test_resnet():
mod, _ = relay.testing.resnet.get_workload()
text = mod.astext()
parsed_mod = tvm.parser.parse(text)
tvm.ir.assert_structural_equal(mod, parsed_mod)
def inline_params(mod, params):
main_fn = mod["main"]
str_to_var = {}
for param in main_fn.params:
str_to_var[param.name_hint] = param
bind_map = {}
for param in params:
bind_map[str_to_var[param]] = relay.const(params[param])
body = relay.bind(main_fn.body, bind_map)
main_fn = relay.Function(relay.analysis.free_vars(body), body)
mod._add("main", main_fn, True)
return mod
def test_resnet_inlined_params():
mod, params = relay.testing.resnet.get_workload()
mod = inline_params(mod, params)
mod = relay.transform.InferType()(mod)
text = mod.astext()
parsed_mod = tvm.parser.parse(text)
tvm.ir.assert_structural_equal(mod, parsed_mod)
def test_tuple_return_value():
program = """
type Box[T] {
constructor(T)
}
def @example() {
%0 = ();
%1 = constructor(%0);
%2 = constructor(0f);
(%1, %2,)
}
"""
parse_module(program)
def test_op_string_attr():
call = parse_text(
"""
free_var %x: Tensor[(1, 32, 32, 3), float32];
free_var %y: Tensor[(1, 1, 3, 3), float32];
nn.conv2d(%x, %y, data_layout="NHWC", kernel_layout="HWIO")
"""
)
assert isinstance(call.op, tvm.ir.Op)
assert call.op.name == "nn.conv2d"
assert call.attrs.data_layout == "NHWC"
assert call.attrs.kernel_layout == "HWIO"
def test_load_prelude():
mod = tvm.IRModule()
mod.import_from_std("prelude.rly")
tvm.parser.parse(mod.astext())
if __name__ == "__main__":
import sys
pytest.main(sys.argv)
| apache-2.0 |
yrobla/nova | nova/api/openstack/compute/contrib/floating_ip_dns.py | 4 | 10974 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Andrew Bogott for the Wikimedia Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License
import urllib
import webob
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova import network
from nova.openstack.common import log as logging
from nova import utils
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('compute', 'floating_ip_dns')
def make_dns_entry(elem):
elem.set('id')
elem.set('ip')
elem.set('type')
elem.set('domain')
elem.set('name')
def make_domain_entry(elem):
elem.set('domain')
elem.set('scope')
elem.set('project')
elem.set('availability_zone')
class FloatingIPDNSTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('dns_entry',
selector='dns_entry')
make_dns_entry(root)
return xmlutil.MasterTemplate(root, 1)
class FloatingIPDNSsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('dns_entries')
elem = xmlutil.SubTemplateElement(root, 'dns_entry',
selector='dns_entries')
make_dns_entry(elem)
return xmlutil.MasterTemplate(root, 1)
class DomainTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('domain_entry',
selector='domain_entry')
make_domain_entry(root)
return xmlutil.MasterTemplate(root, 1)
class DomainsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('domain_entries')
elem = xmlutil.SubTemplateElement(root, 'domain_entry',
selector='domain_entries')
make_domain_entry(elem)
return xmlutil.MasterTemplate(root, 1)
def _translate_dns_entry_view(dns_entry):
result = {}
result['ip'] = dns_entry.get('ip')
result['id'] = dns_entry.get('id')
result['type'] = dns_entry.get('type')
result['domain'] = dns_entry.get('domain')
result['name'] = dns_entry.get('name')
return {'dns_entry': result}
def _translate_dns_entries_view(dns_entries):
return {'dns_entries': [_translate_dns_entry_view(entry)['dns_entry']
for entry in dns_entries]}
def _translate_domain_entry_view(domain_entry):
result = {}
result['domain'] = domain_entry.get('domain')
result['scope'] = domain_entry.get('scope')
result['project'] = domain_entry.get('project')
result['availability_zone'] = domain_entry.get('availability_zone')
return {'domain_entry': result}
def _translate_domain_entries_view(domain_entries):
return {'domain_entries':
[_translate_domain_entry_view(entry)['domain_entry']
for entry in domain_entries]}
def _unquote_domain(domain):
"""Unquoting function for receiving a domain name in a URL.
Domain names tend to have .'s in them. Urllib doesn't quote dots,
but Routes tends to choke on them, so we need an extra level of
by-hand quoting here.
"""
return urllib.unquote(domain).replace('%2E', '.')
def _create_dns_entry(ip, name, domain):
return {'ip': ip, 'name': name, 'domain': domain}
def _create_domain_entry(domain, scope=None, project=None, av_zone=None):
return {'domain': domain, 'scope': scope, 'project': project,
'availability_zone': av_zone}
class FloatingIPDNSDomainController(object):
"""DNS domain controller for OpenStack API."""
def __init__(self):
self.network_api = network.API()
super(FloatingIPDNSDomainController, self).__init__()
@wsgi.serializers(xml=DomainsTemplate)
def index(self, req):
"""Return a list of available DNS domains."""
context = req.environ['nova.context']
authorize(context)
domains = self.network_api.get_dns_domains(context)
domainlist = [_create_domain_entry(domain['domain'],
domain.get('scope'),
domain.get('project'),
domain.get('availability_zone'))
for domain in domains]
return _translate_domain_entries_view(domainlist)
@wsgi.serializers(xml=DomainTemplate)
def update(self, req, id, body):
"""Add or modify domain entry."""
context = req.environ['nova.context']
authorize(context)
fqdomain = _unquote_domain(id)
try:
entry = body['domain_entry']
scope = entry['scope']
except (TypeError, KeyError):
raise webob.exc.HTTPUnprocessableEntity()
project = entry.get('project', None)
av_zone = entry.get('availability_zone', None)
if (scope not in ('private', 'public') or
project and av_zone or
scope == 'private' and project or
scope == 'public' and av_zone):
raise webob.exc.HTTPUnprocessableEntity()
if scope == 'private':
create_dns_domain = self.network_api.create_private_dns_domain
area_name, area = 'availability_zone', av_zone
else:
create_dns_domain = self.network_api.create_public_dns_domain
area_name, area = 'project', project
create_dns_domain(context, fqdomain, area)
return _translate_domain_entry_view({'domain': fqdomain,
'scope': scope,
area_name: area})
def delete(self, req, id):
"""Delete the domain identified by id."""
context = req.environ['nova.context']
authorize(context)
domain = _unquote_domain(id)
# Delete the whole domain
try:
self.network_api.delete_dns_domain(context, domain)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return webob.Response(status_int=202)
class FloatingIPDNSEntryController(object):
"""DNS Entry controller for OpenStack API."""
def __init__(self):
self.network_api = network.API()
super(FloatingIPDNSEntryController, self).__init__()
@wsgi.serializers(xml=FloatingIPDNSTemplate)
def show(self, req, domain_id, id):
"""Return the DNS entry that corresponds to domain_id and id."""
context = req.environ['nova.context']
authorize(context)
domain = _unquote_domain(domain_id)
floating_ip = None
# Check whether id is a valid ipv4/ipv6 address.
if utils.is_valid_ipv4(id) or utils.is_valid_ipv6(id):
floating_ip = id
if floating_ip:
entries = self.network_api.get_dns_entries_by_address(context,
floating_ip,
domain)
else:
entries = self.network_api.get_dns_entries_by_name(context, id,
domain)
if not entries:
explanation = _("DNS entries not found.")
raise webob.exc.HTTPNotFound(explanation=explanation)
if floating_ip:
entrylist = [_create_dns_entry(floating_ip, entry, domain)
for entry in entries]
dns_entries = _translate_dns_entries_view(entrylist)
return wsgi.ResponseObject(dns_entries,
xml=FloatingIPDNSsTemplate)
entry = _create_dns_entry(entries[0], id, domain)
return _translate_dns_entry_view(entry)
@wsgi.serializers(xml=FloatingIPDNSTemplate)
def update(self, req, domain_id, id, body):
"""Add or modify dns entry."""
context = req.environ['nova.context']
authorize(context)
domain = _unquote_domain(domain_id)
name = id
try:
entry = body['dns_entry']
address = entry['ip']
dns_type = entry['dns_type']
except (TypeError, KeyError):
raise webob.exc.HTTPUnprocessableEntity()
entries = self.network_api.get_dns_entries_by_name(context,
name, domain)
if not entries:
# create!
self.network_api.add_dns_entry(context, address, name,
dns_type, domain)
else:
# modify!
self.network_api.modify_dns_entry(context, name, address, domain)
return _translate_dns_entry_view({'ip': address,
'name': name,
'type': dns_type,
'domain': domain})
def delete(self, req, domain_id, id):
"""Delete the entry identified by req and id."""
context = req.environ['nova.context']
authorize(context)
domain = _unquote_domain(domain_id)
name = id
try:
self.network_api.delete_dns_entry(context, name, domain)
except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return webob.Response(status_int=202)
class Floating_ip_dns(extensions.ExtensionDescriptor):
"""Floating IP DNS support."""
name = "FloatingIpDns"
alias = "os-floating-ip-dns"
namespace = "http://docs.openstack.org/ext/floating_ip_dns/api/v1.1"
updated = "2011-12-23T00:00:00+00:00"
def __init__(self, ext_mgr):
self.network_api = network.API()
super(Floating_ip_dns, self).__init__(ext_mgr)
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-floating-ip-dns',
FloatingIPDNSDomainController())
resources.append(res)
res = extensions.ResourceExtension('entries',
FloatingIPDNSEntryController(),
parent={'member_name': 'domain',
'collection_name': 'os-floating-ip-dns'})
resources.append(res)
return resources
| apache-2.0 |
pombredanne/django-rest-framework | tests/test_validators.py | 2 | 15929 | import datetime
from django.db import models
from django.test import TestCase
from rest_framework import serializers
from rest_framework.validators import UniqueValidator
def dedent(blocktext):
return '\n'.join([line[12:] for line in blocktext.splitlines()[1:-1]])
# Tests for `UniqueValidator`
# ---------------------------
class UniquenessModel(models.Model):
username = models.CharField(unique=True, max_length=100)
class UniquenessSerializer(serializers.ModelSerializer):
class Meta:
model = UniquenessModel
fields = '__all__'
class RelatedModel(models.Model):
user = models.OneToOneField(UniquenessModel, on_delete=models.CASCADE)
email = models.CharField(unique=True, max_length=80)
class RelatedModelSerializer(serializers.ModelSerializer):
username = serializers.CharField(source='user.username',
validators=[UniqueValidator(queryset=UniquenessModel.objects.all())]) # NOQA
class Meta:
model = RelatedModel
fields = ('username', 'email')
class AnotherUniquenessModel(models.Model):
code = models.IntegerField(unique=True)
class AnotherUniquenessSerializer(serializers.ModelSerializer):
class Meta:
model = AnotherUniquenessModel
fields = '__all__'
class IntegerFieldModel(models.Model):
integer = models.IntegerField()
class UniquenessIntegerSerializer(serializers.Serializer):
# Note that this field *deliberately* does not correspond with the model field.
# This allows us to ensure that `ValueError`, `TypeError` or `DataError` etc
# raised by a uniqueness check does not trigger a deceptive "this field is not unique"
# validation failure.
integer = serializers.CharField(validators=[UniqueValidator(queryset=IntegerFieldModel.objects.all())])
class TestUniquenessValidation(TestCase):
def setUp(self):
self.instance = UniquenessModel.objects.create(username='existing')
def test_repr(self):
serializer = UniquenessSerializer()
expected = dedent("""
UniquenessSerializer():
id = IntegerField(label='ID', read_only=True)
username = CharField(max_length=100, validators=[<UniqueValidator(queryset=UniquenessModel.objects.all())>])
""")
assert repr(serializer) == expected
def test_is_not_unique(self):
data = {'username': 'existing'}
serializer = UniquenessSerializer(data=data)
assert not serializer.is_valid()
assert serializer.errors == {'username': ['UniquenessModel with this username already exists.']}
def test_is_unique(self):
data = {'username': 'other'}
serializer = UniquenessSerializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {'username': 'other'}
def test_updated_instance_excluded(self):
data = {'username': 'existing'}
serializer = UniquenessSerializer(self.instance, data=data)
assert serializer.is_valid()
assert serializer.validated_data == {'username': 'existing'}
def test_doesnt_pollute_model(self):
instance = AnotherUniquenessModel.objects.create(code='100')
serializer = AnotherUniquenessSerializer(instance)
self.assertEqual(
AnotherUniquenessModel._meta.get_field('code').validators, [])
# Accessing data shouldn't effect validators on the model
serializer.data
self.assertEqual(
AnotherUniquenessModel._meta.get_field('code').validators, [])
def test_related_model_is_unique(self):
data = {'username': 'existing', 'email': 'new-email@example.com'}
rs = RelatedModelSerializer(data=data)
self.assertFalse(rs.is_valid())
self.assertEqual(rs.errors,
{'username': ['This field must be unique.']})
data = {'username': 'new-username', 'email': 'new-email@example.com'}
rs = RelatedModelSerializer(data=data)
self.assertTrue(rs.is_valid())
def test_value_error_treated_as_not_unique(self):
serializer = UniquenessIntegerSerializer(data={'integer': 'abc'})
assert serializer.is_valid()
# Tests for `UniqueTogetherValidator`
# -----------------------------------
class UniquenessTogetherModel(models.Model):
race_name = models.CharField(max_length=100)
position = models.IntegerField()
class Meta:
unique_together = ('race_name', 'position')
class NullUniquenessTogetherModel(models.Model):
"""
Used to ensure that null values are not included when checking
unique_together constraints.
Ignoring items which have a null in any of the validated fields is the same
behavior that database backends will use when they have the
unique_together constraint added.
Example case: a null position could indicate a non-finisher in the race,
there could be many non-finishers in a race, but all non-NULL
values *should* be unique against the given `race_name`.
"""
date_of_birth = models.DateField(null=True) # Not part of the uniqueness constraint
race_name = models.CharField(max_length=100)
position = models.IntegerField(null=True)
class Meta:
unique_together = ('race_name', 'position')
class UniquenessTogetherSerializer(serializers.ModelSerializer):
class Meta:
model = UniquenessTogetherModel
fields = '__all__'
class NullUniquenessTogetherSerializer(serializers.ModelSerializer):
class Meta:
model = NullUniquenessTogetherModel
fields = '__all__'
class TestUniquenessTogetherValidation(TestCase):
def setUp(self):
self.instance = UniquenessTogetherModel.objects.create(
race_name='example',
position=1
)
UniquenessTogetherModel.objects.create(
race_name='example',
position=2
)
UniquenessTogetherModel.objects.create(
race_name='other',
position=1
)
def test_repr(self):
serializer = UniquenessTogetherSerializer()
expected = dedent("""
UniquenessTogetherSerializer():
id = IntegerField(label='ID', read_only=True)
race_name = CharField(max_length=100, required=True)
position = IntegerField(required=True)
class Meta:
validators = [<UniqueTogetherValidator(queryset=UniquenessTogetherModel.objects.all(), fields=('race_name', 'position'))>]
""")
assert repr(serializer) == expected
def test_is_not_unique_together(self):
"""
Failing unique together validation should result in non field errors.
"""
data = {'race_name': 'example', 'position': 2}
serializer = UniquenessTogetherSerializer(data=data)
assert not serializer.is_valid()
assert serializer.errors == {
'non_field_errors': [
'The fields race_name, position must make a unique set.'
]
}
def test_is_unique_together(self):
"""
In a unique together validation, one field may be non-unique
so long as the set as a whole is unique.
"""
data = {'race_name': 'other', 'position': 2}
serializer = UniquenessTogetherSerializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {
'race_name': 'other',
'position': 2
}
def test_updated_instance_excluded_from_unique_together(self):
"""
When performing an update, the existing instance does not count
as a match against uniqueness.
"""
data = {'race_name': 'example', 'position': 1}
serializer = UniquenessTogetherSerializer(self.instance, data=data)
assert serializer.is_valid()
assert serializer.validated_data == {
'race_name': 'example',
'position': 1
}
def test_unique_together_is_required(self):
"""
In a unique together validation, all fields are required.
"""
data = {'position': 2}
serializer = UniquenessTogetherSerializer(data=data, partial=True)
assert not serializer.is_valid()
assert serializer.errors == {
'race_name': ['This field is required.']
}
def test_ignore_excluded_fields(self):
"""
When model fields are not included in a serializer, then uniqueness
validators should not be added for that field.
"""
class ExcludedFieldSerializer(serializers.ModelSerializer):
class Meta:
model = UniquenessTogetherModel
fields = ('id', 'race_name',)
serializer = ExcludedFieldSerializer()
expected = dedent("""
ExcludedFieldSerializer():
id = IntegerField(label='ID', read_only=True)
race_name = CharField(max_length=100)
""")
assert repr(serializer) == expected
def test_ignore_read_only_fields(self):
"""
When serializer fields are read only, then uniqueness
validators should not be added for that field.
"""
class ReadOnlyFieldSerializer(serializers.ModelSerializer):
class Meta:
model = UniquenessTogetherModel
fields = ('id', 'race_name', 'position')
read_only_fields = ('race_name',)
serializer = ReadOnlyFieldSerializer()
expected = dedent("""
ReadOnlyFieldSerializer():
id = IntegerField(label='ID', read_only=True)
race_name = CharField(read_only=True)
position = IntegerField(required=True)
""")
assert repr(serializer) == expected
def test_allow_explict_override(self):
"""
Ensure validators can be explicitly removed..
"""
class NoValidatorsSerializer(serializers.ModelSerializer):
class Meta:
model = UniquenessTogetherModel
fields = ('id', 'race_name', 'position')
validators = []
serializer = NoValidatorsSerializer()
expected = dedent("""
NoValidatorsSerializer():
id = IntegerField(label='ID', read_only=True)
race_name = CharField(max_length=100)
position = IntegerField()
""")
assert repr(serializer) == expected
def test_ignore_validation_for_null_fields(self):
# None values that are on fields which are part of the uniqueness
# constraint cause the instance to ignore uniqueness validation.
NullUniquenessTogetherModel.objects.create(
date_of_birth=datetime.date(2000, 1, 1),
race_name='Paris Marathon',
position=None
)
data = {
'date': datetime.date(2000, 1, 1),
'race_name': 'Paris Marathon',
'position': None
}
serializer = NullUniquenessTogetherSerializer(data=data)
assert serializer.is_valid()
def test_do_not_ignore_validation_for_null_fields(self):
# None values that are not on fields part of the uniqueness constraint
# do not cause the instance to skip validation.
NullUniquenessTogetherModel.objects.create(
date_of_birth=datetime.date(2000, 1, 1),
race_name='Paris Marathon',
position=1
)
data = {'date': None, 'race_name': 'Paris Marathon', 'position': 1}
serializer = NullUniquenessTogetherSerializer(data=data)
assert not serializer.is_valid()
# Tests for `UniqueForDateValidator`
# ----------------------------------
class UniqueForDateModel(models.Model):
slug = models.CharField(max_length=100, unique_for_date='published')
published = models.DateField()
class UniqueForDateSerializer(serializers.ModelSerializer):
class Meta:
model = UniqueForDateModel
fields = '__all__'
class TestUniquenessForDateValidation(TestCase):
def setUp(self):
self.instance = UniqueForDateModel.objects.create(
slug='existing',
published='2000-01-01'
)
def test_repr(self):
serializer = UniqueForDateSerializer()
expected = dedent("""
UniqueForDateSerializer():
id = IntegerField(label='ID', read_only=True)
slug = CharField(max_length=100)
published = DateField(required=True)
class Meta:
validators = [<UniqueForDateValidator(queryset=UniqueForDateModel.objects.all(), field='slug', date_field='published')>]
""")
assert repr(serializer) == expected
def test_is_not_unique_for_date(self):
"""
Failing unique for date validation should result in field error.
"""
data = {'slug': 'existing', 'published': '2000-01-01'}
serializer = UniqueForDateSerializer(data=data)
assert not serializer.is_valid()
assert serializer.errors == {
'slug': ['This field must be unique for the "published" date.']
}
def test_is_unique_for_date(self):
"""
Passing unique for date validation.
"""
data = {'slug': 'existing', 'published': '2000-01-02'}
serializer = UniqueForDateSerializer(data=data)
assert serializer.is_valid()
assert serializer.validated_data == {
'slug': 'existing',
'published': datetime.date(2000, 1, 2)
}
def test_updated_instance_excluded_from_unique_for_date(self):
"""
When performing an update, the existing instance does not count
as a match against unique_for_date.
"""
data = {'slug': 'existing', 'published': '2000-01-01'}
serializer = UniqueForDateSerializer(instance=self.instance, data=data)
assert serializer.is_valid()
assert serializer.validated_data == {
'slug': 'existing',
'published': datetime.date(2000, 1, 1)
}
class HiddenFieldUniqueForDateModel(models.Model):
slug = models.CharField(max_length=100, unique_for_date='published')
published = models.DateTimeField(auto_now_add=True)
class TestHiddenFieldUniquenessForDateValidation(TestCase):
def test_repr_date_field_not_included(self):
class TestSerializer(serializers.ModelSerializer):
class Meta:
model = HiddenFieldUniqueForDateModel
fields = ('id', 'slug')
serializer = TestSerializer()
expected = dedent("""
TestSerializer():
id = IntegerField(label='ID', read_only=True)
slug = CharField(max_length=100)
published = HiddenField(default=CreateOnlyDefault(<function now>))
class Meta:
validators = [<UniqueForDateValidator(queryset=HiddenFieldUniqueForDateModel.objects.all(), field='slug', date_field='published')>]
""")
assert repr(serializer) == expected
def test_repr_date_field_included(self):
class TestSerializer(serializers.ModelSerializer):
class Meta:
model = HiddenFieldUniqueForDateModel
fields = ('id', 'slug', 'published')
serializer = TestSerializer()
expected = dedent("""
TestSerializer():
id = IntegerField(label='ID', read_only=True)
slug = CharField(max_length=100)
published = DateTimeField(default=CreateOnlyDefault(<function now>), read_only=True)
class Meta:
validators = [<UniqueForDateValidator(queryset=HiddenFieldUniqueForDateModel.objects.all(), field='slug', date_field='published')>]
""")
assert repr(serializer) == expected
| bsd-2-clause |
highweb-project/highweb-webcl-html5spec | third_party/jinja2/_compat.py | 638 | 4042 | # -*- coding: utf-8 -*-
"""
jinja2._compat
~~~~~~~~~~~~~~
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
:copyright: Copyright 2013 by the Jinja team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import sys
PY2 = sys.version_info[0] == 2
PYPY = hasattr(sys, 'pypy_translation_info')
_identity = lambda x: x
if not PY2:
unichr = chr
range_type = range
text_type = str
string_types = (str,)
iterkeys = lambda d: iter(d.keys())
itervalues = lambda d: iter(d.values())
iteritems = lambda d: iter(d.items())
import pickle
from io import BytesIO, StringIO
NativeStringIO = StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
ifilter = filter
imap = map
izip = zip
intern = sys.intern
implements_iterator = _identity
implements_to_string = _identity
encode_filename = _identity
get_next = lambda x: x.__next__
else:
unichr = unichr
text_type = unicode
range_type = xrange
string_types = (str, unicode)
iterkeys = lambda d: d.iterkeys()
itervalues = lambda d: d.itervalues()
iteritems = lambda d: d.iteritems()
import cPickle as pickle
from cStringIO import StringIO as BytesIO, StringIO
NativeStringIO = BytesIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
from itertools import imap, izip, ifilter
intern = intern
def implements_iterator(cls):
cls.next = cls.__next__
del cls.__next__
return cls
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
get_next = lambda x: x.next
def encode_filename(filename):
if isinstance(filename, unicode):
return filename.encode('utf-8')
return filename
try:
next = next
except NameError:
def next(it):
return it.next()
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instanciation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
try:
from collections import Mapping as mapping_types
except ImportError:
import UserDict
mapping_types = (UserDict.UserDict, UserDict.DictMixin, dict)
# common types. These do exist in the special types module too which however
# does not exist in IronPython out of the box. Also that way we don't have
# to deal with implementation specific stuff here
class _C(object):
def method(self): pass
def _func():
yield None
function_type = type(_func)
generator_type = type(_func())
method_type = type(_C().method)
code_type = type(_C.method.__code__)
try:
raise TypeError()
except TypeError:
_tb = sys.exc_info()[2]
traceback_type = type(_tb)
frame_type = type(_tb.tb_frame)
try:
from urllib.parse import quote_from_bytes as url_quote
except ImportError:
from urllib import quote as url_quote
try:
from thread import allocate_lock
except ImportError:
try:
from threading import Lock as allocate_lock
except ImportError:
from dummy_thread import allocate_lock
| bsd-3-clause |
lokeshh/lokeshh-stem | stem/util/lru_cache.py | 7 | 7373 | # Drop in replace for python 3.2's collections.lru_cache, from...
# http://code.activestate.com/recipes/578078-py26-and-py30-backport-of-python-33s-lru-cache/
#
# ... which is under the MIT license. Stem users should *not* rely upon this
# module. It will be removed when we drop support for python 3.2 and below.
"""
Memoization decorator that caches a function's return value. If later called
with the same arguments then the cached value is returned rather than
reevaluated.
This is a a python 2.x port of `functools.lru_cache
<http://docs.python.org/3/library/functools.html#functools.lru_cache>`_. If
using python 3.2 or later you should use that instead.
"""
from collections import namedtuple
from functools import update_wrapper
from threading import RLock
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
class _HashedSeq(list):
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(args, kwds, typed,
kwd_mark = (object(),),
fasttypes = set([int, str, frozenset, type(None)]),
sorted=sorted, tuple=tuple, type=type, len=len):
'Make a cache key from optionally typed positional and keyword arguments'
key = args
if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark
for item in sorted_items:
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for k, v in sorted_items)
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def lru_cache(maxsize=100, typed=False):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
If *typed* is True, arguments of different types will be cached separately.
For example, f(3.0) and f(3) will be treated as distinct calls with
distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
def decorating_function(user_function):
cache = dict()
stats = [0, 0] # make statistics updateable non-locally
HITS, MISSES = 0, 1 # names for the stats fields
make_key = _make_key
cache_get = cache.get # bound method to lookup key or return None
_len = len # localize the global len() function
lock = RLock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
nonlocal_root = [root] # make updateable non-locally
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
if maxsize == 0:
def wrapper(*args, **kwds):
# no caching, just do a statistics update after a successful call
result = user_function(*args, **kwds)
stats[MISSES] += 1
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# simple caching without ordering or size limit
key = make_key(args, kwds, typed)
result = cache_get(key, root) # root used here as a unique not-found sentinel
if result is not root:
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
stats[MISSES] += 1
return result
else:
def wrapper(*args, **kwds):
# size limited caching that tracks accesses by recency
key = make_key(args, kwds, typed) if kwds or typed else args
with lock:
link = cache_get(key)
if link is not None:
# record recent use of the key by moving it to the front of the list
root, = nonlocal_root
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = root[PREV]
last[NEXT] = root[PREV] = link
link[PREV] = last
link[NEXT] = root
stats[HITS] += 1
return result
result = user_function(*args, **kwds)
with lock:
root, = nonlocal_root
if key in cache:
# getting here means that this same key was added to the
# cache while the lock was released. since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif _len(cache) >= maxsize:
# use the old root to store the new key and result
oldroot = root
oldroot[KEY] = key
oldroot[RESULT] = result
# empty the oldest link and make it the new root
root = nonlocal_root[0] = oldroot[NEXT]
oldkey = root[KEY]
root[KEY] = root[RESULT] = None
# now update the cache dictionary for the new links
del cache[oldkey]
cache[key] = oldroot
else:
# put result in a new link at the front of the list
last = root[PREV]
link = [last, root, key, result]
last[NEXT] = root[PREV] = cache[key] = link
stats[MISSES] += 1
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
root = nonlocal_root[0]
root[:] = [root, root, None, None]
stats[:] = [0, 0]
wrapper.__wrapped__ = user_function
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return update_wrapper(wrapper, user_function)
return decorating_function
| lgpl-3.0 |
jbtule/keyczar | cpp/src/tools/swtoolkit/test/help_test.py | 18 | 2153 | #!/usr/bin/python2.4
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test hammer displays SCons help for SCons help options (MEDIUM TEST)."""
import TestFramework
def main():
test = TestFramework.TestFramework()
expect = "usage: scons [OPTION] [TARGET] ..."
test.run(arguments="-h")
test.fail_test(test.stdout().find(expect) == -1)
test.run(arguments="--help")
test.fail_test(test.stdout().find(expect) == -1)
test.run(arguments="-H")
test.fail_test(test.stdout().find(expect) == -1)
test.run(arguments="--help-options")
test.fail_test(test.stdout().find(expect) == -1)
test.pass_test()
return 0
if __name__ == "__main__":
main()
| apache-2.0 |
ewheeler/tracpro | tracpro/msgs/tests.py | 1 | 4100 | from __future__ import absolute_import, unicode_literals
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.utils import timezone
from mock import patch
from temba.types import Broadcast
from tracpro.msgs.models import Message, COHORT_ALL, COHORT_RESPONDENTS, COHORT_NONRESPONDENTS
from tracpro.polls.models import Issue, Response, RESPONSE_COMPLETE, RESPONSE_PARTIAL, RESPONSE_EMPTY
from tracpro.test import TracProTest
class MessageTest(TracProTest):
@override_settings(CELERY_ALWAYS_EAGER=True, CELERY_EAGER_PROPAGATES_EXCEPTIONS=True, BROKER_BACKEND='memory')
@patch('dash.orgs.models.TembaClient.create_broadcast')
def test_create(self, mock_create_broadcast):
mock_create_broadcast.return_value = Broadcast.create()
now = timezone.now()
# create non-regional issue with 3 responses (1 complete, 1 partial, 1 empty)
issue1 = Issue.objects.create(poll=self.poll1, region=None, conducted_on=timezone.now())
Response.objects.create(flow_run_id=123, issue=issue1, contact=self.contact1,
created_on=now, updated_on=now, status=RESPONSE_COMPLETE)
Response.objects.create(flow_run_id=234, issue=issue1, contact=self.contact2,
created_on=now, updated_on=now, status=RESPONSE_PARTIAL)
Response.objects.create(flow_run_id=345, issue=issue1, contact=self.contact4,
created_on=now, updated_on=now, status=RESPONSE_EMPTY)
msg1 = Message.create(self.unicef, self.admin, "Test #1", issue1, COHORT_ALL, None)
self.assertEqual(msg1.org, self.unicef)
self.assertEqual(msg1.sent_by, self.admin)
self.assertIsNotNone(msg1.sent_on)
self.assertEqual(msg1.text, "Test #1")
self.assertEqual(msg1.issue, issue1)
self.assertEqual(msg1.cohort, COHORT_ALL)
self.assertEqual(msg1.region, None)
self.assertEqual(list(msg1.recipients.order_by('pk')), [self.contact1, self.contact2, self.contact4])
self.assertEqual(unicode(msg1), "Test #1")
self.assertEqual(msg1.as_json(), dict(id=msg1.pk, recipients=3))
msg2 = Message.create(self.unicef, self.admin, "Test #1", issue1, COHORT_RESPONDENTS, None)
self.assertEqual(msg2.cohort, COHORT_RESPONDENTS)
self.assertEqual(msg2.region, None)
self.assertEqual(list(msg2.recipients.order_by('pk')), [self.contact1])
msg3 = Message.create(self.unicef, self.admin, "Test #1", issue1, COHORT_NONRESPONDENTS, None)
self.assertEqual(msg3.cohort, COHORT_NONRESPONDENTS)
self.assertEqual(msg3.region, None)
self.assertEqual(list(msg3.recipients.order_by('pk')), [self.contact2, self.contact4])
msg4 = Message.create(self.unicef, self.admin, "Test #1", issue1, COHORT_ALL, self.region1)
self.assertEqual(msg4.cohort, COHORT_ALL)
self.assertEqual(msg4.region, self.region1)
self.assertEqual(list(msg4.recipients.order_by('pk')), [self.contact1, self.contact2])
class MessageCRUDLTest(TracProTest):
def test_list(self):
url = reverse('msgs.message_list')
# create a non-regional issue
issue1 = Issue.objects.create(poll=self.poll1, region=None, conducted_on=timezone.now())
# send 1 message to all regions and 2 more to specific regions
msg1 = Message.create(self.unicef, self.admin, "Test to all", issue1, COHORT_ALL, None)
msg2 = Message.create(self.unicef, self.admin, "Test to region #1", issue1, COHORT_ALL, self.region1)
msg3 = Message.create(self.unicef, self.admin, "Test to region #2", issue1, COHORT_ALL, self.region2)
self.login(self.admin)
response = self.url_get('unicef', url)
self.assertEqual(list(response.context['object_list']), [msg3, msg2, msg1])
self.switch_region(self.region1)
# should still include message sent to all regions
response = self.url_get('unicef', url)
self.assertEqual(list(response.context['object_list']), [msg2, msg1])
| bsd-3-clause |
masterkorp/obfsproxy | obfsproxy/transports/obfs3_dh.py | 15 | 3248 | import binascii
import obfsproxy.common.rand as rand
import obfsproxy.common.modexp as modexp
def int_to_bytes(lvalue, width):
fmt = '%%.%dx' % (2*width)
return binascii.unhexlify(fmt % (lvalue & ((1L<<8*width)-1)))
class UniformDH:
"""
This is a class that implements a DH handshake that uses public
keys that are indistinguishable from 192-byte random strings.
The idea (and even the implementation) was suggested by Ian
Goldberg in:
https://lists.torproject.org/pipermail/tor-dev/2012-December/004245.html
https://lists.torproject.org/pipermail/tor-dev/2012-December/004248.html
Attributes:
mod, the modulus of our DH group.
g, the generator of our DH group.
group_len, the size of the group in bytes.
priv_str, a byte string representing our DH private key.
priv, our DH private key as an integer.
pub_str, a byte string representing our DH public key.
pub, our DH public key as an integer.
shared_secret, our DH shared secret.
"""
# 1536-bit MODP Group from RFC3526
mod = int(
"""FFFFFFFF FFFFFFFF C90FDAA2 2168C234 C4C6628B 80DC1CD1
29024E08 8A67CC74 020BBEA6 3B139B22 514A0879 8E3404DD
EF9519B3 CD3A431B 302B0A6D F25F1437 4FE1356D 6D51C245
E485B576 625E7EC6 F44C42E9 A637ED6B 0BFF5CB6 F406B7ED
EE386BFB 5A899FA5 AE9F2411 7C4B1FE6 49286651 ECE45B3D
C2007CB8 A163BF05 98DA4836 1C55D39A 69163FA8 FD24CF5F
83655D23 DCA3AD96 1C62F356 208552BB 9ED52907 7096966D
670C354E 4ABC9804 F1746C08 CA237327 FFFFFFFF FFFFFFFF""".replace(' ','').replace('\n','').replace('\t',''), 16)
g = 2
group_len = 192 # bytes (1536-bits)
def __init__(self, private_key = None):
# Generate private key
if private_key != None:
if len(private_key) != self.group_len:
raise ValueError("private_key is a invalid length (Expected %d, got %d)" % (group_len, len(private_key)))
self.priv_str = private_key
else:
self.priv_str = rand.random_bytes(self.group_len)
self.priv = int(binascii.hexlify(self.priv_str), 16)
# Make the private key even
flip = self.priv % 2
self.priv -= flip
# Generate public key
#
# Note: Always generate both valid public keys, and then pick to avoid
# leaking timing information about which key was chosen.
pub = modexp.powMod(self.g, self.priv, self.mod)
pub_p_sub_X = self.mod - pub
if flip == 1:
self.pub = pub_p_sub_X
else:
self.pub = pub
self.pub_str = int_to_bytes(self.pub, self.group_len)
self.shared_secret = None
def get_public(self):
return self.pub_str
def get_secret(self, their_pub_str):
"""
Given the public key of the other party as a string of bytes,
calculate our shared secret.
This might raise a ValueError since 'their_pub_str' is
attacker controlled.
"""
their_pub = int(binascii.hexlify(their_pub_str), 16)
self.shared_secret = modexp.powMod(their_pub, self.priv, self.mod)
return int_to_bytes(self.shared_secret, self.group_len)
| bsd-3-clause |
Carmezim/tensorflow | tensorflow/python/client/device_lib.py | 149 | 1308 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A Python interface for creating TensorFlow servers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.framework import device_attributes_pb2
from tensorflow.python import pywrap_tensorflow
def list_local_devices():
"""List the available devices available in the local process.
Returns:
A list of `DeviceAttribute` protocol buffers.
"""
def _convert(pb_str):
m = device_attributes_pb2.DeviceAttributes()
m.ParseFromString(pb_str)
return m
return [_convert(s) for s in pywrap_tensorflow.list_devices()]
| apache-2.0 |
subutai/nupic | examples/prediction/experiments/dutyCycle/problem/description.py | 50 | 1547 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from nupic.frameworks.prediction.helpers import importBaseDescription
config = dict(
#sensorVerbosity=3,
iterationCount = 1000,
numAValues = 10,
numBValues = 10,
#encodingFieldStyleA = 'contiguous',
encodingFieldWidthA = 50,
#encodingOnBitsA = 5,
#encodingFieldStyleB = 'contiguous',
encodingFieldWidthB = 50,
#encodingOnBitsB = 5,
b0Likelihood = None,
)
mod = importBaseDescription('../base/description.py', config)
locals().update(mod.__dict__)
| agpl-3.0 |
wdaher/zulip | zerver/management/commands/process_queue.py | 120 | 1460 | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.core.management import CommandError
from django.conf import settings
from zerver.worker.queue_processors import get_worker
import sys
import signal
import logging
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('queue_name', metavar='<queue name>', type=str,
help="queue to process")
parser.add_argument('worker_num', metavar='<worker number>', type=int, nargs='?', default=0,
help="worker label")
help = "Runs a queue processing worker"
def handle(self, *args, **options):
logging.basicConfig()
logger = logging.getLogger('process_queue')
queue_name = options['queue_name']
worker_num = options['worker_num']
def signal_handler(signal, frame):
logger.info("Worker %d disconnecting from queue %s" % (worker_num, queue_name))
worker.stop()
sys.exit(0)
if not settings.USING_RABBITMQ:
logger.error("Cannot run a queue processor when USING_RABBITMQ is False!")
sys.exit(1)
signal.signal(signal.SIGTERM, signal_handler)
signal.signal(signal.SIGINT, signal_handler)
logger.info("Worker %d connecting to queue %s" % (worker_num, queue_name))
worker = get_worker(queue_name)
worker.start()
| apache-2.0 |
mrnamingo/vix4-34-enigma2-bcm | lib/python/Components/Converter/ServiceOrbitalPosition.py | 102 | 1768 | # -*- coding: utf-8 -*-
from Components.Converter.Converter import Converter
from enigma import iServiceInformation, iPlayableService, iPlayableServicePtr, eServiceCenter
from ServiceReference import resolveAlternate
from Components.Element import cached
class ServiceOrbitalPosition(Converter, object):
FULL = 0
SHORT = 1
def __init__(self, type):
Converter.__init__(self, type)
if type == "Short":
self.type = self.SHORT
else:
self.type = self.FULL
@cached
def getText(self):
service = self.source.service
if isinstance(service, iPlayableServicePtr):
info = service and service.info()
ref = None
else: # reference
info = service and self.source.info
ref = service
if not info:
return ""
if ref:
nref = resolveAlternate(ref)
if nref:
ref = nref
info = eServiceCenter.getInstance().info(ref)
transponder_info = info.getInfoObject(ref, iServiceInformation.sTransponderData)
else:
transponder_info = info.getInfoObject(iServiceInformation.sTransponderData)
if transponder_info:
tunerType = transponder_info["tuner_type"]
if tunerType == "DVB-S":
pos = int(transponder_info["orbital_position"])
direction = 'E'
if pos > 1800:
pos = 3600 - pos
direction = 'W'
if self.type == self.SHORT:
return "%d.%d%s" % (pos/10, pos%10, direction)
else:
return "%d.%d° %s" % (pos/10, pos%10, direction)
return tunerType
if ref:
refString = ref.toString().lower()
if "%3a//" in refString:
return _("Stream")
if refString.startswith("1:134:"):
return _("Alternative")
return ""
text = property(getText)
def changed(self, what):
if what[0] != self.CHANGED_SPECIFIC or what[1] in [iPlayableService.evStart]:
Converter.changed(self, what)
| gpl-2.0 |
sparkslabs/kamaelia | Sketches/JMB/Logger/Log.py | 3 | 7081 | # -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Axon.Component import component
from Kamaelia.Util.Backplane import Backplane, SubscribeTo
from Axon.Ipc import newComponent, producerFinished, shutdownMicroprocess
from Kamaelia.Chassis.Graphline import Graphline
import datetime
def wrapMessage(message):
"""
This function is intended to be the default message wrapper. It returns
the given message with the date/time in isoformat at the beginning and a
newline at the end.
"""
dt = datetime.datetime.now().isoformat()
return '%s: %s\n' % (dt, message)
def nullWrapper(message):
"""
This method returns the message that was sent to it. It is used in situations
where you just want to post the raw text to the log.
"""
return message
class Logger(component):
"""
This component is used to write messages to file. Upon instantiation, the
a backplane is registered with the name LOG_ + logname, so that a log named
'foo.bar' would be registered under 'LOG_foo.bar'.
Please note that the Logger will not be shut down automatically. It must be
sent a shutdown message via its control box. Typically this component is to
be used by a Chassis or some other Parent component to provide a log for its
children.
"""
Inboxes = { 'inbox' : 'Receive a tuple containing the filename and message to log',
'control' : 'Receive shutdown messages',}
Outboxes = {'outbox' : 'NOT USED',
'signal' : 'Send shutdown messages',}
def __init__(self, logname, wrapper = wrapMessage):
"""
Initializes a new Logger.
-logname - the name of the log to write to
-wrapper - a method that takes a message as an argument and returns a
formatted string to put in the log.
"""
super(Logger, self).__init__()
self.logname = logname
self.bplane = Backplane('LOG_' + logname)
self.subscriber = SubscribeTo('LOG_' + logname)
self.wrapper = wrapper
#add the components as children
self.addChildren(self.subscriber, self.bplane)
self.link((self.subscriber, 'outbox'), (self, 'inbox'))
self.link((self, 'signal'), (self.bplane, 'control'))
def main(self):
self.bplane.activate()
self.subscriber.activate()
self.first_run = False
not_done = True
while not_done:
if self.dataReady('inbox'):
file = open(self.logname, 'a')
while self.dataReady('inbox'):
msg = self.recv('inbox')
file.write(self.wrapper(msg))
file.close()
while self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, (shutdownMicroprocess)):
not_done = False
self.shutdown(msg)
if not_done:
self.pause()
yield 1
def shutdown(self, msg):
"""
Sends shutdown message to signal box and removes children.
"""
self.send(msg, 'signal')
self.removeChild(self.bplane)
self.removeChild(self.subscriber)
def connectToLogger(component, logger_name):
"""
This method is used to connect a method with a log outbox to a logger.
"""
component.LoggerName = logger_name
publisher = PublishTo('LOG_' + logger_name)
graph = Graphline( COMPONENT = component,
PUBLISHER = publisher,
linkages = {
('COMPONENT', 'log') : ('PUBLISHER', 'inbox'),
('COMPONENT', 'signal') : ('PUBLISHER', 'control'),
})
graph.activate()
component.addChildren(publisher, graph)
if __name__ == '__main__':
from Kamaelia.Util.Backplane import PublishTo
class Producer(component):
"""
A simple component to repeatedly output message.
"""
Inboxes = {'inbox' : 'NOT USED',
'control' : 'receive shutdown messages',}
Outboxes = {'outbox' : 'push data out',
'signal' : 'send shutdown messages',
'log' : 'post messages to the log'}
def __init__(self, message):
super(Producer, self).__init__()
self.message = message
def main(self):
not_done = True
while not_done:
self.send(self.message, 'log')
print 'sent %s' % (self.message)
while self.dataReady('control'):
msg = self.recv('control')
self.send(msg, 'signal')
if isinstance(msg, (shutdownMicroprocess, producerFinished)):
not_done = False
print 'Producer shutting down!'
yield 1
self.send(producerFinished(), 'signal')
class SomeChassis(component):
"""
A toy example of a chassis of some kind. This will run each component 50
times and then send each one a shutdown message.
"""
Inboxes = {'inbox' : 'NOT USED',
'control' : 'NOT USED',}
Outboxes = {'outbox' : 'NOT USED',
'signal-logger' : 'send shutdown signals to the logger',
'signal-producer' : 'send shutdown signals to the producer',}
def __init__(self, Producer, logname):
super(SomeChassis, self).__init__()
self.Logger = Logger(logname)
self.logname = logname
self.Producer = Producer
self.link((self, 'signal-logger'), (self.Logger, 'control'))
self.link((self, 'signal-producer'), (self.Producer, 'control'))
def main(self):
self.Logger.activate()
connectToLogger(self.Producer, self.logname)
i = 0
while i < 50:
print 'i = ' + str(i)
i += 1
yield 1
print 'SomeChassis shutting down!'
self.send(shutdownMicroprocess(), 'signal-logger')
self.send(shutdownMicroprocess(), 'signal-producer')
SomeChassis(Producer = Producer('blah'), logname = 'blah.log').run() | apache-2.0 |
xhchrn/gegan | train.py | 1 | 3732 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import tensorflow as tf
import argparse
from model.gegan import GEGAN
parser = argparse.ArgumentParser(description='Train')
parser.add_argument('--experiment_dir', dest='experiment_dir', required=True,
help='experiment directory, data, samples,checkpoints,etc')
parser.add_argument('--experiment_id', dest='experiment_id', type=int, default=0,
help='sequence id for the experiments you prepare to run')
parser.add_argument('--image_size', dest='image_size', type=int, default=64,
help="size of your input and output image")
parser.add_argument('--L1_penalty', dest='L1_penalty', type=int, default=100, help='weight for L1 loss')
parser.add_argument('--Lconst_penalty', dest='Lconst_penalty', type=int, default=15, help='weight for const loss')
parser.add_argument('--Ltv_penalty', dest='Ltv_penalty', type=float, default=0.0, help='weight for tv loss')
parser.add_argument('--Lcategory_penalty', dest='Lcategory_penalty', type=float, default=1.0,
help='weight for category loss')
parser.add_argument('--embedding_num', dest='embedding_num', type=int, default=2,
help="number for distinct embeddings")
parser.add_argument('--embedding_dim', dest='embedding_dim', type=int, default=64, help="dimension for embedding")
parser.add_argument('--epoch', dest='epoch', type=int, default=100, help='number of epoch')
parser.add_argument('--batch_size', dest='batch_size', type=int, default=16, help='number of examples in batch')
parser.add_argument('--lr', dest='lr', type=float, default=0.001, help='initial learning rate for adam')
parser.add_argument('--schedule', dest='schedule', type=int, default=10, help='number of epochs to half learning rate')
parser.add_argument('--resume', dest='resume', type=int, default=1, help='resume from previous training')
parser.add_argument('--freeze_encoder', dest='freeze_encoder', type=int, default=0,
help="freeze encoder weights during training")
parser.add_argument('--fine_tune', dest='fine_tune', type=str, default=None,
help='specific labels id to be fine tuned')
parser.add_argument('--inst_norm', dest='inst_norm', type=int, default=0,
help='use conditional instance normalization in your model')
parser.add_argument('--sample_steps', dest='sample_steps', type=int, default=10,
help='number of batches in between two samples are drawn from validation set')
parser.add_argument('--checkpoint_steps', dest='checkpoint_steps', type=int, default=500,
help='number of batches in between two checkpoints')
args = parser.parse_args()
def main(_):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
model = GEGAN(args.experiment_dir, batch_size=args.batch_size, experiment_id=args.experiment_id,
input_width=args.image_size, output_width=args.image_size, embedding_num=args.embedding_num,
embedding_dim=args.embedding_dim, L1_penalty=args.L1_penalty, Lconst_penalty=args.Lconst_penalty,
Ltv_penalty=args.Ltv_penalty, Lcategory_penalty=args.Lcategory_penalty)
model.register_session(sess)
model.build_model(is_training=True, inst_norm=args.inst_norm)
model.train(lr=args.lr, epoch=args.epoch, resume=args.resume,
schedule=args.schedule, freeze_encoder=args.freeze_encoder,
sample_steps=args.sample_steps, checkpoint_steps=args.checkpoint_steps)
if __name__ == '__main__':
tf.app.run()
| apache-2.0 |
Bloodyaugust/sugarlabcppboilerplate | lib/boost/tools/build/src/build/build_request.py | 6 | 8018 | # Status: being ported by Vladimir Prus
# TODO: need to re-compare with mainline of .jam
# Base revision: 40480
#
# (C) Copyright David Abrahams 2002. Permission to copy, use, modify, sell and
# distribute this software is granted provided this copyright notice appears in
# all copies. This software is provided "as is" without express or implied
# warranty, and with no claim as to its suitability for any purpose.
import b2.build.feature
feature = b2.build.feature
from b2.util.utility import *
import b2.build.property_set as property_set
def expand_no_defaults (property_sets):
""" Expand the given build request by combining all property_sets which don't
specify conflicting non-free features.
"""
# First make all features and subfeatures explicit
expanded_property_sets = [ps.expand_subfeatures() for ps in property_sets]
# Now combine all of the expanded property_sets
product = __x_product (expanded_property_sets)
return [property_set.create(p) for p in product]
def __x_product (property_sets):
""" Return the cross-product of all elements of property_sets, less any
that would contain conflicting values for single-valued features.
"""
x_product_seen = set()
return __x_product_aux (property_sets, x_product_seen)[0]
def __x_product_aux (property_sets, seen_features):
"""Returns non-conflicting combinations of property sets.
property_sets is a list of PropertySet instances. seen_features is a set of Property
instances.
Returns a tuple of:
- list of lists of Property instances, such that within each list, no two Property instance
have the same feature, and no Property is for feature in seen_features.
- set of features we saw in property_sets
"""
if not property_sets:
return ([], set())
properties = property_sets[0].all()
these_features = set()
for p in property_sets[0].non_free():
these_features.add(p.feature())
# Note: the algorithm as implemented here, as in original Jam code, appears to
# detect conflicts based on features, not properties. For example, if command
# line build request say:
#
# <a>1/<b>1 c<1>/<b>1
#
# It will decide that those two property sets conflict, because they both specify
# a value for 'b' and will not try building "<a>1 <c1> <b1>", but rather two
# different property sets. This is a topic for future fixing, maybe.
if these_features & seen_features:
(inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features)
return (inner_result, inner_seen | these_features)
else:
result = []
(inner_result, inner_seen) = __x_product_aux(property_sets[1:], seen_features | these_features)
if inner_result:
for inner in inner_result:
result.append(properties + inner)
else:
result.append(properties)
if inner_seen & these_features:
# Some of elements in property_sets[1:] conflict with elements of property_sets[0],
# Try again, this time omitting elements of property_sets[0]
(inner_result2, inner_seen2) = __x_product_aux(property_sets[1:], seen_features)
result.extend(inner_result2)
return (result, inner_seen | these_features)
def looks_like_implicit_value(v):
"""Returns true if 'v' is either implicit value, or
the part before the first '-' symbol is implicit value."""
if feature.is_implicit_value(v):
return 1
else:
split = v.split("-")
if feature.is_implicit_value(split[0]):
return 1
return 0
def from_command_line(command_line):
"""Takes the command line tokens (such as taken from ARGV rule)
and constructs build request from it. Returns a list of two
lists. First is the set of targets specified in the command line,
and second is the set of requested build properties."""
targets = []
properties = []
for e in command_line:
if e[:1] != "-":
# Build request spec either has "=" in it, or completely
# consists of implicit feature values.
if e.find("=") != -1 or looks_like_implicit_value(e.split("/")[0]):
properties += convert_command_line_element(e)
elif e:
targets.append(e)
return [targets, properties]
# Converts one element of command line build request specification into
# internal form.
def convert_command_line_element(e):
result = None
parts = e.split("/")
for p in parts:
m = p.split("=")
if len(m) > 1:
feature = m[0]
values = m[1].split(",")
lresult = [("<%s>%s" % (feature, v)) for v in values]
else:
lresult = p.split(",")
if p.find('-') == -1:
# FIXME: first port property.validate
# property.validate cannot handle subfeatures,
# so we avoid the check here.
#for p in lresult:
# property.validate(p)
pass
if not result:
result = lresult
else:
result = [e1 + "/" + e2 for e1 in result for e2 in lresult]
return [property_set.create(b2.build.feature.split(r)) for r in result]
###
### rule __test__ ( )
### {
### import assert feature ;
###
### feature.prepare-test build-request-test-temp ;
###
### import build-request ;
### import build-request : expand_no_defaults : build-request.expand_no_defaults ;
### import errors : try catch ;
### import feature : feature subfeature ;
###
### feature toolset : gcc msvc borland : implicit ;
### subfeature toolset gcc : version : 2.95.2 2.95.3 2.95.4
### 3.0 3.0.1 3.0.2 : optional ;
###
### feature variant : debug release : implicit composite ;
### feature inlining : on off ;
### feature "include" : : free ;
###
### feature stdlib : native stlport : implicit ;
###
### feature runtime-link : dynamic static : symmetric ;
###
###
### local r ;
###
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
###
### try ;
### {
###
### build-request.from-command-line bjam gcc/debug runtime-link=dynamic/static ;
### }
### catch \"static\" is not a value of an implicit feature ;
###
###
### r = [ build-request.from-command-line bjam -d2 --debug debug target runtime-link=dynamic ] ;
### assert.equal [ $(r).get-at 1 ] : target ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic ;
###
### r = [ build-request.from-command-line bjam debug runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug <runtime-link>dynamic <runtime-link>static ;
###
### r = [ build-request.from-command-line bjam debug gcc/runtime-link=dynamic,static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : debug gcc/<runtime-link>dynamic
### gcc/<runtime-link>static ;
###
### r = [ build-request.from-command-line bjam msvc gcc,borland/runtime-link=static ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : msvc gcc/<runtime-link>static
### borland/<runtime-link>static ;
###
### r = [ build-request.from-command-line bjam gcc-3.0 ] ;
### assert.equal [ $(r).get-at 1 ] : ;
### assert.equal [ $(r).get-at 2 ] : gcc-3.0 ;
###
### feature.finish-test build-request-test-temp ;
### }
###
###
| gpl-2.0 |
cdgriffith/PyFoto | pyfoto/config.py | 1 | 2058 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import logging
import yaml
import reusables
default_config = dict(
storage_directory="storage",
# TODO add {date}, {time}, {type}, {hash}, {size}
dir_names="{increment}",
file_names="{increment}.{ext}",
remove_source=False,
folder_limit=1000,
ignore_duplicates=False,
dir_inc=0,
file_inc=0,
connect_string="sqlite:///pyfoto.sqlite"
)
def get_config(config_file: str="config.yaml") -> reusables.Namespace:
"""
:param config_file:
:return:
"""
config = default_config.copy()
if os.path.exists(config_file):
with open(config_file) as f:
config.update(yaml.load(f))
else:
logger.warning('Config file "{0}" does not exist, using '
'defaults which will be saved to it'.format(config_file))
logger.debug("Loaded Config - {0}".format(config))
return reusables.Namespace(**config)
def save_config(config: dict, config_file: str="config.yaml") -> None:
"""
:param config:
:param config_file:
:return:
"""
out_config = config.copy()
dir_path = os.path.dirname(config_file)
if dir_path and not os.path.exists(dir_path):
logger.warning("Attempting to create new path to config file: "
"{0}".format(dir_path))
os.makedirs(dir_path, exist_ok=True)
with open(config_file, "w") as f:
yaml.dump(out_config, f, default_flow_style=False)
logger.debug("Saved config - {0}".format(out_config))
def get_stream_logger(module, level: int=0):
new_logger = logging.getLogger("PyFoto.{}".format(module))
sh = logging.StreamHandler()
if level > 0:
sh.setLevel(level)
formatter = logging.Formatter('%(asctime)s - %(name)s - '
'%(levelname)s - %(message)s')
sh.setFormatter(formatter)
new_logger.addHandler(sh)
if level > 0:
new_logger.setLevel(level)
return new_logger
logger = get_stream_logger("config", level=0)
| mit |
endlessm/chromium-browser | tools/json_schema_compiler/h_generator.py | 4 | 15267 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from code import Code
from model import PropertyType
import cpp_util
import schema_util
class HGenerator(object):
def __init__(self, type_generator):
self._type_generator = type_generator
def Generate(self, namespace):
return _Generator(namespace, self._type_generator).Generate()
class _Generator(object):
"""A .h generator for a namespace.
"""
def __init__(self, namespace, cpp_type_generator):
self._namespace = namespace
self._type_helper = cpp_type_generator
self._generate_error_messages = namespace.compiler_options.get(
'generate_error_messages', False)
def Generate(self):
"""Generates a Code object with the .h for a single namespace.
"""
c = Code()
(c.Append(cpp_util.CHROMIUM_LICENSE)
.Append()
.Append(cpp_util.GENERATED_FILE_MESSAGE % self._namespace.source_file)
.Append()
)
# Hack: for the purpose of gyp the header file will always be the source
# file with its file extension replaced by '.h'. Assume so.
output_file = os.path.splitext(self._namespace.source_file)[0] + '.h'
ifndef_name = cpp_util.GenerateIfndefName(output_file)
# Hack: tabs and windows have circular references, so only generate hard
# references for them (i.e. anything that can't be forward declared). In
# other cases, generate soft dependencies so that they can include
# non-optional types from other namespaces.
include_soft = self._namespace.name not in ('tabs', 'windows')
(c.Append('#ifndef %s' % ifndef_name)
.Append('#define %s' % ifndef_name)
.Append()
.Append('#include <stdint.h>')
.Append()
.Append('#include <map>')
.Append('#include <memory>')
.Append('#include <string>')
.Append('#include <vector>')
.Append()
.Append('#include "base/logging.h"')
.Append('#include "base/values.h"')
.Cblock(self._type_helper.GenerateIncludes(include_soft=include_soft))
.Append()
)
# Hack: we're not generating soft includes for tabs and windows, so we need
# to generate forward declarations for them.
if not include_soft:
c.Cblock(self._type_helper.GenerateForwardDeclarations())
cpp_namespace = cpp_util.GetCppNamespace(
self._namespace.environment.namespace_pattern,
self._namespace.unix_name)
c.Concat(cpp_util.OpenNamespace(cpp_namespace))
c.Append()
if self._namespace.properties:
(c.Append('//')
.Append('// Properties')
.Append('//')
.Append()
)
for prop in self._namespace.properties.values():
property_code = self._type_helper.GeneratePropertyValues(
prop,
'extern const %(type)s %(name)s;')
if property_code:
c.Cblock(property_code)
if self._namespace.types:
(c.Append('//')
.Append('// Types')
.Append('//')
.Append()
.Cblock(self._GenerateTypes(self._FieldDependencyOrder(),
is_toplevel=True,
generate_typedefs=True))
)
if self._namespace.functions:
(c.Append('//')
.Append('// Functions')
.Append('//')
.Append()
)
for function in self._namespace.functions.values():
c.Cblock(self._GenerateFunction(function))
if self._namespace.events:
(c.Append('//')
.Append('// Events')
.Append('//')
.Append()
)
for event in self._namespace.events.values():
c.Cblock(self._GenerateEvent(event))
(c.Concat(cpp_util.CloseNamespace(cpp_namespace))
.Append('#endif // %s' % ifndef_name)
.Append()
)
return c
def _FieldDependencyOrder(self):
"""Generates the list of types in the current namespace in an order in which
depended-upon types appear before types which depend on them.
"""
dependency_order = []
def ExpandType(path, type_):
if type_ in path:
raise ValueError("Illegal circular dependency via cycle " +
", ".join(map(lambda x: x.name, path + [type_])))
for prop in type_.properties.values():
if (prop.type_ == PropertyType.REF and
schema_util.GetNamespace(prop.ref_type) == self._namespace.name):
ExpandType(path + [type_], self._namespace.types[prop.ref_type])
if not type_ in dependency_order:
dependency_order.append(type_)
for type_ in self._namespace.types.values():
ExpandType([], type_)
return dependency_order
def _GenerateEnumDeclaration(self, enum_name, type_):
"""Generate a code object with the declaration of a C++ enum.
"""
c = Code()
c.Sblock('enum %s {' % enum_name)
c.Append(self._type_helper.GetEnumNoneValue(type_) + ',')
for value in type_.enum_values:
current_enum_string = self._type_helper.GetEnumValue(type_, value)
c.Append(current_enum_string + ',')
c.Append('%s = %s,' % (
self._type_helper.GetEnumLastValue(type_), current_enum_string))
c.Eblock('};')
return c
def _GenerateFields(self, props):
"""Generates the field declarations when declaring a type.
"""
c = Code()
needs_blank_line = False
for prop in props:
if needs_blank_line:
c.Append()
needs_blank_line = True
if prop.description:
c.Comment(prop.description)
# ANY is a base::Value which is abstract and cannot be a direct member, so
# we always need to wrap it in a scoped_ptr.
is_ptr = prop.optional or prop.type_.property_type == PropertyType.ANY
(c.Append('%s %s;' % (
self._type_helper.GetCppType(prop.type_, is_ptr=is_ptr),
prop.unix_name))
)
return c
def _GenerateType(self, type_, is_toplevel=False, generate_typedefs=False):
"""Generates a struct for |type_|.
|is_toplevel| implies that the type was declared in the "types" field
of an API schema. This determines the correct function
modifier(s).
|generate_typedefs| controls whether primitive types should be generated as
a typedef. This may not always be desired. If false,
primitive types are ignored.
"""
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
c = Code()
if type_.functions:
# Wrap functions within types in the type's namespace.
(c.Append('namespace %s {' % classname)
.Append()
)
for function in type_.functions.values():
c.Cblock(self._GenerateFunction(function))
c.Append('} // namespace %s' % classname)
elif type_.property_type == PropertyType.ARRAY:
if generate_typedefs and type_.description:
c.Comment(type_.description)
c.Cblock(self._GenerateType(type_.item_type, is_toplevel=is_toplevel))
if generate_typedefs:
(c.Append('typedef std::vector<%s > %s;' % (
self._type_helper.GetCppType(type_.item_type),
classname))
)
elif type_.property_type == PropertyType.STRING:
if generate_typedefs:
if type_.description:
c.Comment(type_.description)
c.Append('typedef std::string %(classname)s;')
elif type_.property_type == PropertyType.ENUM:
if type_.description:
c.Comment(type_.description)
c.Cblock(self._GenerateEnumDeclaration(classname, type_));
# Top level enums are in a namespace scope so the methods shouldn't be
# static. On the other hand, those declared inline (e.g. in an object) do.
maybe_static = '' if is_toplevel else 'static '
(c.Append()
.Append('%sconst char* ToString(%s as_enum);' %
(maybe_static, classname))
.Append('%s%s Parse%s(const std::string& as_string);' %
(maybe_static, classname, classname))
)
elif type_.property_type in (PropertyType.CHOICES,
PropertyType.OBJECT):
if type_.description:
c.Comment(type_.description)
(c.Sblock('struct %(classname)s {')
.Append('%(classname)s();')
.Append('~%(classname)s();')
)
(c.Append('%(classname)s(%(classname)s&& rhs);')
.Append('%(classname)s& operator=(%(classname)s&& rhs);')
)
if type_.origin.from_json:
(c.Append()
.Comment('Populates a %s object from a base::Value. Returns'
' whether |out| was successfully populated.' % classname)
.Append('static bool Populate(%s);' % self._GenerateParams(
('const base::Value& value', '%s* out' % classname)))
)
if is_toplevel:
(c.Append()
.Comment('Creates a %s object from a base::Value, or NULL on '
'failure.' % classname)
.Append('static std::unique_ptr<%s> FromValue(%s);' % (
classname, self._GenerateParams(('const base::Value& value',))))
)
if type_.origin.from_client:
value_type = ('base::Value'
if type_.property_type is PropertyType.CHOICES else
'base::DictionaryValue')
(c.Append()
.Comment('Returns a new %s representing the serialized form of this '
'%s object.' % (value_type, classname))
.Append('std::unique_ptr<%s> ToValue() const;' % value_type)
)
if type_.property_type == PropertyType.CHOICES:
# Choices are modelled with optional fields for each choice. Exactly one
# field of the choice is guaranteed to be set by the compiler.
c.Cblock(self._GenerateTypes(type_.choices))
c.Append('// Choices:')
for choice_type in type_.choices:
c.Append('%s as_%s;' % (
self._type_helper.GetCppType(choice_type, is_ptr=True),
choice_type.unix_name))
else:
properties = type_.properties.values()
(c.Append()
.Cblock(self._GenerateTypes(p.type_ for p in properties))
.Cblock(self._GenerateFields(properties)))
if type_.additional_properties is not None:
# Most additionalProperties actually have type "any", which is better
# modelled as a DictionaryValue rather than a map of string -> Value.
if type_.additional_properties.property_type == PropertyType.ANY:
c.Append('base::DictionaryValue additional_properties;')
else:
(c.Cblock(self._GenerateType(type_.additional_properties))
.Append('std::map<std::string, %s> additional_properties;' %
self._type_helper.GetCppType(type_.additional_properties,
is_in_container=True))
)
(c.Eblock()
.Append()
.Sblock(' private:')
.Append('DISALLOW_COPY_AND_ASSIGN(%(classname)s);')
.Eblock('};')
)
return c.Substitute({'classname': classname})
def _GenerateEvent(self, event):
"""Generates the namespaces for an event.
"""
c = Code()
# TODO(kalman): use event.unix_name not Classname.
event_namespace = cpp_util.Classname(event.name)
(c.Append('namespace %s {' % event_namespace)
.Append()
.Concat(self._GenerateEventNameConstant(event))
.Concat(self._GenerateCreateCallbackArguments(event))
.Append('} // namespace %s' % event_namespace)
)
return c
def _GenerateFunction(self, function):
"""Generates the namespaces and structs for a function.
"""
c = Code()
# TODO(kalman): Use function.unix_name not Classname here.
function_namespace = cpp_util.Classname(function.name)
# Windows has a #define for SendMessage, so to avoid any issues, we need
# to not use the name.
if function_namespace == 'SendMessage':
function_namespace = 'PassMessage'
(c.Append('namespace %s {' % function_namespace)
.Append()
.Cblock(self._GenerateFunctionParams(function))
)
if function.callback:
c.Cblock(self._GenerateFunctionResults(function.callback))
c.Append('} // namespace %s' % function_namespace)
return c
def _GenerateFunctionParams(self, function):
"""Generates the struct for passing parameters from JSON to a function.
"""
if not function.params:
return Code()
c = Code()
(c.Sblock('struct Params {')
.Append('static std::unique_ptr<Params> Create(%s);' %
self._GenerateParams(('const base::ListValue& args',)))
.Append('~Params();')
.Append()
.Cblock(self._GenerateTypes(p.type_ for p in function.params))
.Cblock(self._GenerateFields(function.params))
.Eblock()
.Append()
.Sblock(' private:')
.Append('Params();')
.Append()
.Append('DISALLOW_COPY_AND_ASSIGN(Params);')
.Eblock('};')
)
return c
def _GenerateTypes(self, types, is_toplevel=False, generate_typedefs=False):
"""Generate the structures required by a property such as OBJECT classes
and enums.
"""
c = Code()
for type_ in types:
c.Cblock(self._GenerateType(type_,
is_toplevel=is_toplevel,
generate_typedefs=generate_typedefs))
return c
def _GenerateCreateCallbackArguments(self, function):
"""Generates functions for passing parameters to a callback.
"""
c = Code()
params = function.params
c.Cblock(self._GenerateTypes((p.type_ for p in params), is_toplevel=True))
declaration_list = []
for param in params:
if param.description:
c.Comment(param.description)
declaration_list.append(cpp_util.GetParameterDeclaration(
param, self._type_helper.GetCppType(param.type_)))
c.Append('std::unique_ptr<base::ListValue> Create(%s);' %
', '.join(declaration_list))
return c
def _GenerateEventNameConstant(self, event):
"""Generates a constant string array for the event name.
"""
c = Code()
c.Append('extern const char kEventName[]; // "%s.%s"' % (
self._namespace.name, event.name))
c.Append()
return c
def _GenerateFunctionResults(self, callback):
"""Generates namespace for passing a function's result back.
"""
c = Code()
(c.Append('namespace Results {')
.Append()
.Concat(self._GenerateCreateCallbackArguments(callback))
.Append('} // namespace Results')
)
return c
def _GenerateParams(self, params):
"""Builds the parameter list for a function, given an array of parameters.
"""
# |error| is populated with warnings and/or errors found during parsing.
# |error| being set does not necessarily imply failure and may be
# recoverable.
# For example, optional properties may have failed to parse, but the
# parser was able to continue.
if self._generate_error_messages:
params += ('base::string16* error',)
return ', '.join(str(p) for p in params)
| bsd-3-clause |
DanielSBrown/osf.io | api_tests/guids/views/test_guid_detail.py | 8 | 6233 | from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from website.files.models.osfstorage import OsfStorageFile
from website.settings import API_DOMAIN
from tests.base import ApiTestCase
from tests.factories import (AuthUserFactory, ProjectFactory, RegistrationFactory,
CommentFactory, NodeWikiFactory, CollectionFactory, PrivateLinkFactory)
class TestGuidDetail(ApiTestCase):
def setUp(self):
super(TestGuidDetail, self).setUp()
self.user = AuthUserFactory()
def _add_private_link(self, project, anonymous=False):
view_only_link = PrivateLinkFactory(anonymous=anonymous)
view_only_link.nodes.append(project)
view_only_link.save()
return view_only_link
def test_redirect_to_node_view(self):
project = ProjectFactory()
url = '/{}guids/{}/'.format(API_BASE, project._id)
res = self.app.get(url, auth=self.user.auth)
redirect_url = '{}{}nodes/{}/'.format(API_DOMAIN, API_BASE, project._id)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_to_registration_view(self):
registration = RegistrationFactory()
url = '/{}guids/{}/'.format(API_BASE, registration._id)
res = self.app.get(url, auth=self.user.auth)
redirect_url = '{}{}registrations/{}/'.format(API_DOMAIN, API_BASE, registration._id)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_to_collections_view(self):
collection = CollectionFactory()
url = '/{}guids/{}/'.format(API_BASE, collection._id)
res = self.app.get(url, auth=self.user.auth)
redirect_url = '{}{}collections/{}/'.format(API_DOMAIN, API_BASE, collection._id)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_to_file_view(self):
test_file = OsfStorageFile.create(
is_file=True,
node=ProjectFactory(),
path='/test',
name='test',
materialized_path='/test',
)
test_file.save()
guid = test_file.get_guid(create=True)
url = '/{}guids/{}/'.format(API_BASE, guid._id)
res = self.app.get(url, auth=self.user.auth)
redirect_url = '{}{}files/{}/'.format(API_DOMAIN, API_BASE, test_file._id)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_to_comment_view(self):
comment = CommentFactory()
url = '/{}guids/{}/'.format(API_BASE, comment._id)
res = self.app.get(url, auth=self.user.auth)
redirect_url = '{}{}comments/{}/'.format(API_DOMAIN, API_BASE, comment._id)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_throws_404_for_invalid_guids(self):
url = '/{}guids/{}/'.format(API_BASE, 'fakeguid')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_redirect_when_viewing_private_project_through_view_only_link(self):
project = ProjectFactory()
view_only_link = self._add_private_link(project)
url = '/{}guids/{}/?view_only={}'.format(API_BASE, project._id, view_only_link.key)
res = self.app.get(url, auth=AuthUserFactory().auth)
redirect_url = '{}{}nodes/{}/?view_only={}'.format(API_DOMAIN, API_BASE, project._id, view_only_link.key)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_when_viewing_private_project_file_through_view_only_link(self):
project = ProjectFactory()
test_file = OsfStorageFile.create(
is_file=True,
node=project,
path='/test',
name='test',
materialized_path='/test',
)
test_file.save()
guid = test_file.get_guid(create=True)
view_only_link = self._add_private_link(project)
url = '/{}guids/{}/?view_only={}'.format(API_BASE, guid._id, view_only_link.key)
res = self.app.get(url, auth=AuthUserFactory().auth)
redirect_url = '{}{}files/{}/?view_only={}'.format(API_DOMAIN, API_BASE, test_file._id, view_only_link.key)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_redirect_when_viewing_private_project_comment_through_view_only_link(self):
project = ProjectFactory()
view_only_link = self._add_private_link(project)
comment = CommentFactory(node=project)
url = '/{}guids/{}/?view_only={}'.format(API_BASE, comment._id, view_only_link.key)
res = self.app.get(url, auth=AuthUserFactory().auth)
redirect_url = '{}{}comments/{}/?view_only={}'.format(API_DOMAIN, API_BASE, comment._id, view_only_link.key)
assert_equal(res.status_code, 302)
assert_equal(res.location, redirect_url)
def test_resolve_query_param(self):
project = ProjectFactory()
url = '{}{}guids/{}/?resolve=false'.format(API_DOMAIN, API_BASE, project._id)
res = self.app.get(url, auth=self.user.auth)
related_url = '{}{}nodes/{}/'.format(API_DOMAIN, API_BASE, project._id)
related = res.json['data']['relationships']['referent']['links']['related']
assert_equal(related['href'], related_url)
assert_equal(related['meta']['type'], 'nodes')
def test_referent_is_embeddable(self):
project = ProjectFactory(creator=self.user)
url = '{}{}guids/{}/?resolve=false&embed=referent'.format(API_DOMAIN, API_BASE, project._id)
res = self.app.get(url, auth=self.user.auth)
related_url = '{}{}nodes/{}/'.format(API_DOMAIN, API_BASE, project._id)
related = res.json['data']['relationships']['referent']['links']['related']
assert_equal(related['href'], related_url)
assert_equal(related['meta']['type'], 'nodes')
referent = res.json['data']['embeds']['referent']['data']
assert_equal(referent['id'], project._id)
assert_equal(referent['type'], 'nodes')
| apache-2.0 |
terbolous/SickRage | lib/github/InputGitAuthor.py | 47 | 2633 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class InputGitAuthor(object):
"""
"""
def __init__(self, name, email, date=github.GithubObject.NotSet):
"""
:param name: string
:param email: string
:param date: string
"""
assert isinstance(name, (str, unicode)), name
assert isinstance(email, (str, unicode)), email
assert date is github.GithubObject.NotSet or isinstance(date, (str, unicode)), date # @todo Datetime?
self.__name = name
self.__email = email
self.__date = date
@property
def _identity(self):
identity = {
"name": self.__name,
"email": self.__email,
}
if self.__date is not github.GithubObject.NotSet:
identity["date"] = self.__date
return identity
| gpl-3.0 |
lupyuen/RaspberryPiImage | home/pi/GrovePi/Software/Python/others/temboo/Library/Google/Gmailv2/Threads/DeleteThread.py | 5 | 4936 | # -*- coding: utf-8 -*-
###############################################################################
#
# DeleteThread
# Permanently deletes a specific thread.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class DeleteThread(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the DeleteThread Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(DeleteThread, self).__init__(temboo_session, '/Library/Google/Gmailv2/Threads/DeleteThread')
def new_input_set(self):
return DeleteThreadInputSet()
def _make_result_set(self, result, path):
return DeleteThreadResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return DeleteThreadChoreographyExecution(session, exec_id, path)
class DeleteThreadInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the DeleteThread
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid Access Token retrieved during the OAuth2 process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new Access Token.)
"""
super(DeleteThreadInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(DeleteThreadInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(DeleteThreadInputSet, self)._set_input('ClientSecret', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth Refresh Token used to generate a new Access Token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(DeleteThreadInputSet, self)._set_input('RefreshToken', value)
def set_ThreadID(self, value):
"""
Set the value of the ThreadID input for this Choreo. ((required, string) The ID of the thread to delete.)
"""
super(DeleteThreadInputSet, self)._set_input('ThreadID', value)
def set_UserID(self, value):
"""
Set the value of the UserID input for this Choreo. ((optional, string) The ID of the acting user. Defaults to "me" indicating the user associated with the Access Token or Refresh Token provided.)
"""
super(DeleteThreadInputSet, self)._set_input('UserID', value)
class DeleteThreadResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the DeleteThread Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google. An empty response is expected for a successful delete operation.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class DeleteThreadChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return DeleteThreadResultSet(response, path)
| apache-2.0 |
Ictp/indico | bin/utils/changeStyle.py | 1 | 1781 | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from indico.core.db import DBMgr
from MaKaC.webinterface import displayMgr
from MaKaC.conference import CategoryManager
logfile=open('./oldstyles','w')
def changeCatStyle(cat):
for subcat in cat.getSubCategoryList():
currentStyle=subcat.getDefaultStyle("meeting")
subcat.setDefaultStyle("meeting", "lhcb_meeting")
logfile.write("cat %s: %s"%(subcat.getId(), currentStyle))
changeCatStyle(subcat)
for conf in cat.getConferenceList():
currentStyle=displayMgr.ConfDisplayMgrRegistery().getDisplayMgr(conf).getDefaultStyle()
displayMgr.ConfDisplayMgrRegistery().getDisplayMgr(conf).setDefaultStyle("lhcb_meeting")
logfile.write("\t\t\tconf %s: %s"%(conf.getId(), currentStyle))
dbm = DBMgr.getInstance()
dbm.startRequest()
cat=CategoryManager().getById('233')
currentStyle=cat.getDefaultStyle("meeting")
cat.setDefaultStyle("meeting", "lhcb_meeting")
logfile.write("cat %s: %s"%(cat.getId(), currentStyle))
changeCatStyle(cat)
dbm.endRequest()
| gpl-3.0 |
40223226/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/keyword.py | 761 | 2049 | #! /usr/bin/env python3
"""Keywords (from "graminit.c")
This file is automatically generated; please don't muck it up!
To update the symbols in this file, 'cd' to the top directory of
the python source tree after building the interpreter and run:
./python Lib/keyword.py
"""
__all__ = ["iskeyword", "kwlist"]
kwlist = [
#--start keywords--
'False',
'None',
'True',
'and',
'as',
'assert',
'break',
'class',
'continue',
'def',
'del',
'elif',
'else',
'except',
'finally',
'for',
'from',
'global',
'if',
'import',
'in',
'is',
'lambda',
'nonlocal',
'not',
'or',
'pass',
'raise',
'return',
'try',
'while',
'with',
'yield',
#--end keywords--
]
iskeyword = frozenset(kwlist).__contains__
def main():
import sys, re
args = sys.argv[1:]
iptfile = args and args[0] or "Python/graminit.c"
if len(args) > 1: optfile = args[1]
else: optfile = "Lib/keyword.py"
# scan the source file for keywords
with open(iptfile) as fp:
strprog = re.compile('"([^"]+)"')
lines = []
for line in fp:
if '{1, "' in line:
match = strprog.search(line)
if match:
lines.append(" '" + match.group(1) + "',\n")
lines.sort()
# load the output skeleton from the target
with open(optfile) as fp:
format = fp.readlines()
# insert the lines of keywords
try:
start = format.index("#--start keywords--\n") + 1
end = format.index("#--end keywords--\n")
format[start:end] = lines
except ValueError:
sys.stderr.write("target does not contain format markers\n")
sys.exit(1)
# write the output file
fp = open(optfile, 'w')
fp.write(''.join(format))
fp.close()
if __name__ == "__main__":
main()
| gpl-3.0 |
msimacek/samba | python/samba/schema.py | 33 | 8356 | #
# Unix SMB/CIFS implementation.
# backend code for provisioning a Samba4 server
#
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007-2008
# Copyright (C) Andrew Bartlett <abartlet@samba.org> 2008-2009
# Copyright (C) Oliver Liebel <oliver@itc.li> 2008-2009
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Functions for setting up a Samba Schema."""
from base64 import b64encode
from samba import read_and_sub_file, substitute_var, check_all_substituted
from samba.dcerpc import security
from samba.ms_schema import read_ms_schema
from samba.ndr import ndr_pack
from samba.samdb import SamDB
from samba import dsdb
from ldb import SCOPE_SUBTREE, SCOPE_ONELEVEL
import os
def get_schema_descriptor(domain_sid, name_map={}):
sddl = "O:SAG:SAD:AI(OA;;CR;e12b56b6-0a95-11d1-adbb-00c04fd8d5cd;;SA)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(A;CI;RPLCLORC;;;AU)" \
"(A;CI;RPWPCRCCLCLORCWOWDSW;;;SA)" \
"(A;CI;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;ED)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;BA)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ER)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;ER)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;ER)" \
"S:(AU;SA;WPCCDCWOWDSDDTSW;;;WD)" \
"(AU;CISA;WP;;;WD)" \
"(AU;SA;CR;;;BA)" \
"(AU;SA;CR;;;DU)" \
"(OU;SA;CR;e12b56b6-0a95-11d1-adbb-00c04fd8d5cd;;WD)" \
"(OU;SA;CR;45ec5156-db7e-47bb-b53f-dbeb2d03c40f;;WD)"
sec = security.descriptor.from_sddl(sddl, domain_sid)
return ndr_pack(sec)
class Schema(object):
def __init__(self, domain_sid, invocationid=None, schemadn=None,
files=None, override_prefixmap=None, additional_prefixmap=None):
from samba.provision import setup_path
"""Load schema for the SamDB from the AD schema files and
samba4_schema.ldif
:param samdb: Load a schema into a SamDB.
:param schemadn: DN of the schema
Returns the schema data loaded, to avoid double-parsing when then
needing to add it to the db
"""
self.schemadn = schemadn
# We need to have the am_rodc=False just to keep some warnings quiet -
# this isn't a real SAM, so it's meaningless.
self.ldb = SamDB(global_schema=False, am_rodc=False)
if invocationid is not None:
self.ldb.set_invocation_id(invocationid)
self.schema_data = read_ms_schema(
setup_path('ad-schema/MS-AD_Schema_2K8_R2_Attributes.txt'),
setup_path('ad-schema/MS-AD_Schema_2K8_R2_Classes.txt'))
if files is not None:
for file in files:
self.schema_data += open(file, 'r').read()
self.schema_data = substitute_var(self.schema_data,
{"SCHEMADN": schemadn})
check_all_substituted(self.schema_data)
self.schema_dn_modify = read_and_sub_file(
setup_path("provision_schema_basedn_modify.ldif"),
{"SCHEMADN": schemadn})
descr = b64encode(get_schema_descriptor(domain_sid))
self.schema_dn_add = read_and_sub_file(
setup_path("provision_schema_basedn.ldif"),
{"SCHEMADN": schemadn, "DESCRIPTOR": descr})
if override_prefixmap is not None:
self.prefixmap_data = override_prefixmap
else:
self.prefixmap_data = open(setup_path("prefixMap.txt"), 'r').read()
if additional_prefixmap is not None:
for map in additional_prefixmap:
self.prefixmap_data += "%s\n" % map
self.prefixmap_data = b64encode(self.prefixmap_data)
# We don't actually add this ldif, just parse it
prefixmap_ldif = "dn: %s\nprefixMap:: %s\n\n" % (self.schemadn, self.prefixmap_data)
self.set_from_ldif(prefixmap_ldif, self.schema_data, self.schemadn)
def set_from_ldif(self, pf, df, dn):
dsdb._dsdb_set_schema_from_ldif(self.ldb, pf, df, dn)
def write_to_tmp_ldb(self, schemadb_path):
self.ldb.connect(url=schemadb_path)
self.ldb.transaction_start()
try:
self.ldb.add_ldif("""dn: @ATTRIBUTES
linkID: INTEGER
dn: @INDEXLIST
@IDXATTR: linkID
@IDXATTR: attributeSyntax
""")
# These bits of LDIF are supplied when the Schema object is created
self.ldb.add_ldif(self.schema_dn_add)
self.ldb.modify_ldif(self.schema_dn_modify)
self.ldb.add_ldif(self.schema_data)
except:
self.ldb.transaction_cancel()
raise
else:
self.ldb.transaction_commit()
# Return a hash with the forward attribute as a key and the back as the
# value
def linked_attributes(self):
return get_linked_attributes(self.schemadn, self.ldb)
def dnsyntax_attributes(self):
return get_dnsyntax_attributes(self.schemadn, self.ldb)
def convert_to_openldap(self, target, mapping):
return dsdb._dsdb_convert_schema_to_openldap(self.ldb, target, mapping)
# Return a hash with the forward attribute as a key and the back as the value
def get_linked_attributes(schemadn, schemaldb):
attrs = ["linkID", "lDAPDisplayName"]
res = schemaldb.search(
expression="(&(linkID=*)"
"(!(linkID:1.2.840.113556.1.4.803:=1))"
"(objectclass=attributeSchema)"
"(attributeSyntax=2.5.5.1))",
base=schemadn, scope=SCOPE_ONELEVEL, attrs=attrs)
attributes = {}
for i in range(0, len(res)):
expression = ("(&(objectclass=attributeSchema)(linkID=%d)"
"(attributeSyntax=2.5.5.1))" %
(int(res[i]["linkID"][0])+1))
target = schemaldb.searchone(basedn=schemadn,
expression=expression,
attribute="lDAPDisplayName",
scope=SCOPE_SUBTREE)
if target is not None:
attributes[str(res[i]["lDAPDisplayName"])]=str(target)
return attributes
def get_dnsyntax_attributes(schemadn,schemaldb):
res = schemaldb.search(
expression="(&(!(linkID=*))(objectclass=attributeSchema)(attributeSyntax=2.5.5.1))",
base=schemadn, scope=SCOPE_ONELEVEL,
attrs=["linkID", "lDAPDisplayName"])
attributes = []
for i in range(0, len(res)):
attributes.append(str(res[i]["lDAPDisplayName"]))
return attributes
def ldb_with_schema(schemadn="cn=schema,cn=configuration,dc=example,dc=com",
domainsid=None,
override_prefixmap=None):
"""Load schema for the SamDB from the AD schema files and samba4_schema.ldif
:param schemadn: DN of the schema
:param serverdn: DN of the server
Returns the schema data loaded as an object, with .ldb being a
new ldb with the schema loaded. This allows certain tests to
operate without a remote or local schema.
"""
if domainsid is None:
domainsid = security.random_sid()
else:
domainsid = security.dom_sid(domainsid)
return Schema(domainsid, schemadn=schemadn,
override_prefixmap=override_prefixmap)
| gpl-3.0 |
mastizada/kuma | vendor/packages/nose/functional_tests/test_cases.py | 10 | 1232 | import unittest
from nose.config import Config
from nose import case
from nose.plugins import Plugin, PluginManager
class TestTestCasePluginCalls(unittest.TestCase):
def test_describe_test_called(self):
class Descrip(Plugin):
counter = 0
enabled = True
def describeTest(self, test):
return "test #%s" % id(test)
def testName(self, test):
self.counter += 1
return "(%s) test" % self.counter
class TC(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
config = Config(plugins=PluginManager(plugins=[Descrip()]))
c1 = case.Test(TC('test_one'), config=config)
c2 = case.Test(TC('test_two'), config=config)
self.assertEqual(str(c1), '(1) test')
self.assertEqual(str(c2), '(2) test')
assert c1.shortDescription().startswith('test #'), \
"Unexpected shortDescription: %s" % c1.shortDescription()
assert c2.shortDescription().startswith('test #'), \
"Unexpected shortDescription: %s" % c2.shortDescription()
if __name__ == '__main__':
unittest.main()
| mpl-2.0 |
nhazekam/cctools | weaver/src/weaver/function.py | 13 | 10224 | # Copyright (c) 2010- The University of Notre Dame.
# This software is distributed under the GNU General Public License.
# See the file COPYING for details.
""" Weaver function module """
from weaver.compat import callable, getfuncname
from weaver.data import parse_input_list, parse_output_list
from weaver.logger import D_FUNCTION, debug
from weaver.options import Options
from weaver.stack import CurrentAbstraction, CurrentNest
from weaver.util import find_executable, parse_string_list, type_str, WeaverError
import inspect
import itertools
import os
import sys
# Base Function class
class Function(object):
""" This is the base Function class.
A :class:`Function` provides the :meth:`command` method that specifies how
to generate the command for the executable associated with the
:class:`Function` instance.
At a minimum, the user must specify the name of the `executable`. For
convenience, the function :func:`~weaver.util.find_executable` is used to
locate the executable.
**Positional Arguments:**
- `executable` -- Path or name of executable.
**Keyword Arguments:**
- `cmd_format` -- String template used to generate command string.
- `find_dirs` -- Additional directories to search for executable.
The `cmd_format` supports the following fields:
- `{executable}`, `{EXE}` -- The executable file.
- `{inputs}`, `{IN}` -- The inputs files.
- `{outputs}`, `{OUT}` -- The output files.
- `{arguments}`, `{ARG}` -- The arguments.
The default command string template is :data:`~weaver.Function.CMD_FORMAT`.
"""
#: Default command string format template
CMD_FORMAT = '{executable} {arguments} {inputs} > {outputs}'
def __init__(self, executable, cmd_format=None, find_dirs=None,
environment=None):
self.cmd_format = cmd_format or Function.CMD_FORMAT
self.path = find_executable(executable, find_dirs)
self.environment = environment or dict()
self.includes = set([self.path])
debug(D_FUNCTION, 'Created Function {0}({1}, {2})'.format(
type_str(self), self.path, self.cmd_format))
def __call__(self, inputs=None, outputs=None, arguments=None,
includes=None, local=False, environment=None, collect=False):
abstraction = CurrentAbstraction()
nest = CurrentNest()
# Engine Functions define inputs and output member attributes
try:
inputs = inputs or self.inputs
outputs = outputs or self.outputs
except AttributeError:
pass
inputs = parse_input_list(inputs)
outputs = parse_output_list(outputs, inputs)
includes = parse_input_list(includes) + parse_input_list(self.includes)
command = self.command_format(inputs, outputs, arguments)
options = Options(environment=dict(self.environment), collect=inputs if collect else None)
if local:
options.local = True
if environment:
options.environment.update(environment)
nest.schedule(abstraction, self, command,
list(inputs) + list(includes), outputs, options)
return outputs
def command_format(self, inputs=None, outputs=None, arguments=None):
"""
Returns command string by formatting function template with `inputs`
and `outputs` arguments.
This method requires the user to **explicitly** specify the `inputs`
and `outputs` to be used in the command string.
"""
inputs = ' '.join(parse_string_list(inputs))
outputs = ' '.join(parse_string_list(outputs))
arguments = ' '.join(parse_string_list(arguments))
return self.cmd_format.format(
executable = self.path,
EXE = self.path,
inputs = inputs,
IN = inputs,
outputs = outputs,
OUT = outputs,
arguments = arguments,
ARG = arguments)
def __str__(self):
return self.cmd_format.format(
executable = self.path,
EXE = self.path,
inputs = '{inputs}',
IN = '{IN}',
outputs = '{outputs}',
OUT = '{OUT}',
arguments = '{arguments}',
ARG = '{ARG}')
# Scripting Function classes
class ScriptFunction(Function):
""" This is the base scripting Function class.
This class allows for users to define :class:`Function` objects by
embedding scripts inside of their code.
**Positional Arguments:**
- `source` -- Source code for the script.
**Keyword Arguments:**
- `executable` -- Path or name to use for the script.
- `cmd_format` -- String template used to generate command string.
If `executable` is ``None``, then a unique script name will be generated.
"""
def __init__(self, source, executable=None, cmd_format=None):
if executable is None:
executable = next(CurrentNest().stash)
with open(executable, 'w') as fs:
fs.write(source)
os.chmod(executable, 0o755)
Function.__init__(self, executable, cmd_format)
class ShellFunction(ScriptFunction):
""" This allows the user to embed a shell script.
**Positional Arguments:**
- `source` -- Source code for the script.
**Keyword Arguments:**
- `shell` -- Shell to be used to execute script.
- `executable` -- Path or name to use for the script.
- `cmd_format` -- String template used to generate command string.
The supported values for `shell` are ``sh``, ``ksh``, ``bash``, ``csh``,
and ``tcsh``. The class assumes that the shells are located in ``/bin``.
If you pass an absolute path instead of one of the mentioned `shell`
values, then that will be used as the `shell` path and the basename of the
specified `shell` path will be used as the script extension.
"""
SHELL_TABLE = {
'sh' : '/bin/sh',
'ksh' : '/bin/ksh',
'bash' : '/bin/bash',
'csh' : '/bin/csh',
'tcsh' : '/bin/tcsh',
}
SHELL_DEFAULT = 'sh'
def __init__(self, source, shell=None, executable=None, cmd_format=None):
if shell is None or not os.path.isabs(shell):
if shell not in ShellFunction.SHELL_TABLE:
shell = ShellFunction.SHELL_DEFAULT
shell_path = ShellFunction.SHELL_TABLE[shell]
else:
shell_path = shell
shell = os.path.basename(shell)
source = '#!%s\n' % shell_path + source
ScriptFunction.__init__(self, source, executable, cmd_format)
class PythonFunction(ScriptFunction):
""" This allows the user to embed Python scripts as functions.
**Positional Arguments:**
- `function` -- Name of Python function to materialize as a script.
**Keyword Arguments:**
- `executable` -- Path or name to use for the script.
- `cmd_format` -- String template used to generate command string.
"""
PYTHON_VERSION = 'python{0}.{1}'.format(sys.version_info[0], sys.version_info[1])
PYTHON_TEMPLATE = '''#!/usr/bin/env {0}
import {{0}}
{{1}}
if __name__ == '__main__':
{{2}}(*sys.argv[1:])
'''.format(PYTHON_VERSION)
def __init__(self, function, executable=None, cmd_format=None):
# TODO: this doesn't work with Python3
body = inspect.getsource(function)
name = getfuncname(function)
imports = ['os', 'sys']
try:
imports.extend(function.func_imports)
except AttributeError:
pass
source = self.PYTHON_TEMPLATE.format(', '.join(imports), body, name)
ScriptFunction.__init__(self, source, executable, cmd_format)
# Function argument parser
def parse_function(function, py_func_builder=PythonFunction, environment=None):
""" Return a :class:`Function` object based on the input `function`.
If `function` is already a :class:`Function`, then return it. If it is a
string, then parse it and automagically construct a :class:`Function`
object. Otherwise, raise a :class:`~weaver.util.WeaverError`.
This means that a `function` must be one of the following:
1. An existing :class:`Function`.
2. A string template (ex. `{executable} {arguments} {inputs} {outputs}`)
3. A real Python function that will be converted.
.. note::
The parser expects that the **first word** in the `function` string to
refer to the name of the executable to be used for the
:class:`Function`.
"""
if isinstance(function, Function):
return function
if isinstance(function, str):
if ' ' in function:
flist = function.split(' ')
return Function(flist[0],
cmd_format = ' '.join(['{executable}'] + flist[1:]),
environment = environment)
return Function(function, environment=environment)
if callable(function):
return py_func_builder(function)
raise WeaverError(D_FUNCTION,
'could not parse function argument: {0}'.format(function))
ParseFunction = parse_function
# Pipeline function
class Pipeline(Function):
DEFAULT_SEPARATOR = '&&'
def __init__(self, functions, separator=None):
self.functions = [parse_function(f) for f in functions]
Function.__init__(self, self.functions[0].path,
cmd_format='Pipeline({0})'.format(map(str, self.functions)))
self.includes = set([f.path for f in self.functions])
if separator is None:
self.separator = Pipeline.DEFAULT_SEPARATOR
else:
self.separator = separator
def command_format(self, inputs=None, outputs=None, arguments=None):
divider = ' ' + self.separator + ' '
return divider.join([f.command_format(inputs, outputs, arguments)
for f in self.functions])
def __str__(self):
return self.cmd_format
# vim: set sts=4 sw=4 ts=8 expandtab ft=python:
| gpl-2.0 |
bgris/ODL_bgris | lib/python3.5/site-packages/scipy/optimize/tests/test_cobyla.py | 100 | 3562 | from __future__ import division, print_function, absolute_import
import math
import numpy as np
from numpy.testing import assert_allclose, TestCase, run_module_suite, \
assert_
from scipy.optimize import fmin_cobyla, minimize
class TestCobyla(TestCase):
def setUp(self):
self.x0 = [4.95, 0.66]
self.solution = [math.sqrt(25 - (2.0/3)**2), 2.0/3]
self.opts = {'disp': False, 'rhobeg': 1, 'tol': 1e-5,
'maxiter': 100}
def fun(self, x):
return x[0]**2 + abs(x[1])**3
def con1(self, x):
return x[0]**2 + x[1]**2 - 25
def con2(self, x):
return -self.con1(x)
def test_simple(self):
x = fmin_cobyla(self.fun, self.x0, [self.con1, self.con2], rhobeg=1,
rhoend=1e-5, iprint=0, maxfun=100)
assert_allclose(x, self.solution, atol=1e-4)
def test_minimize_simple(self):
# Minimize with method='COBYLA'
cons = ({'type': 'ineq', 'fun': self.con1},
{'type': 'ineq', 'fun': self.con2})
sol = minimize(self.fun, self.x0, method='cobyla', constraints=cons,
options=self.opts)
assert_allclose(sol.x, self.solution, atol=1e-4)
assert_(sol.success, sol.message)
assert_(sol.maxcv < 1e-5, sol)
assert_(sol.nfev < 70, sol)
assert_(sol.fun < self.fun(self.solution) + 1e-3, sol)
def test_minimize_constraint_violation(self):
np.random.seed(1234)
pb = np.random.rand(10, 10)
spread = np.random.rand(10)
def p(w):
return pb.dot(w)
def f(w):
return -(w * spread).sum()
def c1(w):
return 500 - abs(p(w)).sum()
def c2(w):
return 5 - abs(p(w).sum())
def c3(w):
return 5 - abs(p(w)).max()
cons = ({'type': 'ineq', 'fun': c1},
{'type': 'ineq', 'fun': c2},
{'type': 'ineq', 'fun': c3})
w0 = np.zeros((10, 1))
sol = minimize(f, w0, method='cobyla', constraints=cons,
options={'catol': 1e-6})
assert_(sol.maxcv > 1e-6)
assert_(not sol.success)
def test_vector_constraints():
# test that fmin_cobyla and minimize can take a combination
# of constraints, some returning a number and others an array
def fun(x):
return (x[0] - 1)**2 + (x[1] - 2.5)**2
def fmin(x):
return fun(x) - 1
def cons1(x):
a = np.array([[1, -2, 2], [-1, -2, 6], [-1, 2, 2]])
return np.array([a[i, 0] * x[0] + a[i, 1] * x[1] +
a[i, 2] for i in range(len(a))])
def cons2(x):
return x # identity, acts as bounds x > 0
x0 = np.array([2, 0])
cons_list = [fun, cons1, cons2]
xsol = [1.4, 1.7]
fsol = 0.8
# testing fmin_cobyla
sol = fmin_cobyla(fun, x0, cons_list, rhoend=1e-5, iprint=0)
assert_allclose(sol, xsol, atol=1e-4)
sol = fmin_cobyla(fun, x0, fmin, rhoend=1e-5, iprint=0)
assert_allclose(fun(sol), 1, atol=1e-4)
# testing minimize
constraints = [{'type': 'ineq', 'fun': cons} for cons in cons_list]
sol = minimize(fun, x0, constraints=constraints, tol=1e-5)
assert_allclose(sol.x, xsol, atol=1e-4)
assert_(sol.success, sol.message)
assert_allclose(sol.fun, fsol, atol=1e-4)
constraints = {'type': 'ineq', 'fun': fmin}
sol = minimize(fun, x0, constraints=constraints, tol=1e-5)
assert_allclose(sol.fun, 1, atol=1e-4)
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
cancan101/tensorflow | tensorflow/python/kernel_tests/metrics_test.py | 11 | 129672 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for metrics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import math
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
import tensorflow.python.ops.data_flow_grad # pylint: disable=unused-import
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
NAN = float('nan')
def _enqueue_vector(sess, queue, values, shape=None):
if not shape:
shape = (1, len(values))
dtype = queue.dtypes[0]
sess.run(
queue.enqueue(constant_op.constant(
values, dtype=dtype, shape=shape)))
def _binary_2d_label_to_2d_sparse_value(labels):
"""Convert dense 2D binary indicator to sparse ID.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator, shape [batch_size, num_classes].
Returns:
`SparseTensorValue` of shape [batch_size, num_classes], where num_classes
is the number of `1` values in each row of `labels`. Values are indices
of `1` values along the last dimension of `labels`.
"""
indices = []
values = []
batch = 0
for row in labels:
label = 0
xi = 0
for x in row:
if x == 1:
indices.append([batch, xi])
values.append(label)
xi += 1
else:
assert x == 0
label += 1
batch += 1
shape = [len(labels), len(labels[0])]
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64), np.array(shape, np.int64))
def _binary_2d_label_to_1d_sparse_value(labels):
"""Convert dense 2D binary indicator to sparse ID.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator, shape [batch_size, num_classes]. Each
row must contain exactly 1 `1` value.
Returns:
`SparseTensorValue` of shape [batch_size]. Values are indices of `1` values
along the last dimension of `labels`.
Raises:
ValueError: if there is not exactly 1 `1` value per row of `labels`.
"""
indices = []
values = []
batch = 0
for row in labels:
label = 0
xi = 0
for x in row:
if x == 1:
indices.append([batch])
values.append(label)
xi += 1
else:
assert x == 0
label += 1
batch += 1
if indices != [[i] for i in range(len(labels))]:
raise ValueError('Expected 1 label/example, got %s.' % indices)
shape = [len(labels)]
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64), np.array(shape, np.int64))
def _binary_3d_label_to_sparse_value(labels):
"""Convert dense 3D binary indicator tensor to sparse tensor.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator tensor.
Returns:
`SparseTensorValue` whose values are indices along the last dimension of
`labels`.
"""
indices = []
values = []
for d0, labels_d0 in enumerate(labels):
for d1, labels_d1 in enumerate(labels_d0):
d2 = 0
for class_id, label in enumerate(labels_d1):
if label == 1:
values.append(class_id)
indices.append([d0, d1, d2])
d2 += 1
else:
assert label == 0
shape = [len(labels), len(labels[0]), len(labels[0][0])]
return sparse_tensor.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64), np.array(shape, np.int64))
def _assert_nan(test_case, actual):
test_case.assertTrue(math.isnan(actual), 'Expected NAN, got %s.' % actual)
def _assert_local_variables(test_case, expected):
test_case.assertEquals(
set(expected), set(v.name for v in variables.local_variables()))
def _test_values(shape):
return np.reshape(np.cumsum(np.ones(shape)), newshape=shape)
class MeanTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean(array_ops.ones([4, 3]))
_assert_local_variables(self, ('mean/count:0', 'mean/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean(
array_ops.ones([4, 3]), metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean(
array_ops.ones([4, 3]), updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testBasic(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean(values)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAlmostEqual(1.65, sess.run(mean), 5)
def testUpdateOpsReturnsCurrentValue(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean(values)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op), 5)
self.assertAlmostEqual(1.475, sess.run(update_op), 5)
self.assertAlmostEqual(12.4 / 6.0, sess.run(update_op), 5)
self.assertAlmostEqual(1.65, sess.run(update_op), 5)
self.assertAlmostEqual(1.65, sess.run(mean), 5)
def testUnweighted(self):
values = _test_values((3, 2, 4))
mean_results = (
metrics.mean(values),
metrics.mean(values, weights=1.0),
metrics.mean(values, weights=np.ones((1, 1, 1))),
metrics.mean(values, weights=np.ones((1, 1, 1, 1))),
metrics.mean(values, weights=np.ones((1, 1, 4))),
metrics.mean(values, weights=np.ones((1, 2, 1))),
metrics.mean(values, weights=np.ones((1, 2, 4))),
metrics.mean(values, weights=np.ones((3, 1, 1))),
metrics.mean(values, weights=np.ones((3, 1, 4))),
metrics.mean(values, weights=np.ones((3, 2, 1))),
metrics.mean(values, weights=np.ones((3, 2, 4))),
metrics.mean(values, weights=np.ones((3, 2, 4, 1))),)
expected = np.mean(values)
with self.test_session():
variables.local_variables_initializer().run()
for mean_result in mean_results:
mean, update_op = mean_result
self.assertAlmostEqual(expected, update_op.eval())
self.assertAlmostEqual(expected, mean.eval())
def _test_3d_weighted(self, values, weights):
expected = (
np.sum(np.multiply(weights, values)) /
np.sum(np.multiply(weights, np.ones_like(values)))
)
mean, update_op = metrics.mean(values, weights=weights)
with self.test_session():
variables.local_variables_initializer().run()
self.assertAlmostEqual(expected, update_op.eval(), places=5)
self.assertAlmostEqual(expected, mean.eval(), places=5)
def test1x1x1Weighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5,)).reshape((1, 1, 1)))
def test1x1xNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 7, 11, 3)).reshape((1, 1, 4)))
def test1xNx1Weighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 11)).reshape((1, 2, 1)))
def test1xNxNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 7, 11, 3, 2, 13, 7, 5)).reshape((1, 2, 4)))
def testNx1x1Weighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((5, 7, 11)).reshape((3, 1, 1)))
def testNx1xNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3)).reshape((3, 1, 4)))
def testNxNxNWeighted(self):
self._test_3d_weighted(
_test_values((3, 2, 4)),
weights=np.asarray((
5, 7, 11, 3, 2, 12, 7, 5, 2, 17, 11, 3,
2, 17, 11, 3, 5, 7, 11, 3, 2, 12, 7, 5)).reshape((3, 2, 4)))
def testInvalidWeights(self):
values_placeholder = array_ops.placeholder(dtype=dtypes_lib.float32)
values = _test_values((3, 2, 4, 1))
invalid_weights = (
(1,),
(1, 1),
(1, 1, 1),
(3, 2),
(3, 2, 4),
(2, 4, 1),
(4, 2, 4, 1),
(3, 3, 4, 1),
(3, 2, 5, 1),
(3, 2, 4, 2),
(1, 1, 1, 1, 1))
expected_error_msg = 'weights can not be broadcast to values'
for invalid_weight in invalid_weights:
# Static shapes.
with self.assertRaisesRegexp(ValueError, expected_error_msg):
metrics.mean(values, invalid_weight)
# Dynamic shapes.
with self.assertRaisesRegexp(errors_impl.OpError, expected_error_msg):
with self.test_session():
_, update_op = metrics.mean(values_placeholder, invalid_weight)
variables.local_variables_initializer().run()
update_op.eval(feed_dict={values_placeholder: values})
class MeanTensorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_tensor(array_ops.ones([4, 3]))
_assert_local_variables(self, ('mean/total_tensor:0',
'mean/count_tensor:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_tensor(
array_ops.ones([4, 3]), metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_tensor(
array_ops.ones([4, 3]), updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testBasic(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean_tensor(values)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(mean))
def testMultiDimensional(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(2, 2, 2))
_enqueue_vector(
sess,
values_queue, [[[1, 2], [1, 2]], [[1, 2], [1, 2]]],
shape=(2, 2, 2))
_enqueue_vector(
sess,
values_queue, [[[1, 2], [1, 2]], [[3, 4], [9, 10]]],
shape=(2, 2, 2))
values = values_queue.dequeue()
mean, update_op = metrics.mean_tensor(values)
sess.run(variables.local_variables_initializer())
for _ in range(2):
sess.run(update_op)
self.assertAllClose([[[1, 2], [1, 2]], [[2, 3], [5, 6]]], sess.run(mean))
def testUpdateOpsReturnsCurrentValue(self):
with self.test_session() as sess:
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.mean_tensor(values)
sess.run(variables.local_variables_initializer())
self.assertAllClose([[0, 1]], sess.run(update_op), 5)
self.assertAllClose([[-2.1, 5.05]], sess.run(update_op), 5)
self.assertAllClose([[2.3 / 3., 10.1 / 3.]], sess.run(update_op), 5)
self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(update_op), 5)
self.assertAllClose([[-0.9 / 4., 3.525]], sess.run(mean), 5)
def testWeighted1d(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [[1]])
_enqueue_vector(sess, weights_queue, [[0]])
_enqueue_vector(sess, weights_queue, [[1]])
_enqueue_vector(sess, weights_queue, [[0]])
weights = weights_queue.dequeue()
mean, update_op = metrics.mean_tensor(values, weights)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[3.25, 0.5]], sess.run(mean), 5)
def testWeighted2d_1(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [1, 1])
_enqueue_vector(sess, weights_queue, [1, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.mean_tensor(values, weights)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[-2.1, 0.5]], sess.run(mean), 5)
def testWeighted2d_2(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.mean_tensor(values, weights)
sess.run(variables.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[0, 0.5]], sess.run(mean), 5)
class AccuracyTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.accuracy(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
name='my_accuracy')
_assert_local_variables(self, ('my_accuracy/count:0',
'my_accuracy/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.accuracy(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.accuracy(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones((10, 3))
labels = array_ops.ones((10, 4))
with self.assertRaises(ValueError):
metrics.accuracy(labels, predictions)
def testPredictionsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones((10, 3))
labels = array_ops.ones((10, 3))
weights = array_ops.ones((9, 3))
with self.assertRaises(ValueError):
metrics.accuracy(labels, predictions, weights)
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=3, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=3, dtype=dtypes_lib.int64, seed=1)
accuracy, update_op = metrics.accuracy(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_accuracy = accuracy.eval()
for _ in range(10):
self.assertEqual(initial_accuracy, accuracy.eval())
def testMultipleUpdates(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
labels = labels_queue.dequeue()
accuracy, update_op = metrics.accuracy(labels, predictions)
sess.run(variables.local_variables_initializer())
for _ in xrange(3):
sess.run(update_op)
self.assertEqual(0.5, sess.run(update_op))
self.assertEqual(0.5, accuracy.eval())
def testEffectivelyEquivalentSizes(self):
predictions = array_ops.ones((40, 1))
labels = array_ops.ones((40,))
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertEqual(1.0, update_op.eval())
self.assertEqual(1.0, accuracy.eval())
def testEffectivelyEquivalentSizesWithScalarWeight(self):
predictions = array_ops.ones((40, 1))
labels = array_ops.ones((40,))
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions, weights=2.0)
sess.run(variables.local_variables_initializer())
self.assertEqual(1.0, update_op.eval())
self.assertEqual(1.0, accuracy.eval())
def testEffectivelyEquivalentSizesWithStaticShapedWeight(self):
predictions = ops.convert_to_tensor([1, 1, 1]) # shape 3,
labels = array_ops.expand_dims(ops.convert_to_tensor([1, 0, 0]),
1) # shape 3, 1
weights = array_ops.expand_dims(ops.convert_to_tensor([100, 1, 1]),
1) # shape 3, 1
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions, weights)
sess.run(variables.local_variables_initializer())
# if streaming_accuracy does not flatten the weight, accuracy would be
# 0.33333334 due to an intended broadcast of weight. Due to flattening,
# it will be higher than .95
self.assertGreater(update_op.eval(), .95)
self.assertGreater(accuracy.eval(), .95)
def testEffectivelyEquivalentSizesWithDynamicallyShapedWeight(self):
predictions = ops.convert_to_tensor([1, 1, 1]) # shape 3,
labels = array_ops.expand_dims(ops.convert_to_tensor([1, 0, 0]),
1) # shape 3, 1
weights = [[100], [1], [1]] # shape 3, 1
weights_placeholder = array_ops.placeholder(
dtype=dtypes_lib.int32, name='weights')
feed_dict = {weights_placeholder: weights}
with self.test_session() as sess:
accuracy, update_op = metrics.accuracy(labels, predictions,
weights_placeholder)
sess.run(variables.local_variables_initializer())
# if streaming_accuracy does not flatten the weight, accuracy would be
# 0.33333334 due to an intended broadcast of weight. Due to flattening,
# it will be higher than .95
self.assertGreater(update_op.eval(feed_dict=feed_dict), .95)
self.assertGreater(accuracy.eval(feed_dict=feed_dict), .95)
def testMultipleUpdatesWithWeightedValues(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
4, dtypes=dtypes_lib.int64, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [0])
weights = weights_queue.dequeue()
accuracy, update_op = metrics.accuracy(labels, predictions, weights)
sess.run(variables.local_variables_initializer())
for _ in xrange(3):
sess.run(update_op)
self.assertEqual(1.0, sess.run(update_op))
self.assertEqual(1.0, accuracy.eval())
class PrecisionTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.precision(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('precision/false_positives/count:0',
'precision/true_positives/count:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.precision(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.precision(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_precision = precision.eval()
for _ in range(10):
self.assertEqual(initial_precision, precision.eval())
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs)
labels = constant_op.constant(inputs)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op))
self.assertAlmostEqual(1, precision.eval())
def testSomeCorrect_multipleInputDtypes(self):
for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = math_ops.cast(
constant_op.constant([1, 0, 1, 0], shape=(1, 4)), dtype=dtype)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=dtype)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, update_op.eval())
self.assertAlmostEqual(0.5, precision.eval())
def testWeighted1d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
precision, update_op = metrics.precision(
labels, predictions, weights=constant_op.constant([[2], [5]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 2.0 + 5.0
weighted_positives = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, precision.eval())
def testWeightedScalar_placeholders(self):
predictions = array_ops.placeholder(dtype=dtypes_lib.float32)
labels = array_ops.placeholder(dtype=dtypes_lib.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.precision(labels, predictions, weights=2)
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 2.0 + 2.0
weighted_positives = (2.0 + 2.0) + (2.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testWeighted1d_placeholders(self):
predictions = array_ops.placeholder(dtype=dtypes_lib.float32)
labels = array_ops.placeholder(dtype=dtypes_lib.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.precision(
labels, predictions, weights=constant_op.constant([[2], [5]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 2.0 + 5.0
weighted_positives = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testWeighted2d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
precision, update_op = metrics.precision(
labels,
predictions,
weights=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, precision.eval())
def testWeighted2d_placeholders(self):
predictions = array_ops.placeholder(dtype=dtypes_lib.float32)
labels = array_ops.placeholder(dtype=dtypes_lib.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.precision(
labels,
predictions,
weights=constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
with self.test_session():
variables.local_variables_initializer().run()
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs)
labels = constant_op.constant(1 - inputs)
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertAlmostEqual(0, precision.eval())
def testZeroTrueAndFalsePositivesGivesZeroPrecision(self):
predictions = constant_op.constant([0, 0, 0, 0])
labels = constant_op.constant([0, 0, 0, 0])
precision, update_op = metrics.precision(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0.0, precision.eval())
class RecallTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.recall(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('recall/false_negatives/count:0',
'recall/true_positives/count:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.recall(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.recall(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_recall = recall.eval()
for _ in range(10):
self.assertEqual(initial_recall, recall.eval())
def testAllCorrect(self):
np_inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(np_inputs)
labels = constant_op.constant(np_inputs)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(1, recall.eval())
def testSomeCorrect_multipleInputDtypes(self):
for dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = math_ops.cast(
constant_op.constant([1, 0, 1, 0], shape=(1, 4)), dtype=dtype)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=dtype)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, update_op.eval())
self.assertAlmostEqual(0.5, recall.eval())
def testWeighted1d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
weights = constant_op.constant([[2], [5]])
recall, update_op = metrics.recall(labels, predictions, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
weighted_tp = 2.0 + 5.0
weighted_t = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_t
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, recall.eval())
def testWeighted2d(self):
predictions = constant_op.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
labels = constant_op.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
weights = constant_op.constant([[1, 2, 3, 4], [4, 3, 2, 1]])
recall, update_op = metrics.recall(labels, predictions, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
weighted_tp = 3.0 + 1.0
weighted_t = (2.0 + 3.0) + (4.0 + 1.0)
expected_precision = weighted_tp / weighted_t
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, recall.eval())
def testAllIncorrect(self):
np_inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(np_inputs)
labels = constant_op.constant(1 - np_inputs)
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0, recall.eval())
def testZeroTruePositivesAndFalseNegativesGivesZeroRecall(self):
predictions = array_ops.zeros((1, 4))
labels = array_ops.zeros((1, 4))
recall, update_op = metrics.recall(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0, recall.eval())
class AUCTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.auc(predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)))
_assert_local_variables(self,
('auc/true_positives:0', 'auc/false_negatives:0',
'auc/false_positives:0', 'auc/true_negatives:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.auc(predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.auc(predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
auc, update_op = metrics.auc(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_auc = auc.eval()
for _ in range(10):
self.assertAlmostEqual(initial_auc, auc.eval(), 5)
def testAllCorrect(self):
self.allCorrectAsExpected('ROC')
def allCorrectAsExpected(self, curve):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
auc, update_op = metrics.auc(labels, predictions, curve=curve)
sess.run(variables.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, auc.eval())
def testSomeCorrect_multipleLabelDtypes(self):
with self.test_session() as sess:
for label_dtype in (
dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=label_dtype)
auc, update_op = metrics.auc(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op))
self.assertAlmostEqual(0.5, auc.eval())
def testWeighted1d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
weights = constant_op.constant([2], shape=(1, 1))
auc, update_op = metrics.auc(labels, predictions, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op), 5)
self.assertAlmostEqual(0.5, auc.eval(), 5)
def testWeighted2d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
weights = constant_op.constant([1, 2, 3, 4], shape=(1, 4))
auc, update_op = metrics.auc(labels, predictions, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.7, sess.run(update_op), 5)
self.assertAlmostEqual(0.7, auc.eval(), 5)
def testAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[0.1, 0.4, 0.35, 0.8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 0, 1, 1], shape=(1, 4))
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.79166, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.79166, auc.eval(), delta=1e-3)
def testAnotherAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[0.1, 0.4, 0.35, 0.8, 0.1, 0.135, 0.81],
shape=(1, 7),
dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 0, 1, 0, 1, 0, 1], shape=(1, 7))
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.610317, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.610317, auc.eval(), delta=1e-3)
def testThirdAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[0.0, 0.1, 0.2, 0.33, 0.3, 0.4, 0.5],
shape=(1, 7),
dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 0, 0, 0, 1, 1, 1], shape=(1, 7))
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.90277, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.90277, auc.eval(), delta=1e-3)
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(1 - inputs, dtype=dtypes_lib.float32)
auc, update_op = metrics.auc(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0, sess.run(update_op))
self.assertAlmostEqual(0, auc.eval())
def testZeroTruePositivesAndFalseNegativesGivesOneAUC(self):
with self.test_session() as sess:
predictions = array_ops.zeros([4], dtype=dtypes_lib.float32)
labels = array_ops.zeros([4])
auc, update_op = metrics.auc(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 6)
self.assertAlmostEqual(1, auc.eval(), 6)
def testRecallOneAndPrecisionOneGivesOnePRAUC(self):
with self.test_session() as sess:
predictions = array_ops.ones([4], dtype=dtypes_lib.float32)
labels = array_ops.ones([4])
auc, update_op = metrics.auc(labels, predictions, curve='PR')
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 6)
self.assertAlmostEqual(1, auc.eval(), 6)
def np_auc(self, predictions, labels, weights):
"""Computes the AUC explicitely using Numpy.
Args:
predictions: an ndarray with shape [N].
labels: an ndarray with shape [N].
weights: an ndarray with shape [N].
Returns:
the area under the ROC curve.
"""
if weights is None:
weights = np.ones(np.size(predictions))
is_positive = labels > 0
num_positives = np.sum(weights[is_positive])
num_negatives = np.sum(weights[~is_positive])
# Sort descending:
inds = np.argsort(-predictions)
sorted_labels = labels[inds]
sorted_weights = weights[inds]
is_positive = sorted_labels > 0
tp = np.cumsum(sorted_weights * is_positive) / num_positives
return np.sum((sorted_weights * tp)[~is_positive]) / num_negatives
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=num_samples)
noise = np.random.normal(0.0, scale=0.2, size=num_samples)
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
def _enqueue_as_batches(x, enqueue_ops):
x_batches = x.astype(np.float32).reshape((num_batches, batch_size))
x_queue = data_flow_ops.FIFOQueue(
num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,))
for i in range(num_batches):
enqueue_ops[i].append(x_queue.enqueue(x_batches[i, :]))
return x_queue.dequeue()
for weights in (None, np.ones(num_samples), np.random.exponential(
scale=1.0, size=num_samples)):
expected_auc = self.np_auc(predictions, labels, weights)
with self.test_session() as sess:
enqueue_ops = [[] for i in range(num_batches)]
tf_predictions = _enqueue_as_batches(predictions, enqueue_ops)
tf_labels = _enqueue_as_batches(labels, enqueue_ops)
tf_weights = (_enqueue_as_batches(weights, enqueue_ops) if
weights is not None else None)
for i in range(num_batches):
sess.run(enqueue_ops[i])
auc, update_op = metrics.auc(tf_labels,
tf_predictions,
curve='ROC',
num_thresholds=500,
weights=tf_weights)
sess.run(variables.local_variables_initializer())
for i in range(num_batches):
sess.run(update_op)
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_auc, auc.eval(), 2)
class SpecificityAtSensitivityTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.specificity_at_sensitivity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
sensitivity=0.7)
_assert_local_variables(self,
('specificity_at_sensitivity/true_positives:0',
'specificity_at_sensitivity/false_negatives:0',
'specificity_at_sensitivity/false_positives:0',
'specificity_at_sensitivity/true_negatives:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.specificity_at_sensitivity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
sensitivity=0.7,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.specificity_at_sensitivity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
sensitivity=0.7,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=2, dtype=dtypes_lib.int64, seed=1)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_specificity = specificity.eval()
for _ in range(10):
self.assertAlmostEqual(initial_specificity, specificity.eval(), 5)
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, specificity.eval())
def testSomeCorrectHighSensitivity(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.45, 0.5, 0.8, 0.9]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.8)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1.0, sess.run(update_op))
self.assertAlmostEqual(1.0, specificity.eval())
def testSomeCorrectLowSensitivity(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, sensitivity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted1d_multipleLabelDtypes(self):
for label_dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [3]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = math_ops.cast(labels_values, dtype=label_dtype)
weights = constant_op.constant(weights_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, weights=weights, sensitivity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted2d(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0, 0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
weights = constant_op.constant(weights_values)
specificity, update_op = metrics.specificity_at_sensitivity(
labels, predictions, weights=weights, sensitivity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(8.0 / 15.0, sess.run(update_op))
self.assertAlmostEqual(8.0 / 15.0, specificity.eval())
class SensitivityAtSpecificityTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.sensitivity_at_specificity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
specificity=0.7)
_assert_local_variables(self,
('sensitivity_at_specificity/true_positives:0',
'sensitivity_at_specificity/false_negatives:0',
'sensitivity_at_specificity/false_positives:0',
'sensitivity_at_specificity/true_negatives:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.sensitivity_at_specificity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
specificity=0.7,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.sensitivity_at_specificity(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
specificity=0.7,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=2, dtype=dtypes_lib.int64, seed=1)
sensitivity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_sensitivity = sensitivity.eval()
for _ in range(10):
self.assertAlmostEqual(initial_sensitivity, sensitivity.eval(), 5)
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.7)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, specificity.eval())
def testSomeCorrectHighSpecificity(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.1, 0.45, 0.5, 0.8, 0.9]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.8)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.8, sess.run(update_op))
self.assertAlmostEqual(0.8, specificity.eval())
def testSomeCorrectLowSpecificity(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = constant_op.constant(labels_values)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, specificity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted_multipleLabelDtypes(self):
for label_dtype in (dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions_values = [
0.0, 0.1, 0.2, 0.3, 0.4, 0.01, 0.02, 0.25, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
predictions = constant_op.constant(
predictions_values, dtype=dtypes_lib.float32)
labels = math_ops.cast(labels_values, dtype=label_dtype)
weights = constant_op.constant(weights_values)
specificity, update_op = metrics.sensitivity_at_specificity(
labels, predictions, weights=weights, specificity=0.4)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(0.675, sess.run(update_op))
self.assertAlmostEqual(0.675, specificity.eval())
# TODO(nsilberman): Break this up into two sets of tests.
class PrecisionRecallThresholdsTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.precision_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0])
_assert_local_variables(self, (
'precision_at_thresholds/true_positives:0',
'precision_at_thresholds/false_positives:0',))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
prec, _ = metrics.precision_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
metrics_collections=[my_collection_name])
rec, _ = metrics.recall_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [prec, rec])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, precision_op = metrics.precision_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
updates_collections=[my_collection_name])
_, recall_op = metrics.recall_at_thresholds(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
updates_collections=[my_collection_name])
self.assertListEqual(
ops.get_collection(my_collection_name), [precision_op, recall_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes_lib.int64, seed=1)
thresholds = [0, 0.5, 1.0]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions, thresholds)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates, then verify idempotency.
sess.run([prec_op, rec_op])
initial_prec = prec.eval()
initial_rec = rec.eval()
for _ in range(10):
sess.run([prec_op, rec_op])
self.assertAllClose(initial_prec, prec.eval())
self.assertAllClose(initial_rec, rec.eval())
# TODO(nsilberman): fix tests (passing but incorrect).
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(inputs)
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertEqual(1, prec.eval())
self.assertEqual(1, rec.eval())
def testSomeCorrect_multipleLabelDtypes(self):
with self.test_session() as sess:
for label_dtype in (
dtypes_lib.bool, dtypes_lib.int32, dtypes_lib.float32):
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = math_ops.cast(
constant_op.constant([0, 1, 1, 0], shape=(1, 4)), dtype=label_dtype)
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0.5, prec.eval())
self.assertAlmostEqual(0.5, rec.eval())
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes_lib.float32)
labels = constant_op.constant(1 - inputs, dtype=dtypes_lib.float32)
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0, prec.eval())
self.assertAlmostEqual(0, rec.eval())
def testWeights1d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes_lib.float32)
labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = constant_op.constant(
[[0], [1]], shape=(2, 1), dtype=dtypes_lib.float32)
thresholds = [0.5, 1.1]
prec, prec_op = metrics.precision_at_thresholds(
labels, predictions, thresholds, weights=weights)
rec, rec_op = metrics.recall_at_thresholds(
labels, predictions, thresholds, weights=weights)
[prec_low, prec_high] = array_ops.split(
value=prec, num_or_size_splits=2, axis=0)
prec_low = array_ops.reshape(prec_low, shape=())
prec_high = array_ops.reshape(prec_high, shape=())
[rec_low, rec_high] = array_ops.split(
value=rec, num_or_size_splits=2, axis=0)
rec_low = array_ops.reshape(rec_low, shape=())
rec_high = array_ops.reshape(rec_high, shape=())
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(1.0, prec_low.eval(), places=5)
self.assertAlmostEqual(0.0, prec_high.eval(), places=5)
self.assertAlmostEqual(1.0, rec_low.eval(), places=5)
self.assertAlmostEqual(0.0, rec_high.eval(), places=5)
def testWeights2d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes_lib.float32)
labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = constant_op.constant(
[[0, 0], [1, 1]], shape=(2, 2), dtype=dtypes_lib.float32)
thresholds = [0.5, 1.1]
prec, prec_op = metrics.precision_at_thresholds(
labels, predictions, thresholds, weights=weights)
rec, rec_op = metrics.recall_at_thresholds(
labels, predictions, thresholds, weights=weights)
[prec_low, prec_high] = array_ops.split(
value=prec, num_or_size_splits=2, axis=0)
prec_low = array_ops.reshape(prec_low, shape=())
prec_high = array_ops.reshape(prec_high, shape=())
[rec_low, rec_high] = array_ops.split(
value=rec, num_or_size_splits=2, axis=0)
rec_low = array_ops.reshape(rec_low, shape=())
rec_high = array_ops.reshape(rec_high, shape=())
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(1.0, prec_low.eval(), places=5)
self.assertAlmostEqual(0.0, prec_high.eval(), places=5)
self.assertAlmostEqual(1.0, rec_low.eval(), places=5)
self.assertAlmostEqual(0.0, rec_high.eval(), places=5)
def testExtremeThresholds(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant([0, 1, 1, 1], shape=(1, 4))
thresholds = [-1.0, 2.0] # lower/higher than any values
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
[prec_low, prec_high] = array_ops.split(
value=prec, num_or_size_splits=2, axis=0)
[rec_low, rec_high] = array_ops.split(
value=rec, num_or_size_splits=2, axis=0)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0.75, prec_low.eval())
self.assertAlmostEqual(0.0, prec_high.eval())
self.assertAlmostEqual(1.0, rec_low.eval())
self.assertAlmostEqual(0.0, rec_high.eval())
def testZeroLabelsPredictions(self):
with self.test_session() as sess:
predictions = array_ops.zeros([4], dtype=dtypes_lib.float32)
labels = array_ops.zeros([4])
thresholds = [0.5]
prec, prec_op = metrics.precision_at_thresholds(labels, predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(labels, predictions,
thresholds)
sess.run(variables.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0, prec.eval(), 6)
self.assertAlmostEqual(0, rec.eval(), 6)
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=(num_samples, 1))
noise = np.random.normal(0.0, scale=0.2, size=(num_samples, 1))
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
thresholds = [0.3]
tp = 0
fp = 0
fn = 0
tn = 0
for i in range(num_samples):
if predictions[i] > thresholds[0]:
if labels[i] == 1:
tp += 1
else:
fp += 1
else:
if labels[i] == 1:
fn += 1
else:
tn += 1
epsilon = 1e-7
expected_prec = tp / (epsilon + tp + fp)
expected_rec = tp / (epsilon + tp + fn)
labels = labels.astype(np.float32)
predictions = predictions.astype(np.float32)
with self.test_session() as sess:
# Reshape the data so its easy to queue up:
predictions_batches = predictions.reshape((batch_size, num_batches))
labels_batches = labels.reshape((batch_size, num_batches))
# Enqueue the data:
predictions_queue = data_flow_ops.FIFOQueue(
num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,))
labels_queue = data_flow_ops.FIFOQueue(
num_batches, dtypes=dtypes_lib.float32, shapes=(batch_size,))
for i in range(int(num_batches)):
tf_prediction = constant_op.constant(predictions_batches[:, i])
tf_label = constant_op.constant(labels_batches[:, i])
sess.run([
predictions_queue.enqueue(tf_prediction),
labels_queue.enqueue(tf_label)
])
tf_predictions = predictions_queue.dequeue()
tf_labels = labels_queue.dequeue()
prec, prec_op = metrics.precision_at_thresholds(tf_labels, tf_predictions,
thresholds)
rec, rec_op = metrics.recall_at_thresholds(tf_labels, tf_predictions,
thresholds)
sess.run(variables.local_variables_initializer())
for _ in range(int(num_samples / batch_size)):
sess.run([prec_op, rec_op])
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_prec, prec.eval(), 2)
self.assertAlmostEqual(expected_rec, rec.eval(), 2)
def _test_sparse_precision_at_k(predictions,
labels,
k,
expected,
class_id=None,
weights=None,
test_case=None):
with ops.Graph().as_default() as g, test_case.test_session(g):
if weights is not None:
weights = constant_op.constant(weights, dtypes_lib.float32)
metric, update = metrics.sparse_precision_at_k(
predictions=constant_op.constant(predictions, dtypes_lib.float32),
labels=labels,
k=k,
class_id=class_id,
weights=weights)
# Fails without initialized vars.
test_case.assertRaises(errors_impl.OpError, metric.eval)
test_case.assertRaises(errors_impl.OpError, update.eval)
variables.variables_initializer(variables.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(test_case, update.eval())
_assert_nan(test_case, metric.eval())
else:
test_case.assertEqual(expected, update.eval())
test_case.assertEqual(expected, metric.eval())
def _test_sparse_average_precision_at_k(predictions,
labels,
k,
expected,
weights=None,
test_case=None):
with ops.Graph().as_default() as g, test_case.test_session(g):
if weights is not None:
weights = constant_op.constant(weights, dtypes_lib.float32)
predictions = constant_op.constant(predictions, dtypes_lib.float32)
metric, update = metrics.sparse_average_precision_at_k(
labels, predictions, k, weights=weights)
# Fails without initialized vars.
test_case.assertRaises(errors_impl.OpError, metric.eval)
test_case.assertRaises(errors_impl.OpError, update.eval)
variables.variables_initializer(variables.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(test_case, update.eval())
_assert_nan(test_case, metric.eval())
else:
test_case.assertAlmostEqual(expected, update.eval())
test_case.assertAlmostEqual(expected, metric.eval())
class SingleLabelSparsePrecisionTest(test.TestCase):
def setUp(self):
self._predictions = ((0.1, 0.3, 0.2, 0.4), (0.1, 0.2, 0.3, 0.4))
indicator_labels = ((0, 0, 0, 1), (0, 0, 1, 0))
class_labels = (3, 2)
# Sparse vs dense, and 1d vs 2d labels should all be handled the same.
self._labels = (
_binary_2d_label_to_1d_sparse_value(indicator_labels),
_binary_2d_label_to_2d_sparse_value(indicator_labels), np.array(
class_labels, dtype=np.int64), np.array(
[[class_id] for class_id in class_labels], dtype=np.int64))
self._test_sparse_precision_at_k = functools.partial(
_test_sparse_precision_at_k, test_case=self)
self._test_sparse_average_precision_at_k = functools.partial(
_test_sparse_average_precision_at_k, test_case=self)
def test_at_k1_nan(self):
for labels in self._labels:
# Classes 0,1,2 have 0 predictions, classes -1 and 4 are out of range.
for class_id in (-1, 0, 1, 2, 4):
self._test_sparse_precision_at_k(
self._predictions, labels, k=1, expected=NAN, class_id=class_id)
def test_at_k1(self):
for labels in self._labels:
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
self._predictions, labels, k=1, expected=1.0 / 2, class_id=3)
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
self._predictions, labels, k=1, expected=1.0 / 2)
class MultiLabelSparsePrecisionTest(test.TestCase):
def setUp(self):
self._test_sparse_precision_at_k = functools.partial(
_test_sparse_precision_at_k, test_case=self)
self._test_sparse_average_precision_at_k = functools.partial(
_test_sparse_average_precision_at_k, test_case=self)
def test_average_precision(self):
# Example 1.
# Matches example here:
# fastml.com/what-you-wanted-to-know-about-mean-average-precision
labels_ex1 = (0, 1, 2, 3, 4)
labels = np.array([labels_ex1], dtype=np.int64)
predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3)
predictions = (predictions_ex1,)
precision_ex1 = (0.0 / 1, 1.0 / 2, 1.0 / 3, 2.0 / 4)
avg_precision_ex1 = (0.0 / 1, precision_ex1[1] / 2, precision_ex1[1] / 3,
(precision_ex1[1] + precision_ex1[3]) / 4)
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex1[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex1[i])
# Example 2.
labels_ex2 = (0, 2, 4, 5, 6)
labels = np.array([labels_ex2], dtype=np.int64)
predictions_ex2 = (0.3, 0.5, 0.0, 0.4, 0.0, 0.1, 0.2)
predictions = (predictions_ex2,)
precision_ex2 = (0.0 / 1, 0.0 / 2, 1.0 / 3, 2.0 / 4)
avg_precision_ex2 = (0.0 / 1, 0.0 / 2, precision_ex2[2] / 3,
(precision_ex2[2] + precision_ex2[3]) / 4)
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex2[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex2[i])
# Both examples, we expect both precision and average precision to be the
# average of the 2 examples.
labels = np.array([labels_ex1, labels_ex2], dtype=np.int64)
predictions = (predictions_ex1, predictions_ex2)
streaming_precision = [(ex1 + ex2) / 2
for ex1, ex2 in zip(precision_ex1, precision_ex2)]
streaming_average_precision = [
(ex1 + ex2) / 2
for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)
]
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=streaming_precision[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=streaming_average_precision[i])
# Weighted examples, we expect streaming average precision to be the
# weighted average of the 2 examples.
weights = (0.3, 0.6)
streaming_average_precision = [
(weights[0] * ex1 + weights[1] * ex2) / (weights[0] + weights[1])
for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)
]
for i in xrange(4):
k = i + 1
self._test_sparse_average_precision_at_k(
predictions,
labels,
k,
expected=streaming_average_precision[i],
weights=weights)
def test_average_precision_some_labels_out_of_range(self):
"""Tests that labels outside the [0, n_classes) range are ignored."""
labels_ex1 = (-1, 0, 1, 2, 3, 4, 7)
labels = np.array([labels_ex1], dtype=np.int64)
predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3)
predictions = (predictions_ex1,)
precision_ex1 = (0.0 / 1, 1.0 / 2, 1.0 / 3, 2.0 / 4)
avg_precision_ex1 = (0.0 / 1, precision_ex1[1] / 2, precision_ex1[1] / 3,
(precision_ex1[1] + precision_ex1[3]) / 4)
for i in xrange(4):
k = i + 1
self._test_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex1[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex1[i])
def test_three_labels_at_k5_no_predictions(self):
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_2d_sparse_value(
[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range.
for class_id in (-1, 1, 3, 8, 10):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_three_labels_at_k5_no_labels(self):
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_2d_sparse_value(
[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 0,4,6,9: 0 labels, >=1 prediction.
for class_id in (0, 4, 6, 9):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
def test_three_labels_at_k5(self):
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_2d_sparse_value(
[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 2: 2 labels, 2 correct predictions.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, 1 correct prediction.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, 1 incorrect prediction.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 10 predictions, 3 correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=3.0 / 10)
def test_three_labels_at_k5_some_out_of_range(self):
"""Tests that labels outside the [0, n_classes) range are ignored."""
predictions = [[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sp_labels = sparse_tensor.SparseTensorValue(
indices=[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2],
[1, 3]],
# values -1 and 10 are outside the [0, n_classes) range and are ignored.
values=np.array([2, 7, -1, 8, 1, 2, 5, 10], np.int64),
dense_shape=[2, 4])
# Class 2: 2 labels, 2 correct predictions.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, 1 correct prediction.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, 1 incorrect prediction.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 10 predictions, 3 correct.
self._test_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=3.0 / 10)
def test_3d_nan(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range.
for class_id in (-1, 1, 3, 8, 10):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_3d_no_labels(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Classes 0,4,6,9: 0 labels, >=1 prediction.
for class_id in (0, 4, 6, 9):
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
def test_3d(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Class 2: 4 predictions, all correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=4.0 / 4, class_id=2)
# Class 5: 2 predictions, both correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=5)
# Class 7: 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 2, class_id=7)
# All classes: 20 predictions, 7 correct.
self._test_sparse_precision_at_k(
predictions, labels, k=5, expected=7.0 / 20)
def test_3d_ignore_some(self):
predictions = [[[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]],
[[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]]]
labels = _binary_3d_label_to_sparse_value(
[[[0, 0, 1, 0, 0, 0, 0, 1, 1, 0], [0, 1, 1, 0, 0, 1, 0, 0, 0, 0]],
[[0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 1, 0]]])
# Class 2: 2 predictions, both correct.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[1], [0]])
# Class 2: 2 predictions, both correct.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[0], [1]])
# Class 7: 1 incorrect prediction.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=0.0 / 1.0,
class_id=7,
weights=[[1], [0]])
# Class 7: 1 correct prediction.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=1.0 / 1.0,
class_id=7,
weights=[[0], [1]])
# Class 7: no predictions.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=NAN,
class_id=7,
weights=[[1, 0], [0, 1]])
# Class 7: 2 predictions, 1 correct.
self._test_sparse_precision_at_k(
predictions,
labels,
k=5,
expected=1.0 / 2.0,
class_id=7,
weights=[[0, 1], [1, 0]])
def _test_recall_at_k(predictions,
labels,
k,
expected,
class_id=None,
weights=None,
test_case=None):
with ops.Graph().as_default() as g, test_case.test_session(g):
if weights is not None:
weights = constant_op.constant(weights, dtypes_lib.float32)
metric, update = metrics.recall_at_k(
predictions=constant_op.constant(predictions, dtypes_lib.float32),
labels=labels,
k=k,
class_id=class_id,
weights=weights)
# Fails without initialized vars.
test_case.assertRaises(errors_impl.OpError, metric.eval)
test_case.assertRaises(errors_impl.OpError, update.eval)
variables.variables_initializer(variables.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(test_case, update.eval())
_assert_nan(test_case, metric.eval())
else:
test_case.assertEqual(expected, update.eval())
test_case.assertEqual(expected, metric.eval())
class SingleLabelRecallAtKTest(test.TestCase):
def setUp(self):
self._predictions = ((0.1, 0.3, 0.2, 0.4), (0.1, 0.2, 0.3, 0.4))
indicator_labels = ((0, 0, 0, 1), (0, 0, 1, 0))
class_labels = (3, 2)
# Sparse vs dense, and 1d vs 2d labels should all be handled the same.
self._labels = (
_binary_2d_label_to_1d_sparse_value(indicator_labels),
_binary_2d_label_to_2d_sparse_value(indicator_labels), np.array(
class_labels, dtype=np.int64), np.array(
[[class_id] for class_id in class_labels], dtype=np.int64))
self._test_recall_at_k = functools.partial(
_test_recall_at_k, test_case=self)
def test_at_k1_nan(self):
# Classes 0,1 have 0 labels, 0 predictions, classes -1 and 4 are out of
# range.
for labels in self._labels:
for class_id in (-1, 0, 1, 4):
self._test_recall_at_k(
self._predictions, labels, k=1, expected=NAN, class_id=class_id)
def test_at_k1_no_predictions(self):
for labels in self._labels:
# Class 2: 0 predictions.
self._test_recall_at_k(
self._predictions, labels, k=1, expected=0.0, class_id=2)
def test_one_label_at_k1(self):
for labels in self._labels:
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_recall_at_k(
self._predictions, labels, k=1, expected=1.0 / 1, class_id=3)
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_recall_at_k(self._predictions, labels, k=1, expected=1.0 / 2)
def test_one_label_at_k1_weighted(self):
predictions = self._predictions
for labels in self._labels:
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_recall_at_k(
predictions, labels, k=1, expected=NAN, class_id=3, weights=(0.0,))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(1.0,))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(2.0,))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=NAN,
class_id=3,
weights=(0.0, 0.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=NAN,
class_id=3,
weights=(0.0, 1.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(1.0, 0.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=1.0 / 1,
class_id=3,
weights=(1.0, 1.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=2.0 / 2,
class_id=3,
weights=(2.0, 3.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=3.0 / 3,
class_id=3,
weights=(3.0, 2.0))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=0.3 / 0.3,
class_id=3,
weights=(0.3, 0.6))
self._test_recall_at_k(
predictions,
labels,
k=1,
expected=0.6 / 0.6,
class_id=3,
weights=(0.6, 0.3))
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_recall_at_k(
predictions, labels, k=1, expected=NAN, weights=(0.0,))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(1.0,))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(2.0,))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, weights=(1.0, 0.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=0.0 / 1, weights=(0.0, 1.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(1.0, 1.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=2.0 / 5, weights=(2.0, 3.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=3.0 / 5, weights=(3.0, 2.0))
self._test_recall_at_k(
predictions, labels, k=1, expected=0.3 / 0.9, weights=(0.3, 0.6))
self._test_recall_at_k(
predictions, labels, k=1, expected=0.6 / 0.9, weights=(0.6, 0.3))
class MultiLabel2dRecallAtKTest(test.TestCase):
def setUp(self):
self._predictions = ((0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9),
(0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6))
indicator_labels = ((0, 0, 1, 0, 0, 0, 0, 1, 1, 0),
(0, 1, 1, 0, 0, 1, 0, 0, 0, 0))
class_labels = ((2, 7, 8), (1, 2, 5))
# Sparse vs dense labels should be handled the same.
self._labels = (_binary_2d_label_to_2d_sparse_value(indicator_labels),
np.array(
class_labels, dtype=np.int64))
self._test_recall_at_k = functools.partial(
_test_recall_at_k, test_case=self)
def test_at_k5_nan(self):
for labels in self._labels:
# Classes 0,3,4,6,9 have 0 labels, class 10 is out of range.
for class_id in (0, 3, 4, 6, 9, 10):
self._test_recall_at_k(
self._predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_at_k5_no_predictions(self):
for labels in self._labels:
# Class 8: 1 label, no predictions.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=0.0 / 1, class_id=8)
def test_at_k5(self):
for labels in self._labels:
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 6 labels, 3 correct.
self._test_recall_at_k(self._predictions, labels, k=5, expected=3.0 / 6)
def test_at_k5_some_out_of_range(self):
"""Tests that labels outside the [0, n_classes) count in denominator."""
labels = sparse_tensor.SparseTensorValue(
indices=[[0, 0], [0, 1], [0, 2], [0, 3], [1, 0], [1, 1], [1, 2],
[1, 3]],
# values -1 and 10 are outside the [0, n_classes) range.
values=np.array([2, 7, -1, 8, 1, 2, 5, 10], np.int64),
dense_shape=[2, 4])
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, incorrect.
self._test_recall_at_k(
self._predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 8 labels, 3 correct.
self._test_recall_at_k(self._predictions, labels, k=5, expected=3.0 / 8)
class MultiLabel3dRecallAtKTest(test.TestCase):
def setUp(self):
self._predictions = (((0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9),
(0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6)),
((0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6),
(0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9)))
# Note: We don't test dense labels here, since examples have different
# numbers of labels.
self._labels = _binary_3d_label_to_sparse_value(((
(0, 0, 1, 0, 0, 0, 0, 1, 1, 0), (0, 1, 1, 0, 0, 1, 0, 0, 0, 0)), (
(0, 1, 1, 0, 0, 1, 0, 1, 0, 0), (0, 0, 1, 0, 0, 0, 0, 0, 1, 0))))
self._test_recall_at_k = functools.partial(
_test_recall_at_k, test_case=self)
def test_3d_nan(self):
# Classes 0,3,4,6,9 have 0 labels, class 10 is out of range.
for class_id in (0, 3, 4, 6, 9, 10):
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=NAN, class_id=class_id)
def test_3d_no_predictions(self):
# Classes 1,8 have 0 predictions, >=1 label.
for class_id in (1, 8):
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=0.0, class_id=class_id)
def test_3d(self):
# Class 2: 4 labels, all correct.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=4.0 / 4, class_id=2)
# Class 5: 2 labels, both correct.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=2.0 / 2, class_id=5)
# Class 7: 2 labels, 1 incorrect.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=1.0 / 2, class_id=7)
# All classes: 12 labels, 7 correct.
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=7.0 / 12)
def test_3d_ignore_all(self):
for class_id in xrange(10):
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
class_id=class_id,
weights=[[0], [0]])
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
class_id=class_id,
weights=[[0, 0], [0, 0]])
self._test_recall_at_k(
self._predictions, self._labels, k=5, expected=NAN, weights=[[0], [0]])
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
weights=[[0, 0], [0, 0]])
def test_3d_ignore_some(self):
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[1], [0]])
# Class 2: 2 labels, both correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=2.0 / 2.0,
class_id=2,
weights=[[0], [1]])
# Class 7: 1 label, correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=1.0 / 1.0,
class_id=7,
weights=[[0], [1]])
# Class 7: 1 label, incorrect.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=0.0 / 1.0,
class_id=7,
weights=[[1], [0]])
# Class 7: 2 labels, 1 correct.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=1.0 / 2.0,
class_id=7,
weights=[[1, 0], [1, 0]])
# Class 7: No labels.
self._test_recall_at_k(
self._predictions,
self._labels,
k=5,
expected=NAN,
class_id=7,
weights=[[0, 1], [0, 1]])
class MeanAbsoluteErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_absolute_error(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('mean_absolute_error/count:0',
'mean_absolute_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_absolute_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_absolute_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.mean_absolute_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateWithErrorAndWeights(self):
predictions = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4))
error, update_op = metrics.mean_absolute_error(labels, predictions, weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(3, sess.run(update_op))
self.assertEqual(3, error.eval())
class MeanRelativeErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_relative_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
normalizer=array_ops.ones((10, 1)))
_assert_local_variables(self, ('mean_relative_error/count:0',
'mean_relative_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_relative_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
normalizer=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_relative_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
normalizer=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
normalizer = random_ops.random_normal((10, 3), seed=3)
error, update_op = metrics.mean_relative_error(labels, predictions,
normalizer)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateNormalizedByLabels(self):
np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32)
np_labels = np.asarray([1, 3, 2, 3], dtype=np.float32)
expected_error = np.mean(
np.divide(np.absolute(np_predictions - np_labels), np_labels))
predictions = constant_op.constant(
np_predictions, shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(np_labels, shape=(1, 4))
error, update_op = metrics.mean_relative_error(
labels, predictions, normalizer=labels)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(expected_error, sess.run(update_op))
self.assertEqual(expected_error, error.eval())
def testSingleUpdateNormalizedByZeros(self):
np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32)
predictions = constant_op.constant(
np_predictions, shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_relative_error(
labels, predictions, normalizer=array_ops.zeros_like(labels))
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0.0, sess.run(update_op))
self.assertEqual(0.0, error.eval())
class MeanSquaredErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_squared_error(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('mean_squared_error/count:0',
'mean_squared_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
predictions = array_ops.zeros((1, 3), dtype=dtypes_lib.float32)
labels = array_ops.zeros((1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithError(self):
predictions = constant_op.constant(
[2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(6, sess.run(update_op))
self.assertEqual(6, error.eval())
def testSingleUpdateWithErrorAndWeights(self):
predictions = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4))
error, update_op = metrics.mean_squared_error(labels, predictions, weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(13, sess.run(update_op))
self.assertEqual(13, error.eval())
def testMultipleBatchesOfSizeOne(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue, [10, 8, 6])
_enqueue_vector(sess, preds_queue, [-4, 3, -1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue, [1, 3, 2])
_enqueue_vector(sess, labels_queue, [2, 4, 6])
labels = labels_queue.dequeue()
error, update_op = metrics.mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
sess.run(update_op)
self.assertAlmostEqual(208.0 / 6, sess.run(update_op), 5)
self.assertAlmostEqual(208.0 / 6, error.eval(), 5)
def testMetricsComputedConcurrently(self):
with self.test_session() as sess:
# Create the queue that populates one set of predictions.
preds_queue0 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue0, [10, 8, 6])
_enqueue_vector(sess, preds_queue0, [-4, 3, -1])
predictions0 = preds_queue0.dequeue()
# Create the queue that populates one set of predictions.
preds_queue1 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue1, [0, 1, 1])
_enqueue_vector(sess, preds_queue1, [1, 1, 0])
predictions1 = preds_queue1.dequeue()
# Create the queue that populates one set of labels.
labels_queue0 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue0, [1, 3, 2])
_enqueue_vector(sess, labels_queue0, [2, 4, 6])
labels0 = labels_queue0.dequeue()
# Create the queue that populates another set of labels.
labels_queue1 = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue1, [-5, -3, -1])
_enqueue_vector(sess, labels_queue1, [5, 4, 3])
labels1 = labels_queue1.dequeue()
mse0, update_op0 = metrics.mean_squared_error(
labels0, predictions0, name='msd0')
mse1, update_op1 = metrics.mean_squared_error(
labels1, predictions1, name='msd1')
sess.run(variables.local_variables_initializer())
sess.run([update_op0, update_op1])
sess.run([update_op0, update_op1])
mse0, mse1 = sess.run([mse0, mse1])
self.assertAlmostEqual(208.0 / 6, mse0, 5)
self.assertAlmostEqual(79.0 / 6, mse1, 5)
def testMultipleMetricsOnMultipleBatchesOfSizeOne(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue, [10, 8, 6])
_enqueue_vector(sess, preds_queue, [-4, 3, -1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
2, dtypes=dtypes_lib.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue, [1, 3, 2])
_enqueue_vector(sess, labels_queue, [2, 4, 6])
labels = labels_queue.dequeue()
mae, ma_update_op = metrics.mean_absolute_error(labels, predictions)
mse, ms_update_op = metrics.mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
sess.run([ma_update_op, ms_update_op])
sess.run([ma_update_op, ms_update_op])
self.assertAlmostEqual(32.0 / 6, mae.eval(), 5)
self.assertAlmostEqual(208.0 / 6, mse.eval(), 5)
class RootMeanSquaredErrorTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.root_mean_squared_error(
predictions=array_ops.ones((10, 1)), labels=array_ops.ones((10, 1)))
_assert_local_variables(self, ('root_mean_squared_error/count:0',
'root_mean_squared_error/total:0'))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.root_mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.root_mean_squared_error(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.root_mean_squared_error(labels, predictions)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
with self.test_session() as sess:
predictions = constant_op.constant(
0.0, shape=(1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(0.0, shape=(1, 3), dtype=dtypes_lib.float32)
rmse, update_op = metrics.root_mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, rmse.eval())
def testSingleUpdateWithError(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[2, 4, 6], shape=(1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2], shape=(1, 3), dtype=dtypes_lib.float32)
rmse, update_op = metrics.root_mean_squared_error(labels, predictions)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(math.sqrt(6), update_op.eval(), 5)
self.assertAlmostEqual(math.sqrt(6), rmse.eval(), 5)
def testSingleUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
labels = constant_op.constant(
[1, 3, 2, 3], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant([0, 1, 0, 1], shape=(1, 4))
rmse, update_op = metrics.root_mean_squared_error(labels, predictions,
weights)
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(math.sqrt(13), sess.run(update_op))
self.assertAlmostEqual(math.sqrt(13), rmse.eval(), 5)
def _reweight(predictions, labels, weights):
return (np.concatenate([[p] * int(w) for p, w in zip(predictions, weights)]),
np.concatenate([[l] * int(w) for l, w in zip(labels, weights)]))
class MeanCosineDistanceTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.mean_cosine_distance(
predictions=array_ops.ones((10, 3)),
labels=array_ops.ones((10, 3)),
dim=1)
_assert_local_variables(self, (
'mean_cosine_distance/count:0',
'mean_cosine_distance/total:0',))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.mean_cosine_distance(
predictions=array_ops.ones((10, 3)),
labels=array_ops.ones((10, 3)),
dim=1,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_cosine_distance(
predictions=array_ops.ones((10, 3)),
labels=array_ops.ones((10, 3)),
dim=1,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_normal((10, 3), seed=1)
labels = random_ops.random_normal((10, 3), seed=2)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=1)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
predictions = constant_op.constant(
np_labels, shape=(1, 3, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(1, 3, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=2)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithError1(self):
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=2)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 5)
self.assertAlmostEqual(1, error.eval(), 5)
def testSingleUpdateWithError2(self):
np_predictions = np.matrix(
('0.819031913261206 0.567041924552012 0.087465312324590;'
'-0.665139432070255 -0.739487441769973 -0.103671883216994;'
'0.707106781186548 -0.707106781186548 0'))
np_labels = np.matrix(
('0.819031913261206 0.567041924552012 0.087465312324590;'
'0.665139432070255 0.739487441769973 0.103671883216994;'
'0.707106781186548 0.707106781186548 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(labels, predictions, dim=2)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertAlmostEqual(1.0, sess.run(update_op), 5)
self.assertAlmostEqual(1.0, error.eval(), 5)
def testSingleUpdateWithErrorAndWeights1(self):
np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0'))
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
weights = constant_op.constant(
[1, 0, 0], shape=(3, 1, 1), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(
labels, predictions, dim=2, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithErrorAndWeights2(self):
np_predictions = np.matrix(('1 0 0;' '0 0 -1;' '1 0 0'))
np_labels = np.matrix(('1 0 0;' '0 0 1;' '0 1 0'))
predictions = constant_op.constant(
np_predictions, shape=(3, 1, 3), dtype=dtypes_lib.float32)
labels = constant_op.constant(
np_labels, shape=(3, 1, 3), dtype=dtypes_lib.float32)
weights = constant_op.constant(
[0, 1, 1], shape=(3, 1, 1), dtype=dtypes_lib.float32)
error, update_op = metrics.mean_cosine_distance(
labels, predictions, dim=2, weights=weights)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
self.assertEqual(1.5, update_op.eval())
self.assertEqual(1.5, error.eval())
class PcntBelowThreshTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
def testVars(self):
metrics.percentage_below(values=array_ops.ones((10,)), threshold=2)
_assert_local_variables(self, (
'percentage_below_threshold/count:0',
'percentage_below_threshold/total:0',))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.percentage_below(
values=array_ops.ones((10,)),
threshold=2,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.percentage_below(
values=array_ops.ones((10,)),
threshold=2,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testOneUpdate(self):
with self.test_session() as sess:
values = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
pcnt0, update_op0 = metrics.percentage_below(values, 100, name='high')
pcnt1, update_op1 = metrics.percentage_below(values, 7, name='medium')
pcnt2, update_op2 = metrics.percentage_below(values, 1, name='low')
sess.run(variables.local_variables_initializer())
sess.run([update_op0, update_op1, update_op2])
pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2])
self.assertAlmostEqual(1.0, pcnt0, 5)
self.assertAlmostEqual(0.75, pcnt1, 5)
self.assertAlmostEqual(0.0, pcnt2, 5)
def testSomePresentOneUpdate(self):
with self.test_session() as sess:
values = constant_op.constant(
[2, 4, 6, 8], shape=(1, 4), dtype=dtypes_lib.float32)
weights = constant_op.constant(
[1, 0, 0, 1], shape=(1, 4), dtype=dtypes_lib.float32)
pcnt0, update_op0 = metrics.percentage_below(
values, 100, weights=weights, name='high')
pcnt1, update_op1 = metrics.percentage_below(
values, 7, weights=weights, name='medium')
pcnt2, update_op2 = metrics.percentage_below(
values, 1, weights=weights, name='low')
sess.run(variables.local_variables_initializer())
self.assertListEqual([1.0, 0.5, 0.0],
sess.run([update_op0, update_op1, update_op2]))
pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2])
self.assertAlmostEqual(1.0, pcnt0, 5)
self.assertAlmostEqual(0.5, pcnt1, 5)
self.assertAlmostEqual(0.0, pcnt2, 5)
class MeanIOUTest(test.TestCase):
def setUp(self):
np.random.seed(1)
ops.reset_default_graph()
def testVars(self):
metrics.mean_iou(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2)
_assert_local_variables(self, ('mean_iou/total_confusion_matrix:0',))
def testMetricsCollections(self):
my_collection_name = '__metrics__'
mean_iou, _ = metrics.mean_iou(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [mean_iou])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.mean_iou(
predictions=array_ops.ones([10, 1]),
labels=array_ops.ones([10, 1]),
num_classes=2,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones([10, 3])
labels = array_ops.ones([10, 4])
with self.assertRaises(ValueError):
metrics.mean_iou(labels, predictions, num_classes=2)
def testLabelsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = array_ops.ones([10])
labels = array_ops.ones([10])
weights = array_ops.zeros([9])
with self.assertRaises(ValueError):
metrics.mean_iou(labels, predictions, num_classes=2, weights=weights)
def testValueTensorIsIdempotent(self):
num_classes = 3
predictions = random_ops.random_uniform(
[10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1)
labels = random_ops.random_uniform(
[10], maxval=num_classes, dtype=dtypes_lib.int64, seed=1)
mean_iou, update_op = metrics.mean_iou(
labels, predictions, num_classes=num_classes)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_mean_iou = mean_iou.eval()
for _ in range(10):
self.assertEqual(initial_mean_iou, mean_iou.eval())
def testMultipleUpdates(self):
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0 / 2.0, 1.0 / 4.0, 0.])
self.assertEqual(desired_output, miou.eval())
def testMultipleUpdatesWithWeights(self):
num_classes = 2
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = data_flow_ops.FIFOQueue(
6, dtypes=dtypes_lib.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
weights = weights_queue.dequeue()
mean_iou, update_op = metrics.mean_iou(
labels, predictions, num_classes, weights=weights)
variables.local_variables_initializer().run()
for _ in range(6):
sess.run(update_op)
desired_output = np.mean([2.0 / 3.0, 1.0 / 2.0])
self.assertAlmostEqual(desired_output, mean_iou.eval())
def testMultipleUpdatesWithMissingClass(self):
# Test the case where there are no predicions and labels for
# one class, and thus there is one row and one column with
# zero entries in the confusion matrix.
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
# There is no prediction for class 2.
preds_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
# There is label for class 2.
labels_queue = data_flow_ops.FIFOQueue(
5, dtypes=dtypes_lib.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0 / 3.0, 2.0 / 4.0, 0.])
self.assertAlmostEqual(desired_output, miou.eval())
def testUpdateOpEvalIsAccumulatedConfusionMatrix(self):
predictions = array_ops.concat(
[
constant_op.constant(
0, shape=[5]), constant_op.constant(
1, shape=[5])
],
0)
labels = array_ops.concat(
[
constant_op.constant(
0, shape=[3]), constant_op.constant(
1, shape=[7])
],
0)
num_classes = 2
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
confusion_matrix = update_op.eval()
self.assertAllEqual([[3, 0], [2, 5]], confusion_matrix)
desired_miou = np.mean([3. / 5., 5. / 7.])
self.assertAlmostEqual(desired_miou, miou.eval())
def testAllCorrect(self):
predictions = array_ops.zeros([40])
labels = array_ops.zeros([40])
num_classes = 1
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
self.assertEqual(40, update_op.eval()[0])
self.assertEqual(1.0, miou.eval())
def testAllWrong(self):
predictions = array_ops.zeros([40])
labels = array_ops.ones([40])
num_classes = 2
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(labels, predictions, num_classes)
sess.run(variables.local_variables_initializer())
self.assertAllEqual([[0, 0], [40, 0]], update_op.eval())
self.assertEqual(0., miou.eval())
def testResultsWithSomeMissing(self):
predictions = array_ops.concat(
[
constant_op.constant(
0, shape=[5]), constant_op.constant(
1, shape=[5])
],
0)
labels = array_ops.concat(
[
constant_op.constant(
0, shape=[3]), constant_op.constant(
1, shape=[7])
],
0)
num_classes = 2
weights = array_ops.concat(
[
constant_op.constant(
0, shape=[1]), constant_op.constant(
1, shape=[8]), constant_op.constant(
0, shape=[1])
],
0)
with self.test_session() as sess:
miou, update_op = metrics.mean_iou(
labels, predictions, num_classes, weights=weights)
sess.run(variables.local_variables_initializer())
self.assertAllEqual([[2, 0], [2, 4]], update_op.eval())
desired_miou = np.mean([2. / 4., 4. / 6.])
self.assertAlmostEqual(desired_miou, miou.eval())
if __name__ == '__main__':
test.main()
| apache-2.0 |
Fat-Zer/FreeCAD_sf_master | src/Mod/Fem/femexamples/meshes/mesh_rc_wall_2d_tria6.py | 23 | 82470 | def create_nodes(femmesh):
# nodes
femmesh.addNode(0.0, 0.0, 0.0, 1)
femmesh.addNode(0.0, 2000.0, 0.0, 2)
femmesh.addNode(500.0, 0.0, 0.0, 3)
femmesh.addNode(4000.0, 2000.0, 0.0, 4)
femmesh.addNode(3500.0, 0.0, 0.0, 5)
femmesh.addNode(4000.0, 0.0, 0.0, 6)
femmesh.addNode(0.0, -2000.0, 0.0, 7)
femmesh.addNode(500.0, -2000.0, 0.0, 8)
femmesh.addNode(3500.0, -2000.0, 0.0, 9)
femmesh.addNode(4000.0, -2000.0, 0.0, 10)
femmesh.addNode(0.0, 250.0, 0.0, 11)
femmesh.addNode(0.0, 500.0, 0.0, 12)
femmesh.addNode(0.0, 750.0, 0.0, 13)
femmesh.addNode(0.0, 1000.0, 0.0, 14)
femmesh.addNode(0.0, 1250.0, 0.0, 15)
femmesh.addNode(0.0, 1500.0, 0.0, 16)
femmesh.addNode(0.0, 1750.0, 0.0, 17)
femmesh.addNode(0.0, 125.0, 0.0, 18)
femmesh.addNode(0.0, 375.0, 0.0, 19)
femmesh.addNode(0.0, 625.0, 0.0, 20)
femmesh.addNode(0.0, 875.0, 0.0, 21)
femmesh.addNode(0.0, 1125.0, 0.0, 22)
femmesh.addNode(0.0, 1375.0, 0.0, 23)
femmesh.addNode(0.0, 1625.0, 0.0, 24)
femmesh.addNode(0.0, 1875.0, 0.0, 25)
femmesh.addNode(250.0, 0.0, 0.0, 26)
femmesh.addNode(125.0, 0.0, 0.0, 27)
femmesh.addNode(375.0, 0.0, 0.0, 28)
femmesh.addNode(250.0, 2000.0, 0.0, 29)
femmesh.addNode(500.0, 2000.0, 0.0, 30)
femmesh.addNode(750.0, 2000.0, 0.0, 31)
femmesh.addNode(1000.0, 2000.0, 0.0, 32)
femmesh.addNode(1250.0, 2000.0, 0.0, 33)
femmesh.addNode(1500.0, 2000.0, 0.0, 34)
femmesh.addNode(1750.0, 2000.0, 0.0, 35)
femmesh.addNode(2000.0, 2000.0, 0.0, 36)
femmesh.addNode(2250.0, 2000.0, 0.0, 37)
femmesh.addNode(2500.0, 2000.0, 0.0, 38)
femmesh.addNode(2750.0, 2000.0, 0.0, 39)
femmesh.addNode(3000.0, 2000.0, 0.0, 40)
femmesh.addNode(3250.0, 2000.0, 0.0, 41)
femmesh.addNode(3500.0, 2000.0, 0.0, 42)
femmesh.addNode(3750.0, 2000.0, 0.0, 43)
femmesh.addNode(125.0, 2000.0, 0.0, 44)
femmesh.addNode(375.0, 2000.0, 0.0, 45)
femmesh.addNode(625.0, 2000.0, 0.0, 46)
femmesh.addNode(875.0, 2000.0, 0.0, 47)
femmesh.addNode(1125.0, 2000.0, 0.0, 48)
femmesh.addNode(1375.0, 2000.0, 0.0, 49)
femmesh.addNode(1625.0, 2000.0, 0.0, 50)
femmesh.addNode(1875.0, 2000.0, 0.0, 51)
femmesh.addNode(2125.0, 2000.0, 0.0, 52)
femmesh.addNode(2375.0, 2000.0, 0.0, 53)
femmesh.addNode(2625.0, 2000.0, 0.0, 54)
femmesh.addNode(2875.0, 2000.0, 0.0, 55)
femmesh.addNode(3125.0, 2000.0, 0.0, 56)
femmesh.addNode(3375.0, 2000.0, 0.0, 57)
femmesh.addNode(3625.0, 2000.0, 0.0, 58)
femmesh.addNode(3875.0, 2000.0, 0.0, 59)
femmesh.addNode(750.0, 0.0, 0.0, 60)
femmesh.addNode(1000.0, 0.0, 0.0, 61)
femmesh.addNode(1250.0, 0.0, 0.0, 62)
femmesh.addNode(1500.0, 0.0, 0.0, 63)
femmesh.addNode(1750.0, 0.0, 0.0, 64)
femmesh.addNode(2000.0, 0.0, 0.0, 65)
femmesh.addNode(2250.0, 0.0, 0.0, 66)
femmesh.addNode(2500.0, 0.0, 0.0, 67)
femmesh.addNode(2750.0, 0.0, 0.0, 68)
femmesh.addNode(3000.0, 0.0, 0.0, 69)
femmesh.addNode(3250.0, 0.0, 0.0, 70)
femmesh.addNode(625.0, 0.0, 0.0, 71)
femmesh.addNode(875.0, 0.0, 0.0, 72)
femmesh.addNode(1125.0, 0.0, 0.0, 73)
femmesh.addNode(1375.0, 0.0, 0.0, 74)
femmesh.addNode(1625.0, 0.0, 0.0, 75)
femmesh.addNode(1875.0, 0.0, 0.0, 76)
femmesh.addNode(2125.0, 0.0, 0.0, 77)
femmesh.addNode(2375.0, 0.0, 0.0, 78)
femmesh.addNode(2625.0, 0.0, 0.0, 79)
femmesh.addNode(2875.0, 0.0, 0.0, 80)
femmesh.addNode(3125.0, 0.0, 0.0, 81)
femmesh.addNode(3375.0, 0.0, 0.0, 82)
femmesh.addNode(4000.0, 250.0, 0.0, 83)
femmesh.addNode(4000.0, 500.0, 0.0, 84)
femmesh.addNode(4000.0, 750.0, 0.0, 85)
femmesh.addNode(4000.0, 1000.0, 0.0, 86)
femmesh.addNode(4000.0, 1250.0, 0.0, 87)
femmesh.addNode(4000.0, 1500.0, 0.0, 88)
femmesh.addNode(4000.0, 1750.0, 0.0, 89)
femmesh.addNode(4000.0, 125.0, 0.0, 90)
femmesh.addNode(4000.0, 375.0, 0.0, 91)
femmesh.addNode(4000.0, 625.0, 0.0, 92)
femmesh.addNode(4000.0, 875.0, 0.0, 93)
femmesh.addNode(4000.0, 1125.0, 0.0, 94)
femmesh.addNode(4000.0, 1375.0, 0.0, 95)
femmesh.addNode(4000.0, 1625.0, 0.0, 96)
femmesh.addNode(4000.0, 1875.0, 0.0, 97)
femmesh.addNode(3750.0, 0.0, 0.0, 98)
femmesh.addNode(3625.0, 0.0, 0.0, 99)
femmesh.addNode(3875.0, 0.0, 0.0, 100)
femmesh.addNode(0.0, -1750.0, 0.0, 101)
femmesh.addNode(0.0, -1500.0, 0.0, 102)
femmesh.addNode(0.0, -1250.0, 0.0, 103)
femmesh.addNode(0.0, -1000.0, 0.0, 104)
femmesh.addNode(0.0, -750.0, 0.0, 105)
femmesh.addNode(0.0, -500.0, 0.0, 106)
femmesh.addNode(0.0, -250.0, 0.0, 107)
femmesh.addNode(0.0, -1875.0, 0.0, 108)
femmesh.addNode(0.0, -1625.0, 0.0, 109)
femmesh.addNode(0.0, -1375.0, 0.0, 110)
femmesh.addNode(0.0, -1125.0, 0.0, 111)
femmesh.addNode(0.0, -875.0, 0.0, 112)
femmesh.addNode(0.0, -625.0, 0.0, 113)
femmesh.addNode(0.0, -375.0, 0.0, 114)
femmesh.addNode(0.0, -125.0, 0.0, 115)
femmesh.addNode(250.0, -2000.0, 0.0, 116)
femmesh.addNode(125.0, -2000.0, 0.0, 117)
femmesh.addNode(375.0, -2000.0, 0.0, 118)
femmesh.addNode(500.0, -1750.0, 0.0, 119)
femmesh.addNode(500.0, -1500.0, 0.0, 120)
femmesh.addNode(500.0, -1250.0, 0.0, 121)
femmesh.addNode(500.0, -1000.0, 0.0, 122)
femmesh.addNode(500.0, -750.0, 0.0, 123)
femmesh.addNode(500.0, -500.0, 0.0, 124)
femmesh.addNode(500.0, -250.0, 0.0, 125)
femmesh.addNode(500.0, -1875.0, 0.0, 126)
femmesh.addNode(500.0, -1625.0, 0.0, 127)
femmesh.addNode(500.0, -1375.0, 0.0, 128)
femmesh.addNode(500.0, -1125.0, 0.0, 129)
femmesh.addNode(500.0, -875.0, 0.0, 130)
femmesh.addNode(500.0, -625.0, 0.0, 131)
femmesh.addNode(500.0, -375.0, 0.0, 132)
femmesh.addNode(500.0, -125.0, 0.0, 133)
femmesh.addNode(3500.0, -1750.0, 0.0, 134)
femmesh.addNode(3500.0, -1500.0, 0.0, 135)
femmesh.addNode(3500.0, -1250.0, 0.0, 136)
femmesh.addNode(3500.0, -1000.0, 0.0, 137)
femmesh.addNode(3500.0, -750.0, 0.0, 138)
femmesh.addNode(3500.0, -500.0, 0.0, 139)
femmesh.addNode(3500.0, -250.0, 0.0, 140)
femmesh.addNode(3500.0, -1875.0, 0.0, 141)
femmesh.addNode(3500.0, -1625.0, 0.0, 142)
femmesh.addNode(3500.0, -1375.0, 0.0, 143)
femmesh.addNode(3500.0, -1125.0, 0.0, 144)
femmesh.addNode(3500.0, -875.0, 0.0, 145)
femmesh.addNode(3500.0, -625.0, 0.0, 146)
femmesh.addNode(3500.0, -375.0, 0.0, 147)
femmesh.addNode(3500.0, -125.0, 0.0, 148)
femmesh.addNode(3750.0, -2000.0, 0.0, 149)
femmesh.addNode(3625.0, -2000.0, 0.0, 150)
femmesh.addNode(3875.0, -2000.0, 0.0, 151)
femmesh.addNode(4000.0, -1750.0, 0.0, 152)
femmesh.addNode(4000.0, -1500.0, 0.0, 153)
femmesh.addNode(4000.0, -1250.0, 0.0, 154)
femmesh.addNode(4000.0, -1000.0, 0.0, 155)
femmesh.addNode(4000.0, -750.0, 0.0, 156)
femmesh.addNode(4000.0, -500.0, 0.0, 157)
femmesh.addNode(4000.0, -250.0, 0.0, 158)
femmesh.addNode(4000.0, -1875.0, 0.0, 159)
femmesh.addNode(4000.0, -1625.0, 0.0, 160)
femmesh.addNode(4000.0, -1375.0, 0.0, 161)
femmesh.addNode(4000.0, -1125.0, 0.0, 162)
femmesh.addNode(4000.0, -875.0, 0.0, 163)
femmesh.addNode(4000.0, -625.0, 0.0, 164)
femmesh.addNode(4000.0, -375.0, 0.0, 165)
femmesh.addNode(4000.0, -125.0, 0.0, 166)
femmesh.addNode(1151.2155027143724, 991.1595002148733, 0.0, 167)
femmesh.addNode(2345.552182966278, 992.5521461426597, 0.0, 168)
femmesh.addNode(3153.541159295079, 1199.5398685297591, 0.0, 169)
femmesh.addNode(1750.0, 695.3125, 0.0, 170)
femmesh.addNode(1681.25, 1347.65625, 0.0, 171)
femmesh.addNode(2881.435948455562, 621.2099677847261, 0.0, 172)
femmesh.addNode(606.0449604906953, 635.4082348210767, 0.0, 173)
femmesh.addNode(657.2976568366603, 1353.3222602999324, 0.0, 174)
femmesh.addNode(3445.1692479821986, 652.3366992631719, 0.0, 175)
femmesh.addNode(2674.4322928661404, 1434.2147374924186, 0.0, 176)
femmesh.addNode(2185.539194525211, 1473.115044788804, 0.0, 177)
femmesh.addNode(1273.2425124792012, 510.71781405990015, 0.0, 178)
femmesh.addNode(2215.6598753905773, 520.7910541541594, 0.0, 179)
femmesh.addNode(1115.670904262049, 1485.8186742572047, 0.0, 180)
femmesh.addNode(3519.3278588807784, 1519.3278588807786, 0.0, 181)
femmesh.addNode(388.34501905880387, 1026.9203157654522, 0.0, 182)
femmesh.addNode(3614.4372160624785, 1020.2250489643089, 0.0, 183)
femmesh.addNode(3105.4482841646395, 1593.0462413828066, 0.0, 184)
femmesh.addNode(2818.493830446949, 990.0975624837915, 0.0, 185)
femmesh.addNode(1959.2775916812711, 1064.8265300145413, 0.0, 186)
femmesh.addNode(1546.3069778764673, 1011.232872978363, 0.0, 187)
femmesh.addNode(2591.0841961277283, 363.09805979524884, 0.0, 188)
femmesh.addNode(900.8498339227292, 372.74375656728824, 0.0, 189)
femmesh.addNode(3159.4114197567123, 364.99779587913827, 0.0, 190)
femmesh.addNode(390.87897826169245, 345.62828737063484, 0.0, 191)
femmesh.addNode(413.062332090631, 1650.5023139707077, 0.0, 192)
femmesh.addNode(1599.1990546119257, 331.5972138397307, 0.0, 193)
femmesh.addNode(1495.152113940753, 1619.2944541079414, 0.0, 194)
femmesh.addNode(1863.5454857801321, 1658.2389001513154, 0.0, 195)
femmesh.addNode(778.2580283531327, 931.2966961410348, 0.0, 196)
femmesh.addNode(3674.5511265257346, 338.9844927685506, 0.0, 197)
femmesh.addNode(3172.7789481491654, 832.1374169291745, 0.0, 198)
femmesh.addNode(784.7082624860842, 1653.7264044731728, 0.0, 199)
femmesh.addNode(2554.739497815064, 712.365487790745, 0.0, 200)
femmesh.addNode(1943.1567961046958, 330.9370445232647, 0.0, 201)
femmesh.addNode(981.3704439777061, 695.0014769598008, 0.0, 202)
femmesh.addNode(2453.2517051286445, 1672.8747213645538, 0.0, 203)
femmesh.addNode(907.6249789310948, 1264.8279056360939, 0.0, 204)
femmesh.addNode(1342.8357754800325, 1305.982903143703, 0.0, 205)
femmesh.addNode(301.68045436748366, 1278.0614848964112, 0.0, 206)
femmesh.addNode(321.4019295935336, 712.8743177284927, 0.0, 207)
femmesh.addNode(2426.6601885758864, 1313.9261459477857, 0.0, 208)
femmesh.addNode(2867.9421371538983, 263.92048664402716, 0.0, 209)
femmesh.addNode(2046.149001386518, 725.1823701492324, 0.0, 210)
femmesh.addNode(2854.1111443273658, 1741.083702840498, 0.0, 211)
femmesh.addNode(3695.4452236106526, 1279.177794976091, 0.0, 212)
femmesh.addNode(3685.7292713722686, 751.032843822758, 0.0, 213)
femmesh.addNode(624.8652096522102, 258.70949318509355, 0.0, 214)
femmesh.addNode(1466.6263015417012, 719.2247946131447, 0.0, 215)
femmesh.addNode(3719.1293832862143, 1719.129383286214, 0.0, 216)
femmesh.addNode(2945.872854070339, 1363.49471903432, 0.0, 217)
femmesh.addNode(3344.6356052877895, 1737.2354549848008, 0.0, 218)
femmesh.addNode(1942.1605314429821, 1315.7220947444227, 0.0, 219)
femmesh.addNode(2093.97656761822, 1743.5731735527538, 0.0, 220)
femmesh.addNode(2337.404696236552, 259.86552863747835, 0.0, 221)
femmesh.addNode(1136.4178616265353, 256.9280165667089, 0.0, 222)
femmesh.addNode(3436.9983781650485, 1261.4961473823978, 0.0, 223)
femmesh.addNode(3406.2893160086924, 251.07304786541548, 0.0, 224)
femmesh.addNode(2215.636485028124, 1177.419912858442, 0.0, 225)
femmesh.addNode(1126.6678439977832, 1759.6448940404007, 0.0, 226)
femmesh.addNode(2335.9075546219287, 765.8443516431239, 0.0, 227)
femmesh.addNode(623.3677071526471, 1794.4665663127212, 0.0, 228)
femmesh.addNode(1183.6674808624416, 743.4840698227251, 0.0, 229)
femmesh.addNode(676.4452885380255, 1103.2769600640684, 0.0, 230)
femmesh.addNode(1747.334612316145, 938.7081036305343, 0.0, 231)
femmesh.addNode(1403.1870558121493, 254.37888532175242, 0.0, 232)
femmesh.addNode(3314.847513203245, 987.8243030056708, 0.0, 233)
femmesh.addNode(1640.5623138995034, 1785.4652572759182, 0.0, 234)
femmesh.addNode(2694.302594697601, 1191.5326623293304, 0.0, 235)
femmesh.addNode(3249.075663998931, 1405.1235272166903, 0.0, 236)
femmesh.addNode(3194.7171333591295, 605.4257226147388, 0.0, 237)
femmesh.addNode(3787.3499723629325, 540.6602594003787, 0.0, 238)
femmesh.addNode(185.04426967129564, 1086.6700079903421, 0.0, 239)
femmesh.addNode(3046.934155784866, 1000.6169552050135, 0.0, 240)
femmesh.addNode(1969.4468096267958, 523.2169628726109, 0.0, 241)
femmesh.addNode(2129.5909553861516, 192.09252274746703, 0.0, 242)
femmesh.addNode(1136.6994499219468, 1211.2946783273387, 0.0, 243)
femmesh.addNode(2597.352264247392, 1808.9701221137507, 0.0, 244)
femmesh.addNode(3819.1744978371717, 1088.9949533284503, 0.0, 245)
femmesh.addNode(2584.2256241005716, 935.7579854262561, 0.0, 246)
femmesh.addNode(3125.0, 1815.2509614904245, 0.0, 247)
femmesh.addNode(3777.6248611905694, 1504.537741959381, 0.0, 248)
femmesh.addNode(588.8115450690148, 859.6463157782961, 0.0, 249)
femmesh.addNode(2143.2683627665697, 936.9869149561475, 0.0, 250)
femmesh.addNode(2754.7204686764762, 780.5893488071242, 0.0, 251)
femmesh.addNode(178.93890438705535, 903.482590983227, 0.0, 252)
femmesh.addNode(1747.982107629327, 1148.2128944222316, 0.0, 253)
femmesh.addNode(1666.5850159712468, 1563.4019198648652, 0.0, 254)
femmesh.addNode(851.0519754307722, 1495.0619168135465, 0.0, 255)
femmesh.addNode(3481.0728671542547, 837.1465204467868, 0.0, 256)
femmesh.addNode(2443.601401640797, 530.7979808791658, 0.0, 257)
femmesh.addNode(205.94642509722854, 499.4693567890037, 0.0, 258)
femmesh.addNode(206.07459453719548, 1499.857298977669, 0.0, 259)
femmesh.addNode(834.6180802078031, 567.491198370818, 0.0, 260)
femmesh.addNode(1766.7929131536002, 212.0366200123763, 0.0, 261)
femmesh.addNode(1372.868119293763, 1786.5908878427776, 0.0, 262)
femmesh.addNode(217.93466600459956, 214.41626585864105, 0.0, 263)
femmesh.addNode(211.16821383061668, 1789.0339304012352, 0.0, 264)
femmesh.addNode(1346.4369194052335, 1104.0442859810998, 0.0, 265)
femmesh.addNode(2625.0, 168.47319683138994, 0.0, 266)
femmesh.addNode(875.0000000000001, 168.27854442880616, 0.0, 267)
femmesh.addNode(3124.9999999999995, 166.32624951132104, 0.0, 268)
femmesh.addNode(2725.0455335697234, 531.4266954931735, 0.0, 269)
femmesh.addNode(1522.1536134860664, 504.98683031131714, 0.0, 270)
femmesh.addNode(3802.9015660741943, 197.09843392580635, 0.0, 271)
femmesh.addNode(1505.6503066232626, 1428.2716333977896, 0.0, 272)
femmesh.addNode(1051.6943688460692, 524.3943887245401, 0.0, 273)
femmesh.addNode(3359.4878194613807, 477.16337384383746, 0.0, 274)
femmesh.addNode(1528.4103478357204, 1224.233473320531, 0.0, 275)
femmesh.addNode(3535.438527288789, 1801.7330078935504, 0.0, 276)
femmesh.addNode(434.67162780804375, 542.3401347012916, 0.0, 277)
femmesh.addNode(451.9072533191967, 1434.6301952460099, 0.0, 278)
femmesh.addNode(2981.795085118534, 448.0055367511777, 0.0, 279)
femmesh.addNode(2978.1800808120247, 805.0128289707615, 0.0, 280)
femmesh.addNode(935.1388659912074, 1056.3754597693553, 0.0, 281)
femmesh.addNode(1754.4192556759315, 451.41573980441683, 0.0, 282)
femmesh.addNode(2361.398753416636, 1810.7310101916673, 0.0, 283)
femmesh.addNode(2422.5421676233855, 1516.583426074803, 0.0, 284)
femmesh.addNode(880.47047742822, 1807.7722912938007, 0.0, 285)
femmesh.addNode(630.2325312193318, 1563.618276186015, 0.0, 286)
femmesh.addNode(444.11067943331193, 184.20265254642115, 0.0, 287)
femmesh.addNode(399.0489951122346, 1843.0986785146765, 0.0, 288)
femmesh.addNode(1326.4741772662737, 1564.2672411316362, 0.0, 289)
femmesh.addNode(1873.5987279934327, 1848.0564063658549, 0.0, 290)
femmesh.addNode(2857.8120840696374, 1538.7765004950957, 0.0, 291)
femmesh.addNode(3615.420858496385, 153.92013164518346, 0.0, 292)
femmesh.addNode(1986.4322329809784, 1519.8742672628473, 0.0, 293)
femmesh.addNode(795.7903110696158, 761.2330506728068, 0.0, 294)
femmesh.addNode(3827.887082573013, 916.2471888959267, 0.0, 295)
femmesh.addNode(2931.8289188589665, 1149.0563535164429, 0.0, 296)
femmesh.addNode(3857.0724217345637, 373.58588179238143, 0.0, 297)
femmesh.addNode(1603.8358047155352, 159.6025438347719, 0.0, 298)
femmesh.addNode(2632.697761232849, 1624.134712542077, 0.0, 299)
femmesh.addNode(981.8039384323351, 874.5293213047062, 0.0, 300)
femmesh.addNode(680.2099424162964, 475.33856352911357, 0.0, 301)
femmesh.addNode(3559.7627922855345, 501.8751194940187, 0.0, 302)
femmesh.addNode(974.4233579675891, 1639.6167774462617, 0.0, 303)
femmesh.addNode(495.13513442403405, 1239.242243254375, 0.0, 304)
femmesh.addNode(1377.2739687571882, 899.7018057699061, 0.0, 305)
femmesh.addNode(3867.2823458215535, 1867.2823458215535, 0.0, 306)
femmesh.addNode(3343.8770189511474, 1557.6498333315424, 0.0, 307)
femmesh.addNode(1824.8290135155744, 1476.484523517676, 0.0, 308)
femmesh.addNode(1577.5083720983002, 852.8360153983897, 0.0, 309)
femmesh.addNode(1914.9321930628041, 875.9179316201704, 0.0, 310)
femmesh.addNode(2284.573533854269, 1640.5742507791397, 0.0, 311)
femmesh.addNode(2119.0518265489545, 365.3806225869961, 0.0, 312)
femmesh.addNode(2453.275415073692, 1122.2377705408946, 0.0, 313)
femmesh.addNode(3322.401080864958, 774.4507864829744, 0.0, 314)
femmesh.addNode(666.8714726873429, 1228.2996101820004, 0.0, 315)
femmesh.addNode(792.0351337345601, 1184.052432850081, 0.0, 316)
femmesh.addNode(782.4613178838775, 1309.0750829680132, 0.0, 317)
femmesh.addNode(108.96733300229978, 232.20813292932053, 0.0, 318)
femmesh.addNode(211.94054555091407, 356.9428113238224, 0.0, 319)
femmesh.addNode(102.97321254861427, 374.7346783945019, 0.0, 320)
femmesh.addNode(208.62140418390607, 1644.4456146894522, 0.0, 321)
femmesh.addNode(105.58410691530834, 1769.5169652006175, 0.0, 322)
femmesh.addNode(103.03729726859774, 1624.9286494888345, 0.0, 323)
femmesh.addNode(1854.974854629148, 271.4868322678205, 0.0, 324)
femmesh.addNode(1883.3964565768001, 106.01831000618814, 0.0, 325)
femmesh.addNode(1971.578398052348, 165.46852226163236, 0.0, 326)
femmesh.addNode(2802.055572163683, 1870.541851420249, 0.0, 327)
femmesh.addNode(2927.055572163683, 1870.541851420249, 0.0, 328)
femmesh.addNode(2684.367443781871, 1312.8736999108746, 0.0, 329)
femmesh.addNode(2820.08772438397, 1277.5136906818252, 0.0, 330)
femmesh.addNode(2810.15257346824, 1398.8547282633695, 0.0, 331)
femmesh.addNode(3888.8124305952847, 1377.2688709796905, 0.0, 332)
femmesh.addNode(3736.535042400611, 1391.8577684677362, 0.0, 333)
femmesh.addNode(3847.7226118053263, 1264.5888974880454, 0.0, 334)
femmesh.addNode(3086.4486070855774, 705.2192757927501, 0.0, 335)
femmesh.addNode(2929.8080146337934, 713.1113983777439, 0.0, 336)
femmesh.addNode(3038.0765409073456, 613.3178451997325, 0.0, 337)
femmesh.addNode(2989.555572163683, 1778.1673321654612, 0.0, 338)
femmesh.addNode(3062.5, 1907.6254807452124, 0.0, 339)
femmesh.addNode(2924.8686111362163, 355.96301169760244, 0.0, 340)
femmesh.addNode(2853.420309344129, 489.7161161221756, 0.0, 341)
femmesh.addNode(2796.493835361811, 397.67359106860033, 0.0, 342)
femmesh.addNode(2813.0657567782837, 1170.2945079228866, 0.0, 343)
femmesh.addNode(2938.850886464653, 1256.2755362753815, 0.0, 344)
femmesh.addNode(102.97321254861427, 499.7346783945019, 0.0, 345)
femmesh.addNode(103.03729726859774, 1499.9286494888345, 0.0, 346)
femmesh.addNode(2658.064864848726, 447.26237764421114, 0.0, 347)
femmesh.addNode(2729.5131666408133, 313.509273219638, 0.0, 348)
femmesh.addNode(529.768418730127, 400.5248139431926, 0.0, 349)
femmesh.addNode(412.7753030348681, 443.9842110359632, 0.0, 350)
femmesh.addNode(507.8720939569513, 302.1688902778642, 0.0, 351)
femmesh.addNode(652.5375760342533, 367.0240283571036, 0.0, 352)
femmesh.addNode(557.44078511217, 508.8393491152026, 0.0, 353)
femmesh.addNode(3888.8124305952847, 1502.2688709796905, 0.0, 354)
femmesh.addNode(2064.7954776930756, 96.04626137373351, 0.0, 355)
femmesh.addNode(2036.3738757454237, 261.51478363536586, 0.0, 356)
femmesh.addNode(983.3614398464106, 1490.4402955353758, 0.0, 357)
femmesh.addNode(879.3384771809335, 1379.9449112248203, 0.0, 358)
femmesh.addNode(1011.6479415965719, 1375.3232899466493, 0.0, 359)
femmesh.addNode(102.97321254861427, 624.7346783945019, 0.0, 360)
femmesh.addNode(263.67417734538105, 606.1718372587482, 0.0, 361)
femmesh.addNode(160.7009647967668, 731.4371588642464, 0.0, 362)
femmesh.addNode(253.87752445233957, 1388.95939193704, 0.0, 363)
femmesh.addNode(103.03729726859774, 1374.9286494888345, 0.0, 364)
femmesh.addNode(150.84022718374183, 1264.0307424482057, 0.0, 365)
femmesh.addNode(2569.482560957818, 824.0617366085005, 0.0, 366)
femmesh.addNode(2460.0665893612504, 850.8011685346901, 0.0, 367)
femmesh.addNode(2445.323526218496, 739.1049197169344, 0.0, 368)
femmesh.addNode(2464.888903533425, 964.155065784458, 0.0, 369)
femmesh.addNode(2340.7298687941034, 879.1982488928918, 0.0, 370)
femmesh.addNode(3748.377122238392, 1611.8335626227977, 0.0, 371)
femmesh.addNode(3888.8124305952847, 1627.2688709796905, 0.0, 372)
femmesh.addNode(3859.564691643107, 1734.564691643107, 0.0, 373)
femmesh.addNode(2200.5878397766674, 1325.2674788236232, 0.0, 374)
femmesh.addNode(2321.1483368020054, 1245.673029403114, 0.0, 375)
femmesh.addNode(2306.0996915505484, 1393.5205953682948, 0.0, 376)
femmesh.addNode(1436.4340596468815, 1893.2954439213888, 0.0, 377)
femmesh.addNode(1506.7152165966331, 1786.028072559348, 0.0, 378)
femmesh.addNode(1570.2811569497517, 1892.7326286379591, 0.0, 379)
femmesh.addNode(2933.971068576949, 131.96024332201358, 0.0, 380)
femmesh.addNode(2808.971068576949, 131.96024332201358, 0.0, 381)
femmesh.addNode(2931.6155167870484, 534.6077522679519, 0.0, 382)
femmesh.addNode(3088.256109238832, 526.7156296829583, 0.0, 383)
femmesh.addNode(2548.487230244763, 1475.3990817836107, 0.0, 384)
femmesh.addNode(2424.601178099636, 1415.2547860112943, 0.0, 385)
femmesh.addNode(2550.5462407210134, 1374.0704417201023, 0.0, 386)
femmesh.addNode(1226.5710106320284, 1661.9560675860184, 0.0, 387)
femmesh.addNode(1349.6711482800183, 1675.429064487207, 0.0, 388)
femmesh.addNode(1249.767981645773, 1773.1178909415892, 0.0, 389)
femmesh.addNode(1022.1622144265208, 1238.0612919817163, 0.0, 390)
femmesh.addNode(1126.185177091998, 1348.5566762922717, 0.0, 391)
femmesh.addNode(805.7920772646164, 1079.8262099167118, 0.0, 392)
femmesh.addNode(921.3819224611511, 1160.6016827027247, 0.0, 393)
femmesh.addNode(1239.7676127009895, 1258.6387907355208, 0.0, 394)
femmesh.addNode(1229.2533398710407, 1395.9007887004539, 0.0, 395)
femmesh.addNode(597.428252779855, 747.5272752996864, 0.0, 396)
femmesh.addNode(455.1067373312742, 786.2603167533944, 0.0, 397)
femmesh.addNode(463.72344504211446, 674.1412762747847, 0.0, 398)
femmesh.addNode(488.57828206390934, 943.2833157718742, 0.0, 399)
femmesh.addNode(354.8734743261688, 869.8973167469724, 0.0, 400)
femmesh.addNode(687.432604826105, 129.35474659254677, 0.0, 401)
femmesh.addNode(562.432604826105, 129.35474659254677, 0.0, 402)
femmesh.addNode(1334.6549763731532, 1435.1250721376696, 0.0, 403)
femmesh.addNode(1221.0725407641612, 1525.0429576944205, 0.0, 404)
femmesh.addNode(1494.389957513884, 612.1058124622309, 0.0, 405)
femmesh.addNode(1636.076806743033, 600.1496651556586, 0.0, 406)
femmesh.addNode(1608.3131507708506, 707.2686473065723, 0.0, 407)
femmesh.addNode(754.1748161337163, 1424.1920885567395, 0.0, 408)
femmesh.addNode(1462.6703346491079, 379.6828578165348, 0.0, 409)
femmesh.addNode(1397.698062982634, 507.8523221856086, 0.0, 410)
femmesh.addNode(1338.2147841456754, 382.5483496908263, 0.0, 411)
femmesh.addNode(378.03677870078866, 627.6072262148921, 0.0, 412)
femmesh.addNode(520.3582941493695, 588.8741847611841, 0.0, 413)
femmesh.addNode(1269.8024587193422, 255.65345094423066, 0.0, 414)
femmesh.addNode(1204.8301870528683, 383.8229153133045, 0.0, 415)
femmesh.addNode(3736.5396218676005, 645.8465516115683, 0.0, 416)
femmesh.addNode(3893.674986181466, 645.3301297001893, 0.0, 417)
femmesh.addNode(3842.8646356861345, 750.516421911379, 0.0, 418)
femmesh.addNode(1638.286434580999, 478.201285057867, 0.0, 419)
femmesh.addNode(1752.2096278379659, 573.3641199022084, 0.0, 420)
femmesh.addNode(3115.2241420823198, 1704.1486014366155, 0.0, 421)
femmesh.addNode(2979.7797142460026, 1667.0649721116524, 0.0, 422)
femmesh.addNode(2653.565027049495, 1529.174725017248, 0.0, 423)
femmesh.addNode(2527.6199644281173, 1570.35906930844, 0.0, 424)
femmesh.addNode(2007.797905506657, 624.1996665109216, 0.0, 425)
femmesh.addNode(2092.5533425086865, 522.0040085133851, 0.0, 426)
femmesh.addNode(2130.9044383885475, 622.9867121516959, 0.0, 427)
femmesh.addNode(1434.010116617258, 1702.9426709753595, 0.0, 428)
femmesh.addNode(1567.8572139201283, 1702.3798556919298, 0.0, 429)
femmesh.addNode(1005.7089308132677, 212.6032804977575, 0.0, 430)
femmesh.addNode(937.5, 84.13927221440308, 0.0, 431)
femmesh.addNode(1068.2089308132677, 128.46400828335445, 0.0, 432)
femmesh.addNode(1121.169374129916, 1622.7317841488027, 0.0, 433)
femmesh.addNode(3297.3178026438945, 1868.6177274924003, 0.0, 434)
femmesh.addNode(3422.3178026438945, 1868.6177274924003, 0.0, 435)
femmesh.addNode(2562.5, 84.23659841569497, 0.0, 436)
femmesh.addNode(2481.202348118276, 214.16936273443415, 0.0, 437)
femmesh.addNode(2418.702348118276, 129.93276431873917, 0.0, 438)
femmesh.addNode(283.64196172292964, 965.2014533743396, 0.0, 439)
femmesh.addNode(250.17041699029448, 808.1784543558599, 0.0, 440)
femmesh.addNode(3566.221800887851, 1270.3369711792443, 0.0, 441)
femmesh.addNode(3525.7177971137635, 1140.8605981733533, 0.0, 442)
femmesh.addNode(3654.9412198365653, 1149.7014219702, 0.0, 443)
femmesh.addNode(3478.1631185229135, 1390.4120031315883, 0.0, 444)
femmesh.addNode(3607.3865412457153, 1399.2528269284348, 0.0, 445)
femmesh.addNode(92.52213483564782, 1168.335003995171, 0.0, 446)
femmesh.addNode(243.36236201938965, 1182.3657464433768, 0.0, 447)
femmesh.addNode(1193.2089308132677, 128.46400828335445, 0.0, 448)
femmesh.addNode(2293.702348118276, 129.93276431873917, 0.0, 449)
femmesh.addNode(2078.898508235553, 1246.5710038014324, 0.0, 450)
femmesh.addNode(2063.8498629840965, 1394.4185697666135, 0.0, 451)
femmesh.addNode(2430.6993767083177, 1905.3655050958337, 0.0, 452)
femmesh.addNode(2479.375508832014, 1809.850566152709, 0.0, 453)
femmesh.addNode(2548.6761321236963, 1904.4850610568753, 0.0, 454)
femmesh.addNode(3617.1569594056346, 420.42980613128464, 0.0, 455)
femmesh.addNode(3483.0260541471134, 376.47408367971707, 0.0, 456)
femmesh.addNode(3540.4202212672135, 295.028770316983, 0.0, 457)
femmesh.addNode(3459.6253058734574, 489.5192466689281, 0.0, 458)
femmesh.addNode(3382.8885677350363, 364.11821085462645, 0.0, 459)
femmesh.addNode(2803.2407410126425, 576.3183316389498, 0.0, 460)
femmesh.addNode(2407.32522927264, 1741.8028657781106, 0.0, 461)
femmesh.addNode(2525.3019846880184, 1740.9224217391522, 0.0, 462)
femmesh.addNode(1117.6809248542554, 633.9392292736327, 0.0, 463)
femmesh.addNode(1162.4684406626352, 517.5561013922202, 0.0, 464)
femmesh.addNode(1228.4549966708214, 627.1009419413126, 0.0, 465)
femmesh.addNode(3075.479514480595, 818.575122949968, 0.0, 466)
femmesh.addNode(3183.7480407541475, 718.7815697719566, 0.0, 467)
femmesh.addNode(3100.2376575399726, 1100.0784118673864, 0.0, 468)
femmesh.addNode(3180.8908344940555, 994.2206291053421, 0.0, 469)
femmesh.addNode(3234.194336249162, 1093.6820857677149, 0.0, 470)
femmesh.addNode(3109.8565519670155, 916.377186067094, 0.0, 471)
femmesh.addNode(3243.8132306762054, 909.9808599674227, 0.0, 472)
femmesh.addNode(89.46945219352767, 826.7412954916135, 0.0, 473)
femmesh.addNode(286.6946443650497, 1056.795161877897, 0.0, 474)
femmesh.addNode(345.0127367131438, 1152.4909003309317, 0.0, 475)
femmesh.addNode(2275.7837150062533, 643.3177028986416, 0.0, 476)
femmesh.addNode(2191.0282780042235, 745.5133608961781, 0.0, 477)
femmesh.addNode(441.740076741419, 1133.0812795099137, 0.0, 478)
femmesh.addNode(398.40779439575886, 1258.651864075393, 0.0, 479)
femmesh.addNode(751.9190922904336, 1801.119428803261, 0.0, 480)
femmesh.addNode(815.23523871411, 1903.8861456469003, 0.0, 481)
femmesh.addNode(686.6838535763236, 1897.2332831563606, 0.0, 482)
femmesh.addNode(1758.3964565768001, 106.01831000618814, 0.0, 483)
femmesh.addNode(1016.5324064118877, 609.6979328421705, 0.0, 484)
femmesh.addNode(1082.5189624200739, 719.242773391263, 0.0, 485)
femmesh.addNode(2304.040681074298, 1494.8492354318034, 0.0, 486)
femmesh.addNode(3453.144658004346, 125.53652393270774, 0.0, 487)
femmesh.addNode(3328.144658004346, 125.53652393270774, 0.0, 488)
femmesh.addNode(3201.308411647005, 1302.3316978732246, 0.0, 489)
femmesh.addNode(3097.474259034635, 1384.3091231255053, 0.0, 490)
femmesh.addNode(3049.707006682709, 1281.5172937820396, 0.0, 491)
femmesh.addNode(3177.2619740817854, 1499.0848842997484, 0.0, 492)
femmesh.addNode(3025.660569117489, 1478.2704802085632, 0.0, 493)
femmesh.addNode(856.6984471721701, 993.836077955195, 0.0, 494)
femmesh.addNode(727.351658445579, 1017.2868281025516, 0.0, 495)
femmesh.addNode(3265.6446580043457, 208.69964868836826, 0.0, 496)
femmesh.addNode(3187.5, 83.16312475566052, 0.0, 497)
femmesh.addNode(534.487944542761, 221.45607286575733, 0.0, 498)
femmesh.addNode(472.05533971665596, 92.10132627321057, 0.0, 499)
femmesh.addNode(2746.471068576949, 216.19684173770855, 0.0, 500)
femmesh.addNode(2687.5, 84.23659841569497, 0.0, 501)
femmesh.addNode(3893.674986181466, 520.3301297001893, 0.0, 502)
femmesh.addNode(2608.042098063864, 265.7856283133194, 0.0, 503)
femmesh.addNode(3062.5, 83.16312475566052, 0.0, 504)
femmesh.addNode(2996.4710685769487, 215.12336807767412, 0.0, 505)
femmesh.addNode(3142.205709878356, 265.66202269522967, 0.0, 506)
femmesh.addNode(3013.6767784553053, 314.4591412615827, 0.0, 507)
femmesh.addNode(887.9249169613647, 270.5111504980472, 0.0, 508)
femmesh.addNode(1018.6338477746323, 314.83588656699857, 0.0, 509)
femmesh.addNode(3384.2017614398546, 1462.2256930487345, 0.0, 510)
femmesh.addNode(3343.0370210819897, 1333.3098372995441, 0.0, 511)
femmesh.addNode(2560.481391636744, 1252.729404138558, 0.0, 512)
femmesh.addNode(3431.602438915963, 1538.4888461061605, 0.0, 513)
femmesh.addNode(3296.476341475039, 1481.3866802741163, 0.0, 514)
femmesh.addNode(2464.24444618214, 311.4817942163636, 0.0, 515)
femmesh.addNode(2087.4570383546975, 1121.1232214364918, 0.0, 516)
femmesh.addNode(1950.7190615621266, 1190.2743123794821, 0.0, 517)
femmesh.addNode(2399.413799019985, 1057.394958341777, 0.0, 518)
femmesh.addNode(2334.4559500509076, 1149.8288416996684, 0.0, 519)
femmesh.addNode(2280.594333997201, 1084.986029500551, 0.0, 520)
femmesh.addNode(1325.1468912020714, 731.3544322179349, 0.0, 521)
femmesh.addNode(1369.9344070104512, 614.9713043365224, 0.0, 522)
femmesh.addNode(561.6838535763236, 1897.2332831563606, 0.0, 523)
femmesh.addNode(3756.8081769726405, 833.6400163593423, 0.0, 524)
femmesh.addNode(3721.1621493177454, 968.2361189301178, 0.0, 525)
femmesh.addNode(3650.0832437173735, 885.6289463935334, 0.0, 526)
femmesh.addNode(2439.967801824789, 1218.08195824434, 0.0, 527)
femmesh.addNode(3070.6032524376233, 406.501666315158, 0.0, 528)
femmesh.addNode(3440.037066288289, 1769.4842314391756, 0.0, 529)
femmesh.addNode(3517.7192636443942, 1900.866503946775, 0.0, 530)
femmesh.addNode(2046.98828380911, 1871.786586776377, 0.0, 531)
femmesh.addNode(2171.98828380911, 1871.786586776377, 0.0, 532)
femmesh.addNode(312.1152729606238, 1719.7681221859716, 0.0, 533)
femmesh.addNode(309.56846331391324, 1575.1798064741884, 0.0, 534)
femmesh.addNode(304.406822133146, 280.0222766146379, 0.0, 535)
femmesh.addNode(298.4127016794605, 422.5488220798193, 0.0, 536)
femmesh.addNode(3375.9229456841467, 1124.6602251940344, 0.0, 537)
femmesh.addNode(3464.6423646328617, 1004.0246759849899, 0.0, 538)
femmesh.addNode(3295.269768730064, 1230.5180079560785, 0.0, 539)
femmesh.addNode(1747.658359972736, 1043.4604990263829, 0.0, 540)
femmesh.addNode(1647.144542752897, 1079.7228837002972, 0.0, 541)
femmesh.addNode(1646.820795096306, 974.9704883044487, 0.0, 542)
femmesh.addNode(832.589369957152, 1730.7493478834867, 0.0, 543)
femmesh.addNode(704.0379848193656, 1724.096485392947, 0.0, 544)
femmesh.addNode(3642.7192636443942, 1900.866503946775, 0.0, 545)
femmesh.addNode(3627.2839552875016, 1760.4311955898822, 0.0, 546)
femmesh.addNode(3734.564691643107, 1859.564691643107, 0.0, 547)
femmesh.addNode(3757.309860723912, 1184.0863741522708, 0.0, 548)
femmesh.addNode(3909.587248918586, 1169.4974766642251, 0.0, 549)
femmesh.addNode(1045.047131114819, 1562.7177258517331, 0.0, 550)
femmesh.addNode(912.7376666991806, 1567.3393471299041, 0.0, 551)
femmesh.addNode(632.6284168035202, 981.4616379211823, 0.0, 552)
femmesh.addNode(532.3951537984146, 1065.0986379147603, 0.0, 553)
femmesh.addNode(683.5347867110737, 895.4715059596654, 0.0, 554)
femmesh.addNode(2517.3427988842627, 446.94802033720737, 0.0, 555)
femmesh.addNode(2390.5030489386745, 395.33175475832206, 0.0, 556)
femmesh.addNode(2329.630638515687, 525.7945175166626, 0.0, 557)
femmesh.addNode(2276.5322858135646, 390.32829139581884, 0.0, 558)
femmesh.addNode(1638.1962277325238, 1186.2231838713813, 0.0, 559)
femmesh.addNode(1537.3586628560938, 1117.7331731494469, 0.0, 560)
femmesh.addNode(1859.7234048133978, 609.2647314363055, 0.0, 561)
femmesh.addNode(1898.0745006932589, 710.2474350746162, 0.0, 562)
femmesh.addNode(3648.4763600356737, 1511.9328004200797, 0.0, 563)
femmesh.addNode(1326.5935279060745, 127.18944266087621, 0.0, 564)
femmesh.addNode(3282.8503678827024, 308.03542187227686, 0.0, 565)
femmesh.addNode(1980.540597224661, 800.5501508847015, 0.0, 566)
femmesh.addNode(1832.466096531402, 785.6152158100851, 0.0, 567)
femmesh.addNode(3502.4660201338666, 577.1059093785952, 0.0, 568)
femmesh.addNode(3622.7460318289013, 626.4539816583883, 0.0, 569)
femmesh.addNode(3565.4492596772334, 701.6847715429649, 0.0, 570)
femmesh.addNode(3527.3831930847837, 1660.5304333871645, 0.0, 571)
femmesh.addNode(3619.2286210834964, 1619.2286210834964, 0.0, 572)
femmesh.addNode(1560.676334048996, 418.29202207552396, 0.0, 573)
femmesh.addNode(1501.1930552120375, 292.9880495807416, 0.0, 574)
femmesh.addNode(749.9326048261051, 213.49401880694984, 0.0, 575)
femmesh.addNode(762.8575217874698, 315.7266248761909, 0.0, 576)
femmesh.addNode(812.5, 84.13927221440308, 0.0, 577)
femmesh.addNode(3431.981732084284, 1628.2816569327897, 0.0, 578)
femmesh.addNode(2044.249318087875, 444.2987927298035, 0.0, 579)
femmesh.addNode(2167.355850969766, 443.08583837057773, 0.0, 580)
femmesh.addNode(2673.6761321236963, 1904.4850610568753, 0.0, 581)
femmesh.addNode(2725.731704287379, 1775.0269124771244, 0.0, 582)
femmesh.addNode(108.96733300229978, 107.20813292932053, 0.0, 583)
femmesh.addNode(233.96733300229977, 107.20813292932053, 0.0, 584)
femmesh.addNode(105.58410691530834, 1894.5169652006175, 0.0, 585)
femmesh.addNode(230.58410691530833, 1894.5169652006175, 0.0, 586)
femmesh.addNode(2855.9616141985016, 1639.9301016677969, 0.0, 587)
femmesh.addNode(2981.6301841171385, 1565.9113709389512, 0.0, 588)
femmesh.addNode(521.6474316549813, 1607.0602950783614, 0.0, 589)
femmesh.addNode(626.8001191859894, 1679.042421249368, 0.0, 590)
femmesh.addNode(518.215019621639, 1722.4844401417145, 0.0, 591)
femmesh.addNode(1063.3339219988916, 1879.8224470202003, 0.0, 592)
femmesh.addNode(1188.3339219988916, 1879.8224470202003, 0.0, 593)
femmesh.addNode(1848.7880258903137, 391.1763921638408, 0.0, 594)
femmesh.addNode(1760.606084414766, 331.72617990839655, 0.0, 595)
femmesh.addNode(3547.755041608367, 928.6857847055478, 0.0, 596)
femmesh.addNode(3583.401069263262, 794.0896821347724, 0.0, 597)
femmesh.addNode(3463.1210575682267, 744.7416098549793, 0.0, 598)
femmesh.addNode(3716.8058569498253, 1054.6100011463795, 0.0, 599)
femmesh.addNode(417.49482884750216, 264.915469958528, 0.0, 600)
femmesh.addNode(1410.8131456035135, 1591.7808476197888, 0.0, 601)
femmesh.addNode(2639.264109399086, 1063.6453238777933, 0.0, 602)
femmesh.addNode(2701.35972727376, 962.9277739550239, 0.0, 603)
femmesh.addNode(2756.398212572275, 1090.815112406561, 0.0, 604)
femmesh.addNode(1924.9888593805554, 1589.0565837070812, 0.0, 605)
femmesh.addNode(2040.2044002995992, 1631.7237204078006, 0.0, 606)
femmesh.addNode(1978.761026699176, 1700.9060368520345, 0.0, 607)
femmesh.addNode(541.0698922692643, 1499.1242357160124, 0.0, 608)
femmesh.addNode(432.48479270491384, 1542.5662546083588, 0.0, 609)
femmesh.addNode(347.05533971665596, 92.10132627321057, 0.0, 610)
femmesh.addNode(331.02267271895573, 199.30945920253112, 0.0, 611)
femmesh.addNode(1451.5935279060745, 127.18944266087621, 0.0, 612)
femmesh.addNode(1663.7541860491501, 774.0742576991948, 0.0, 613)
femmesh.addNode(1522.0673368200007, 786.0304050057672, 0.0, 614)
femmesh.addNode(790.5298881695128, 424.04116004820094, 0.0, 615)
femmesh.addNode(2085.9857137530944, 1496.4946560258256, 0.0, 616)
femmesh.addNode(2139.7578810717155, 1608.344109170779, 0.0, 617)
femmesh.addNode(3913.9435412865064, 833.1235944479633, 0.0, 618)
femmesh.addNode(1695.2811569497517, 1892.7326286379591, 0.0, 619)
femmesh.addNode(2898.3369556294865, 897.5551957272764, 0.0, 620)
femmesh.addNode(3012.5571182984454, 902.8148920878875, 0.0, 621)
femmesh.addNode(2932.713993115907, 995.3572588444025, 0.0, 622)
femmesh.addNode(1983.7876478058265, 1795.8147899593043, 0.0, 623)
femmesh.addNode(1868.5721068867824, 1753.1476532585853, 0.0, 624)
femmesh.addNode(3823.530790205092, 1002.6210711121885, 0.0, 625)
femmesh.addNode(2542.9747331807466, 1648.5047169533154, 0.0, 626)
femmesh.addNode(2437.896936376015, 1594.7290737196784, 0.0, 627)
femmesh.addNode(1936.7993639967162, 1924.0282031829274, 0.0, 628)
femmesh.addNode(3042.685039077023, 1174.298111023101, 0.0, 629)
femmesh.addNode(2573.789004885646, 1156.8852164351124, 0.0, 630)
femmesh.addNode(2518.7505195871317, 1028.9978779835753, 0.0, 631)
femmesh.addNode(1845.0713195361545, 1231.9674945833272, 0.0, 632)
femmesh.addNode(1853.629849655299, 1106.5197122183863, 0.0, 633)
femmesh.addNode(1714.6160538146635, 1247.9345722111157, 0.0, 634)
femmesh.addNode(1811.705265721491, 1331.6891723722115, 0.0, 635)
femmesh.addNode(1094.0561152363023, 390.6612026456245, 0.0, 636)
femmesh.addNode(976.2721013843992, 448.56907264591416, 0.0, 637)
femmesh.addNode(92.52213483564782, 1043.335003995171, 0.0, 638)
femmesh.addNode(3933.6411729107767, 1808.6411729107767, 0.0, 639)
femmesh.addNode(3793.2058645538837, 1793.2058645538837, 0.0, 640)
femmesh.addNode(3808.6411729107767, 1933.6411729107767, 0.0, 641)
femmesh.addNode(2866.4502747442502, 792.8010888889428, 0.0, 642)
femmesh.addNode(2786.6071495617125, 885.3434556454579, 0.0, 643)
femmesh.addNode(3901.450783037097, 98.54921696290317, 0.0, 644)
femmesh.addNode(3776.450783037097, 98.54921696290317, 0.0, 645)
femmesh.addNode(3901.450783037097, 223.5492169629032, 0.0, 646)
femmesh.addNode(3187.5, 1907.6254807452124, 0.0, 647)
femmesh.addNode(3234.8178026438945, 1776.2432082376126, 0.0, 648)
femmesh.addNode(3225.0419447262148, 1665.1408481838037, 0.0, 649)
femmesh.addNode(2179.4524238973468, 1057.2034139072948, 0.0, 650)
femmesh.addNode(2244.410272866424, 964.7695305494036, 0.0, 651)
femmesh.addNode(1676.8091551439286, 391.50647682207375, 0.0, 652)
femmesh.addNode(1682.995983882763, 271.8169169260535, 0.0, 653)
femmesh.addNode(3909.587248918586, 1044.4974766642251, 0.0, 654)
femmesh.addNode(181.9915870291755, 995.0762994867846, 0.0, 655)
femmesh.addNode(3344.2563121194685, 1647.4426441581716, 0.0, 656)
femmesh.addNode(2233.4978258113515, 225.9790256924727, 0.0, 657)
femmesh.addNode(2189.7954776930756, 96.04626137373351, 0.0, 658)
femmesh.addNode(2901.8424690699885, 1451.1356097647079, 0.0, 659)
femmesh.addNode(2051.2729772239204, 1000.9067224853444, 0.0, 660)
femmesh.addNode(2389.7544781313627, 648.3211662611449, 0.0, 661)
femmesh.addNode(2499.1704497279306, 621.5817343349554, 0.0, 662)
femmesh.addNode(3177.064276557921, 485.2117592469385, 0.0, 663)
femmesh.addNode(940.23523871411, 1903.8861456469003, 0.0, 664)
femmesh.addNode(1003.5691607130016, 1783.7085926671007, 0.0, 665)
femmesh.addNode(3224.6626515578937, 1575.3480373571745, 0.0, 666)
femmesh.addNode(2766.122188467889, 1486.495618993757, 0.0, 667)
femmesh.addNode(3673.5563823242337, 521.2676894471987, 0.0, 668)
femmesh.addNode(2228.228261392753, 312.62307561223724, 0.0, 669)
femmesh.addNode(2305.6993767083177, 1905.3655050958337, 0.0, 670)
femmesh.addNode(1311.4340596468815, 1893.2954439213888, 0.0, 671)
femmesh.addNode(2875.1613746529574, 1069.5769580001172, 0.0, 672)
femmesh.addNode(2639.8925156923938, 621.8960916419592, 0.0, 673)
femmesh.addNode(2739.8830011231, 656.0080221501489, 0.0, 674)
femmesh.addNode(2654.72998324577, 746.4774182989346, 0.0, 675)
femmesh.addNode(1883.4947724792783, 1396.1033091310494, 0.0, 676)
femmesh.addNode(1753.0395067577872, 1412.0703867588381, 0.0, 677)
femmesh.addNode(1964.2963822119802, 1417.798181003635, 0.0, 678)
femmesh.addNode(2239.587958694249, 851.4156332996357, 0.0, 679)
femmesh.addNode(2094.708682076544, 831.08464255269, 0.0, 680)
femmesh.addNode(2235.0563641897397, 1556.8446477839718, 0.0, 681)
femmesh.addNode(2189.2750507362443, 1692.0737121659467, 0.0, 682)
femmesh.addNode(1477.3911704277443, 876.2689105841479, 0.0, 683)
femmesh.addNode(1421.9501351494446, 809.4633001915254, 0.0, 684)
femmesh.addNode(1561.9076749873839, 932.0344441883764, 0.0, 685)
femmesh.addNode(1461.7904733168277, 955.4673393741346, 0.0, 686)
femmesh.addNode(1035.919157956577, 1133.8350690483471, 0.0, 687)
femmesh.addNode(1043.17718435279, 1023.7674799921143, 0.0, 688)
femmesh.addNode(1143.9574763181595, 1101.227089271106, 0.0, 689)
femmesh.addNode(2227.687660517428, 1777.1520918722106, 0.0, 690)
femmesh.addNode(2818.0782085660194, 700.8996582959252, 0.0, 691)
femmesh.addNode(3510.8550872525384, 202.49658975529945, 0.0, 692)
femmesh.addNode(3644.9859925110595, 246.45231220686702, 0.0, 693)
femmesh.addNode(3557.7104292481927, 76.96006582259173, 0.0, 694)
femmesh.addNode(1066.5097205733537, 932.8444107597898, 0.0, 695)
femmesh.addNode(1082.7357096473884, 809.0066955637157, 0.0, 696)
femmesh.addNode(1167.441491788407, 867.3217850187992, 0.0, 697)
femmesh.addNode(981.5871912050206, 784.7653991322535, 0.0, 698)
femmesh.addNode(927.4469176979046, 1723.6945343700313, 0.0, 699)
femmesh.addNode(1050.5456009826862, 1699.6308357433313, 0.0, 700)
femmesh.addNode(585.7902114810297, 1171.2596016592215, 0.0, 701)
femmesh.addNode(888.7971247509754, 817.8811859887564, 0.0, 702)
femmesh.addNode(888.5803775236609, 728.1172638163039, 0.0, 703)
femmesh.addNode(3259.4496196090467, 421.08058486148786, 0.0, 704)
femmesh.addNode(2989.3815373219163, 1074.836654360728, 0.0, 705)
femmesh.addNode(1861.9330326513636, 487.31635133851387, 0.0, 706)
femmesh.addNode(1956.3018028657457, 427.0770036979378, 0.0, 707)
femmesh.addNode(643.1274514534958, 555.3733991750951, 0.0, 708)
femmesh.addNode(2031.1043113268252, 348.1588335551304, 0.0, 709)
femmesh.addNode(1280.4707248098148, 821.5929377963156, 0.0, 710)
femmesh.addNode(1264.2447357357803, 945.4306529923897, 0.0, 711)
femmesh.addNode(89.46945219352767, 951.7412954916135, 0.0, 712)
femmesh.addNode(2743.4044527801075, 1682.6092076912876, 0.0, 713)
femmesh.addNode(2745.2549226512433, 1581.4556065185864, 0.0, 714)
femmesh.addNode(880.0309833927339, 902.9130087228705, 0.0, 715)
femmesh.addNode(787.0241697113743, 846.2648734069207, 0.0, 716)
femmesh.addNode(1676.9179023577676, 79.80127191738595, 0.0, 717)
femmesh.addNode(1551.9179023577676, 79.80127191738595, 0.0, 718)
femmesh.addNode(1831.1334026894747, 907.3130176253524, 0.0, 719)
femmesh.addNode(1748.6673061580725, 817.0103018152672, 0.0, 720)
femmesh.addNode(879.5658102268367, 1646.6715909597174, 0.0, 721)
femmesh.addNode(817.8801189584283, 1574.3941606433596, 0.0, 722)
femmesh.addNode(1437.423633620477, 1164.1388796508154, 0.0, 723)
femmesh.addNode(1435.6230616578764, 1265.1081882321168, 0.0, 724)
femmesh.addNode(1344.636347442633, 1205.0135945624015, 0.0, 725)
femmesh.addNode(1446.3719486408504, 1057.6385794797316, 0.0, 726)
femmesh.addNode(1853.306101998708, 1001.7673168225379, 0.0, 727)
femmesh.addNode(2669.473046388524, 858.1736671166902, 0.0, 728)
femmesh.addNode(943.1562245269362, 545.942793547679, 0.0, 729)
femmesh.addNode(867.7339570652662, 470.11747746905314, 0.0, 730)
femmesh.addNode(907.9942620927546, 631.2463376653094, 0.0, 731)
femmesh.addNode(324.5244975561173, 1921.5493392573383, 0.0, 732)
femmesh.addNode(449.5244975561173, 1921.5493392573383, 0.0, 733)
femmesh.addNode(3247.590014507062, 803.2941017060745, 0.0, 734)
femmesh.addNode(3318.624297034102, 881.1375447443227, 0.0, 735)
femmesh.addNode(1937.1048923720377, 970.3722308173558, 0.0, 736)
femmesh.addNode(2029.100277914687, 906.452423288159, 0.0, 737)
femmesh.addNode(1653.573664935375, 1674.4335885703917, 0.0, 738)
femmesh.addNode(1765.0652508756893, 1610.8204100080902, 0.0, 739)
femmesh.addNode(1752.0538998398179, 1721.8520787136167, 0.0, 740)
femmesh.addNode(2584.3234676052602, 531.1123381861696, 0.0, 741)
femmesh.addNode(3397.96019017875, 912.4854117262288, 0.0, 742)
femmesh.addNode(3913.9435412865064, 958.1235944479633, 0.0, 743)
femmesh.addNode(1580.8685649559998, 1591.3481869864033, 0.0, 744)
femmesh.addNode(511.20835113244084, 1818.7826224136988, 0.0, 745)
femmesh.addNode(406.0556636014328, 1746.8004962426921, 0.0, 746)
femmesh.addNode(1757.080520946468, 1816.7608318208866, 0.0, 747)
femmesh.addNode(1811.7993639967162, 1924.0282031829274, 0.0, 748)
femmesh.addNode(707.4703968527081, 1608.672340329594, 0.0, 749)
femmesh.addNode(2124.3213909675533, 278.73657266723154, 0.0, 750)
femmesh.addNode(3933.6411729107767, 1933.6411729107767, 0.0, 751)
femmesh.addNode(3682.7104292481927, 76.96006582259173, 0.0, 752)
femmesh.addNode(1361.8554440812109, 1001.873045875503, 0.0, 753)
femmesh.addNode(1248.826211059803, 1047.6018930979867, 0.0, 754)
femmesh.addNode(1241.5681846635903, 1157.6694821542192, 0.0, 755)
femmesh.addNode(1604.83017391786, 1285.9448616602654, 0.0, 756)
femmesh.addNode(1517.0303272294914, 1326.2525533591602, 0.0, 757)
femmesh.addNode(1593.4501533116313, 1387.9639416988948, 0.0, 758)
femmesh.addNode(1424.2430410516477, 1367.1272682707463, 0.0, 759)
femmesh.addNode(815.2041956387095, 664.3621245218123, 0.0, 760)
femmesh.addNode(3928.5362108672816, 436.7929408961907, 0.0, 761)
femmesh.addNode(3928.5362108672816, 311.7929408961907, 0.0, 762)
femmesh.addNode(3402.3285337217894, 564.7500365535047, 0.0, 763)
femmesh.addNode(3401.7369740096065, 805.7986534648805, 0.0, 764)
femmesh.addNode(1662.4214922072226, 895.772059514462, 0.0, 765)
femmesh.addNode(3277.102476410255, 541.2945482292881, 0.0, 766)
femmesh.addNode(3319.943190670664, 628.8812109389553, 0.0, 767)
femmesh.addNode(757.4140113120498, 521.4148809499658, 0.0, 768)
femmesh.addNode(643.765094027996, 1458.4702682429738, 0.0, 769)
femmesh.addNode(554.6024550779284, 1393.976227772971, 0.0, 770)
femmesh.addNode(1685.3143589345677, 185.8195819235741, 0.0, 771)
femmesh.addNode(1503.5114302638422, 206.99071457826216, 0.0, 772)
femmesh.addNode(1601.5174296637306, 245.5998788372513, 0.0, 773)
femmesh.addNode(376.7938538433402, 1356.3458400712107, 0.0, 774)
femmesh.addNode(328.9909239281961, 1467.2437471118394, 0.0, 775)
femmesh.addNode(320.30902645263615, 520.9047457451477, 0.0, 776)
femmesh.addNode(576.2163956303472, 1296.2822517771538, 0.0, 777)
femmesh.addNode(2615.0250127401205, 1716.5524173279139, 0.0, 778)
femmesh.addNode(740.642253325052, 1529.3400964997809, 0.0, 779)
femmesh.addNode(305.1086044714256, 1816.066304457956, 0.0, 780)
femmesh.addNode(3765.8117741301494, 356.285187280466, 0.0, 781)
femmesh.addNode(3822.211197048748, 457.12307059638005, 0.0, 782)
femmesh.addNode(3730.950549444334, 439.8223760844646, 0.0, 783)
femmesh.addNode(1586.1176612972547, 1495.8367766313274, 0.0, 784)
femmesh.addNode(1673.9175079856234, 1455.5290849324326, 0.0, 785)
femmesh.addNode(1500.401210282008, 1523.7830437528655, 0.0, 786)
femmesh.addNode(1416.0622419447682, 1496.269437264713, 0.0, 787)
femmesh.addNode(692.3009280693152, 810.4396832255514, 0.0, 788)
femmesh.addNode(700.9176357801555, 698.3206427469418, 0.0, 789)
femmesh.addNode(3383.7851644235784, 713.3937428730732, 0.0, 790)
femmesh.addNode(3258.559107112044, 689.9382545488565, 0.0, 791)
femmesh.addNode(958.4714022117712, 965.4523905370307, 0.0, 792)
femmesh.addNode(1745.7070147434106, 1519.9432216912705, 0.0, 793)
femmesh.addNode(1844.1872496478532, 1567.3617118344957, 0.0, 794)
femmesh.addNode(3709.1612122852894, 175.50928278549492, 0.0, 795)
femmesh.addNode(473.52119387161537, 1336.9362192501924, 0.0, 796)
femmesh.addNode(720.3315203492492, 601.4497165959474, 0.0, 797)
femmesh.addNode(3829.9869939043792, 285.3421578590939, 0.0, 798)
femmesh.addNode(3738.7263462999645, 268.0414633471785, 0.0, 799)
femmesh.addNode(1905.6306232482764, 1498.1793953902616, 0.0, 800)
femmesh.addNode(2322.986143635452, 1725.6526304854035, 0.0, 801)
femmesh.addNode(2368.912619491457, 1656.7244860718467, 0.0, 802)
femmesh.addNode(2353.5578507388273, 1578.5788384269713, 0.0, 803)
femmesh.addNode(251.59438775510205, -1642.5382653061224, 0.0, 804)
femmesh.addNode(250.0, -1125.0, 0.0, 805)
femmesh.addNode(244.79166666666666, -621.527777777778, 0.0, 806)
femmesh.addNode(253.65823412698398, -349.26835317460336, 0.0, 807)
femmesh.addNode(250.26573129251702, -1377.9230442176872, 0.0, 808)
femmesh.addNode(249.1319444444444, -874.4212962962965, 0.0, 809)
femmesh.addNode(183.5156907411028, -1822.3611654157708, 0.0, 810)
femmesh.addNode(175.62426452602392, -169.8009014101267, 0.0, 811)
femmesh.addNode(340.96396806154064, -151.6577109724219, 0.0, 812)
femmesh.addNode(337.022015699241, -1842.9798861443787, 0.0, 813)
femmesh.addNode(375.1328656462585, -1313.9615221088436, 0.0, 814)
femmesh.addNode(375.1328656462585, -1438.9615221088436, 0.0, 815)
femmesh.addNode(374.5659722222222, -812.2106481481483, 0.0, 816)
femmesh.addNode(374.5659722222222, -937.2106481481483, 0.0, 817)
femmesh.addNode(250.1328656462585, -1251.4615221088436, 0.0, 818)
femmesh.addNode(375.0, -1187.5, 0.0, 819)
femmesh.addNode(250.93005952380952, -1510.2306547619048, 0.0, 820)
femmesh.addNode(375.797193877551, -1571.2691326530612, 0.0, 821)
femmesh.addNode(249.5659722222222, -999.7106481481483, 0.0, 822)
femmesh.addNode(375.0, -1062.5, 0.0, 823)
femmesh.addNode(246.96180555555554, -747.9745370370374, 0.0, 824)
femmesh.addNode(372.3958333333333, -685.7638888888891, 0.0, 825)
femmesh.addNode(249.2249503968253, -485.3980654761908, 0.0, 826)
femmesh.addNode(126.82911706349199, -424.6341765873017, 0.0, 827)
femmesh.addNode(122.39583333333333, -560.7638888888891, 0.0, 828)
femmesh.addNode(376.829117063492, -424.6341765873017, 0.0, 829)
femmesh.addNode(372.3958333333333, -560.7638888888891, 0.0, 830)
femmesh.addNode(126.82911706349199, -299.6341765873017, 0.0, 831)
femmesh.addNode(376.829117063492, -299.6341765873017, 0.0, 832)
femmesh.addNode(125.79719387755102, -1696.2691326530612, 0.0, 833)
femmesh.addNode(125.79719387755102, -1571.2691326530612, 0.0, 834)
femmesh.addNode(125.0, -1187.5, 0.0, 835)
femmesh.addNode(125.0, -1062.5, 0.0, 836)
femmesh.addNode(122.39583333333333, -685.7638888888891, 0.0, 837)
femmesh.addNode(375.797193877551, -1696.2691326530612, 0.0, 838)
femmesh.addNode(124.5659722222222, -937.2106481481483, 0.0, 839)
femmesh.addNode(125.13286564625851, -1313.9615221088436, 0.0, 840)
femmesh.addNode(124.5659722222222, -812.2106481481483, 0.0, 841)
femmesh.addNode(125.13286564625851, -1438.9615221088436, 0.0, 842)
femmesh.addNode(294.30820172717154, -1742.7590757252506, 0.0, 843)
femmesh.addNode(418.5110078496205, -1796.4899430721894, 0.0, 844)
femmesh.addNode(91.7578453705514, -1911.1805827078854, 0.0, 845)
femmesh.addNode(91.7578453705514, -1786.1805827078854, 0.0, 846)
femmesh.addNode(216.7578453705514, -1911.1805827078854, 0.0, 847)
femmesh.addNode(217.55503924810242, -1732.4497153609466, 0.0, 848)
femmesh.addNode(420.4819840307703, -200.82885548621107, 0.0, 849)
femmesh.addNode(297.3111010942623, -250.46303207351275, 0.0, 850)
femmesh.addNode(214.64124932650395, -259.53462729236503, 0.0, 851)
femmesh.addNode(87.81213226301196, -209.90045070506335, 0.0, 852)
femmesh.addNode(87.81213226301196, -84.90045070506335, 0.0, 853)
femmesh.addNode(212.81213226301196, -84.90045070506335, 0.0, 854)
femmesh.addNode(260.2688532201719, -1832.6705257800747, 0.0, 855)
femmesh.addNode(295.4819840307703, -75.82885548621107, 0.0, 856)
femmesh.addNode(420.4819840307703, -75.82885548621107, 0.0, 857)
femmesh.addNode(418.5110078496205, -1921.4899430721894, 0.0, 858)
femmesh.addNode(293.5110078496205, -1921.4899430721894, 0.0, 859)
femmesh.addNode(258.2941162937823, -160.7293061912742, 0.0, 860)
femmesh.addNode(3751.594387755102, -1642.5382653061224, 0.0, 861)
femmesh.addNode(3750.0, -1125.0, 0.0, 862)
femmesh.addNode(3744.7916666666665, -621.527777777778, 0.0, 863)
femmesh.addNode(3753.658234126984, -349.26835317460336, 0.0, 864)
femmesh.addNode(3750.265731292517, -1377.9230442176872, 0.0, 865)
femmesh.addNode(3749.1319444444443, -874.4212962962965, 0.0, 866)
femmesh.addNode(3683.515690741103, -1822.3611654157708, 0.0, 867)
femmesh.addNode(3675.624264526024, -169.8009014101267, 0.0, 868)
femmesh.addNode(3840.9639680615405, -151.6577109724219, 0.0, 869)
femmesh.addNode(3837.022015699241, -1842.9798861443787, 0.0, 870)
femmesh.addNode(3875.1328656462583, -1313.9615221088436, 0.0, 871)
femmesh.addNode(3875.1328656462583, -1438.9615221088436, 0.0, 872)
femmesh.addNode(3874.565972222222, -812.2106481481483, 0.0, 873)
femmesh.addNode(3874.565972222222, -937.2106481481483, 0.0, 874)
femmesh.addNode(3750.1328656462583, -1251.4615221088436, 0.0, 875)
femmesh.addNode(3875.0, -1187.5, 0.0, 876)
femmesh.addNode(3746.9618055555557, -747.9745370370374, 0.0, 877)
femmesh.addNode(3872.395833333333, -685.7638888888891, 0.0, 878)
femmesh.addNode(3750.9300595238096, -1510.2306547619048, 0.0, 879)
femmesh.addNode(3875.797193877551, -1571.2691326530612, 0.0, 880)
femmesh.addNode(3749.565972222222, -999.7106481481483, 0.0, 881)
femmesh.addNode(3875.0, -1062.5, 0.0, 882)
femmesh.addNode(3876.829117063492, -424.6341765873017, 0.0, 883)
femmesh.addNode(3749.224950396825, -485.3980654761908, 0.0, 884)
femmesh.addNode(3872.395833333333, -560.7638888888891, 0.0, 885)
femmesh.addNode(3626.829117063492, -424.6341765873017, 0.0, 886)
femmesh.addNode(3622.395833333333, -560.7638888888891, 0.0, 887)
femmesh.addNode(3626.829117063492, -299.6341765873017, 0.0, 888)
femmesh.addNode(3876.829117063492, -299.6341765873017, 0.0, 889)
femmesh.addNode(3625.797193877551, -1696.2691326530612, 0.0, 890)
femmesh.addNode(3625.797193877551, -1571.2691326530612, 0.0, 891)
femmesh.addNode(3625.0, -1187.5, 0.0, 892)
femmesh.addNode(3625.0, -1062.5, 0.0, 893)
femmesh.addNode(3622.395833333333, -685.7638888888891, 0.0, 894)
femmesh.addNode(3875.797193877551, -1696.2691326530612, 0.0, 895)
femmesh.addNode(3624.565972222222, -937.2106481481483, 0.0, 896)
femmesh.addNode(3625.1328656462583, -1438.9615221088436, 0.0, 897)
femmesh.addNode(3625.1328656462583, -1313.9615221088436, 0.0, 898)
femmesh.addNode(3624.565972222222, -812.2106481481483, 0.0, 899)
femmesh.addNode(3794.3082017271718, -1742.7590757252506, 0.0, 900)
femmesh.addNode(3918.5110078496205, -1796.4899430721894, 0.0, 901)
femmesh.addNode(3591.7578453705514, -1911.1805827078854, 0.0, 902)
femmesh.addNode(3591.7578453705514, -1786.1805827078854, 0.0, 903)
femmesh.addNode(3716.7578453705514, -1911.1805827078854, 0.0, 904)
femmesh.addNode(3717.5550392481027, -1732.4497153609466, 0.0, 905)
femmesh.addNode(3920.48198403077, -200.82885548621107, 0.0, 906)
femmesh.addNode(3797.311101094262, -250.46303207351275, 0.0, 907)
femmesh.addNode(3714.6412493265043, -259.53462729236503, 0.0, 908)
femmesh.addNode(3587.8121322630122, -209.90045070506335, 0.0, 909)
femmesh.addNode(3587.8121322630122, -84.90045070506335, 0.0, 910)
femmesh.addNode(3712.8121322630122, -84.90045070506335, 0.0, 911)
femmesh.addNode(3760.268853220172, -1832.6705257800747, 0.0, 912)
femmesh.addNode(3795.48198403077, -75.82885548621107, 0.0, 913)
femmesh.addNode(3920.48198403077, -75.82885548621107, 0.0, 914)
femmesh.addNode(3918.5110078496205, -1921.4899430721894, 0.0, 915)
femmesh.addNode(3793.5110078496205, -1921.4899430721894, 0.0, 916)
femmesh.addNode(3758.2941162937823, -160.7293061912742, 0.0, 917)
return True
def create_elements(femmesh):
# elements
femmesh.addFace([174, 230, 204, 315, 316, 317], 21)
femmesh.addFace([11, 263, 258, 318, 319, 320], 22)
femmesh.addFace([259, 264, 17, 321, 322, 323], 23)
femmesh.addFace([201, 261, 65, 324, 325, 326], 24)
femmesh.addFace([39, 211, 40, 327, 328, 55], 25)
femmesh.addFace([176, 235, 217, 329, 330, 331], 26)
femmesh.addFace([87, 248, 212, 332, 333, 334], 27)
femmesh.addFace([237, 280, 172, 335, 336, 337], 28)
femmesh.addFace([211, 247, 40, 338, 339, 328], 29)
femmesh.addFace([209, 279, 269, 340, 341, 342], 30)
femmesh.addFace([235, 296, 217, 343, 344, 330], 31)
femmesh.addFace([11, 258, 12, 320, 345, 19], 32)
femmesh.addFace([16, 259, 17, 346, 323, 24], 33)
femmesh.addFace([209, 269, 188, 342, 347, 348], 34)
femmesh.addFace([214, 277, 191, 349, 350, 351], 35)
femmesh.addFace([214, 301, 277, 352, 353, 349], 36)
femmesh.addFace([88, 248, 87, 354, 332, 95], 37)
femmesh.addFace([65, 242, 201, 355, 356, 326], 38)
femmesh.addFace([180, 255, 204, 357, 358, 359], 39)
femmesh.addFace([13, 258, 207, 360, 361, 362], 40)
femmesh.addFace([206, 259, 15, 363, 364, 365], 41)
femmesh.addFace([200, 246, 227, 366, 367, 368], 42)
femmesh.addFace([227, 246, 168, 367, 369, 370], 43)
femmesh.addFace([216, 248, 89, 371, 372, 373], 44)
femmesh.addFace([177, 225, 208, 374, 375, 376], 45)
femmesh.addFace([34, 262, 234, 377, 378, 379], 46)
femmesh.addFace([69, 209, 68, 380, 381, 80], 47)
femmesh.addFace([172, 279, 237, 382, 383, 337], 48)
femmesh.addFace([176, 284, 208, 384, 385, 386], 49)
femmesh.addFace([226, 289, 262, 387, 388, 389], 50)
femmesh.addFace([204, 243, 180, 390, 391, 359], 51)
femmesh.addFace([230, 281, 204, 392, 393, 316], 52)
femmesh.addFace([89, 248, 88, 372, 354, 96], 53)
femmesh.addFace([180, 243, 205, 391, 394, 395], 54)
femmesh.addFace([173, 249, 207, 396, 397, 398], 55)
femmesh.addFace([207, 249, 182, 397, 399, 400], 56)
femmesh.addFace([60, 214, 3, 401, 402, 71], 57)
femmesh.addFace([205, 289, 180, 403, 404, 395], 58)
femmesh.addFace([215, 270, 170, 405, 406, 407], 59)
femmesh.addFace([204, 255, 174, 358, 408, 317], 60)
femmesh.addFace([232, 270, 178, 409, 410, 411], 61)
femmesh.addFace([207, 277, 173, 412, 413, 398], 62)
femmesh.addFace([222, 232, 178, 414, 411, 415], 63)
femmesh.addFace([213, 238, 85, 416, 417, 418], 64)
femmesh.addFace([270, 282, 170, 419, 420, 406], 65)
femmesh.addFace([184, 247, 211, 421, 338, 422], 66)
femmesh.addFace([176, 299, 284, 423, 424, 384], 67)
femmesh.addFace([210, 241, 179, 425, 426, 427], 68)
femmesh.addFace([234, 262, 194, 378, 428, 429], 69)
femmesh.addFace([222, 267, 61, 430, 431, 432], 70)
femmesh.addFace([180, 289, 226, 404, 387, 433], 71)
femmesh.addFace([41, 218, 42, 434, 435, 57], 72)
femmesh.addFace([67, 266, 221, 436, 437, 438], 73)
femmesh.addFace([182, 252, 207, 439, 440, 400], 74)
femmesh.addFace([212, 223, 183, 441, 442, 443], 75)
femmesh.addFace([181, 223, 212, 444, 441, 445], 76)
femmesh.addFace([15, 239, 206, 446, 447, 365], 77)
femmesh.addFace([62, 222, 61, 448, 432, 73], 78)
femmesh.addFace([67, 221, 66, 438, 449, 78], 79)
femmesh.addFace([219, 225, 177, 450, 374, 451], 80)
femmesh.addFace([38, 283, 244, 452, 453, 454], 81)
femmesh.addFace([197, 302, 224, 455, 456, 457], 82)
femmesh.addFace([224, 302, 274, 456, 458, 459], 83)
femmesh.addFace([269, 279, 172, 341, 382, 460], 84)
femmesh.addFace([244, 283, 203, 453, 461, 462], 85)
femmesh.addFace([229, 273, 178, 463, 464, 465], 86)
femmesh.addFace([198, 280, 237, 466, 335, 467], 87)
femmesh.addFace([169, 240, 233, 468, 469, 470], 88)
femmesh.addFace([233, 240, 198, 469, 471, 472], 89)
femmesh.addFace([12, 258, 13, 345, 360, 20], 90)
femmesh.addFace([15, 259, 16, 364, 346, 23], 91)
femmesh.addFace([207, 252, 13, 440, 473, 362], 92)
femmesh.addFace([206, 239, 182, 447, 474, 475], 93)
femmesh.addFace([179, 227, 210, 476, 477, 427], 94)
femmesh.addFace([182, 304, 206, 478, 479, 475], 95)
femmesh.addFace([228, 285, 31, 480, 481, 482], 96)
femmesh.addFace([65, 261, 64, 325, 483, 76], 97)
femmesh.addFace([202, 273, 229, 484, 463, 485], 98)
femmesh.addFace([208, 284, 177, 385, 486, 376], 99)
femmesh.addFace([5, 224, 70, 487, 488, 82], 100)
femmesh.addFace([169, 236, 217, 489, 490, 491], 101)
femmesh.addFace([217, 236, 184, 490, 492, 493], 102)
femmesh.addFace([196, 281, 230, 494, 392, 495], 103)
femmesh.addFace([224, 268, 70, 496, 497, 488], 104)
femmesh.addFace([214, 287, 3, 498, 499, 402], 105)
femmesh.addFace([209, 266, 68, 500, 501, 381], 106)
femmesh.addFace([85, 238, 84, 417, 502, 92], 107)
femmesh.addFace([188, 266, 209, 503, 500, 348], 108)
femmesh.addFace([69, 268, 209, 504, 505, 380], 109)
femmesh.addFace([209, 268, 190, 505, 506, 507], 110)
femmesh.addFace([189, 267, 222, 508, 430, 509], 111)
femmesh.addFace([181, 236, 223, 510, 511, 444], 112)
femmesh.addFace([208, 235, 176, 512, 329, 386], 113)
femmesh.addFace([181, 307, 236, 513, 514, 510], 114)
femmesh.addFace([221, 266, 188, 437, 503, 515], 115)
femmesh.addFace([186, 225, 219, 516, 450, 517], 116)
femmesh.addFace([168, 313, 225, 518, 519, 520], 117)
femmesh.addFace([215, 229, 178, 521, 465, 522], 118)
femmesh.addFace([30, 228, 31, 523, 482, 46], 119)
femmesh.addFace([213, 295, 183, 524, 525, 526], 120)
femmesh.addFace([225, 313, 208, 519, 527, 375], 121)
femmesh.addFace([190, 279, 209, 528, 340, 507], 122)
femmesh.addFace([218, 276, 42, 529, 530, 435], 123)
femmesh.addFace([36, 220, 37, 531, 532, 52], 124)
femmesh.addFace([192, 264, 259, 533, 321, 534], 125)
femmesh.addFace([258, 263, 191, 319, 535, 536], 126)
femmesh.addFace([223, 233, 183, 537, 538, 442], 127)
femmesh.addFace([169, 233, 223, 470, 537, 539], 128)
femmesh.addFace([231, 253, 187, 540, 541, 542], 129)
femmesh.addFace([199, 285, 228, 543, 480, 544], 130)
femmesh.addFace([43, 276, 216, 545, 546, 547], 131)
femmesh.addFace([212, 245, 87, 548, 549, 334], 132)
femmesh.addFace([180, 303, 255, 550, 551, 357], 133)
femmesh.addFace([182, 249, 230, 399, 552, 553], 134)
femmesh.addFace([230, 249, 196, 552, 554, 495], 135)
femmesh.addFace([188, 257, 221, 555, 556, 515], 136)
femmesh.addFace([221, 257, 179, 556, 557, 558], 137)
femmesh.addFace([253, 275, 187, 559, 560, 541], 138)
femmesh.addFace([170, 241, 210, 561, 425, 562], 139)
femmesh.addFace([212, 248, 181, 333, 563, 445], 140)
femmesh.addFace([62, 232, 222, 564, 414, 448], 141)
femmesh.addFace([190, 268, 224, 506, 496, 565], 142)
femmesh.addFace([210, 310, 170, 566, 567, 562], 143)
femmesh.addFace([175, 302, 213, 568, 569, 570], 144)
femmesh.addFace([216, 276, 181, 546, 571, 572], 145)
femmesh.addFace([193, 270, 232, 573, 409, 574], 146)
femmesh.addFace([214, 267, 189, 575, 508, 576], 147)
femmesh.addFace([60, 267, 214, 577, 575, 401], 148)
femmesh.addFace([181, 276, 218, 571, 529, 578], 149)
femmesh.addFace([241, 312, 179, 579, 580, 426], 150)
femmesh.addFace([39, 244, 211, 581, 582, 327], 151)
femmesh.addFace([1, 263, 11, 583, 318, 18], 152)
femmesh.addFace([26, 263, 1, 584, 583, 27], 153)
femmesh.addFace([17, 264, 2, 322, 585, 25], 154)
femmesh.addFace([2, 264, 29, 585, 586, 44], 155)
femmesh.addFace([211, 291, 184, 587, 588, 422], 156)
femmesh.addFace([192, 286, 228, 589, 590, 591], 157)
femmesh.addFace([32, 226, 33, 592, 593, 48], 158)
femmesh.addFace([181, 248, 216, 563, 371, 572], 159)
femmesh.addFace([201, 282, 261, 594, 595, 324], 160)
femmesh.addFace([183, 256, 213, 596, 597, 526], 161)
femmesh.addFace([213, 256, 175, 597, 598, 570], 162)
femmesh.addFace([183, 245, 212, 599, 548, 443], 163)
femmesh.addFace([191, 287, 214, 600, 498, 351], 164)
femmesh.addFace([262, 289, 194, 388, 601, 428], 165)
femmesh.addFace([42, 276, 43, 530, 545, 58], 166)
femmesh.addFace([235, 246, 185, 602, 603, 604], 167)
femmesh.addFace([195, 293, 220, 605, 606, 607], 168)
femmesh.addFace([278, 286, 192, 608, 589, 609], 169)
femmesh.addFace([26, 287, 263, 610, 611, 584], 170)
femmesh.addFace([63, 232, 62, 612, 564, 74], 171)
femmesh.addFace([170, 309, 215, 613, 614, 407], 172)
femmesh.addFace([189, 301, 214, 615, 352, 576], 173)
femmesh.addFace([220, 293, 177, 606, 616, 617], 174)
femmesh.addFace([85, 295, 213, 618, 524, 418], 175)
femmesh.addFace([34, 234, 35, 379, 619, 50], 176)
femmesh.addFace([185, 280, 240, 620, 621, 622], 177)
femmesh.addFace([220, 290, 195, 623, 624, 607], 178)
femmesh.addFace([183, 295, 245, 525, 625, 599], 179)
femmesh.addFace([284, 299, 203, 424, 626, 627], 180)
femmesh.addFace([3, 287, 26, 499, 610, 28], 181)
femmesh.addFace([36, 290, 220, 628, 623, 531], 182)
femmesh.addFace([217, 296, 169, 344, 629, 491], 183)
femmesh.addFace([235, 313, 246, 630, 631, 602], 184)
femmesh.addFace([219, 253, 186, 632, 633, 517], 185)
femmesh.addFace([171, 253, 219, 634, 632, 635], 186)
femmesh.addFace([222, 273, 189, 636, 637, 509], 187)
femmesh.addFace([178, 273, 222, 464, 636, 415], 188)
femmesh.addFace([14, 239, 15, 638, 446, 22], 189)
femmesh.addFace([89, 306, 216, 639, 640, 373], 190)
femmesh.addFace([216, 306, 43, 640, 641, 547], 191)
femmesh.addFace([251, 280, 185, 642, 620, 643], 192)
femmesh.addFace([6, 271, 98, 644, 645, 100], 193)
femmesh.addFace([83, 271, 6, 646, 644, 90], 194)
femmesh.addFace([41, 247, 218, 647, 648, 434], 195)
femmesh.addFace([218, 247, 184, 648, 421, 649], 196)
femmesh.addFace([223, 236, 169, 511, 489, 539], 197)
femmesh.addFace([225, 250, 168, 650, 651, 520], 198)
femmesh.addFace([261, 282, 193, 595, 652, 653], 199)
femmesh.addFace([87, 245, 86, 549, 654, 94], 200)
femmesh.addFace([239, 252, 182, 655, 439, 474], 201)
femmesh.addFace([218, 307, 181, 656, 513, 578], 202)
femmesh.addFace([221, 242, 66, 657, 658, 449], 203)
femmesh.addFace([184, 291, 217, 588, 659, 493], 204)
femmesh.addFace([186, 250, 225, 660, 650, 516], 205)
femmesh.addFace([179, 257, 227, 557, 661, 476], 206)
femmesh.addFace([178, 270, 215, 410, 405, 522], 207)
femmesh.addFace([227, 257, 200, 661, 662, 368], 208)
femmesh.addFace([237, 279, 190, 383, 528, 663], 209)
femmesh.addFace([32, 285, 226, 664, 665, 592], 210)
femmesh.addFace([184, 307, 218, 666, 656, 649], 211)
femmesh.addFace([217, 291, 176, 659, 667, 331], 212)
femmesh.addFace([213, 302, 238, 569, 668, 416], 213)
femmesh.addFace([179, 312, 221, 580, 669, 558], 214)
femmesh.addFace([37, 283, 38, 670, 452, 53], 215)
femmesh.addFace([33, 262, 34, 671, 377, 49], 216)
femmesh.addFace([185, 296, 235, 672, 343, 604], 217)
femmesh.addFace([200, 269, 251, 673, 674, 675], 218)
femmesh.addFace([219, 308, 171, 676, 677, 635], 219)
femmesh.addFace([177, 293, 219, 616, 678, 451], 220)
femmesh.addFace([168, 250, 227, 651, 679, 370], 221)
femmesh.addFace([227, 250, 210, 679, 680, 477], 222)
femmesh.addFace([177, 311, 220, 681, 682, 617], 223)
femmesh.addFace([215, 309, 305, 614, 683, 684], 224)
femmesh.addFace([305, 309, 187, 683, 685, 686], 225)
femmesh.addFace([204, 281, 243, 393, 687, 390], 226)
femmesh.addFace([243, 281, 167, 687, 688, 689], 227)
femmesh.addFace([220, 283, 37, 690, 670, 532], 228)
femmesh.addFace([251, 269, 172, 674, 460, 691], 229)
femmesh.addFace([224, 292, 197, 692, 693, 457], 230)
femmesh.addFace([5, 292, 224, 694, 692, 487], 231)
femmesh.addFace([167, 300, 229, 695, 696, 697], 232)
femmesh.addFace([229, 300, 202, 696, 698, 485], 233)
femmesh.addFace([66, 242, 65, 658, 355, 77], 234)
femmesh.addFace([285, 303, 226, 699, 700, 665], 235)
femmesh.addFace([38, 244, 39, 454, 581, 54], 236)
femmesh.addFace([230, 304, 182, 701, 478, 553], 237)
femmesh.addFace([202, 300, 294, 698, 702, 703], 238)
femmesh.addFace([224, 274, 190, 459, 704, 565], 239)
femmesh.addFace([169, 296, 240, 629, 705, 468], 240)
femmesh.addFace([226, 262, 33, 389, 671, 593], 241)
femmesh.addFace([40, 247, 41, 339, 647, 56], 242)
femmesh.addFace([241, 282, 201, 706, 594, 707], 243)
femmesh.addFace([277, 301, 173, 353, 708, 413], 244)
femmesh.addFace([201, 312, 241, 709, 579, 707], 245)
femmesh.addFace([193, 282, 270, 652, 419, 573], 246)
femmesh.addFace([229, 305, 167, 710, 711, 697], 247)
femmesh.addFace([13, 252, 14, 473, 712, 21], 248)
femmesh.addFace([211, 299, 291, 713, 714, 587], 249)
femmesh.addFace([294, 300, 196, 702, 715, 716], 250)
femmesh.addFace([64, 298, 63, 717, 718, 75], 251)
femmesh.addFace([170, 310, 231, 567, 719, 720], 252)
femmesh.addFace([226, 303, 180, 700, 550, 433], 253)
femmesh.addFace([255, 303, 199, 551, 721, 722], 254)
femmesh.addFace([265, 275, 205, 723, 724, 725], 255)
femmesh.addFace([187, 275, 265, 560, 723, 726], 256)
femmesh.addFace([186, 253, 231, 633, 540, 727], 257)
femmesh.addFace([200, 251, 246, 675, 728, 366], 258)
femmesh.addFace([189, 273, 260, 637, 729, 730], 259)
femmesh.addFace([260, 273, 202, 729, 484, 731], 260)
femmesh.addFace([68, 266, 67, 501, 436, 79], 261)
femmesh.addFace([61, 267, 60, 431, 577, 72], 262)
femmesh.addFace([29, 288, 30, 732, 733, 45], 263)
femmesh.addFace([198, 314, 233, 734, 735, 472], 264)
femmesh.addFace([291, 299, 176, 714, 423, 667], 265)
femmesh.addFace([186, 310, 250, 736, 737, 660], 266)
femmesh.addFace([250, 310, 210, 737, 566, 680], 267)
femmesh.addFace([263, 287, 191, 611, 600, 535], 268)
femmesh.addFace([234, 254, 195, 738, 739, 740], 269)
femmesh.addFace([70, 268, 69, 497, 504, 81], 270)
femmesh.addFace([257, 269, 200, 741, 673, 662], 271)
femmesh.addFace([188, 269, 257, 347, 741, 555], 272)
femmesh.addFace([233, 256, 183, 742, 596, 538], 273)
femmesh.addFace([86, 295, 85, 743, 618, 93], 274)
femmesh.addFace([194, 254, 234, 744, 738, 429], 275)
femmesh.addFace([30, 288, 228, 733, 745, 523], 276)
femmesh.addFace([228, 288, 192, 745, 746, 591], 277)
femmesh.addFace([236, 307, 184, 514, 666, 492], 278)
femmesh.addFace([234, 290, 35, 747, 748, 619], 279)
femmesh.addFace([195, 290, 234, 624, 747, 740], 280)
femmesh.addFace([14, 252, 239, 712, 655, 638], 281)
femmesh.addFace([228, 286, 199, 590, 749, 544], 282)
femmesh.addFace([221, 312, 242, 669, 750, 657], 283)
femmesh.addFace([43, 306, 4, 641, 751, 59], 284)
femmesh.addFace([4, 306, 89, 751, 639, 97], 285)
femmesh.addFace([98, 292, 5, 752, 694, 99], 286)
femmesh.addFace([167, 305, 265, 711, 753, 754], 287)
femmesh.addFace([246, 251, 185, 728, 643, 603], 288)
femmesh.addFace([167, 265, 243, 754, 755, 689], 289)
femmesh.addFace([243, 265, 205, 755, 725, 394], 290)
femmesh.addFace([31, 285, 32, 481, 664, 47], 291)
femmesh.addFace([35, 290, 36, 748, 628, 51], 292)
femmesh.addFace([171, 275, 253, 756, 559, 634], 293)
femmesh.addFace([272, 275, 171, 757, 756, 758], 294)
femmesh.addFace([205, 275, 272, 724, 757, 759], 295)
femmesh.addFace([215, 305, 229, 684, 710, 521], 296)
femmesh.addFace([202, 294, 260, 703, 760, 731], 297)
femmesh.addFace([84, 297, 83, 761, 762, 91], 298)
femmesh.addFace([274, 302, 175, 458, 568, 763], 299)
femmesh.addFace([233, 314, 256, 735, 764, 742], 300)
femmesh.addFace([187, 309, 231, 685, 765, 542], 301)
femmesh.addFace([240, 280, 198, 621, 466, 471], 302)
femmesh.addFace([237, 274, 175, 766, 763, 767], 303)
femmesh.addFace([190, 274, 237, 704, 766, 663], 304)
femmesh.addFace([260, 301, 189, 768, 615, 730], 305)
femmesh.addFace([174, 286, 278, 769, 608, 770], 306)
femmesh.addFace([261, 298, 64, 771, 717, 483], 307)
femmesh.addFace([199, 303, 285, 721, 699, 543], 308)
femmesh.addFace([63, 298, 232, 718, 772, 612], 309)
femmesh.addFace([232, 298, 193, 772, 773, 574], 310)
femmesh.addFace([206, 278, 259, 774, 775, 363], 311)
femmesh.addFace([259, 278, 192, 775, 609, 534], 312)
femmesh.addFace([191, 277, 258, 350, 776, 536], 313)
femmesh.addFace([258, 277, 207, 776, 412, 361], 314)
femmesh.addFace([174, 304, 230, 777, 701, 315], 315)
femmesh.addFace([246, 313, 168, 631, 518, 369], 316)
femmesh.addFace([244, 299, 211, 778, 713, 582], 317)
femmesh.addFace([255, 286, 174, 779, 769, 408], 318)
femmesh.addFace([199, 286, 255, 749, 779, 722], 319)
femmesh.addFace([231, 310, 186, 719, 736, 727], 320)
femmesh.addFace([231, 309, 170, 765, 613, 720], 321)
femmesh.addFace([264, 288, 29, 780, 732, 586], 322)
femmesh.addFace([197, 297, 238, 781, 782, 783], 323)
femmesh.addFace([170, 282, 241, 420, 706, 561], 324)
femmesh.addFace([254, 272, 171, 784, 758, 785], 325)
femmesh.addFace([194, 272, 254, 786, 784, 744], 326)
femmesh.addFace([272, 289, 205, 787, 403, 759], 327)
femmesh.addFace([249, 294, 196, 788, 716, 554], 328)
femmesh.addFace([173, 294, 249, 789, 788, 396], 329)
femmesh.addFace([240, 296, 185, 705, 672, 622], 330)
femmesh.addFace([208, 313, 235, 527, 630, 512], 331)
femmesh.addFace([175, 314, 237, 790, 791, 767], 332)
femmesh.addFace([237, 314, 198, 791, 734, 467], 333)
femmesh.addFace([238, 297, 84, 782, 761, 502], 334)
femmesh.addFace([172, 280, 251, 336, 642, 691], 335)
femmesh.addFace([238, 302, 197, 668, 455, 783], 336)
femmesh.addFace([245, 295, 86, 625, 743, 654], 337)
femmesh.addFace([196, 300, 281, 715, 792, 494], 338)
femmesh.addFace([203, 299, 244, 626, 778, 462], 339)
femmesh.addFace([192, 288, 264, 746, 780, 533], 340)
femmesh.addFace([254, 308, 195, 793, 794, 739], 341)
femmesh.addFace([271, 292, 98, 795, 752, 645], 342)
femmesh.addFace([242, 312, 201, 750, 709, 356], 343)
femmesh.addFace([206, 304, 278, 479, 796, 774], 344)
femmesh.addFace([278, 304, 174, 796, 777, 770], 345)
femmesh.addFace([260, 294, 173, 760, 789, 797], 346)
femmesh.addFace([271, 297, 197, 798, 781, 799], 347)
femmesh.addFace([193, 298, 261, 773, 771, 653], 348)
femmesh.addFace([194, 289, 272, 601, 787, 786], 349)
femmesh.addFace([197, 292, 271, 693, 795, 799], 350)
femmesh.addFace([173, 301, 260, 708, 768, 797], 351)
femmesh.addFace([171, 308, 254, 677, 793, 785], 352)
femmesh.addFace([83, 297, 271, 762, 798, 646], 353)
femmesh.addFace([256, 314, 175, 764, 790, 598], 354)
femmesh.addFace([265, 305, 187, 753, 686, 726], 355)
femmesh.addFace([293, 308, 219, 800, 676, 678], 356)
femmesh.addFace([195, 308, 293, 794, 800, 605], 357)
femmesh.addFace([281, 300, 167, 792, 695, 688], 358)
femmesh.addFace([220, 311, 283, 682, 801, 690], 359)
femmesh.addFace([283, 311, 203, 801, 802, 461], 360)
femmesh.addFace([203, 311, 284, 802, 803, 627], 361)
femmesh.addFace([284, 311, 177, 803, 681, 486], 362)
femmesh.addFace([121, 808, 120, 814, 815, 128], 363)
femmesh.addFace([123, 809, 122, 816, 817, 130], 364)
femmesh.addFace([805, 808, 121, 818, 814, 819], 365)
femmesh.addFace([120, 808, 804, 815, 820, 821], 366)
femmesh.addFace([122, 809, 805, 817, 822, 823], 367)
femmesh.addFace([806, 809, 123, 824, 816, 825], 368)
femmesh.addFace([806, 807, 106, 826, 827, 828], 369)
femmesh.addFace([124, 807, 806, 829, 826, 830], 370)
femmesh.addFace([106, 807, 107, 827, 831, 114], 371)
femmesh.addFace([125, 807, 124, 832, 829, 132], 372)
femmesh.addFace([101, 804, 102, 833, 834, 109], 373)
femmesh.addFace([103, 805, 104, 835, 836, 111], 374)
femmesh.addFace([105, 806, 106, 837, 828, 113], 375)
femmesh.addFace([120, 804, 119, 821, 838, 127], 376)
femmesh.addFace([122, 805, 121, 823, 819, 129], 377)
femmesh.addFace([124, 806, 123, 830, 825, 131], 378)
femmesh.addFace([805, 809, 104, 822, 839, 836], 379)
femmesh.addFace([103, 808, 805, 840, 818, 835], 380)
femmesh.addFace([105, 809, 806, 841, 824, 837], 381)
femmesh.addFace([804, 808, 102, 820, 842, 834], 382)
femmesh.addFace([102, 808, 103, 842, 840, 110], 383)
femmesh.addFace([104, 809, 105, 839, 841, 112], 384)
femmesh.addFace([804, 813, 119, 843, 844, 838], 385)
femmesh.addFace([7, 810, 101, 845, 846, 108], 386)
femmesh.addFace([116, 810, 7, 847, 845, 117], 387)
femmesh.addFace([101, 810, 804, 846, 848, 833], 388)
femmesh.addFace([125, 812, 807, 849, 850, 832], 389)
femmesh.addFace([807, 811, 107, 851, 852, 831], 390)
femmesh.addFace([1, 811, 26, 853, 854, 27], 391)
femmesh.addFace([107, 811, 1, 852, 853, 115], 392)
femmesh.addFace([810, 813, 804, 855, 843, 848], 393)
femmesh.addFace([26, 812, 3, 856, 857, 28], 394)
femmesh.addFace([3, 812, 125, 857, 849, 133], 395)
femmesh.addFace([8, 813, 116, 858, 859, 118], 396)
femmesh.addFace([119, 813, 8, 844, 858, 126], 397)
femmesh.addFace([807, 812, 811, 850, 860, 851], 398)
femmesh.addFace([116, 813, 810, 859, 855, 847], 399)
femmesh.addFace([811, 812, 26, 860, 856, 854], 400)
femmesh.addFace([154, 865, 153, 871, 872, 161], 401)
femmesh.addFace([156, 866, 155, 873, 874, 163], 402)
femmesh.addFace([862, 865, 154, 875, 871, 876], 403)
femmesh.addFace([863, 866, 156, 877, 873, 878], 404)
femmesh.addFace([153, 865, 861, 872, 879, 880], 405)
femmesh.addFace([155, 866, 862, 874, 881, 882], 406)
femmesh.addFace([157, 864, 863, 883, 884, 885], 407)
femmesh.addFace([863, 864, 139, 884, 886, 887], 408)
femmesh.addFace([139, 864, 140, 886, 888, 147], 409)
femmesh.addFace([158, 864, 157, 889, 883, 165], 410)
femmesh.addFace([134, 861, 135, 890, 891, 142], 411)
femmesh.addFace([136, 862, 137, 892, 893, 144], 412)
femmesh.addFace([138, 863, 139, 894, 887, 146], 413)
femmesh.addFace([153, 861, 152, 880, 895, 160], 414)
femmesh.addFace([155, 862, 154, 882, 876, 162], 415)
femmesh.addFace([157, 863, 156, 885, 878, 164], 416)
femmesh.addFace([862, 866, 137, 881, 896, 893], 417)
femmesh.addFace([861, 865, 135, 879, 897, 891], 418)
femmesh.addFace([136, 865, 862, 898, 875, 892], 419)
femmesh.addFace([138, 866, 863, 899, 877, 894], 420)
femmesh.addFace([135, 865, 136, 897, 898, 143], 421)
femmesh.addFace([137, 866, 138, 896, 899, 145], 422)
femmesh.addFace([861, 870, 152, 900, 901, 895], 423)
femmesh.addFace([9, 867, 134, 902, 903, 141], 424)
femmesh.addFace([149, 867, 9, 904, 902, 150], 425)
femmesh.addFace([134, 867, 861, 903, 905, 890], 426)
femmesh.addFace([158, 869, 864, 906, 907, 889], 427)
femmesh.addFace([864, 868, 140, 908, 909, 888], 428)
femmesh.addFace([5, 868, 98, 910, 911, 99], 429)
femmesh.addFace([140, 868, 5, 909, 910, 148], 430)
femmesh.addFace([867, 870, 861, 912, 900, 905], 431)
femmesh.addFace([98, 869, 6, 913, 914, 100], 432)
femmesh.addFace([6, 869, 158, 914, 906, 166], 433)
femmesh.addFace([10, 870, 149, 915, 916, 151], 434)
femmesh.addFace([152, 870, 10, 901, 915, 159], 435)
femmesh.addFace([864, 869, 868, 907, 917, 908], 436)
femmesh.addFace([149, 870, 867, 916, 912, 904], 437)
femmesh.addFace([868, 869, 98, 917, 913, 911], 438)
return True
| lgpl-2.1 |
beni55/scrapy | tests/test_dupefilter.py | 25 | 1609 | import hashlib
import unittest
from scrapy.dupefilter import RFPDupeFilter
from scrapy.http import Request
class RFPDupeFilterTest(unittest.TestCase):
def test_filter(self):
dupefilter = RFPDupeFilter()
dupefilter.open()
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
assert not dupefilter.request_seen(r1)
assert dupefilter.request_seen(r1)
assert not dupefilter.request_seen(r2)
assert dupefilter.request_seen(r3)
dupefilter.close('finished')
def test_request_fingerprint(self):
"""Test if customization of request_fingerprint method will change
output of request_seen.
"""
r1 = Request('http://scrapytest.org/index.html')
r2 = Request('http://scrapytest.org/INDEX.html')
dupefilter = RFPDupeFilter()
dupefilter.open()
assert not dupefilter.request_seen(r1)
assert not dupefilter.request_seen(r2)
dupefilter.close('finished')
class CaseInsensitiveRFPDupeFilter(RFPDupeFilter):
def request_fingerprint(self, request):
fp = hashlib.sha1()
fp.update(request.url.lower())
return fp.hexdigest()
case_insensitive_dupefilter = CaseInsensitiveRFPDupeFilter()
case_insensitive_dupefilter.open()
assert not case_insensitive_dupefilter.request_seen(r1)
assert case_insensitive_dupefilter.request_seen(r2)
case_insensitive_dupefilter.close('finished')
| bsd-3-clause |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/matplotlib/dates.py | 6 | 52305 | """
Matplotlib provides sophisticated date plotting capabilities, standing on the
shoulders of python :mod:`datetime`, the add-on modules :mod:`pytz` and
:mod:`dateutil`. :class:`datetime` objects are converted to floating point
numbers which represent time in days since 0001-01-01 UTC, plus 1. For
example, 0001-01-01, 06:00 is 1.25, not 0.25. The helper functions
:func:`date2num`, :func:`num2date` and :func:`drange` are used to facilitate
easy conversion to and from :mod:`datetime` and numeric ranges.
.. note::
Like Python's datetime, mpl uses the Gregorian calendar for all
conversions between dates and floating point numbers. This practice
is not universal, and calendar differences can cause confusing
differences between what Python and mpl give as the number of days
since 0001-01-01 and what other software and databases yield. For
example, the US Naval Observatory uses a calendar that switches
from Julian to Gregorian in October, 1582. Hence, using their
calculator, the number of days between 0001-01-01 and 2006-04-01 is
732403, whereas using the Gregorian calendar via the datetime
module we find::
In [31]:date(2006,4,1).toordinal() - date(1,1,1).toordinal()
Out[31]:732401
A wide range of specific and general purpose date tick locators and
formatters are provided in this module. See
:mod:`matplotlib.ticker` for general information on tick locators
and formatters. These are described below.
All the matplotlib date converters, tickers and formatters are
timezone aware, and the default timezone is given by the timezone
parameter in your :file:`matplotlibrc` file. If you leave out a
:class:`tz` timezone instance, the default from your rc file will be
assumed. If you want to use a custom time zone, pass a
:class:`pytz.timezone` instance with the tz keyword argument to
:func:`num2date`, :func:`plot_date`, and any custom date tickers or
locators you create. See `pytz <http://pythonhosted.org/pytz/>`_ for
information on :mod:`pytz` and timezone handling.
The `dateutil module <https://dateutil.readthedocs.io/en/stable/>`_ provides
additional code to handle date ticking, making it easy to place ticks
on any kinds of dates. See examples below.
Date tickers
------------
Most of the date tickers can locate single or multiple values. For
example::
# import constants for the days of the week
from matplotlib.dates import MO, TU, WE, TH, FR, SA, SU
# tick on mondays every week
loc = WeekdayLocator(byweekday=MO, tz=tz)
# tick on mondays and saturdays
loc = WeekdayLocator(byweekday=(MO, SA))
In addition, most of the constructors take an interval argument::
# tick on mondays every second week
loc = WeekdayLocator(byweekday=MO, interval=2)
The rrule locator allows completely general date ticking::
# tick every 5th easter
rule = rrulewrapper(YEARLY, byeaster=1, interval=5)
loc = RRuleLocator(rule)
Here are all the date tickers:
* :class:`MinuteLocator`: locate minutes
* :class:`HourLocator`: locate hours
* :class:`DayLocator`: locate specifed days of the month
* :class:`WeekdayLocator`: Locate days of the week, e.g., MO, TU
* :class:`MonthLocator`: locate months, e.g., 7 for july
* :class:`YearLocator`: locate years that are multiples of base
* :class:`RRuleLocator`: locate using a
:class:`matplotlib.dates.rrulewrapper`. The
:class:`rrulewrapper` is a simple wrapper around a
:class:`dateutil.rrule` (`dateutil
<https://dateutil.readthedocs.io/en/stable/>`_) which allow almost
arbitrary date tick specifications. See `rrule example
<../examples/pylab_examples/date_demo_rrule.html>`_.
* :class:`AutoDateLocator`: On autoscale, this class picks the best
:class:`MultipleDateLocator` to set the view limits and the tick
locations.
Date formatters
---------------
Here all all the date formatters:
* :class:`AutoDateFormatter`: attempts to figure out the best format
to use. This is most useful when used with the :class:`AutoDateLocator`.
* :class:`DateFormatter`: use :func:`strftime` format strings
* :class:`IndexDateFormatter`: date plots with implicit *x*
indexing.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import zip
from matplotlib import rcParams
import re
import time
import math
import datetime
import warnings
from dateutil.rrule import (rrule, MO, TU, WE, TH, FR, SA, SU, YEARLY,
MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY,
SECONDLY)
from dateutil.relativedelta import relativedelta
import dateutil.parser
import numpy as np
import matplotlib
import matplotlib.units as units
import matplotlib.cbook as cbook
import matplotlib.ticker as ticker
__all__ = ('date2num', 'num2date', 'drange', 'epoch2num',
'num2epoch', 'mx2num', 'DateFormatter',
'IndexDateFormatter', 'AutoDateFormatter', 'DateLocator',
'RRuleLocator', 'AutoDateLocator', 'YearLocator',
'MonthLocator', 'WeekdayLocator',
'DayLocator', 'HourLocator', 'MinuteLocator',
'SecondLocator', 'MicrosecondLocator',
'rrule', 'MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU',
'YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY',
'HOURLY', 'MINUTELY', 'SECONDLY', 'MICROSECONDLY', 'relativedelta',
'seconds', 'minutes', 'hours', 'weeks')
# Make a simple UTC instance so we don't always have to import
# pytz. From the python datetime library docs:
class _UTC(datetime.tzinfo):
"""UTC"""
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return str("UTC")
def dst(self, dt):
return datetime.timedelta(0)
UTC = _UTC()
def _get_rc_timezone():
"""
Retrieve the preferred timeszone from the rcParams dictionary.
"""
s = matplotlib.rcParams['timezone']
if s == 'UTC':
return UTC
import pytz
return pytz.timezone(s)
"""
Time-related constants.
"""
EPOCH_OFFSET = float(datetime.datetime(1970, 1, 1).toordinal())
JULIAN_OFFSET = 1721424.5 # Julian date at 0001-01-01
MICROSECONDLY = SECONDLY + 1
HOURS_PER_DAY = 24.
MIN_PER_HOUR = 60.
SEC_PER_MIN = 60.
MONTHS_PER_YEAR = 12.
DAYS_PER_WEEK = 7.
DAYS_PER_MONTH = 30.
DAYS_PER_YEAR = 365.0
MINUTES_PER_DAY = MIN_PER_HOUR * HOURS_PER_DAY
SEC_PER_HOUR = SEC_PER_MIN * MIN_PER_HOUR
SEC_PER_DAY = SEC_PER_HOUR * HOURS_PER_DAY
SEC_PER_WEEK = SEC_PER_DAY * DAYS_PER_WEEK
MUSECONDS_PER_DAY = 1e6 * SEC_PER_DAY
MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY = (
MO, TU, WE, TH, FR, SA, SU)
WEEKDAYS = (MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY)
def _to_ordinalf(dt):
"""
Convert :mod:`datetime` or :mod:`date` to the Gregorian date as UTC float
days, preserving hours, minutes, seconds and microseconds. Return value
is a :func:`float`.
"""
# Convert to UTC
tzi = getattr(dt, 'tzinfo', None)
if tzi is not None:
dt = dt.astimezone(UTC)
tzi = UTC
base = float(dt.toordinal())
# If it's sufficiently datetime-like, it will have a `date()` method
cdate = getattr(dt, 'date', lambda: None)()
if cdate is not None:
# Get a datetime object at midnight UTC
midnight_time = datetime.time(0, tzinfo=tzi)
rdt = datetime.datetime.combine(cdate, midnight_time)
# Append the seconds as a fraction of a day
base += (dt - rdt).total_seconds() / SEC_PER_DAY
return base
# a version of _to_ordinalf that can operate on numpy arrays
_to_ordinalf_np_vectorized = np.vectorize(_to_ordinalf)
def _from_ordinalf(x, tz=None):
"""
Convert Gregorian float of the date, preserving hours, minutes,
seconds and microseconds. Return value is a :class:`datetime`.
The input date `x` is a float in ordinal days at UTC, and the output will
be the specified :class:`datetime` object corresponding to that time in
timezone `tz`, or if `tz` is `None`, in the timezone specified in
`rcParams['timezone']`.
"""
if tz is None:
tz = _get_rc_timezone()
ix = int(x)
dt = datetime.datetime.fromordinal(ix).replace(tzinfo=UTC)
remainder = float(x) - ix
# Round down to the nearest microsecond.
dt += datetime.timedelta(microseconds=int(remainder * MUSECONDS_PER_DAY))
# Compensate for rounding errors
if dt.microsecond < 10:
dt = dt.replace(microsecond=0)
elif dt.microsecond > 999990:
dt += datetime.timedelta(microseconds=1e6 - dt.microsecond)
return dt.astimezone(tz)
# a version of _from_ordinalf that can operate on numpy arrays
_from_ordinalf_np_vectorized = np.vectorize(_from_ordinalf)
class strpdate2num(object):
"""
Use this class to parse date strings to matplotlib datenums when
you know the date format string of the date you are parsing.
"""
def __init__(self, fmt):
""" fmt: any valid strptime format is supported """
self.fmt = fmt
def __call__(self, s):
"""s : string to be converted
return value: a date2num float
"""
return date2num(datetime.datetime(*time.strptime(s, self.fmt)[:6]))
class bytespdate2num(strpdate2num):
"""
Use this class to parse date strings to matplotlib datenums when
you know the date format string of the date you are parsing. See
:file:`examples/pylab_examples/load_converter.py`.
"""
def __init__(self, fmt, encoding='utf-8'):
"""
Args:
fmt: any valid strptime format is supported
encoding: encoding to use on byte input (default: 'utf-8')
"""
super(bytespdate2num, self).__init__(fmt)
self.encoding = encoding
def __call__(self, b):
"""
Args:
b: byte input to be converted
Returns:
A date2num float
"""
s = b.decode(self.encoding)
return super(bytespdate2num, self).__call__(s)
# a version of dateutil.parser.parse that can operate on nump0y arrays
_dateutil_parser_parse_np_vectorized = np.vectorize(dateutil.parser.parse)
def datestr2num(d, default=None):
"""
Convert a date string to a datenum using
:func:`dateutil.parser.parse`.
Parameters
----------
d : string or sequence of strings
The dates to convert.
default : datetime instance
The default date to use when fields are missing in `d`.
"""
if cbook.is_string_like(d):
dt = dateutil.parser.parse(d, default=default)
return date2num(dt)
else:
if default is not None:
d = [dateutil.parser.parse(s, default=default) for s in d]
d = np.asarray(d)
if not d.size:
return d
return date2num(_dateutil_parser_parse_np_vectorized(d))
def date2num(d):
"""
*d* is either a :class:`datetime` instance or a sequence of datetimes.
Return value is a floating point number (or sequence of floats)
which gives the number of days (fraction part represents hours,
minutes, seconds) since 0001-01-01 00:00:00 UTC, *plus* *one*.
The addition of one here is a historical artifact. Also, note
that the Gregorian calendar is assumed; this is not universal
practice. For details, see the module docstring.
"""
if not cbook.iterable(d):
return _to_ordinalf(d)
else:
d = np.asarray(d)
if not d.size:
return d
return _to_ordinalf_np_vectorized(d)
def julian2num(j):
"""
Convert a Julian date (or sequence) to a matplotlib date (or sequence).
"""
if cbook.iterable(j):
j = np.asarray(j)
return j - JULIAN_OFFSET
def num2julian(n):
"""
Convert a matplotlib date (or sequence) to a Julian date (or sequence).
"""
if cbook.iterable(n):
n = np.asarray(n)
return n + JULIAN_OFFSET
def num2date(x, tz=None):
"""
*x* is a float value which gives the number of days
(fraction part represents hours, minutes, seconds) since
0001-01-01 00:00:00 UTC *plus* *one*.
The addition of one here is a historical artifact. Also, note
that the Gregorian calendar is assumed; this is not universal
practice. For details, see the module docstring.
Return value is a :class:`datetime` instance in timezone *tz* (default to
rcparams TZ value).
If *x* is a sequence, a sequence of :class:`datetime` objects will
be returned.
"""
if tz is None:
tz = _get_rc_timezone()
if not cbook.iterable(x):
return _from_ordinalf(x, tz)
else:
x = np.asarray(x)
if not x.size:
return x
return _from_ordinalf_np_vectorized(x, tz).tolist()
def drange(dstart, dend, delta):
"""
Return a date range as float Gregorian ordinals. *dstart* and
*dend* are :class:`datetime` instances. *delta* is a
:class:`datetime.timedelta` instance.
"""
f1 = _to_ordinalf(dstart)
f2 = _to_ordinalf(dend)
step = delta.total_seconds() / SEC_PER_DAY
# calculate the difference between dend and dstart in times of delta
num = int(np.ceil((f2 - f1) / step))
# calculate end of the interval which will be generated
dinterval_end = dstart + num * delta
# ensure, that an half open interval will be generated [dstart, dend)
if dinterval_end >= dend:
# if the endpoint is greated than dend, just subtract one delta
dinterval_end -= delta
num -= 1
f2 = _to_ordinalf(dinterval_end) # new float-endpoint
return np.linspace(f1, f2, num + 1)
### date tickers and formatters ###
class DateFormatter(ticker.Formatter):
"""
Tick location is seconds since the epoch. Use a :func:`strftime`
format string.
Python only supports :mod:`datetime` :func:`strftime` formatting
for years greater than 1900. Thanks to Andrew Dalke, Dalke
Scientific Software who contributed the :func:`strftime` code
below to include dates earlier than this year.
"""
illegal_s = re.compile(r"((^|[^%])(%%)*%s)")
def __init__(self, fmt, tz=None):
"""
*fmt* is a :func:`strftime` format string; *tz* is the
:class:`tzinfo` instance.
"""
if tz is None:
tz = _get_rc_timezone()
self.fmt = fmt
self.tz = tz
def __call__(self, x, pos=0):
if x == 0:
raise ValueError('DateFormatter found a value of x=0, which is '
'an illegal date. This usually occurs because '
'you have not informed the axis that it is '
'plotting dates, e.g., with ax.xaxis_date()')
dt = num2date(x, self.tz)
return self.strftime(dt, self.fmt)
def set_tzinfo(self, tz):
self.tz = tz
def _replace_common_substr(self, s1, s2, sub1, sub2, replacement):
"""Helper function for replacing substrings sub1 and sub2
located at the same indexes in strings s1 and s2 respectively,
with the string replacement. It is expected that sub1 and sub2
have the same length. Returns the pair s1, s2 after the
substitutions.
"""
# Find common indexes of substrings sub1 in s1 and sub2 in s2
# and make substitutions inplace. Because this is inplace,
# it is okay if len(replacement) != len(sub1), len(sub2).
i = 0
while True:
j = s1.find(sub1, i)
if j == -1:
break
i = j + 1
if s2[j:j + len(sub2)] != sub2:
continue
s1 = s1[:j] + replacement + s1[j + len(sub1):]
s2 = s2[:j] + replacement + s2[j + len(sub2):]
return s1, s2
def strftime_pre_1900(self, dt, fmt=None):
"""Call time.strftime for years before 1900 by rolling
forward a multiple of 28 years.
*fmt* is a :func:`strftime` format string.
Dalke: I hope I did this math right. Every 28 years the
calendar repeats, except through century leap years excepting
the 400 year leap years. But only if you're using the Gregorian
calendar.
"""
if fmt is None:
fmt = self.fmt
# Since python's time module's strftime implementation does not
# support %f microsecond (but the datetime module does), use a
# regular expression substitution to replace instances of %f.
# Note that this can be useful since python's floating-point
# precision representation for datetime causes precision to be
# more accurate closer to year 0 (around the year 2000, precision
# can be at 10s of microseconds).
fmt = re.sub(r'((^|[^%])(%%)*)%f',
r'\g<1>{0:06d}'.format(dt.microsecond), fmt)
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6 * (delta // 100 + delta // 400)
year = year + off
# Move to between the years 1973 and 2000
year1 = year + ((2000 - year) // 28) * 28
year2 = year1 + 28
timetuple = dt.timetuple()
# Generate timestamp string for year and year+28
s1 = time.strftime(fmt, (year1,) + timetuple[1:])
s2 = time.strftime(fmt, (year2,) + timetuple[1:])
# Replace instances of respective years (both 2-digit and 4-digit)
# that are located at the same indexes of s1, s2 with dt's year.
# Note that C++'s strftime implementation does not use padded
# zeros or padded whitespace for %y or %Y for years before 100, but
# uses padded zeros for %x. (For example, try the runnable examples
# with .tm_year in the interval [-1900, -1800] on
# http://en.cppreference.com/w/c/chrono/strftime.) For ease of
# implementation, we always use padded zeros for %y, %Y, and %x.
s1, s2 = self._replace_common_substr(s1, s2,
"{0:04d}".format(year1),
"{0:04d}".format(year2),
"{0:04d}".format(dt.year))
s1, s2 = self._replace_common_substr(s1, s2,
"{0:02d}".format(year1 % 100),
"{0:02d}".format(year2 % 100),
"{0:02d}".format(dt.year % 100))
return cbook.unicode_safe(s1)
def strftime(self, dt, fmt=None):
"""Refer to documentation for datetime.strftime.
*fmt* is a :func:`strftime` format string.
Warning: For years before 1900, depending upon the current
locale it is possible that the year displayed with %x might
be incorrect. For years before 100, %y and %Y will yield
zero-padded strings.
"""
if fmt is None:
fmt = self.fmt
fmt = self.illegal_s.sub(r"\1", fmt)
fmt = fmt.replace("%s", "s")
if dt.year >= 1900:
# Note: in python 3.3 this is okay for years >= 1000,
# refer to http://bugs.python.org/issue177742
return cbook.unicode_safe(dt.strftime(fmt))
return self.strftime_pre_1900(dt, fmt)
class IndexDateFormatter(ticker.Formatter):
"""
Use with :class:`~matplotlib.ticker.IndexLocator` to cycle format
strings by index.
"""
def __init__(self, t, fmt, tz=None):
"""
*t* is a sequence of dates (floating point days). *fmt* is a
:func:`strftime` format string.
"""
if tz is None:
tz = _get_rc_timezone()
self.t = t
self.fmt = fmt
self.tz = tz
def __call__(self, x, pos=0):
'Return the label for time *x* at position *pos*'
ind = int(np.round(x))
if ind >= len(self.t) or ind <= 0:
return ''
dt = num2date(self.t[ind], self.tz)
return cbook.unicode_safe(dt.strftime(self.fmt))
class AutoDateFormatter(ticker.Formatter):
"""
This class attempts to figure out the best format to use. This is
most useful when used with the :class:`AutoDateLocator`.
The AutoDateFormatter has a scale dictionary that maps the scale
of the tick (the distance in days between one major tick) and a
format string. The default looks like this::
self.scaled = {
DAYS_PER_YEAR: rcParams['date.autoformat.year'],
DAYS_PER_MONTH: rcParams['date.autoformat.month'],
1.0: rcParams['date.autoformat.day'],
1. / HOURS_PER_DAY: rcParams['date.autoformat.hour'],
1. / (MINUTES_PER_DAY): rcParams['date.autoformat.minute'],
1. / (SEC_PER_DAY): rcParams['date.autoformat.second'],
1. / (MUSECONDS_PER_DAY): rcParams['date.autoformat.microsecond'],
}
The algorithm picks the key in the dictionary that is >= the
current scale and uses that format string. You can customize this
dictionary by doing::
>>> locator = AutoDateLocator()
>>> formatter = AutoDateFormatter(locator)
>>> formatter.scaled[1/(24.*60.)] = '%M:%S' # only show min and sec
A custom :class:`~matplotlib.ticker.FuncFormatter` can also be used.
The following example shows how to use a custom format function to strip
trailing zeros from decimal seconds and adds the date to the first
ticklabel::
>>> def my_format_function(x, pos=None):
... x = matplotlib.dates.num2date(x)
... if pos == 0:
... fmt = '%D %H:%M:%S.%f'
... else:
... fmt = '%H:%M:%S.%f'
... label = x.strftime(fmt)
... label = label.rstrip("0")
... label = label.rstrip(".")
... return label
>>> from matplotlib.ticker import FuncFormatter
>>> formatter.scaled[1/(24.*60.)] = FuncFormatter(my_format_function)
"""
# This can be improved by providing some user-level direction on
# how to choose the best format (precedence, etc...)
# Perhaps a 'struct' that has a field for each time-type where a
# zero would indicate "don't show" and a number would indicate
# "show" with some sort of priority. Same priorities could mean
# show all with the same priority.
# Or more simply, perhaps just a format string for each
# possibility...
def __init__(self, locator, tz=None, defaultfmt='%Y-%m-%d'):
"""
Autoformat the date labels. The default format is the one to use
if none of the values in ``self.scaled`` are greater than the unit
returned by ``locator._get_unit()``.
"""
self._locator = locator
self._tz = tz
self.defaultfmt = defaultfmt
self._formatter = DateFormatter(self.defaultfmt, tz)
self.scaled = {DAYS_PER_YEAR: rcParams['date.autoformatter.year'],
DAYS_PER_MONTH: rcParams['date.autoformatter.month'],
1.0: rcParams['date.autoformatter.day'],
1. / HOURS_PER_DAY: rcParams['date.autoformatter.hour'],
1. / (MINUTES_PER_DAY):
rcParams['date.autoformatter.minute'],
1. / (SEC_PER_DAY):
rcParams['date.autoformatter.second'],
1. / (MUSECONDS_PER_DAY):
rcParams['date.autoformatter.microsecond']}
def __call__(self, x, pos=None):
locator_unit_scale = float(self._locator._get_unit())
fmt = self.defaultfmt
# Pick the first scale which is greater than the locator unit.
for possible_scale in sorted(self.scaled):
if possible_scale >= locator_unit_scale:
fmt = self.scaled[possible_scale]
break
if isinstance(fmt, six.string_types):
self._formatter = DateFormatter(fmt, self._tz)
result = self._formatter(x, pos)
elif six.callable(fmt):
result = fmt(x, pos)
else:
raise TypeError('Unexpected type passed to {0!r}.'.format(self))
return result
class rrulewrapper(object):
def __init__(self, freq, **kwargs):
self._construct = kwargs.copy()
self._construct["freq"] = freq
self._rrule = rrule(**self._construct)
def set(self, **kwargs):
self._construct.update(kwargs)
self._rrule = rrule(**self._construct)
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return getattr(self._rrule, name)
class DateLocator(ticker.Locator):
"""
Determines the tick locations when plotting dates.
"""
hms0d = {'byhour': 0, 'byminute': 0, 'bysecond': 0}
def __init__(self, tz=None):
"""
*tz* is a :class:`tzinfo` instance.
"""
if tz is None:
tz = _get_rc_timezone()
self.tz = tz
def set_tzinfo(self, tz):
"""
Set time zone info.
"""
self.tz = tz
def datalim_to_dt(self):
"""
Convert axis data interval to datetime objects.
"""
dmin, dmax = self.axis.get_data_interval()
if dmin > dmax:
dmin, dmax = dmax, dmin
return num2date(dmin, self.tz), num2date(dmax, self.tz)
def viewlim_to_dt(self):
"""
Converts the view interval to datetime objects.
"""
vmin, vmax = self.axis.get_view_interval()
if vmin > vmax:
vmin, vmax = vmax, vmin
return num2date(vmin, self.tz), num2date(vmax, self.tz)
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
return 1
def _get_interval(self):
"""
Return the number of units for each tick.
"""
return 1
def nonsingular(self, vmin, vmax):
"""
Given the proposed upper and lower extent, adjust the range
if it is too close to being singular (i.e. a range of ~0).
"""
unit = self._get_unit()
interval = self._get_interval()
if abs(vmax - vmin) < 1e-6:
vmin -= 2 * unit * interval
vmax += 2 * unit * interval
return vmin, vmax
class RRuleLocator(DateLocator):
# use the dateutil rrule instance
def __init__(self, o, tz=None):
DateLocator.__init__(self, tz)
self.rule = o
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
delta = relativedelta(vmax, vmin)
# We need to cap at the endpoints of valid datetime
try:
start = vmin - delta
except ValueError:
start = _from_ordinalf(1.0)
try:
stop = vmax + delta
except ValueError:
# The magic number!
stop = _from_ordinalf(3652059.9999999)
self.rule.set(dtstart=start, until=stop)
dates = self.rule.between(vmin, vmax, True)
if len(dates) == 0:
return date2num([vmin, vmax])
return self.raise_if_exceeds(date2num(dates))
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
freq = self.rule._rrule._freq
return self.get_unit_generic(freq)
@staticmethod
def get_unit_generic(freq):
if freq == YEARLY:
return DAYS_PER_YEAR
elif freq == MONTHLY:
return DAYS_PER_MONTH
elif freq == WEEKLY:
return DAYS_PER_WEEK
elif freq == DAILY:
return 1.0
elif freq == HOURLY:
return 1.0 / HOURS_PER_DAY
elif freq == MINUTELY:
return 1.0 / MINUTES_PER_DAY
elif freq == SECONDLY:
return 1.0 / SEC_PER_DAY
else:
# error
return -1 # or should this just return '1'?
def _get_interval(self):
return self.rule._rrule._interval
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
delta = relativedelta(dmax, dmin)
# We need to cap at the endpoints of valid datetime
try:
start = dmin - delta
except ValueError:
start = _from_ordinalf(1.0)
try:
stop = dmax + delta
except ValueError:
# The magic number!
stop = _from_ordinalf(3652059.9999999)
self.rule.set(dtstart=start, until=stop)
dmin, dmax = self.datalim_to_dt()
vmin = self.rule.before(dmin, True)
if not vmin:
vmin = dmin
vmax = self.rule.after(dmax, True)
if not vmax:
vmax = dmax
vmin = date2num(vmin)
vmax = date2num(vmax)
return self.nonsingular(vmin, vmax)
class AutoDateLocator(DateLocator):
"""
On autoscale, this class picks the best
:class:`DateLocator` to set the view limits and the tick
locations.
"""
def __init__(self, tz=None, minticks=5, maxticks=None,
interval_multiples=False):
"""
*minticks* is the minimum number of ticks desired, which is used to
select the type of ticking (yearly, monthly, etc.).
*maxticks* is the maximum number of ticks desired, which controls
any interval between ticks (ticking every other, every 3, etc.).
For really fine-grained control, this can be a dictionary mapping
individual rrule frequency constants (YEARLY, MONTHLY, etc.)
to their own maximum number of ticks. This can be used to keep
the number of ticks appropriate to the format chosen in
:class:`AutoDateFormatter`. Any frequency not specified in this
dictionary is given a default value.
*tz* is a :class:`tzinfo` instance.
*interval_multiples* is a boolean that indicates whether ticks
should be chosen to be multiple of the interval. This will lock
ticks to 'nicer' locations. For example, this will force the
ticks to be at hours 0,6,12,18 when hourly ticking is done at
6 hour intervals.
The AutoDateLocator has an interval dictionary that maps the
frequency of the tick (a constant from dateutil.rrule) and a
multiple allowed for that ticking. The default looks like this::
self.intervald = {
YEARLY : [1, 2, 4, 5, 10, 20, 40, 50, 100, 200, 400, 500,
1000, 2000, 4000, 5000, 10000],
MONTHLY : [1, 2, 3, 4, 6],
DAILY : [1, 2, 3, 7, 14],
HOURLY : [1, 2, 3, 4, 6, 12],
MINUTELY: [1, 5, 10, 15, 30],
SECONDLY: [1, 5, 10, 15, 30],
MICROSECONDLY: [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000,
5000, 10000, 20000, 50000, 100000, 200000, 500000,
1000000],
}
The interval is used to specify multiples that are appropriate for
the frequency of ticking. For instance, every 7 days is sensible
for daily ticks, but for minutes/seconds, 15 or 30 make sense.
You can customize this dictionary by doing::
locator = AutoDateLocator()
locator.intervald[HOURLY] = [3] # only show every 3 hours
"""
DateLocator.__init__(self, tz)
self._locator = YearLocator()
self._freq = YEARLY
self._freqs = [YEARLY, MONTHLY, DAILY, HOURLY, MINUTELY,
SECONDLY, MICROSECONDLY]
self.minticks = minticks
self.maxticks = {YEARLY: 11, MONTHLY: 12, DAILY: 11, HOURLY: 12,
MINUTELY: 11, SECONDLY: 11, MICROSECONDLY: 8}
if maxticks is not None:
try:
self.maxticks.update(maxticks)
except TypeError:
# Assume we were given an integer. Use this as the maximum
# number of ticks for every frequency and create a
# dictionary for this
self.maxticks = dict(zip(self._freqs,
[maxticks] * len(self._freqs)))
self.interval_multiples = interval_multiples
self.intervald = {
YEARLY: [1, 2, 4, 5, 10, 20, 40, 50, 100, 200, 400, 500,
1000, 2000, 4000, 5000, 10000],
MONTHLY: [1, 2, 3, 4, 6],
DAILY: [1, 2, 3, 7, 14, 21],
HOURLY: [1, 2, 3, 4, 6, 12],
MINUTELY: [1, 5, 10, 15, 30],
SECONDLY: [1, 5, 10, 15, 30],
MICROSECONDLY: [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000,
5000, 10000, 20000, 50000, 100000, 200000, 500000,
1000000]}
self._byranges = [None, range(1, 13), range(1, 32),
range(0, 24), range(0, 60), range(0, 60), None]
def __call__(self):
'Return the locations of the ticks'
self.refresh()
return self._locator()
def tick_values(self, vmin, vmax):
return self.get_locator(vmin, vmax).tick_values(vmin, vmax)
def nonsingular(self, vmin, vmax):
# whatever is thrown at us, we can scale the unit.
# But default nonsingular date plots at an ~4 year period.
if vmin == vmax:
vmin = vmin - DAYS_PER_YEAR * 2
vmax = vmax + DAYS_PER_YEAR * 2
return vmin, vmax
def set_axis(self, axis):
DateLocator.set_axis(self, axis)
self._locator.set_axis(axis)
def refresh(self):
'Refresh internal information based on current limits.'
dmin, dmax = self.viewlim_to_dt()
self._locator = self.get_locator(dmin, dmax)
def _get_unit(self):
if self._freq in [MICROSECONDLY]:
return 1. / MUSECONDS_PER_DAY
else:
return RRuleLocator.get_unit_generic(self._freq)
def autoscale(self):
'Try to choose the view limits intelligently.'
dmin, dmax = self.datalim_to_dt()
self._locator = self.get_locator(dmin, dmax)
return self._locator.autoscale()
def get_locator(self, dmin, dmax):
'Pick the best locator based on a distance.'
delta = relativedelta(dmax, dmin)
tdelta = dmax - dmin
# take absolute difference
if dmin > dmax:
delta = -delta
tdelta = -tdelta
# The following uses a mix of calls to relativedelta and timedelta
# methods because there is incomplete overlap in the functionality of
# these similar functions, and it's best to avoid doing our own math
# whenever possible.
numYears = float(delta.years)
numMonths = (numYears * MONTHS_PER_YEAR) + delta.months
numDays = tdelta.days # Avoids estimates of days/month, days/year
numHours = (numDays * HOURS_PER_DAY) + delta.hours
numMinutes = (numHours * MIN_PER_HOUR) + delta.minutes
numSeconds = np.floor(tdelta.total_seconds())
numMicroseconds = np.floor(tdelta.total_seconds() * 1e6)
nums = [numYears, numMonths, numDays, numHours, numMinutes,
numSeconds, numMicroseconds]
use_rrule_locator = [True] * 6 + [False]
# Default setting of bymonth, etc. to pass to rrule
# [unused (for year), bymonth, bymonthday, byhour, byminute,
# bysecond, unused (for microseconds)]
byranges = [None, 1, 1, 0, 0, 0, None]
# Loop over all the frequencies and try to find one that gives at
# least a minticks tick positions. Once this is found, look for
# an interval from an list specific to that frequency that gives no
# more than maxticks tick positions. Also, set up some ranges
# (bymonth, etc.) as appropriate to be passed to rrulewrapper.
for i, (freq, num) in enumerate(zip(self._freqs, nums)):
# If this particular frequency doesn't give enough ticks, continue
if num < self.minticks:
# Since we're not using this particular frequency, set
# the corresponding by_ to None so the rrule can act as
# appropriate
byranges[i] = None
continue
# Find the first available interval that doesn't give too many
# ticks
for interval in self.intervald[freq]:
if num <= interval * (self.maxticks[freq] - 1):
break
else:
# We went through the whole loop without breaking, default to
# the last interval in the list and raise a warning
warnings.warn('AutoDateLocator was unable to pick an '
'appropriate interval for this date range. '
'It may be necessary to add an interval value '
"to the AutoDateLocator's intervald dictionary."
' Defaulting to {0}.'.format(interval))
# Set some parameters as appropriate
self._freq = freq
if self._byranges[i] and self.interval_multiples:
byranges[i] = self._byranges[i][::interval]
interval = 1
else:
byranges[i] = self._byranges[i]
# We found what frequency to use
break
else:
raise ValueError('No sensible date limit could be found in the '
'AutoDateLocator.')
if use_rrule_locator[i]:
_, bymonth, bymonthday, byhour, byminute, bysecond, _ = byranges
rrule = rrulewrapper(self._freq, interval=interval,
dtstart=dmin, until=dmax,
bymonth=bymonth, bymonthday=bymonthday,
byhour=byhour, byminute=byminute,
bysecond=bysecond)
locator = RRuleLocator(rrule, self.tz)
else:
locator = MicrosecondLocator(interval, tz=self.tz)
locator.set_axis(self.axis)
locator.set_view_interval(*self.axis.get_view_interval())
locator.set_data_interval(*self.axis.get_data_interval())
return locator
class YearLocator(DateLocator):
"""
Make ticks on a given day of each year that is a multiple of base.
Examples::
# Tick every year on Jan 1st
locator = YearLocator()
# Tick every 5 years on July 4th
locator = YearLocator(5, month=7, day=4)
"""
def __init__(self, base=1, month=1, day=1, tz=None):
"""
Mark years that are multiple of base on a given month and day
(default jan 1).
"""
DateLocator.__init__(self, tz)
self.base = ticker.Base(base)
self.replaced = {'month': month,
'day': day,
'hour': 0,
'minute': 0,
'second': 0,
'tzinfo': tz
}
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
ymin = self.base.le(vmin.year)
ymax = self.base.ge(vmax.year)
ticks = [vmin.replace(year=ymin, **self.replaced)]
while 1:
dt = ticks[-1]
if dt.year >= ymax:
return date2num(ticks)
year = dt.year + self.base.get_base()
ticks.append(dt.replace(year=year, **self.replaced))
def autoscale(self):
"""
Set the view limits to include the data range.
"""
dmin, dmax = self.datalim_to_dt()
ymin = self.base.le(dmin.year)
ymax = self.base.ge(dmax.year)
vmin = dmin.replace(year=ymin, **self.replaced)
vmax = dmax.replace(year=ymax, **self.replaced)
vmin = date2num(vmin)
vmax = date2num(vmax)
return self.nonsingular(vmin, vmax)
class MonthLocator(RRuleLocator):
"""
Make ticks on occurances of each month month, e.g., 1, 3, 12.
"""
def __init__(self, bymonth=None, bymonthday=1, interval=1, tz=None):
"""
Mark every month in *bymonth*; *bymonth* can be an int or
sequence. Default is ``range(1,13)``, i.e. every month.
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurance.
"""
if bymonth is None:
bymonth = range(1, 13)
elif isinstance(bymonth, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
bymonth = [x.item() for x in bymonth.astype(int)]
rule = rrulewrapper(MONTHLY, bymonth=bymonth, bymonthday=bymonthday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class WeekdayLocator(RRuleLocator):
"""
Make ticks on occurances of each weekday.
"""
def __init__(self, byweekday=1, interval=1, tz=None):
"""
Mark every weekday in *byweekday*; *byweekday* can be a number or
sequence.
Elements of *byweekday* must be one of MO, TU, WE, TH, FR, SA,
SU, the constants from :mod:`dateutil.rrule`, which have been
imported into the :mod:`matplotlib.dates` namespace.
*interval* specifies the number of weeks to skip. For example,
``interval=2`` plots every second week.
"""
if isinstance(byweekday, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
[x.item() for x in byweekday.astype(int)]
rule = rrulewrapper(DAILY, byweekday=byweekday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class DayLocator(RRuleLocator):
"""
Make ticks on occurances of each day of the month. For example,
1, 15, 30.
"""
def __init__(self, bymonthday=None, interval=1, tz=None):
"""
Mark every day in *bymonthday*; *bymonthday* can be an int or
sequence.
Default is to tick every day of the month: ``bymonthday=range(1,32)``
"""
if not interval == int(interval) or interval < 1:
raise ValueError("interval must be an integer greater than 0")
if bymonthday is None:
bymonthday = range(1, 32)
elif isinstance(bymonthday, np.ndarray):
# This fixes a bug in dateutil <= 2.3 which prevents the use of
# numpy arrays in (among other things) the bymonthday, byweekday
# and bymonth parameters.
bymonthday = [x.item() for x in bymonthday.astype(int)]
rule = rrulewrapper(DAILY, bymonthday=bymonthday,
interval=interval, **self.hms0d)
RRuleLocator.__init__(self, rule, tz)
class HourLocator(RRuleLocator):
"""
Make ticks on occurances of each hour.
"""
def __init__(self, byhour=None, interval=1, tz=None):
"""
Mark every hour in *byhour*; *byhour* can be an int or sequence.
Default is to tick every hour: ``byhour=range(24)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if byhour is None:
byhour = range(24)
rule = rrulewrapper(HOURLY, byhour=byhour, interval=interval,
byminute=0, bysecond=0)
RRuleLocator.__init__(self, rule, tz)
class MinuteLocator(RRuleLocator):
"""
Make ticks on occurances of each minute.
"""
def __init__(self, byminute=None, interval=1, tz=None):
"""
Mark every minute in *byminute*; *byminute* can be an int or
sequence. Default is to tick every minute: ``byminute=range(60)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if byminute is None:
byminute = range(60)
rule = rrulewrapper(MINUTELY, byminute=byminute, interval=interval,
bysecond=0)
RRuleLocator.__init__(self, rule, tz)
class SecondLocator(RRuleLocator):
"""
Make ticks on occurances of each second.
"""
def __init__(self, bysecond=None, interval=1, tz=None):
"""
Mark every second in *bysecond*; *bysecond* can be an int or
sequence. Default is to tick every second: ``bysecond = range(60)``
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second occurrence.
"""
if bysecond is None:
bysecond = range(60)
rule = rrulewrapper(SECONDLY, bysecond=bysecond, interval=interval)
RRuleLocator.__init__(self, rule, tz)
class MicrosecondLocator(DateLocator):
"""
Make ticks on occurances of each microsecond.
"""
def __init__(self, interval=1, tz=None):
"""
*interval* is the interval between each iteration. For
example, if ``interval=2``, mark every second microsecond.
"""
self._interval = interval
self._wrapped_locator = ticker.MultipleLocator(interval)
self.tz = tz
def set_axis(self, axis):
self._wrapped_locator.set_axis(axis)
return DateLocator.set_axis(self, axis)
def set_view_interval(self, vmin, vmax):
self._wrapped_locator.set_view_interval(vmin, vmax)
return DateLocator.set_view_interval(self, vmin, vmax)
def set_data_interval(self, vmin, vmax):
self._wrapped_locator.set_data_interval(vmin, vmax)
return DateLocator.set_data_interval(self, vmin, vmax)
def __call__(self):
# if no data have been set, this will tank with a ValueError
try:
dmin, dmax = self.viewlim_to_dt()
except ValueError:
return []
return self.tick_values(dmin, dmax)
def tick_values(self, vmin, vmax):
nmin, nmax = date2num((vmin, vmax))
nmin *= MUSECONDS_PER_DAY
nmax *= MUSECONDS_PER_DAY
ticks = self._wrapped_locator.tick_values(nmin, nmax)
ticks = [tick / MUSECONDS_PER_DAY for tick in ticks]
return ticks
def _get_unit(self):
"""
Return how many days a unit of the locator is; used for
intelligent autoscaling.
"""
return 1. / MUSECONDS_PER_DAY
def _get_interval(self):
"""
Return the number of units for each tick.
"""
return self._interval
def _close_to_dt(d1, d2, epsilon=5):
"""
Assert that datetimes *d1* and *d2* are within *epsilon* microseconds.
"""
delta = d2 - d1
mus = abs(delta.total_seconds() * 1e6)
assert mus < epsilon
def _close_to_num(o1, o2, epsilon=5):
"""
Assert that float ordinals *o1* and *o2* are within *epsilon*
microseconds.
"""
delta = abs((o2 - o1) * MUSECONDS_PER_DAY)
assert delta < epsilon
def epoch2num(e):
"""
Convert an epoch or sequence of epochs to the new date format,
that is days since 0001.
"""
return EPOCH_OFFSET + np.asarray(e) / SEC_PER_DAY
def num2epoch(d):
"""
Convert days since 0001 to epoch. *d* can be a number or sequence.
"""
return (np.asarray(d) - EPOCH_OFFSET) * SEC_PER_DAY
def mx2num(mxdates):
"""
Convert mx :class:`datetime` instance (or sequence of mx
instances) to the new date format.
"""
scalar = False
if not cbook.iterable(mxdates):
scalar = True
mxdates = [mxdates]
ret = epoch2num([m.ticks() for m in mxdates])
if scalar:
return ret[0]
else:
return ret
def date_ticker_factory(span, tz=None, numticks=5):
"""
Create a date locator with *numticks* (approx) and a date formatter
for *span* in days. Return value is (locator, formatter).
"""
if span == 0:
span = 1 / HOURS_PER_DAY
mins = span * MINUTES_PER_DAY
hrs = span * HOURS_PER_DAY
days = span
wks = span / DAYS_PER_WEEK
months = span / DAYS_PER_MONTH # Approx
years = span / DAYS_PER_YEAR # Approx
if years > numticks:
locator = YearLocator(int(years / numticks), tz=tz) # define
fmt = '%Y'
elif months > numticks:
locator = MonthLocator(tz=tz)
fmt = '%b %Y'
elif wks > numticks:
locator = WeekdayLocator(tz=tz)
fmt = '%a, %b %d'
elif days > numticks:
locator = DayLocator(interval=int(math.ceil(days / numticks)), tz=tz)
fmt = '%b %d'
elif hrs > numticks:
locator = HourLocator(interval=int(math.ceil(hrs / numticks)), tz=tz)
fmt = '%H:%M\n%b %d'
elif mins > numticks:
locator = MinuteLocator(interval=int(math.ceil(mins / numticks)),
tz=tz)
fmt = '%H:%M:%S'
else:
locator = MinuteLocator(tz=tz)
fmt = '%H:%M:%S'
formatter = DateFormatter(fmt, tz=tz)
return locator, formatter
def seconds(s):
"""
Return seconds as days.
"""
return float(s) / SEC_PER_DAY
def minutes(m):
"""
Return minutes as days.
"""
return float(m) / MINUTES_PER_DAY
def hours(h):
"""
Return hours as days.
"""
return h / HOURS_PER_DAY
def weeks(w):
"""
Return weeks as days.
"""
return w * DAYS_PER_WEEK
class DateConverter(units.ConversionInterface):
"""
Converter for datetime.date and datetime.datetime data,
or for date/time data represented as it would be converted
by :func:`date2num`.
The 'unit' tag for such data is None or a tzinfo instance.
"""
@staticmethod
def axisinfo(unit, axis):
"""
Return the :class:`~matplotlib.units.AxisInfo` for *unit*.
*unit* is a tzinfo instance or None.
The *axis* argument is required but not used.
"""
tz = unit
majloc = AutoDateLocator(tz=tz)
majfmt = AutoDateFormatter(majloc, tz=tz)
datemin = datetime.date(2000, 1, 1)
datemax = datetime.date(2010, 1, 1)
return units.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
default_limits=(datemin, datemax))
@staticmethod
def convert(value, unit, axis):
"""
If *value* is not already a number or sequence of numbers,
convert it with :func:`date2num`.
The *unit* and *axis* arguments are not used.
"""
if units.ConversionInterface.is_numlike(value):
return value
return date2num(value)
@staticmethod
def default_units(x, axis):
"""
Return the tzinfo instance of *x* or of its first element, or None
"""
if isinstance(x, np.ndarray):
x = x.ravel()
try:
x = cbook.safe_first_element(x)
except (TypeError, StopIteration):
pass
try:
return x.tzinfo
except AttributeError:
pass
return None
units.registry[datetime.date] = DateConverter()
units.registry[datetime.datetime] = DateConverter()
| gpl-3.0 |
tkaitchuck/nupic | external/common/lib/python2.6/site-packages/logilab/astng/test/unittest_inference.py | 1 | 41136 | # copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:thenault@gmail.com
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""tests for the astng inference capabilities
"""
from os.path import join, dirname, abspath
import sys
from StringIO import StringIO
from logilab.common.testlib import TestCase, unittest_main
from logilab.astng import InferenceError, builder, nodes
from logilab.astng.inference import infer_end as inference_infer_end
from logilab.astng.bases import YES, Instance, BoundMethod, UnboundMethod,\
path_wrapper, BUILTINS_NAME
def get_name_node(start_from, name, index=0):
return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
def get_node_of_class(start_from, klass):
return start_from.nodes_of_class(klass).next()
builder = builder.ASTNGBuilder()
class InferenceUtilsTC(TestCase):
def test_path_wrapper(self):
def infer_default(self, *args):
raise InferenceError
infer_default = path_wrapper(infer_default)
infer_end = path_wrapper(inference_infer_end)
self.failUnlessRaises(InferenceError,
infer_default(1).next)
self.failUnlessEqual(infer_end(1).next(), 1)
if sys.version_info < (3, 0):
EXC_MODULE = 'exceptions'
else:
EXC_MODULE = BUILTINS_NAME
class InferenceTC(TestCase):
CODE = '''
class C(object):
"new style"
attr = 4
def meth1(self, arg1, optarg=0):
var = object()
print ("yo", arg1, optarg)
self.iattr = "hop"
return var
def meth2(self):
self.meth1(*self.meth3)
def meth3(self, d=attr):
b = self.attr
c = self.iattr
return b, c
ex = Exception("msg")
v = C().meth1(1)
m_unbound = C.meth1
m_bound = C().meth1
a, b, c = ex, 1, "bonjour"
[d, e, f] = [ex, 1.0, ("bonjour", v)]
g, h = f
i, (j, k) = "glup", f
a, b= b, a # Gasp !
'''
astng = builder.string_build(CODE, __name__, __file__)
def test_module_inference(self):
infered = self.astng.infer()
obj = infered.next()
self.failUnlessEqual(obj.name, __name__)
self.failUnlessEqual(obj.root().name, __name__)
self.failUnlessRaises(StopIteration, infered.next)
def test_class_inference(self):
infered = self.astng['C'].infer()
obj = infered.next()
self.failUnlessEqual(obj.name, 'C')
self.failUnlessEqual(obj.root().name, __name__)
self.failUnlessRaises(StopIteration, infered.next)
def test_function_inference(self):
infered = self.astng['C']['meth1'].infer()
obj = infered.next()
self.failUnlessEqual(obj.name, 'meth1')
self.failUnlessEqual(obj.root().name, __name__)
self.failUnlessRaises(StopIteration, infered.next)
def test_builtin_name_inference(self):
infered = self.astng['C']['meth1']['var'].infer()
var = infered.next()
self.failUnlessEqual(var.name, 'object')
self.failUnlessEqual(var.root().name, BUILTINS_NAME)
self.failUnlessRaises(StopIteration, infered.next)
def test_tupleassign_name_inference(self):
infered = self.astng['a'].infer()
exc = infered.next()
self.assertIsInstance(exc, Instance)
self.failUnlessEqual(exc.name, 'Exception')
self.failUnlessEqual(exc.root().name, EXC_MODULE)
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['b'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, 1)
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['c'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, "bonjour")
self.failUnlessRaises(StopIteration, infered.next)
def test_listassign_name_inference(self):
infered = self.astng['d'].infer()
exc = infered.next()
self.assertIsInstance(exc, Instance)
self.failUnlessEqual(exc.name, 'Exception')
self.failUnlessEqual(exc.root().name, EXC_MODULE)
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['e'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, 1.0)
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['f'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Tuple)
self.failUnlessRaises(StopIteration, infered.next)
def test_advanced_tupleassign_name_inference1(self):
infered = self.astng['g'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, "bonjour")
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['h'].infer()
var = infered.next()
self.failUnlessEqual(var.name, 'object')
self.failUnlessEqual(var.root().name, BUILTINS_NAME)
self.failUnlessRaises(StopIteration, infered.next)
def test_advanced_tupleassign_name_inference2(self):
infered = self.astng['i'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, u"glup")
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['j'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, "bonjour")
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng['k'].infer()
var = infered.next()
self.failUnlessEqual(var.name, 'object')
self.failUnlessEqual(var.root().name, BUILTINS_NAME)
self.failUnlessRaises(StopIteration, infered.next)
def test_swap_assign_inference(self):
infered = self.astng.locals['a'][1].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, 1)
self.failUnlessRaises(StopIteration, infered.next)
infered = self.astng.locals['b'][1].infer()
exc = infered.next()
self.assertIsInstance(exc, Instance)
self.failUnlessEqual(exc.name, 'Exception')
self.failUnlessEqual(exc.root().name, EXC_MODULE)
self.failUnlessRaises(StopIteration, infered.next)
def test_getattr_inference1(self):
infered = self.astng['ex'].infer()
exc = infered.next()
self.assertIsInstance(exc, Instance)
self.failUnlessEqual(exc.name, 'Exception')
self.failUnlessEqual(exc.root().name, EXC_MODULE)
self.failUnlessRaises(StopIteration, infered.next)
def test_getattr_inference2(self):
infered = get_node_of_class(self.astng['C']['meth2'], nodes.Getattr).infer()
meth1 = infered.next()
self.failUnlessEqual(meth1.name, 'meth1')
self.failUnlessEqual(meth1.root().name, __name__)
self.failUnlessRaises(StopIteration, infered.next)
def test_getattr_inference3(self):
infered = self.astng['C']['meth3']['b'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, 4)
self.failUnlessRaises(StopIteration, infered.next)
def test_getattr_inference4(self):
infered = self.astng['C']['meth3']['c'].infer()
const = infered.next()
self.assertIsInstance(const, nodes.Const)
self.failUnlessEqual(const.value, "hop")
self.failUnlessRaises(StopIteration, infered.next)
def test_callfunc_inference(self):
infered = self.astng['v'].infer()
meth1 = infered.next()
self.assertIsInstance(meth1, Instance)
self.failUnlessEqual(meth1.name, 'object')
self.failUnlessEqual(meth1.root().name, BUILTINS_NAME)
self.failUnlessRaises(StopIteration, infered.next)
def test_unbound_method_inference(self):
infered = self.astng['m_unbound'].infer()
meth1 = infered.next()
self.assertIsInstance(meth1, UnboundMethod)
self.failUnlessEqual(meth1.name, 'meth1')
self.failUnlessEqual(meth1.parent.frame().name, 'C')
self.failUnlessRaises(StopIteration, infered.next)
def test_bound_method_inference(self):
infered = self.astng['m_bound'].infer()
meth1 = infered.next()
self.assertIsInstance(meth1, BoundMethod)
self.failUnlessEqual(meth1.name, 'meth1')
self.failUnlessEqual(meth1.parent.frame().name, 'C')
self.failUnlessRaises(StopIteration, infered.next)
def test_args_default_inference1(self):
optarg = get_name_node(self.astng['C']['meth1'], 'optarg')
infered = optarg.infer()
obj1 = infered.next()
self.assertIsInstance(obj1, nodes.Const)
self.failUnlessEqual(obj1.value, 0)
obj1 = infered.next()
self.assertIs(obj1, YES, obj1)
self.failUnlessRaises(StopIteration, infered.next)
def test_args_default_inference2(self):
infered = self.astng['C']['meth3'].ilookup('d')
obj1 = infered.next()
self.assertIsInstance(obj1, nodes.Const)
self.failUnlessEqual(obj1.value, 4)
obj1 = infered.next()
self.assertIs(obj1, YES, obj1)
self.failUnlessRaises(StopIteration, infered.next)
def test_inference_restrictions(self):
infered = get_name_node(self.astng['C']['meth1'], 'arg1').infer()
obj1 = infered.next()
self.assertIs(obj1, YES, obj1)
self.failUnlessRaises(StopIteration, infered.next)
def test_ancestors_inference(self):
code = '''
class A:
pass
class A(A):
pass
'''
astng = builder.string_build(code, __name__, __file__)
a1 = astng.locals['A'][0]
a2 = astng.locals['A'][1]
a2_ancestors = list(a2.ancestors())
self.failUnlessEqual(len(a2_ancestors), 1)
self.failUnless(a2_ancestors[0] is a1)
def test_ancestors_inference2(self):
code = '''
class A:
pass
class B(A): pass
class A(B):
pass
'''
astng = builder.string_build(code, __name__, __file__)
a1 = astng.locals['A'][0]
a2 = astng.locals['A'][1]
a2_ancestors = list(a2.ancestors())
self.failUnlessEqual(len(a2_ancestors), 2)
self.failUnless(a2_ancestors[0] is astng.locals['B'][0])
self.failUnless(a2_ancestors[1] is a1, a2_ancestors[1])
def test_f_arg_f(self):
code = '''
def f(f=1):
return f
a = f()
'''
astng = builder.string_build(code, __name__, __file__)
a = astng['a']
a_infered = a.infered()
self.failUnlessEqual(a_infered[0].value, 1)
self.assertEqual(len(a_infered), 1)
def test_exc_ancestors(self):
code = '''
def f():
raise NotImplementedError
'''
astng = builder.string_build(code, __name__, __file__)
error = astng.nodes_of_class(nodes.Name).next()
nie = error.infered()[0]
self.assertIsInstance(nie, nodes.Class)
nie_ancestors = [c.name for c in nie.ancestors()]
if sys.version_info < (3, 0):
self.failUnlessEqual(nie_ancestors, ['RuntimeError', 'StandardError', 'Exception', 'BaseException', 'object'])
else:
self.failUnlessEqual(nie_ancestors, ['RuntimeError', 'Exception', 'BaseException', 'object'])
def test_except_inference(self):
code = '''
try:
print (hop)
except NameError, ex:
ex1 = ex
except Exception, ex:
ex2 = ex
raise
'''
if sys.version_info >= (3, 0):
code = code.replace(', ex:', ' as ex:')
astng = builder.string_build(code, __name__, __file__)
ex1 = astng['ex1']
ex1_infer = ex1.infer()
ex1 = ex1_infer.next()
self.assertIsInstance(ex1, Instance)
self.failUnlessEqual(ex1.name, 'NameError')
self.failUnlessRaises(StopIteration, ex1_infer.next)
ex2 = astng['ex2']
ex2_infer = ex2.infer()
ex2 = ex2_infer.next()
self.assertIsInstance(ex2, Instance)
self.failUnlessEqual(ex2.name, 'Exception')
self.failUnlessRaises(StopIteration, ex2_infer.next)
def test_del1(self):
code = '''
del undefined_attr
'''
delete = builder.string_build(code, __name__, __file__).body[0]
self.failUnlessRaises(InferenceError, delete.infer)
def test_del2(self):
code = '''
a = 1
b = a
del a
c = a
a = 2
d = a
'''
astng = builder.string_build(code, __name__, __file__)
n = astng['b']
n_infer = n.infer()
infered = n_infer.next()
self.assertIsInstance(infered, nodes.Const)
self.failUnlessEqual(infered.value, 1)
self.failUnlessRaises(StopIteration, n_infer.next)
n = astng['c']
n_infer = n.infer()
self.failUnlessRaises(InferenceError, n_infer.next)
n = astng['d']
n_infer = n.infer()
infered = n_infer.next()
self.assertIsInstance(infered, nodes.Const)
self.failUnlessEqual(infered.value, 2)
self.failUnlessRaises(StopIteration, n_infer.next)
def test_builtin_types(self):
code = '''
l = [1]
t = (2,)
d = {}
s = ''
s2 = '_'
'''
astng = builder.string_build(code, __name__, __file__)
n = astng['l']
infered = n.infer().next()
self.assertIsInstance(infered, nodes.List)
self.assertIsInstance(infered, Instance)
self.failUnlessEqual(infered.getitem(0).value, 1)
self.assertIsInstance(infered._proxied, nodes.Class)
self.failUnlessEqual(infered._proxied.name, 'list')
self.failUnless('append' in infered._proxied.locals)
n = astng['t']
infered = n.infer().next()
self.assertIsInstance(infered, nodes.Tuple)
self.assertIsInstance(infered, Instance)
self.failUnlessEqual(infered.getitem(0).value, 2)
self.assertIsInstance(infered._proxied, nodes.Class)
self.failUnlessEqual(infered._proxied.name, 'tuple')
n = astng['d']
infered = n.infer().next()
self.assertIsInstance(infered, nodes.Dict)
self.assertIsInstance(infered, Instance)
self.assertIsInstance(infered._proxied, nodes.Class)
self.failUnlessEqual(infered._proxied.name, 'dict')
self.failUnless('get' in infered._proxied.locals)
n = astng['s']
infered = n.infer().next()
self.assertIsInstance(infered, nodes.Const)
self.assertIsInstance(infered, Instance)
self.failUnlessEqual(infered.name, 'str')
self.failUnless('lower' in infered._proxied.locals)
n = astng['s2']
infered = n.infer().next()
self.failUnlessEqual(infered.getitem(0).value, '_')
def test_unicode_type(self):
if sys.version_info >= (3, 0):
self.skipTest('unicode removed on py >= 3.0')
code = '''u = u""'''
astng = builder.string_build(code, __name__, __file__)
n = astng['u']
infered = n.infer().next()
self.assertIsInstance(infered, nodes.Const)
self.assertIsInstance(infered, Instance)
self.failUnlessEqual(infered.name, 'unicode')
self.failUnless('lower' in infered._proxied.locals)
def test_descriptor_are_callable(self):
code = '''
class A:
statm = staticmethod(open)
clsm = classmethod('whatever')
'''
astng = builder.string_build(code, __name__, __file__)
statm = astng['A'].igetattr('statm').next()
self.failUnless(statm.callable())
clsm = astng['A'].igetattr('clsm').next()
self.failUnless(clsm.callable())
def test_bt_ancestor_crash(self):
code = '''
class Warning(Warning):
pass
'''
astng = builder.string_build(code, __name__, __file__)
w = astng['Warning']
ancestors = w.ancestors()
ancestor = ancestors.next()
self.failUnlessEqual(ancestor.name, 'Warning')
self.failUnlessEqual(ancestor.root().name, EXC_MODULE)
ancestor = ancestors.next()
self.failUnlessEqual(ancestor.name, 'Exception')
self.failUnlessEqual(ancestor.root().name, EXC_MODULE)
ancestor = ancestors.next()
self.failUnlessEqual(ancestor.name, 'BaseException')
self.failUnlessEqual(ancestor.root().name, EXC_MODULE)
ancestor = ancestors.next()
self.failUnlessEqual(ancestor.name, 'object')
self.failUnlessEqual(ancestor.root().name, BUILTINS_NAME)
self.failUnlessRaises(StopIteration, ancestors.next)
def test_qqch(self):
code = '''
from logilab.common.modutils import load_module_from_name
xxx = load_module_from_name('__pkginfo__')
'''
astng = builder.string_build(code, __name__, __file__)
xxx = astng['xxx']
self.assertSetEqual(set(n.__class__ for n in xxx.infered()),
set([nodes.Const, YES.__class__]))
def test_method_argument(self):
code = '''
class ErudiEntitySchema:
"""a entity has a type, a set of subject and or object relations"""
def __init__(self, e_type, **kwargs):
kwargs['e_type'] = e_type.capitalize().encode()
def meth(self, e_type, *args, **kwargs):
kwargs['e_type'] = e_type.capitalize().encode()
print(args)
'''
astng = builder.string_build(code, __name__, __file__)
arg = get_name_node(astng['ErudiEntitySchema']['__init__'], 'e_type')
self.failUnlessEqual([n.__class__ for n in arg.infer()],
[YES.__class__])
arg = get_name_node(astng['ErudiEntitySchema']['__init__'], 'kwargs')
self.failUnlessEqual([n.__class__ for n in arg.infer()],
[nodes.Dict])
arg = get_name_node(astng['ErudiEntitySchema']['meth'], 'e_type')
self.failUnlessEqual([n.__class__ for n in arg.infer()],
[YES.__class__])
arg = get_name_node(astng['ErudiEntitySchema']['meth'], 'args')
self.failUnlessEqual([n.__class__ for n in arg.infer()],
[nodes.Tuple])
arg = get_name_node(astng['ErudiEntitySchema']['meth'], 'kwargs')
self.failUnlessEqual([n.__class__ for n in arg.infer()],
[nodes.Dict])
def test_tuple_then_list(self):
code = '''
def test_view(rql, vid, tags=()):
tags = list(tags)
tags.append(vid)
'''
astng = builder.string_build(code, __name__, __file__)
name = get_name_node(astng['test_view'], 'tags', -1)
it = name.infer()
tags = it.next()
self.failUnlessEqual(tags.__class__, Instance)
self.failUnlessEqual(tags._proxied.name, 'list')
self.failUnlessRaises(StopIteration, it.next)
def test_mulassign_inference(self):
code = '''
def first_word(line):
"""Return the first word of a line"""
return line.split()[0]
def last_word(line):
"""Return last word of a line"""
return line.split()[-1]
def process_line(word_pos):
"""Silly function: returns (ok, callable) based on argument.
For test purpose only.
"""
if word_pos > 0:
return (True, first_word)
elif word_pos < 0:
return (True, last_word)
else:
return (False, None)
if __name__ == '__main__':
line_number = 0
for a_line in file('test_callable.py'):
tupletest = process_line(line_number)
(ok, fct) = process_line(line_number)
if ok:
fct(a_line)
'''
astng = builder.string_build(code, __name__, __file__)
self.failUnlessEqual(len(list(astng['process_line'].infer_call_result(
None))), 3)
self.failUnlessEqual(len(list(astng['tupletest'].infer())), 3)
values = ['Function(first_word)', 'Function(last_word)', 'Const(NoneType)']
self.failUnlessEqual([str(infered)
for infered in astng['fct'].infer()], values)
def test_float_complex_ambiguity(self):
code = '''
def no_conjugate_member(magic_flag):
"""should not raise E1101 on something.conjugate"""
if magic_flag:
something = 1.0
else:
something = 1.0j
if isinstance(something, float):
return something
return something.conjugate()
'''
astng = builder.string_build(code, __name__, __file__)
self.failUnlessEqual([i.value for i in
astng['no_conjugate_member'].ilookup('something')], [1.0, 1.0j])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'something', -1).infer()], [1.0, 1.0j])
def test_lookup_cond_branches(self):
code = '''
def no_conjugate_member(magic_flag):
"""should not raise E1101 on something.conjugate"""
something = 1.0
if magic_flag:
something = 1.0j
return something.conjugate()
'''
astng = builder.string_build(code, __name__, __file__)
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'something', -1).infer()], [1.0, 1.0j])
def test_simple_subscript(self):
code = '''
a = [1, 2, 3][0]
b = (1, 2, 3)[1]
c = (1, 2, 3)[-1]
d = a + b + c
print (d)
'''
astng = builder.string_build(code, __name__, __file__)
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'a', -1).infer()], [1])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'b', -1).infer()], [2])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'c', -1).infer()], [3])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'd', -1).infer()], [6])
#def test_simple_tuple(self):
#"""test case for a simple tuple value"""
## XXX tuple inference is not implemented ...
#code = """
#a = (1,)
#b = (22,)
#some = a + b
#"""
#astng = builder.string_build(code, __name__, __file__)
#self.failUnlessEqual(astng['some'].infer.next().as_string(), "(1, 22)")
def test_simple_for(self):
code = '''
for a in [1, 2, 3]:
print (a)
for b,c in [(1,2), (3,4)]:
print (b)
print (c)
print ([(d,e) for e,d in ([1,2], [3,4])])
'''
astng = builder.string_build(code, __name__, __file__)
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'a', -1).infer()], [1, 2, 3])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'b', -1).infer()], [1, 3])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'c', -1).infer()], [2, 4])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'd', -1).infer()], [2, 4])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'e', -1).infer()], [1, 3])
def test_simple_for_genexpr(self):
code = '''
print ((d,e) for e,d in ([1,2], [3,4]))
'''
astng = builder.string_build(code, __name__, __file__)
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'd', -1).infer()], [2, 4])
self.failUnlessEqual([i.value for i in
get_name_node(astng, 'e', -1).infer()], [1, 3])
def test_builtin_help(self):
code = '''
help()
'''
# XXX failing since __builtin__.help assignment has
# been moved into a function...
astng = builder.string_build(code, __name__, __file__)
node = get_name_node(astng, 'help', -1)
infered = list(node.infer())
self.failUnlessEqual(len(infered), 1, infered)
self.assertIsInstance(infered[0], Instance)
self.failUnlessEqual(str(infered[0]),
'Instance of site._Helper')
def test_builtin_open(self):
code = '''
open("toto.txt")
'''
astng = builder.string_build(code, __name__, __file__)
node = get_name_node(astng, 'open', -1)
infered = list(node.infer())
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Function)
self.failUnlessEqual(infered[0].name, 'open')
def test_callfunc_context_func(self):
code = '''
def mirror(arg=None):
return arg
un = mirror(1)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng.igetattr('un'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Const)
self.failUnlessEqual(infered[0].value, 1)
def test_callfunc_context_lambda(self):
code = '''
mirror = lambda x=None: x
un = mirror(1)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng.igetattr('mirror'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Lambda)
infered = list(astng.igetattr('un'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Const)
self.failUnlessEqual(infered[0].value, 1)
def test_factory_method(self):
code = '''
class Super(object):
@classmethod
def instance(cls):
return cls()
class Sub(Super):
def method(self):
print ('method called')
sub = Sub.instance()
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng.igetattr('sub'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], Instance)
self.failUnlessEqual(infered[0]._proxied.name, 'Sub')
def test_import_as(self):
code = '''
import os.path as osp
print (osp.dirname(__file__))
from os.path import exists as e
assert e(__file__)
from new import code as make_code
print (make_code)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng.igetattr('osp'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Module)
self.failUnlessEqual(infered[0].name, 'os.path')
infered = list(astng.igetattr('e'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Function)
self.failUnlessEqual(infered[0].name, 'exists')
if sys.version_info >= (3, 0):
self.skipTest('<new> module has been removed')
infered = list(astng.igetattr('make_code'))
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], Instance)
self.failUnlessEqual(str(infered[0]),
'Instance of %s.type' % BUILTINS_NAME)
def _test_const_infered(self, node, value):
infered = list(node.infer())
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Const)
self.failUnlessEqual(infered[0].value, value)
def test_unary_not(self):
for code in ('a = not (1,); b = not ()',
'a = not {1:2}; b = not {}'):
astng = builder.string_build(code, __name__, __file__)
self._test_const_infered(astng['a'], False)
self._test_const_infered(astng['b'], True)
def test_binary_op_int_add(self):
astng = builder.string_build('a = 1 + 2', __name__, __file__)
self._test_const_infered(astng['a'], 3)
def test_binary_op_int_sub(self):
astng = builder.string_build('a = 1 - 2', __name__, __file__)
self._test_const_infered(astng['a'], -1)
def test_binary_op_float_div(self):
astng = builder.string_build('a = 1 / 2.', __name__, __file__)
self._test_const_infered(astng['a'], 1 / 2.)
def test_binary_op_str_mul(self):
astng = builder.string_build('a = "*" * 40', __name__, __file__)
self._test_const_infered(astng['a'], "*" * 40)
def test_binary_op_bitand(self):
astng = builder.string_build('a = 23&20', __name__, __file__)
self._test_const_infered(astng['a'], 23&20)
def test_binary_op_bitor(self):
astng = builder.string_build('a = 23|8', __name__, __file__)
self._test_const_infered(astng['a'], 23|8)
def test_binary_op_bitxor(self):
astng = builder.string_build('a = 23^9', __name__, __file__)
self._test_const_infered(astng['a'], 23^9)
def test_binary_op_shiftright(self):
astng = builder.string_build('a = 23 >>1', __name__, __file__)
self._test_const_infered(astng['a'], 23>>1)
def test_binary_op_shiftleft(self):
astng = builder.string_build('a = 23 <<1', __name__, __file__)
self._test_const_infered(astng['a'], 23<<1)
def test_binary_op_list_mul(self):
for code in ('a = [[]] * 2', 'a = 2 * [[]]'):
astng = builder.string_build(code, __name__, __file__)
infered = list(astng['a'].infer())
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.List)
self.failUnlessEqual(len(infered[0].elts), 2)
self.assertIsInstance(infered[0].elts[0], nodes.List)
self.assertIsInstance(infered[0].elts[1], nodes.List)
def test_binary_op_list_mul_none(self):
'test correct handling on list multiplied by None'
astng = builder.string_build( 'a = [1] * None\nb = [1] * "r"')
infered = astng['a'].infered()
self.assertEqual(len(infered), 1)
self.assertEqual(infered[0], YES)
infered = astng['b'].infered()
self.assertEqual(len(infered), 1)
self.assertEqual(infered[0], YES)
def test_binary_op_tuple_add(self):
astng = builder.string_build('a = (1,) + (2,)', __name__, __file__)
infered = list(astng['a'].infer())
self.failUnlessEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Tuple)
self.failUnlessEqual(len(infered[0].elts), 2)
self.failUnlessEqual(infered[0].elts[0].value, 1)
self.failUnlessEqual(infered[0].elts[1].value, 2)
def test_binary_op_custom_class(self):
code = '''
class myarray:
def __init__(self, array):
self.array = array
def __mul__(self, x):
return myarray([2,4,6])
def astype(self):
return "ASTYPE"
def randint(maximum):
if maximum is not None:
return myarray([1,2,3]) * 2
else:
return int(5)
x = randint(1)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng.igetattr('x'))
self.failUnlessEqual(len(infered), 2)
value = [str(v) for v in infered]
# The __name__ trick here makes it work when invoked directly
# (__name__ == '__main__') and through pytest (__name__ ==
# 'unittest_inference')
self.assertEqual(value, ['Instance of %s.myarray' % __name__,
'Instance of %s.int' % BUILTINS_NAME])
def test_nonregr_lambda_arg(self):
code = '''
def f(g = lambda: None):
g().x
'''
astng = builder.string_build(code, __name__, __file__)
callfuncnode = astng['f'].body[0].value.expr
infered = list(callfuncnode.infer())
self.failUnlessEqual(len(infered), 2, infered)
infered.remove(YES)
self.assertIsInstance(infered[0], nodes.Const)
self.failUnlessEqual(infered[0].value, None)
def test_nonregr_getitem_empty_tuple(self):
code = '''
def f(x):
a = ()[x]
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng['f'].ilookup('a'))
self.failUnlessEqual(len(infered), 1)
self.failUnlessEqual(infered[0], YES)
def test_python25_generator_exit(self):
sys.stderr = StringIO()
data = "b = {}[str(0)+''].a"
astng = builder.string_build(data, __name__, __file__)
list(astng['b'].infer())
output = sys.stderr.getvalue()
# I have no idea how to test for this in another way...
self.failIf("RuntimeError" in output, "Exception exceptions.RuntimeError: 'generator ignored GeneratorExit' in <generator object> ignored")
sys.stderr = sys.__stderr__
def test_python25_relative_import(self):
data = "from ...common import date; print (date)"
# !! FIXME also this relative import would not work 'in real' (no __init__.py in test/)
# the test works since we pretend we have a package by passing the full modname
astng = builder.string_build(data, 'logilab.astng.test.unittest_inference', __file__)
infered = get_name_node(astng, 'date').infer().next()
self.assertIsInstance(infered, nodes.Module)
self.assertEqual(infered.name, 'logilab.common.date')
def test_python25_no_relative_import(self):
fname = join(abspath(dirname(__file__)), 'regrtest_data', 'package', 'absimport.py')
astng = builder.file_build(fname, 'absimport')
self.failUnless(astng.absolute_import_activated(), True)
infered = get_name_node(astng, 'import_package_subpackage_module').infer().next()
# failed to import since absolute_import is activated
self.failUnless(infered is YES)
def test_nonregr_absolute_import(self):
fname = join(abspath(dirname(__file__)), 'regrtest_data', 'absimp', 'string.py')
astng = builder.file_build(fname, 'absimp.string')
self.failUnless(astng.absolute_import_activated(), True)
infered = get_name_node(astng, 'string').infer().next()
self.assertIsInstance(infered, nodes.Module)
self.assertEqual(infered.name, 'string')
self.failUnless('lower' in infered.locals)
def test_mechanize_open(self):
try:
import mechanize
except ImportError:
self.skipTest('require mechanize installed')
data = '''from mechanize import Browser
print (Browser)
b = Browser()
'''
astng = builder.string_build(data, __name__, __file__)
browser = get_name_node(astng, 'Browser').infer().next()
self.assertIsInstance(browser, nodes.Class)
bopen = list(browser.igetattr('open'))
self.skipTest('the commit said: "huum, see that later"')
self.assertEqual(len(bopen), 1)
self.assertIsInstance(bopen[0], nodes.Function)
self.failUnless(bopen[0].callable())
b = get_name_node(astng, 'b').infer().next()
self.assertIsInstance(b, Instance)
bopen = list(b.igetattr('open'))
self.assertEqual(len(bopen), 1)
self.assertIsInstance(bopen[0], BoundMethod)
self.failUnless(bopen[0].callable())
def test_property(self):
code = '''
from smtplib import SMTP
class SendMailController(object):
@property
def smtp(self):
return SMTP(mailhost, port)
@property
def me(self):
return self
my_smtp = SendMailController().smtp
my_me = SendMailController().me
'''
decorators = set(['%s.property' % BUILTINS_NAME])
astng = builder.string_build(code, __name__, __file__)
self.assertEqual(astng['SendMailController']['smtp'].decoratornames(),
decorators)
propinfered = list(astng.body[2].value.infer())
self.assertEqual(len(propinfered), 1)
propinfered = propinfered[0]
self.assertIsInstance(propinfered, Instance)
self.assertEqual(propinfered.name, 'SMTP')
self.assertEqual(propinfered.root().name, 'smtplib')
self.assertEqual(astng['SendMailController']['me'].decoratornames(),
decorators)
propinfered = list(astng.body[3].value.infer())
self.assertEqual(len(propinfered), 1)
propinfered = propinfered[0]
self.assertIsInstance(propinfered, Instance)
self.assertEqual(propinfered.name, 'SendMailController')
self.assertEqual(propinfered.root().name, __name__)
def test_im_func_unwrap(self):
code = '''
class EnvBasedTC:
def pactions(self):
pass
pactions = EnvBasedTC.pactions.im_func
print (pactions)
class EnvBasedTC2:
pactions = EnvBasedTC.pactions.im_func
print (pactions)
'''
astng = builder.string_build(code, __name__, __file__)
pactions = get_name_node(astng, 'pactions')
infered = list(pactions.infer())
self.assertEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Function)
pactions = get_name_node(astng['EnvBasedTC2'], 'pactions')
infered = list(pactions.infer())
self.assertEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Function)
def test_augassign(self):
code = '''
a = 1
a += 2
print (a)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(get_name_node(astng, 'a').infer())
self.assertEqual(len(infered), 1)
self.assertIsInstance(infered[0], nodes.Const)
self.assertEqual(infered[0].value, 3)
def test_nonregr_func_arg(self):
code = '''
def foo(self, bar):
def baz():
pass
def qux():
return baz
spam = bar(None, qux)
print (spam)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(get_name_node(astng['foo'], 'spam').infer())
self.assertEqual(len(infered), 1)
self.assertIs(infered[0], YES)
def test_nonregr_func_global(self):
code = '''
active_application = None
def get_active_application():
global active_application
return active_application
class Application(object):
def __init__(self):
global active_application
active_application = self
class DataManager(object):
def __init__(self, app=None):
self.app = get_active_application()
def test(self):
p = self.app
print (p)
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(Instance(astng['DataManager']).igetattr('app'))
self.assertEqual(len(infered), 2, infered) # None / Instance(Application)
infered = list(get_name_node(astng['DataManager']['test'], 'p').infer())
self.assertEqual(len(infered), 2, infered)
for node in infered:
if isinstance(node, Instance) and node.name == 'Application':
break
else:
self.fail('expected to find an instance of Application in %s' % infered)
def test_list_inference(self):
"""#20464"""
code = '''
import optparse
A = []
B = []
def test():
xyz = [
"foobar=%s" % options.ca,
] + A + B
if options.bind is not None:
xyz.append("bind=%s" % options.bind)
return xyz
def main():
global options
parser = optparse.OptionParser()
(options, args) = parser.parse_args()
Z = test()
'''
astng = builder.string_build(code, __name__, __file__)
infered = list(astng['Z'].infer())
self.assertEqual(len(infered), 1, infered)
self.assertIsInstance(infered[0], Instance)
self.assertIsInstance(infered[0]._proxied, nodes.Class)
self.assertEqual(infered[0]._proxied.name, 'list')
def test__new__(self):
code = '''
class NewTest(object):
"doc"
def __new__(cls, arg):
self = object.__new__(cls)
self.arg = arg
return self
n = NewTest()
'''
astng = builder.string_build(code, __name__, __file__)
self.assertRaises(InferenceError, list, astng['NewTest'].igetattr('arg'))
n = astng['n'].infer().next()
infered = list(n.igetattr('arg'))
self.assertEqual(len(infered), 1, infered)
def test_two_parents_from_same_module(self):
code = '''
from data import nonregr
class Xxx(nonregr.Aaa, nonregr.Ccc):
"doc"
'''
astng = builder.string_build(code, __name__, __file__)
parents = list(astng['Xxx'].ancestors())
self.assertEqual(len(parents), 3, parents) # Aaa, Ccc, object
if __name__ == '__main__':
unittest_main()
| gpl-3.0 |
MattsFleaMarket/python-for-android | python-modules/twisted/twisted/web/test/test_httpauth.py | 49 | 21718 | # Copyright (c) 2009-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web._auth}.
"""
from zope.interface import implements
from zope.interface.verify import verifyObject
from twisted.trial import unittest
from twisted.python.failure import Failure
from twisted.internet.error import ConnectionDone
from twisted.internet.address import IPv4Address
from twisted.cred import error, portal
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.cred.checkers import ANONYMOUS, AllowAnonymousAccess
from twisted.cred.credentials import IUsernamePassword
from twisted.web.iweb import ICredentialFactory
from twisted.web.resource import IResource, Resource, getChildForRequest
from twisted.web._auth import basic, digest
from twisted.web._auth.wrapper import HTTPAuthSessionWrapper, UnauthorizedResource
from twisted.web._auth.basic import BasicCredentialFactory
from twisted.web.server import NOT_DONE_YET
from twisted.web.static import Data
from twisted.web.test.test_web import DummyRequest
def b64encode(s):
return s.encode('base64').strip()
class BasicAuthTestsMixin:
"""
L{TestCase} mixin class which defines a number of tests for
L{basic.BasicCredentialFactory}. Because this mixin defines C{setUp}, it
must be inherited before L{TestCase}.
"""
def setUp(self):
self.request = self.makeRequest()
self.realm = 'foo'
self.username = 'dreid'
self.password = 'S3CuR1Ty'
self.credentialFactory = basic.BasicCredentialFactory(self.realm)
def makeRequest(self, method='GET', clientAddress=None):
"""
Create a request object to be passed to
L{basic.BasicCredentialFactory.decode} along with a response value.
Override this in a subclass.
"""
raise NotImplementedError("%r did not implement makeRequest" % (
self.__class__,))
def test_interface(self):
"""
L{BasicCredentialFactory} implements L{ICredentialFactory}.
"""
self.assertTrue(
verifyObject(ICredentialFactory, self.credentialFactory))
def test_usernamePassword(self):
"""
L{basic.BasicCredentialFactory.decode} turns a base64-encoded response
into a L{UsernamePassword} object with a password which reflects the
one which was encoded in the response.
"""
response = b64encode('%s:%s' % (self.username, self.password))
creds = self.credentialFactory.decode(response, self.request)
self.assertTrue(IUsernamePassword.providedBy(creds))
self.assertTrue(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
def test_incorrectPadding(self):
"""
L{basic.BasicCredentialFactory.decode} decodes a base64-encoded
response with incorrect padding.
"""
response = b64encode('%s:%s' % (self.username, self.password))
response = response.strip('=')
creds = self.credentialFactory.decode(response, self.request)
self.assertTrue(verifyObject(IUsernamePassword, creds))
self.assertTrue(creds.checkPassword(self.password))
def test_invalidEncoding(self):
"""
L{basic.BasicCredentialFactory.decode} raises L{LoginFailed} if passed
a response which is not base64-encoded.
"""
response = 'x' # one byte cannot be valid base64 text
self.assertRaises(
error.LoginFailed,
self.credentialFactory.decode, response, self.makeRequest())
def test_invalidCredentials(self):
"""
L{basic.BasicCredentialFactory.decode} raises L{LoginFailed} when
passed a response which is not valid base64-encoded text.
"""
response = b64encode('123abc+/')
self.assertRaises(
error.LoginFailed,
self.credentialFactory.decode,
response, self.makeRequest())
class RequestMixin:
def makeRequest(self, method='GET', clientAddress=None):
"""
Create a L{DummyRequest} (change me to create a
L{twisted.web.http.Request} instead).
"""
request = DummyRequest('/')
request.method = method
request.client = clientAddress
return request
class BasicAuthTestCase(RequestMixin, BasicAuthTestsMixin, unittest.TestCase):
"""
Basic authentication tests which use L{twisted.web.http.Request}.
"""
class DigestAuthTestCase(RequestMixin, unittest.TestCase):
"""
Digest authentication tests which use L{twisted.web.http.Request}.
"""
def setUp(self):
"""
Create a DigestCredentialFactory for testing
"""
self.realm = "test realm"
self.algorithm = "md5"
self.credentialFactory = digest.DigestCredentialFactory(
self.algorithm, self.realm)
self.request = self.makeRequest()
def test_decode(self):
"""
L{digest.DigestCredentialFactory.decode} calls the C{decode} method on
L{twisted.cred.digest.DigestCredentialFactory} with the HTTP method and
host of the request.
"""
host = '169.254.0.1'
method = 'GET'
done = [False]
response = object()
def check(_response, _method, _host):
self.assertEqual(response, _response)
self.assertEqual(method, _method)
self.assertEqual(host, _host)
done[0] = True
self.patch(self.credentialFactory.digest, 'decode', check)
req = self.makeRequest(method, IPv4Address('TCP', host, 81))
self.credentialFactory.decode(response, req)
self.assertTrue(done[0])
def test_interface(self):
"""
L{DigestCredentialFactory} implements L{ICredentialFactory}.
"""
self.assertTrue(
verifyObject(ICredentialFactory, self.credentialFactory))
def test_getChallenge(self):
"""
The challenge issued by L{DigestCredentialFactory.getChallenge} must
include C{'qop'}, C{'realm'}, C{'algorithm'}, C{'nonce'}, and
C{'opaque'} keys. The values for the C{'realm'} and C{'algorithm'}
keys must match the values supplied to the factory's initializer.
None of the values may have newlines in them.
"""
challenge = self.credentialFactory.getChallenge(self.request)
self.assertEquals(challenge['qop'], 'auth')
self.assertEquals(challenge['realm'], 'test realm')
self.assertEquals(challenge['algorithm'], 'md5')
self.assertIn('nonce', challenge)
self.assertIn('opaque', challenge)
for v in challenge.values():
self.assertNotIn('\n', v)
def test_getChallengeWithoutClientIP(self):
"""
L{DigestCredentialFactory.getChallenge} can issue a challenge even if
the L{Request} it is passed returns C{None} from C{getClientIP}.
"""
request = self.makeRequest('GET', None)
challenge = self.credentialFactory.getChallenge(request)
self.assertEqual(challenge['qop'], 'auth')
self.assertEqual(challenge['realm'], 'test realm')
self.assertEqual(challenge['algorithm'], 'md5')
self.assertIn('nonce', challenge)
self.assertIn('opaque', challenge)
class UnauthorizedResourceTests(unittest.TestCase):
"""
Tests for L{UnauthorizedResource}.
"""
def test_getChildWithDefault(self):
"""
An L{UnauthorizedResource} is every child of itself.
"""
resource = UnauthorizedResource([])
self.assertIdentical(
resource.getChildWithDefault("foo", None), resource)
self.assertIdentical(
resource.getChildWithDefault("bar", None), resource)
def test_render(self):
"""
L{UnauthorizedResource} renders with a 401 response code and a
I{WWW-Authenticate} header and puts a simple unauthorized message
into the response body.
"""
resource = UnauthorizedResource([
BasicCredentialFactory('example.com')])
request = DummyRequest([''])
request.render(resource)
self.assertEqual(request.responseCode, 401)
self.assertEqual(
request.responseHeaders.getRawHeaders('www-authenticate'),
['basic realm="example.com"'])
self.assertEqual(request.written, ['Unauthorized'])
def test_renderQuotesRealm(self):
"""
The realm value included in the I{WWW-Authenticate} header set in
the response when L{UnauthorizedResounrce} is rendered has quotes
and backslashes escaped.
"""
resource = UnauthorizedResource([
BasicCredentialFactory('example\\"foo')])
request = DummyRequest([''])
request.render(resource)
self.assertEqual(
request.responseHeaders.getRawHeaders('www-authenticate'),
['basic realm="example\\\\\\"foo"'])
class Realm(object):
"""
A simple L{IRealm} implementation which gives out L{WebAvatar} for any
avatarId.
@type loggedIn: C{int}
@ivar loggedIn: The number of times C{requestAvatar} has been invoked for
L{IResource}.
@type loggedOut: C{int}
@ivar loggedOut: The number of times the logout callback has been invoked.
"""
implements(portal.IRealm)
def __init__(self, avatarFactory):
self.loggedOut = 0
self.loggedIn = 0
self.avatarFactory = avatarFactory
def requestAvatar(self, avatarId, mind, *interfaces):
if IResource in interfaces:
self.loggedIn += 1
return IResource, self.avatarFactory(avatarId), self.logout
raise NotImplementedError()
def logout(self):
self.loggedOut += 1
class HTTPAuthHeaderTests(unittest.TestCase):
"""
Tests for L{HTTPAuthSessionWrapper}.
"""
makeRequest = DummyRequest
def setUp(self):
"""
Create a realm, portal, and L{HTTPAuthSessionWrapper} to use in the tests.
"""
self.username = 'foo bar'
self.password = 'bar baz'
self.avatarContent = "contents of the avatar resource itself"
self.childName = "foo-child"
self.childContent = "contents of the foo child of the avatar"
self.checker = InMemoryUsernamePasswordDatabaseDontUse()
self.checker.addUser(self.username, self.password)
self.avatar = Data(self.avatarContent, 'text/plain')
self.avatar.putChild(
self.childName, Data(self.childContent, 'text/plain'))
self.avatars = {self.username: self.avatar}
self.realm = Realm(self.avatars.get)
self.portal = portal.Portal(self.realm, [self.checker])
self.credentialFactories = []
self.wrapper = HTTPAuthSessionWrapper(
self.portal, self.credentialFactories)
def _authorizedBasicLogin(self, request):
"""
Add an I{basic authorization} header to the given request and then
dispatch it, starting from C{self.wrapper} and returning the resulting
L{IResource}.
"""
authorization = b64encode(self.username + ':' + self.password)
request.headers['authorization'] = 'Basic ' + authorization
return getChildForRequest(self.wrapper, request)
def test_getChildWithDefault(self):
"""
Resource traversal which encounters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} instance when the request does
not have the required I{Authorization} headers.
"""
request = self.makeRequest([self.childName])
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(result):
self.assertEquals(request.responseCode, 401)
d.addCallback(cbFinished)
request.render(child)
return d
def _invalidAuthorizationTest(self, response):
"""
Create a request with the given value as the value of an
I{Authorization} header and perform resource traversal with it,
starting at C{self.wrapper}. Assert that the result is a 401 response
code. Return a L{Deferred} which fires when this is all done.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
request.headers['authorization'] = response
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(result):
self.assertEqual(request.responseCode, 401)
d.addCallback(cbFinished)
request.render(child)
return d
def test_getChildWithDefaultUnauthorizedUser(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has an
I{Authorization} header with a user which does not exist.
"""
return self._invalidAuthorizationTest('Basic ' + b64encode('foo:bar'))
def test_getChildWithDefaultUnauthorizedPassword(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has an
I{Authorization} header with a user which exists and the wrong
password.
"""
return self._invalidAuthorizationTest(
'Basic ' + b64encode(self.username + ':bar'))
def test_getChildWithDefaultUnrecognizedScheme(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has an
I{Authorization} header with an unrecognized scheme.
"""
return self._invalidAuthorizationTest('Quux foo bar baz')
def test_getChildWithDefaultAuthorized(self):
"""
Resource traversal which encounters an L{HTTPAuthSessionWrapper}
results in an L{IResource} which renders the L{IResource} avatar
retrieved from the portal when the request has a valid I{Authorization}
header.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
child = self._authorizedBasicLogin(request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEquals(request.written, [self.childContent])
d.addCallback(cbFinished)
request.render(child)
return d
def test_renderAuthorized(self):
"""
Resource traversal which terminates at an L{HTTPAuthSessionWrapper}
and includes correct authentication headers results in the
L{IResource} avatar (not one of its children) retrieved from the
portal being rendered.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
# Request it exactly, not any of its children.
request = self.makeRequest([])
child = self._authorizedBasicLogin(request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEquals(request.written, [self.avatarContent])
d.addCallback(cbFinished)
request.render(child)
return d
def test_getChallengeCalledWithRequest(self):
"""
When L{HTTPAuthSessionWrapper} finds an L{ICredentialFactory} to issue
a challenge, it calls the C{getChallenge} method with the request as an
argument.
"""
class DumbCredentialFactory(object):
implements(ICredentialFactory)
scheme = 'dumb'
def __init__(self):
self.requests = []
def getChallenge(self, request):
self.requests.append(request)
return {}
factory = DumbCredentialFactory()
self.credentialFactories.append(factory)
request = self.makeRequest([self.childName])
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEqual(factory.requests, [request])
d.addCallback(cbFinished)
request.render(child)
return d
def _logoutTest(self):
"""
Issue a request for an authentication-protected resource using valid
credentials and then return the C{DummyRequest} instance which was
used.
This is a helper for tests about the behavior of the logout
callback.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
class SlowerResource(Resource):
def render(self, request):
return NOT_DONE_YET
self.avatar.putChild(self.childName, SlowerResource())
request = self.makeRequest([self.childName])
child = self._authorizedBasicLogin(request)
request.render(child)
self.assertEquals(self.realm.loggedOut, 0)
return request
def test_logout(self):
"""
The realm's logout callback is invoked after the resource is rendered.
"""
request = self._logoutTest()
request.finish()
self.assertEquals(self.realm.loggedOut, 1)
def test_logoutOnError(self):
"""
The realm's logout callback is also invoked if there is an error
generating the response (for example, if the client disconnects
early).
"""
request = self._logoutTest()
request.processingFailed(
Failure(ConnectionDone("Simulated disconnect")))
self.assertEquals(self.realm.loggedOut, 1)
def test_decodeRaises(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has a I{Basic
Authorization} header which cannot be decoded using base64.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
request.headers['authorization'] = 'Basic decode should fail'
child = getChildForRequest(self.wrapper, request)
self.assertIsInstance(child, UnauthorizedResource)
def test_selectParseResponse(self):
"""
L{HTTPAuthSessionWrapper._selectParseHeader} returns a two-tuple giving
the L{ICredentialFactory} to use to parse the header and a string
containing the portion of the header which remains to be parsed.
"""
basicAuthorization = 'Basic abcdef123456'
self.assertEqual(
self.wrapper._selectParseHeader(basicAuthorization),
(None, None))
factory = BasicCredentialFactory('example.com')
self.credentialFactories.append(factory)
self.assertEqual(
self.wrapper._selectParseHeader(basicAuthorization),
(factory, 'abcdef123456'))
def test_unexpectedDecodeError(self):
"""
Any unexpected exception raised by the credential factory's C{decode}
method results in a 500 response code and causes the exception to be
logged.
"""
class UnexpectedException(Exception):
pass
class BadFactory(object):
scheme = 'bad'
def getChallenge(self, client):
return {}
def decode(self, response, request):
raise UnexpectedException()
self.credentialFactories.append(BadFactory())
request = self.makeRequest([self.childName])
request.headers['authorization'] = 'Bad abc'
child = getChildForRequest(self.wrapper, request)
request.render(child)
self.assertEqual(request.responseCode, 500)
self.assertEqual(len(self.flushLoggedErrors(UnexpectedException)), 1)
def test_unexpectedLoginError(self):
"""
Any unexpected failure from L{Portal.login} results in a 500 response
code and causes the failure to be logged.
"""
class UnexpectedException(Exception):
pass
class BrokenChecker(object):
credentialInterfaces = (IUsernamePassword,)
def requestAvatarId(self, credentials):
raise UnexpectedException()
self.portal.registerChecker(BrokenChecker())
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
child = self._authorizedBasicLogin(request)
request.render(child)
self.assertEqual(request.responseCode, 500)
self.assertEqual(len(self.flushLoggedErrors(UnexpectedException)), 1)
def test_anonymousAccess(self):
"""
Anonymous requests are allowed if a L{Portal} has an anonymous checker
registered.
"""
unprotectedContents = "contents of the unprotected child resource"
self.avatars[ANONYMOUS] = Resource()
self.avatars[ANONYMOUS].putChild(
self.childName, Data(unprotectedContents, 'text/plain'))
self.portal.registerChecker(AllowAnonymousAccess())
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEquals(request.written, [unprotectedContents])
d.addCallback(cbFinished)
request.render(child)
return d
| apache-2.0 |
Livit/Livit.Learn.EdX | common/lib/dogstats/dogstats_wrapper/wrapper.py | 211 | 1586 | """
Wrapper for dog_stats_api, ensuring tags are valid.
See: http://help.datadoghq.com/customer/portal/questions/908720-api-guidelines
"""
from dogapi import dog_stats_api
def _clean_tags(tags):
"""
Helper method that does the actual cleaning of tags for sending to statsd.
1. Handles any type of tag - a plain string, UTF-8 binary, or a unicode
string, and converts it to UTF-8 encoded bytestring needed by statsd.
2. Escape pipe character - used by statsd as a field separator.
3. Trim to 200 characters (DataDog API limitation)
"""
def clean(tagstr):
if isinstance(tagstr, str):
return tagstr.replace('|', '_')[:200]
return unicode(tagstr).replace('|', '_')[:200].encode("utf-8")
return [clean(t) for t in tags]
def increment(metric_name, *args, **kwargs):
"""
Wrapper around dog_stats_api.increment that cleans any tags used.
"""
if "tags" in kwargs:
kwargs["tags"] = _clean_tags(kwargs["tags"])
dog_stats_api.increment(metric_name, *args, **kwargs)
def histogram(metric_name, *args, **kwargs):
"""
Wrapper around dog_stats_api.histogram that cleans any tags used.
"""
if "tags" in kwargs:
kwargs["tags"] = _clean_tags(kwargs["tags"])
dog_stats_api.histogram(metric_name, *args, **kwargs)
def timer(metric_name, *args, **kwargs):
"""
Wrapper around dog_stats_api.timer that cleans any tags used.
"""
if "tags" in kwargs:
kwargs["tags"] = _clean_tags(kwargs["tags"])
return dog_stats_api.timer(metric_name, *args, **kwargs)
| agpl-3.0 |
saurabh6790/medsyn-app | stock/doctype/warehouse/warehouse.py | 18 | 5226 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint, validate_email_add
from webnotes import msgprint, _
class DocType:
def __init__(self, doc, doclist=[]):
self.doc = doc
self.doclist = doclist
def autoname(self):
suffix = " - " + webnotes.conn.get_value("Company", self.doc.company, "abbr")
if not self.doc.warehouse_name.endswith(suffix):
self.doc.name = self.doc.warehouse_name + suffix
def validate(self):
if self.doc.email_id and not validate_email_add(self.doc.email_id):
msgprint("Please enter valid Email Id", raise_exception=1)
self.update_parent_account()
def update_parent_account(self):
if not self.doc.__islocal and (self.doc.create_account_under !=
webnotes.conn.get_value("Warehouse", self.doc.name, "create_account_under")):
warehouse_account = webnotes.conn.get_value("Account",
{"account_type": "Warehouse", "company": self.doc.company,
"master_name": self.doc.name}, ["name", "parent_account"])
if warehouse_account and warehouse_account[1] != self.doc.create_account_under:
acc_bean = webnotes.bean("Account", warehouse_account[0])
acc_bean.doc.parent_account = self.doc.create_account_under
acc_bean.save()
def on_update(self):
self.create_account_head()
def create_account_head(self):
if cint(webnotes.defaults.get_global_default("auto_accounting_for_stock")):
if not webnotes.conn.get_value("Account", {"account_type": "Warehouse",
"master_name": self.doc.name}) and not webnotes.conn.get_value("Account",
{"account_name": self.doc.warehouse_name}):
if self.doc.fields.get("__islocal") or not webnotes.conn.get_value(
"Stock Ledger Entry", {"warehouse": self.doc.name}):
self.validate_parent_account()
ac_bean = webnotes.bean({
"doctype": "Account",
'account_name': self.doc.warehouse_name,
'parent_account': self.doc.create_account_under,
'group_or_ledger':'Ledger',
'company':self.doc.company,
"account_type": "Warehouse",
"master_name": self.doc.name,
"freeze_account": "No"
})
ac_bean.ignore_permissions = True
ac_bean.insert()
msgprint(_("Account Head") + ": " + ac_bean.doc.name + _(" created"))
def validate_parent_account(self):
if not self.doc.create_account_under:
parent_account = webnotes.conn.get_value("Account",
{"account_name": "Stock Assets", "company": self.doc.company})
if parent_account:
self.doc.create_account_under = parent_account
else:
webnotes.throw(_("Please enter account group under which account \
for warehouse ") + self.doc.name +_(" will be created"))
def on_trash(self):
# delete bin
bins = webnotes.conn.sql("select * from `tabBin` where warehouse = %s",
self.doc.name, as_dict=1)
for d in bins:
if d['actual_qty'] or d['reserved_qty'] or d['ordered_qty'] or \
d['indented_qty'] or d['projected_qty'] or d['planned_qty']:
msgprint("""Warehouse: %s can not be deleted as qty exists for item: %s"""
% (self.doc.name, d['item_code']), raise_exception=1)
else:
webnotes.conn.sql("delete from `tabBin` where name = %s", d['name'])
warehouse_account = webnotes.conn.get_value("Account",
{"account_type": "Warehouse", "master_name": self.doc.name})
if warehouse_account:
webnotes.delete_doc("Account", warehouse_account)
if webnotes.conn.sql("""select name from `tabStock Ledger Entry`
where warehouse = %s""", self.doc.name):
msgprint("""Warehouse can not be deleted as stock ledger entry
exists for this warehouse.""", raise_exception=1)
def before_rename(self, olddn, newdn, merge=False):
# Add company abbr if not provided
from setup.doctype.company.company import get_name_with_abbr
new_warehouse = get_name_with_abbr(newdn, self.doc.company)
if merge:
if not webnotes.conn.exists("Warehouse", newdn):
webnotes.throw(_("Warehouse ") + newdn +_(" does not exists"))
if self.doc.company != webnotes.conn.get_value("Warehouse", new_warehouse, "company"):
webnotes.throw(_("Both Warehouse must belong to same Company"))
webnotes.conn.sql("delete from `tabBin` where warehouse=%s", olddn)
from accounts.utils import rename_account_for
rename_account_for("Warehouse", olddn, new_warehouse, merge)
return new_warehouse
def after_rename(self, olddn, newdn, merge=False):
if merge:
self.recalculate_bin_qty(newdn)
def recalculate_bin_qty(self, newdn):
from utilities.repost_stock import repost_stock
webnotes.conn.auto_commit_on_many_writes = 1
webnotes.conn.set_default("allow_negative_stock", 1)
for item in webnotes.conn.sql("""select distinct item_code from (
select name as item_code from `tabItem` where ifnull(is_stock_item, 'Yes')='Yes'
union
select distinct item_code from tabBin) a"""):
repost_stock(item[0], newdn)
webnotes.conn.set_default("allow_negative_stock",
webnotes.conn.get_value("Stock Settings", None, "allow_negative_stock"))
webnotes.conn.auto_commit_on_many_writes = 0 | agpl-3.0 |
Lh4cKg/sl4a | python-build/python-libs/gdata/src/gdata/apps/emailsettings/service.py | 143 | 8840 | #!/usr/bin/python
#
# Copyright (C) 2008 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Allow Google Apps domain administrators to set users' email settings.
EmailSettingsService: Set various email settings.
"""
__author__ = 'google-apps-apis@googlegroups.com'
import gdata.apps
import gdata.apps.service
import gdata.service
API_VER='2.0'
# Forwarding and POP3 options
KEEP='KEEP'
ARCHIVE='ARCHIVE'
DELETE='DELETE'
ALL_MAIL='ALL_MAIL'
MAIL_FROM_NOW_ON='MAIL_FROM_NOW_ON'
class EmailSettingsService(gdata.apps.service.PropertyService):
"""Client for the Google Apps Email Settings service."""
def _serviceUrl(self, setting_id, username, domain=None):
if domain is None:
domain = self.domain
return '/a/feeds/emailsettings/%s/%s/%s/%s' % (API_VER, domain, username,
setting_id)
def _bool2str(self, b):
if b is None:
return None
return str(b is True).lower()
def CreateLabel(self, username, label):
"""Create a label.
Args:
username: User to create label for.
label: Label to create.
Returns:
A dict containing the result of the create operation.
"""
uri = self._serviceUrl('label', username)
properties = {'label': label}
return self._PostProperties(uri, properties)
def CreateFilter(self, username, from_=None, to=None, subject=None,
has_the_word=None, does_not_have_the_word=None,
has_attachment=None, label=None, should_mark_as_read=None,
should_archive=None):
"""Create a filter.
Args:
username: User to create filter for.
from_: Filter from string.
to: Filter to string.
subject: Filter subject.
has_the_word: Words to filter in.
does_not_have_the_word: Words to filter out.
has_attachment: Boolean for message having attachment.
label: Label to apply.
should_mark_as_read: Boolean for marking message as read.
should_archive: Boolean for archiving message.
Returns:
A dict containing the result of the create operation.
"""
uri = self._serviceUrl('filter', username)
properties = {}
properties['from'] = from_
properties['to'] = to
properties['subject'] = subject
properties['hasTheWord'] = has_the_word
properties['doesNotHaveTheWord'] = does_not_have_the_word
properties['hasAttachment'] = self._bool2str(has_attachment)
properties['label'] = label
properties['shouldMarkAsRead'] = self._bool2str(should_mark_as_read)
properties['shouldArchive'] = self._bool2str(should_archive)
return self._PostProperties(uri, properties)
def CreateSendAsAlias(self, username, name, address, reply_to=None,
make_default=None):
"""Create alias to send mail as.
Args:
username: User to create alias for.
name: Name of alias.
address: Email address to send from.
reply_to: Email address to reply to.
make_default: Boolean for whether this is the new default sending alias.
Returns:
A dict containing the result of the create operation.
"""
uri = self._serviceUrl('sendas', username)
properties = {}
properties['name'] = name
properties['address'] = address
properties['replyTo'] = reply_to
properties['makeDefault'] = self._bool2str(make_default)
return self._PostProperties(uri, properties)
def UpdateWebClipSettings(self, username, enable):
"""Update WebClip Settings
Args:
username: User to update forwarding for.
enable: Boolean whether to enable Web Clip.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('webclip', username)
properties = {}
properties['enable'] = self._bool2str(enable)
return self._PutProperties(uri, properties)
def UpdateForwarding(self, username, enable, forward_to=None, action=None):
"""Update forwarding settings.
Args:
username: User to update forwarding for.
enable: Boolean whether to enable this forwarding rule.
forward_to: Email address to forward to.
action: Action to take after forwarding.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('forwarding', username)
properties = {}
properties['enable'] = self._bool2str(enable)
if enable is True:
properties['forwardTo'] = forward_to
properties['action'] = action
return self._PutProperties(uri, properties)
def UpdatePop(self, username, enable, enable_for=None, action=None):
"""Update POP3 settings.
Args:
username: User to update POP3 settings for.
enable: Boolean whether to enable POP3.
enable_for: Which messages to make available via POP3.
action: Action to take after user retrieves email via POP3.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('pop', username)
properties = {}
properties['enable'] = self._bool2str(enable)
if enable is True:
properties['enableFor'] = enable_for
properties['action'] = action
return self._PutProperties(uri, properties)
def UpdateImap(self, username, enable):
"""Update IMAP settings.
Args:
username: User to update IMAP settings for.
enable: Boolean whether to enable IMAP.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('imap', username)
properties = {'enable': self._bool2str(enable)}
return self._PutProperties(uri, properties)
def UpdateVacation(self, username, enable, subject=None, message=None,
contacts_only=None):
"""Update vacation settings.
Args:
username: User to update vacation settings for.
enable: Boolean whether to enable vacation responses.
subject: Vacation message subject.
message: Vacation message body.
contacts_only: Boolean whether to send message only to contacts.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('vacation', username)
properties = {}
properties['enable'] = self._bool2str(enable)
if enable is True:
properties['subject'] = subject
properties['message'] = message
properties['contactsOnly'] = self._bool2str(contacts_only)
return self._PutProperties(uri, properties)
def UpdateSignature(self, username, signature):
"""Update signature.
Args:
username: User to update signature for.
signature: Signature string.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('signature', username)
properties = {'signature': signature}
return self._PutProperties(uri, properties)
def UpdateLanguage(self, username, language):
"""Update user interface language.
Args:
username: User to update language for.
language: Language code.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('language', username)
properties = {'language': language}
return self._PutProperties(uri, properties)
def UpdateGeneral(self, username, page_size=None, shortcuts=None, arrows=None,
snippets=None, unicode=None):
"""Update general settings.
Args:
username: User to update general settings for.
page_size: Number of messages to show.
shortcuts: Boolean whether shortcuts are enabled.
arrows: Boolean whether arrows are enabled.
snippets: Boolean whether snippets are enabled.
unicode: Wheter unicode is enabled.
Returns:
A dict containing the result of the update operation.
"""
uri = self._serviceUrl('general', username)
properties = {}
if page_size != None:
properties['pageSize'] = str(page_size)
if shortcuts != None:
properties['shortcuts'] = self._bool2str(shortcuts)
if arrows != None:
properties['arrows'] = self._bool2str(arrows)
if snippets != None:
properties['snippets'] = self._bool2str(snippets)
if unicode != None:
properties['unicode'] = self._bool2str(unicode)
return self._PutProperties(uri, properties)
| apache-2.0 |
ximion/appstream-dep11 | dep11/parsers.py | 2 | 14680 | #!/usr/bin/env python3
"""
Reads AppStream XML metadata and metadata from
XDG .desktop files.
"""
# Copyright (c) 2014 Abhishek Bhattacharjee <abhishek.bhattacharjee11@gmail.com>
# Copyright (c) 2014-2016 Matthias Klumpp <mak@debian.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program.
import re
from configparser import RawConfigParser
import lxml.etree as et
from xml.sax.saxutils import escape
from io import StringIO
from .component import Component, Screenshot, IconType, ProvidedItemType
from .utils import str_enc_dec
def read_desktop_data(cpt, dcontent, ignore_nodisplay=False):
'''
Parses a .desktop file and sets ComponentData properties
'''
df = RawConfigParser(allow_no_value=True)
items = None
try:
df.readfp(StringIO(dcontent))
dtype = df.get("Desktop Entry", "Type")
if dtype and dtype.lower() != "application":
# ignore this file, isn't an application
cpt.add_hint("not-an-application")
return False
try:
nodisplay = df.get("Desktop Entry", "NoDisplay")
if not ignore_nodisplay and nodisplay and nodisplay.lower() == "true":
# we ignore this .desktop file, shouldn't be displayed
cpt.add_hint("invisible-application")
return False
except:
# we don't care if the NoDisplay variable doesn't exist
# if it isn't there, the file should be processed
pass
try:
asignore = df.get("Desktop Entry", "X-AppStream-Ignore")
if asignore and asignore.lower() == "true":
# this .desktop file should be excluded from AppStream metadata
cpt.add_hint("invisible-application")
return False
except:
# we don't care if the AppStream-Ignore variable doesn't exist
# if it isn't there, the file should be processed
pass
items = df.items('Desktop Entry')
except Exception as e:
# this .desktop file is not interesting
cpt.add_hint("desktop-file-read-error", str(e))
return True
# if we reached this step, we are dealing with a GUI desktop app
cpt.set_kind_from_string('desktop-app')
for item in items:
if len(item) != 2:
continue
key = item[0]
value = str_enc_dec(item[1])
if not value:
continue
value = value.strip()
if key.startswith("name"):
if key == 'name':
cpt.name['C'] = value
else:
cpt.name[key[5:-1]] = value
elif key == 'categories':
value = value.split(';')
value.pop()
cpt.categories = value
elif key.startswith('comment'):
if key == 'comment':
cpt.summary['C'] = value
else:
cpt.summary[key[8:-1]] = value
elif key.startswith('keywords'):
value = re.split(';|,', value)
if not value[-1]:
value.pop()
if key[8:] == '':
if cpt.keywords:
if set(value) not in \
[set(val) for val in
cpt.keywords.values()]:
cpt.keywords.update(
{'C': list(map(str_enc_dec, value))}
)
else:
cpt.keywords = {
'C': list(map(str_enc_dec, value))
}
else:
if cpt.keywords:
if set(value) not in \
[set(val) for val in
cpt.keywords.values()]:
cpt.keywords.update(
{key[9:-1]: list(map(str_enc_dec, value))}
)
else:
cpt.keywords = {
key[9:-1]: list(map(str_enc_dec, value))
}
elif key == 'mimetype':
value = value.split(';')
if len(value) > 1:
value.pop()
for val in value:
cpt.add_provided_item(ProvidedItemType.MIMETYPE, val)
elif key == 'icon':
cpt.set_icon(IconType.CACHED, value)
return True
def _get_tag_locale(subs):
attr_dic = subs.attrib
if attr_dic:
locale = attr_dic.get('{http://www.w3.org/XML/1998/namespace}lang')
if locale:
return locale
return "C"
def _parse_description_tag(subs):
'''
Handles the description tag
'''
def prepare_desc_string(s):
'''
Clears linebreaks and XML-escapes the resulting string
'''
if not s:
return ""
s = s.strip()
s = " ".join(s.split())
return escape(s)
ddict = dict()
# The description tag translation is combined per language,
# for faster parsing on the client side.
# In case no translation is found, the untranslated version is used instead.
# the DEP-11 YAML stores the description as HTML
for usubs in subs:
locale = _get_tag_locale(usubs)
if usubs.tag == 'p':
if not locale in ddict:
ddict[locale] = ""
ddict[locale] += "<p>%s</p>" % str_enc_dec(prepare_desc_string(usubs.text))
elif usubs.tag == 'ul' or usubs.tag == 'ol':
tmp_dict = dict()
# find the right locale, or fallback to untranslated
for u_usubs in usubs:
locale = _get_tag_locale(u_usubs)
if not locale in tmp_dict:
tmp_dict[locale] = ""
if u_usubs.tag == 'li':
tmp_dict[locale] += "<li>%s</li>" % str_enc_dec(prepare_desc_string(u_usubs.text))
for locale, value in tmp_dict.items():
if not locale in ddict:
# This should not happen (but better be prepared)
ddict[locale] = ""
ddict[locale] += "<%s>%s</%s>" % (usubs.tag, value, usubs.tag)
return ddict
def _parse_screenshots_tag(subs):
'''
Handles screenshots, caption, source-image etc.
'''
shots = []
for usubs in subs:
# for one screeshot tag
if usubs.tag == 'screenshot':
shot = Screenshot()
attr_dic = usubs.attrib
if attr_dic.get('type'):
if attr_dic['type'] == 'default':
shot.default = True
# handle pre-0.6 spec screenshot notations
url = usubs.text.strip() if usubs.text else None
if url:
# we do not know width or height yet, that information will be added later
shot.set_source_image(url, 0, 0)
shots.append(shot)
continue
# else look for captions and image tag
for tags in usubs:
if tags.tag == 'caption':
# for localisation
attr_dic = tags.attrib
if attr_dic:
for v in attr_dic.values():
key = v
else:
key = 'C'
caption_text = str_enc_dec(tags.text)
if caption_text:
shot.caption[key] = caption_text
if tags.tag == 'image':
shot.set_source_image(tags.text, 0, 0)
# only add the screenshot if we have a source image
if shot.has_source_image():
shots.append(shot)
return shots
def _parse_releases_tag(relstag):
'''
Parses a releases tag and returns the last three releases
'''
rels = list()
for subs in relstag:
# for one screeshot tag
if subs.tag != 'release':
continue
release = dict()
attr_dic = subs.attrib
if attr_dic.get('version'):
release['version'] = attr_dic['version']
if attr_dic.get('timestamp'):
try:
release['unix-timestamp'] = int(attr_dic['timestamp'])
except:
# the timestamp was wrong - we silently ignore the error
# TODO: Emit warning hint
continue
else:
# we can't use releases which don't have a timestamp
# TODO: Emit a warning hint here
continue
# else look for captions and image tag
for usubs in subs:
if usubs.tag == 'description':
release['description'] = _parse_description_tag(usubs)
rels.append(release)
# sort releases, newest first
rels = sorted(rels, key=lambda k: k['unix-timestamp'], reverse=True)
if len(rels) > 3:
return rels[:3]
return rels
def read_appstream_upstream_xml(cpt, xml_content):
'''
Reads the metadata from the xml file in usr/share/metainfo.
Sets ComponentData properties
'''
root = None
try:
# Drop default namespace - some add a bogus namespace to their metainfo files which breaks the parser.
# When we actually start using namespaces in future, we need to handle them explicitly.
xml_content = re.sub(r'\sxmlns="[^"]+"', '', xml_content, count=1)
root = et.fromstring(bytes(xml_content, 'utf-8'))
except Exception as e:
cpt.add_hint("metainfo-parse-error", str(e))
return
if root is None:
cpt.add_hint("metainfo-parse-error", "Error is unknown, the root node was null.")
if root.tag == 'application':
# we parse ancient AppStream XML, but it is a good idea to update it to make use of newer features, remove some ancient
# oddities and to simplify the parser in future. So we add a hint for that.
cpt.add_hint("ancient-metadata")
# set the type of our component
cpt.set_kind_from_string(root.attrib.get('type'))
for subs in root:
locale = _get_tag_locale(subs)
value = None
if subs.text:
value = subs.text.strip()
if subs.tag == 'id':
cpt.cid = value
# INFO: legacy support, remove later
tps = subs.attrib.get('type')
if tps:
cpt.set_kind_from_string(tps)
elif subs.tag == "name":
cpt.name[locale] = value
elif subs.tag == "summary":
cpt.summary[locale] = value
elif subs.tag == "description":
desc = _parse_description_tag(subs)
cpt.description = desc
elif subs.tag == "screenshots":
screen = _parse_screenshots_tag(subs)
cpt.screenshots = screen
elif subs.tag == "provides":
for ptag in subs:
ptag_text = None
if ptag.text:
ptag_text = ptag.text.strip()
if ptag.tag == "binary":
cpt.add_provided_item(
ProvidedItemType.BINARY, ptag_text
)
if ptag.tag == 'library':
cpt.add_provided_item(
ProvidedItemType.LIBRARY, ptag_text
)
if ptag.tag == 'dbus':
bus_kind = ptag.attrib.get('type')
if bus_kind == "session":
bus_kind = "user"
if bus_kind:
cpt.add_provided_item(ProvidedItemType.DBUS, {'type': bus_kind, 'service': ptag_text})
if ptag.tag == 'firmware':
fw_type = ptag.attrib.get('type')
fw_data = {'type': fw_type}
fw_valid = True
if fw_type == "flashed":
fw_data['guid'] = ptag_text
elif fw_type == "runtime":
fw_data['fname'] = ptag_text
else:
fw_valid = False
if fw_valid:
cpt.add_provided_item(ProvidedItemType.FIRMWARE, fw_data)
if ptag.tag == 'python2':
cpt.add_provided_item(
ProvidedItemType.PYTHON_2, ptag_text
)
if ptag.tag == 'python3':
cpt.add_provided_item(
ProvidedItemType.PYTHON_3, ptag_text
)
if ptag.tag == 'modalias':
cpt.add_provided_item(
ProvidedItemType.MODALIAS, ptag_text
)
if ptag.tag == 'mimetype':
cpt.add_provided_item(
ProvidedItemType.MIMETYPE, ptag_text
)
if ptag.tag == 'font':
font_file = ptag.attrib.get('file')
if font_file:
cpt.add_provided_item(ProvidedItemType.FONT, {'file': font_file, 'name': ptag_text})
elif subs.tag == "mimetypes":
for mimetag in subs:
if mimetag.tag == "mimetype":
cpt.add_provided_item(
ProvidedItemType.MIMETYPE, mimetag.text
)
elif subs.tag == "url":
if cpt.url:
cpt.url.update({subs.attrib['type']: value})
else:
cpt.url = {subs.attrib['type']: value}
elif subs.tag == "project_license":
cpt.project_license = value
elif subs.tag == "project_group":
cpt.project_group = value
elif subs.tag == "developer_name":
cpt.developer_name[locale] = value
elif subs.tag == "extends":
cpt.extends.append(value)
elif subs.tag == "compulsory_for_desktop":
cpt.compulsory_for_desktops.append(value)
elif subs.tag == "releases":
releases = _parse_releases_tag(subs)
cpt.releases = releases
| lgpl-3.0 |
RachitKansal/scikit-learn | examples/ensemble/plot_gradient_boosting_regularization.py | 355 | 2843 | """
================================
Gradient Boosting regularization
================================
Illustration of the effect of different regularization strategies
for Gradient Boosting. The example is taken from Hastie et al 2009.
The loss function used is binomial deviance. Regularization via
shrinkage (``learning_rate < 1.0``) improves performance considerably.
In combination with shrinkage, stochastic gradient boosting
(``subsample < 1.0``) can produce more accurate models by reducing the
variance via bagging.
Subsampling without shrinkage usually does poorly.
Another strategy to reduce the variance is by subsampling the features
analogous to the random splits in Random Forests
(via the ``max_features`` parameter).
.. [1] T. Hastie, R. Tibshirani and J. Friedman, "Elements of Statistical
Learning Ed. 2", Springer, 2009.
"""
print(__doc__)
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>
#
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn import ensemble
from sklearn import datasets
X, y = datasets.make_hastie_10_2(n_samples=12000, random_state=1)
X = X.astype(np.float32)
# map labels from {-1, 1} to {0, 1}
labels, y = np.unique(y, return_inverse=True)
X_train, X_test = X[:2000], X[2000:]
y_train, y_test = y[:2000], y[2000:]
original_params = {'n_estimators': 1000, 'max_leaf_nodes': 4, 'max_depth': None, 'random_state': 2,
'min_samples_split': 5}
plt.figure()
for label, color, setting in [('No shrinkage', 'orange',
{'learning_rate': 1.0, 'subsample': 1.0}),
('learning_rate=0.1', 'turquoise',
{'learning_rate': 0.1, 'subsample': 1.0}),
('subsample=0.5', 'blue',
{'learning_rate': 1.0, 'subsample': 0.5}),
('learning_rate=0.1, subsample=0.5', 'gray',
{'learning_rate': 0.1, 'subsample': 0.5}),
('learning_rate=0.1, max_features=2', 'magenta',
{'learning_rate': 0.1, 'max_features': 2})]:
params = dict(original_params)
params.update(setting)
clf = ensemble.GradientBoostingClassifier(**params)
clf.fit(X_train, y_train)
# compute test set deviance
test_deviance = np.zeros((params['n_estimators'],), dtype=np.float64)
for i, y_pred in enumerate(clf.staged_decision_function(X_test)):
# clf.loss_ assumes that y_test[i] in {0, 1}
test_deviance[i] = clf.loss_(y_test, y_pred)
plt.plot((np.arange(test_deviance.shape[0]) + 1)[::5], test_deviance[::5],
'-', color=color, label=label)
plt.legend(loc='upper left')
plt.xlabel('Boosting Iterations')
plt.ylabel('Test Set Deviance')
plt.show()
| bsd-3-clause |
dfalt974/SickRage | lib/sqlalchemy/connectors/mxodbc.py | 79 | 5362 | # connectors/mxodbc.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Provide an SQLALchemy connector for the eGenix mxODBC commercial
Python adapter for ODBC. This is not a free product, but eGenix
provides SQLAlchemy with a license for use in continuous integration
testing.
This has been tested for use with mxODBC 3.1.2 on SQL Server 2005
and 2008, using the SQL Server Native driver. However, it is
possible for this to be used on other database platforms.
For more info on mxODBC, see http://www.egenix.com/
"""
import sys
import re
import warnings
from . import Connector
class MxODBCConnector(Connector):
driver = 'mxodbc'
supports_sane_multi_rowcount = False
supports_unicode_statements = True
supports_unicode_binds = True
supports_native_decimal = True
@classmethod
def dbapi(cls):
# this classmethod will normally be replaced by an instance
# attribute of the same name, so this is normally only called once.
cls._load_mx_exceptions()
platform = sys.platform
if platform == 'win32':
from mx.ODBC import Windows as module
# this can be the string "linux2", and possibly others
elif 'linux' in platform:
from mx.ODBC import unixODBC as module
elif platform == 'darwin':
from mx.ODBC import iODBC as module
else:
raise ImportError("Unrecognized platform for mxODBC import")
return module
@classmethod
def _load_mx_exceptions(cls):
""" Import mxODBC exception classes into the module namespace,
as if they had been imported normally. This is done here
to avoid requiring all SQLAlchemy users to install mxODBC.
"""
global InterfaceError, ProgrammingError
from mx.ODBC import InterfaceError
from mx.ODBC import ProgrammingError
def on_connect(self):
def connect(conn):
conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT
conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
conn.errorhandler = self._error_handler()
return connect
def _error_handler(self):
""" Return a handler that adjusts mxODBC's raised Warnings to
emit Python standard warnings.
"""
from mx.ODBC.Error import Warning as MxOdbcWarning
def error_handler(connection, cursor, errorclass, errorvalue):
if issubclass(errorclass, MxOdbcWarning):
errorclass.__bases__ = (Warning,)
warnings.warn(message=str(errorvalue),
category=errorclass,
stacklevel=2)
else:
raise errorclass(errorvalue)
return error_handler
def create_connect_args(self, url):
""" Return a tuple of *args,**kwargs for creating a connection.
The mxODBC 3.x connection constructor looks like this:
connect(dsn, user='', password='',
clear_auto_commit=1, errorhandler=None)
This method translates the values in the provided uri
into args and kwargs needed to instantiate an mxODBC Connection.
The arg 'errorhandler' is not used by SQLAlchemy and will
not be populated.
"""
opts = url.translate_connect_args(username='user')
opts.update(url.query)
args = opts.pop('host')
opts.pop('port', None)
opts.pop('database', None)
return (args,), opts
def is_disconnect(self, e, connection, cursor):
# TODO: eGenix recommends checking connection.closed here
# Does that detect dropped connections ?
if isinstance(e, self.dbapi.ProgrammingError):
return "connection already closed" in str(e)
elif isinstance(e, self.dbapi.Error):
return '[08S01]' in str(e)
else:
return False
def _get_server_version_info(self, connection):
# eGenix suggests using conn.dbms_version instead
# of what we're doing here
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
# 18 == pyodbc.SQL_DBMS_VER
for n in r.split(dbapi_con.getinfo(18)[1]):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _get_direct(self, context):
if context:
native_odbc_execute = context.execution_options.\
get('native_odbc_execute', 'auto')
# default to direct=True in all cases, is more generally
# compatible especially with SQL Server
return False if native_odbc_execute is True else True
else:
return True
def do_executemany(self, cursor, statement, parameters, context=None):
cursor.executemany(
statement, parameters, direct=self._get_direct(context))
def do_execute(self, cursor, statement, parameters, context=None):
cursor.execute(statement, parameters, direct=self._get_direct(context))
| gpl-3.0 |
jeremypogue/ansible | lib/ansible/plugins/lookup/subelements.py | 6 | 4311 | # (c) 2013, Serge van Ginderachter <serge@vanginderachter.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.six import string_types
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.boolean import boolean
FLAGS = ('skip_missing',)
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
def _raise_terms_error(msg=""):
raise AnsibleError(
"subelements lookup expects a list of two or three items, "
+ msg)
terms[0] = listify_lookup_plugin_terms(terms[0], templar=self._templar, loader=self._loader)
# check lookup terms - check number of terms
if not isinstance(terms, list) or not 2 <= len(terms) <= 3:
_raise_terms_error()
# first term should be a list (or dict), second a string holding the subkey
if not isinstance(terms[0], (list, dict)) or not isinstance(terms[1], string_types):
_raise_terms_error("first a dict or a list, second a string pointing to the subkey")
subelements = terms[1].split(".")
if isinstance(terms[0], dict): # convert to list:
if terms[0].get('skipped', False) is not False:
# the registered result was completely skipped
return []
elementlist = []
for key in terms[0].iterkeys():
elementlist.append(terms[0][key])
else:
elementlist = terms[0]
# check for optional flags in third term
flags = {}
if len(terms) == 3:
flags = terms[2]
if not isinstance(flags, dict) and not all([isinstance(key, string_types) and key in FLAGS for key in flags]):
_raise_terms_error("the optional third item must be a dict with flags %s" % FLAGS)
# build_items
ret = []
for item0 in elementlist:
if not isinstance(item0, dict):
raise AnsibleError("subelements lookup expects a dictionary, got '%s'" % item0)
if item0.get('skipped', False) is not False:
# this particular item is to be skipped
continue
skip_missing = boolean(flags.get('skip_missing', False))
subvalue = item0
lastsubkey = False
sublist = []
for subkey in subelements:
if subkey == subelements[-1]:
lastsubkey = True
if not subkey in subvalue:
if skip_missing:
continue
else:
raise AnsibleError("could not find '%s' key in iterated item '%s'" % (subkey, subvalue))
if not lastsubkey:
if not isinstance(subvalue[subkey], dict):
if skip_missing:
continue
else:
raise AnsibleError("the key %s should point to a dictionary, got '%s'" % (subkey, subvalue[subkey]))
else:
subvalue = subvalue[subkey]
else: # lastsubkey
if not isinstance(subvalue[subkey], list):
raise AnsibleError("the key %s should point to a list, got '%s'" % (subkey, subvalue[subkey]))
else:
sublist = subvalue.pop(subkey, [])
for item1 in sublist:
ret.append((item0, item1))
return ret
| gpl-3.0 |
bhargav/scikit-learn | doc/conf.py | 26 | 8446 | # -*- coding: utf-8 -*-
#
# scikit-learn documentation build configuration file, created by
# sphinx-quickstart on Fri Jan 8 09:13:42 2010.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import print_function
import sys
import os
from sklearn.externals.six import u
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.insert(0, os.path.abspath('sphinxext'))
from github_link import make_linkcode_resolve
# -- General configuration ---------------------------------------------------
# Try to override the matplotlib configuration as early as possible
try:
import gen_rst
except:
pass
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['gen_rst',
'sphinx.ext.autodoc', 'sphinx.ext.autosummary',
'sphinx.ext.pngmath', 'numpy_ext.numpydoc',
'sphinx.ext.linkcode',
]
autosummary_generate = True
autodoc_default_flags = ['members', 'inherited-members']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['templates']
# generate autosummary even if no references
autosummary_generate = True
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# Generate the plots for the gallery
plot_gallery = True
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u('scikit-learn')
copyright = u('2010 - 2015, scikit-learn developers (BSD License)')
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import sklearn
version = sklearn.__version__
# The full version, including alpha/beta/rc tags.
release = sklearn.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be
# searched for source files.
exclude_trees = ['_build', 'templates', 'includes']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = False
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'scikit-learn'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {'oldversion': False, 'collapsiblesidebar': True,
'google_analytics': True, 'surveybanner': False,
'sprintbanner': True}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'scikit-learn'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'logos/scikit-learn-logo-small.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'logos/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['images']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'scikit-learndoc'
# -- Options for LaTeX output ------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [('index', 'user_guide.tex', u('scikit-learn user guide'),
u('scikit-learn developers'), 'manual'), ]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = "logos/scikit-learn-logo.png"
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
latex_preamble = r"""
\usepackage{amsmath}\usepackage{amsfonts}\usepackage{bm}\usepackage{morefloats}
\usepackage{enumitem} \setlistdepth{10}
"""
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
latex_domain_indices = False
trim_doctests_flags = True
def generate_example_rst(app, what, name, obj, options, lines):
# generate empty examples files, so that we don't get
# inclusion errors if there are no examples for a class / module
examples_path = os.path.join(app.srcdir, "modules", "generated",
"%s.examples" % name)
if not os.path.exists(examples_path):
# touch file
open(examples_path, 'w').close()
def setup(app):
# to hide/show the prompt in code examples:
app.add_javascript('js/copybutton.js')
app.connect('autodoc-process-docstring', generate_example_rst)
# The following is used by sphinx.ext.linkcode to provide links to github
linkcode_resolve = make_linkcode_resolve('sklearn',
u'https://github.com/scikit-learn/'
'scikit-learn/blob/{revision}/'
'{package}/{path}#L{lineno}')
| bsd-3-clause |
luser/socorro | scripts/hive_adi_test.py | 14 | 3016 | #!/usr/bin/env python
import codecs
import datetime
import optparse
import os
import pyhs2
import tempfile
import unicodedata
import urllib2
# Example command-line usage:
# $ . /etc/socorro/socorrorc
# $ $PYTHON hive_adi_test.py -d '2015-01-21' -s peach-gw.peach.metrics.scl3.mozilla.com -o /tmp/output_adi.txt
def main():
storage_date = datetime.date.today().isoformat()
# Defaulting to creating a temp file for output
raw_adi_logs_pathname = os.path.join(
tempfile.gettempdir(),
"%s.raw_adi_logs.TEMPORARY%s" % (
storage_date,
'.txt'
)
)
p = optparse.OptionParser()
p.add_option('--target-date', '-d', default=storage_date)
p.add_option('--host', '-s', default='localhost')
p.add_option('--user', '-u', default='socorro')
p.add_option('--output-filename', '-o', default=raw_adi_logs_pathname)
options, arguments = p.parse_args()
query = """
select
ds,
split(request_url,'/')[5],
split(split(request_url,'/')[10], '%%20')[0],
split(split(request_url,'/')[10], '%%20')[1],
split(request_url,'/')[4],
split(request_url,'/')[6],
split(request_url,'/')[9],
split(request_url,'/')[3],
count(*)
FROM v2_raw_logs
WHERE
(domain='addons.mozilla.org' OR domain='blocklist.addons.mozilla.org')
and http_status_code = '200'
and request_url like '/blocklist/3/%%'
and ds='%s'
GROUP BY
ds,
split(request_url,'/')[5],
split(split(request_url,'/')[10], '%%20')[0],
split(split(request_url,'/')[10], '%%20')[1],
split(request_url,'/')[4],
split(request_url,'/')[6],
split(request_url,'/')[9],
split(request_url,'/')[3]
"""
hive = pyhs2.connect(
host=options.host,
port=10000,
authMechanism='PLAIN',
user=options.user,
password='ignored',
database='default',
# the underlying TSocket setTimeout() wants milliseconds
timeout=30 * 60 * 1000
)
def remove_control_characters(s):
if isinstance(s, str):
s = unicode(s, 'utf-8', errors='replace')
return ''.join(c for c in s if unicodedata.category(c)[0] != "C")
with codecs.open(options.output_filename, 'w', 'utf-8') as f:
cur = hive.cursor()
query = query % options.target_date
cur.execute(query)
for row in cur:
if None in row:
continue
f.write(
"\t"
.join(
remove_control_characters(
urllib2.unquote(v)
).replace('\\', '\\\\')
if isinstance(v, basestring) else str(v)
for v in row
)
)
f.write("\n")
if __name__ == '__main__':
main()
| mpl-2.0 |
caioserra/apiAdwords | examples/adspygoogle/dfp/v201308/activity_service/update_activities.py | 2 | 2132 | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates activities.
To determine which activities exist, run get_all_activities.py.
Tags: ActivityService.getActivity
Tags: ActivityService.updateActivities
"""
__author__ = 'Vincent Tsao'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Set the ID of the activity to update.
ACTIVITY_ID = 'INSERT_ACTIVITY_ID_HERE'
def main(client, activity_id):
# Initialize appropriate service.
activity_service = client.GetService('ActivityService', version='v201308')
# Get the activity.
activity = activity_service.GetActivity(activity_id)[0]
if activity:
# Update the expected URL.
activity['expectedURL'] = 'https://google.com'
# Update the activity on the server.
activities = activity_service.UpdateActivities([activity])
# Display results.
if activities:
for updated_activity in activities:
print (('Activity with ID \'%s\' and name \'%s\' was updated.')
% (updated_activity['id'], updated_activity['name']))
else:
print 'No activities were updated.'
else:
print 'No activities found to update.'
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
main(dfp_client, ACTIVITY_ID)
| apache-2.0 |
Drooids/odoo | addons/pad/pad.py | 84 | 4296 | # -*- coding: utf-8 -*-
from openerp.osv import fields, osv
import random
import re
import string
import urllib2
import logging
from openerp import SUPERUSER_ID
from openerp.tools.translate import _
from openerp.tools import html2plaintext
from py_etherpad import EtherpadLiteClient
_logger = logging.getLogger(__name__)
class pad_common(osv.osv_memory):
_name = 'pad.common'
def pad_is_configured(self, cr, uid, context=None):
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
return bool(user.company_id.pad_server)
def pad_generate_url(self, cr, uid, context=None):
company = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context).company_id
pad = {
"server" : company.pad_server,
"key" : company.pad_key,
}
# make sure pad server in the form of http://hostname
if not pad["server"]:
return pad
if not pad["server"].startswith('http'):
pad["server"] = 'http://' + pad["server"]
pad["server"] = pad["server"].rstrip('/')
# generate a salt
s = string.ascii_uppercase + string.digits
salt = ''.join([s[random.randint(0, len(s) - 1)] for i in range(10)])
#path
# etherpad hardcodes pad id length limit to 50
path = '-%s-%s' % (self._name, salt)
path = '%s%s' % (cr.dbname.replace('_','-')[0:50 - len(path)], path)
# contruct the url
url = '%s/p/%s' % (pad["server"], path)
#if create with content
if "field_name" in context and "model" in context and "object_id" in context:
myPad = EtherpadLiteClient( pad["key"], pad["server"]+'/api')
try:
myPad.createPad(path)
except urllib2.URLError:
raise osv.except_osv(_("Error"), _("Pad creation failed, \
either there is a problem with your pad server URL or with your connection."))
#get attr on the field model
model = self.pool[context["model"]]
field = model._fields[context['field_name']]
real_field = field.pad_content_field
#get content of the real field
for record in model.browse(cr, uid, [context["object_id"]]):
if record[real_field]:
myPad.setText(path, (html2plaintext(record[real_field]).encode('utf-8')))
#Etherpad for html not functional
#myPad.setHTML(path, record[real_field])
return {
"server": pad["server"],
"path": path,
"url": url,
}
def pad_get_content(self, cr, uid, url, context=None):
content = ''
if url:
try:
page = urllib2.urlopen('%s/export/html'%url).read()
mo = re.search('<body>(.*)</body>',page)
if mo:
content = mo.group(1)
except:
_logger.warning("No url found '%s'.", url)
return content
# TODO
# reverse engineer protocol to be setHtml without using the api key
def write(self, cr, uid, ids, vals, context=None):
self._set_pad_value(cr, uid, vals, context)
return super(pad_common, self).write(cr, uid, ids, vals, context=context)
def create(self, cr, uid, vals, context=None):
self._set_pad_value(cr, uid, vals, context)
return super(pad_common, self).create(cr, uid, vals, context=context)
# Set the pad content in vals
def _set_pad_value(self, cr, uid, vals, context=None):
for k,v in vals.items():
field = self._fields[k]
if hasattr(field,'pad_content_field'):
vals[field.pad_content_field] = self.pad_get_content(cr, uid, v, context=context)
def copy(self, cr, uid, id, default=None, context=None):
if not default:
default = {}
for k, field in self._fields.iteritems():
if hasattr(field,'pad_content_field'):
pad = self.pad_generate_url(cr, uid, context)
default[k] = pad.get('url')
return super(pad_common, self).copy(cr, uid, id, default, context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
virtualnobi/MediaFiler | nobi/ProductTraderPattern.py | 1 | 2526 | """Product Trader Pattern
This class implements a simple version of the Product Trader Pattern:
A SimpleProductTrader manages a registry mapping specifications to classes.
Strings are used as Specification.
For each Product, a SimpleProductTrader is created.
Subclasses of Product register with this SimpleProductTrader.
To instantiate a (subclass of) Product, the appropriate class is retrieved
from the SimpleProductTrader using the Specification.
(c) by nobisoft 2015-
"""
# Imports
## Standard
from __future__ import absolute_import
import logging
## Contributed
## nobi
## Project
# Package Variables
Logger = logging.getLogger(__name__)
class SimpleProductTrader(object):
"""Implement a simple Product Trader, using strings to specify the class to instantiate.
"""
# Constants
# Class Methods
# Lifecycle
def __init__(self):
"""Create a SimpleProductTrader with empty registry.
"""
# inheritance
super(SimpleProductTrader, self).__init__()
# internal state
self.productRegistry = {} # mapping String to Class
#
return(None)
# Getters
def isKnown(self, specString):
"""Return True is specString is a known specification, i.e., getClassFor() would return a valid class.
String specString
Return Boolean
"""
return(specString in self.productRegistry)
def getClassFor(self, specString):
"""Return the class to which specString is mapped.
BaseException when specString was not registered.
Returns Class
"""
if (self.isKnown(specString)):
return(self.productRegistry[specString])
else:
raise(BaseException('Specification "%s" not found in registry of SimpleProductTrader' % specString))
def getClasses(self):
"""Return the set of classes registered.
"""
return(set(self.productRegistry.values()))
# Setters
def registerClassFor(self, clas, specString):
"""Inform the product trader that clas handles specString.
"""
if (specString in self.productRegistry):
# raise(BaseException('Specification "%s" already used in SimpleProductTrader' % specString))
Logger.warning('Overwriting specification "%s" in SimpleProductTrader' % specString)
self.productRegistry[specString] = clas
| gpl-3.0 |
KohlsTechnology/ansible | test/units/modules/network/f5/test_bigip_monitor_tcp_half_open.py | 26 | 10303 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
import pytest
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_monitor_tcp_half_open import Parameters
from library.modules.bigip_monitor_tcp_half_open import ModuleManager
from library.modules.bigip_monitor_tcp_half_open import ArgumentSpec
from library.modules.bigip_monitor_tcp_half_open import HAS_F5SDK
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_monitor_tcp_half_open import Parameters
from ansible.modules.network.f5.bigip_monitor_tcp_half_open import ModuleManager
from ansible.modules.network.f5.bigip_monitor_tcp_half_open import ArgumentSpec
from ansible.modules.network.f5.bigip_monitor_tcp_half_open import HAS_F5SDK
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='parent',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
time_until_up=60,
partition='Common'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.ip == '10.10.10.10'
assert p.port == 80
assert p.type == 'tcp_half_open'
assert p.destination == '10.10.10.10:80'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
def test_module_parameters_ints_as_strings(self):
args = dict(
name='foo',
parent='parent',
ip='10.10.10.10',
port=80,
interval='20',
timeout='30',
time_until_up='60',
partition='Common'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.ip == '10.10.10.10'
assert p.port == 80
assert p.type == 'tcp_half_open'
assert p.destination == '10.10.10.10:80'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
def test_api_parameters(self):
args = dict(
name='foo',
defaultsFrom='/Common/parent',
destination='10.10.10.10:80',
interval=20,
timeout=30,
timeUntilUp=60
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.ip == '10.10.10.10'
assert p.port == 80
assert p.type == 'tcp_half_open'
assert p.destination == '10.10.10.10:80'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_monitor(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
time_until_up=60,
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
def test_create_monitor_idempotent(self, *args):
set_module_args(dict(
name='foo',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
time_until_up=60,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_half_open.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is False
def test_update_interval(self, *args):
set_module_args(dict(
name='foo',
interval=10,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_half_open.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['interval'] == 10
def test_update_interval_larger_than_existing_timeout(self, *args):
set_module_args(dict(
name='foo',
interval=30,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_half_open.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert "must be less than" in str(ex)
def test_update_interval_larger_than_new_timeout(self, *args):
set_module_args(dict(
name='foo',
interval=10,
timeout=5,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_half_open.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert "must be less than" in str(ex)
def test_update_timeout(self, *args):
set_module_args(dict(
name='foo',
timeout=300,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_half_open.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['timeout'] == 300
def test_update_time_until_up(self, *args):
set_module_args(dict(
name='foo',
time_until_up=300,
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp_half_open.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['time_until_up'] == 300
| gpl-3.0 |
Alignak-monitoring/alignak | tests/test_satellite_link.py | 1 | 3229 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2018: Alignak team, see AUTHORS.txt file for contributors
#
# This file is part of Alignak.
#
# Alignak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Alignak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Alignak. If not, see <http://www.gnu.org/licenses/>.
from .alignak_test import AlignakTest
from alignak.objects.arbiterlink import ArbiterLink
from alignak.objects.schedulerlink import SchedulerLink
from alignak.objects.brokerlink import BrokerLink
from alignak.objects.reactionnerlink import ReactionnerLink
from alignak.objects.receiverlink import ReceiverLink
from alignak.objects.pollerlink import PollerLink
class template_DaemonLink_get_name():
def get_link(self):
cls = self.daemon_link
return cls({})
def test_get_name(self):
link = self.get_link()
print(("Link: %s / %s" % (type(link), link)))
link.fill_default()
print(("Name: %s / %s / %s" % (link.type, link.name, link.get_name())))
print(("Config: %s" % (link.give_satellite_cfg())))
print(("Config: %s" % (link.have_conf)))
assert False == link.have_conf
try:
self.assertEqual("Unnamed {0}".format(self.daemon_link.my_type), link.get_name())
except AttributeError:
self.assertTrue(False, "get_name should not raise AttributeError")
class Test_ArbiterLink_get_name(template_DaemonLink_get_name, AlignakTest):
"""Test satellite link arbiter"""
def setUp(self):
super(Test_ArbiterLink_get_name, self).setUp()
daemon_link = ArbiterLink
class Test_SchedulerLink_get_name(template_DaemonLink_get_name, AlignakTest):
"""Test satellite link scheduler"""
def setUp(self):
super(Test_SchedulerLink_get_name, self).setUp()
daemon_link = SchedulerLink
class Test_BrokerLink_get_name(template_DaemonLink_get_name, AlignakTest):
"""Test satellite link broker"""
def setUp(self):
super(Test_BrokerLink_get_name, self).setUp()
daemon_link = BrokerLink
class Test_ReactionnerLink_get_name(template_DaemonLink_get_name, AlignakTest):
"""Test satellite link reactionner"""
def setUp(self):
super(Test_ReactionnerLink_get_name, self).setUp()
daemon_link = ReactionnerLink
class Test_ReceiverLink_get_name(template_DaemonLink_get_name, AlignakTest):
"""Test satellite link receiver"""
def setUp(self):
super(Test_ReceiverLink_get_name, self).setUp()
daemon_link = ReceiverLink
class Test_PollerLink_get_name(template_DaemonLink_get_name, AlignakTest):
"""Test satellite link poller"""
def setUp(self):
super(Test_PollerLink_get_name, self).setUp()
daemon_link = PollerLink
| agpl-3.0 |
ttsirkia/a-plus | exercise/cache/hierarchy.py | 3 | 10105 | from course.models import CourseModule, LearningObjectCategory
from ..models import LearningObject
class NoSuchContent(Exception):
pass
class HierarchyIterator(object):
def __init__(self, children, idx=None, tree=None, visited=False, enclosed=True):
if idx is None:
self._default_start(children)
else:
self.idx = idx.copy()
self.levels = [children]
if tree and len(tree) > 1:
for entry in tree[:-1]:
self.levels.append(entry['children'])
self.visited = visited
self.enclose_begun = not enclosed
self.enclose_ended = not enclosed
def __iter__(self):
return self
class NextIterator(HierarchyIterator):
def _default_start(self, children):
self.idx = [0]
self.levels = [children]
def __next__(self):
if not self.enclose_begun:
self.enclose_begun = True
return {'type':'level','down':True}
i = self.idx[-1]
level = self.levels[-1]
if not self.visited:
if i < len(level):
self.visited = True
return level[i]
else:
children = level[i].get('children')
if children:
self.levels.append(children)
self.idx.append(0)
self.visited = False
return {'type':'level','down':True}
i += 1
if i < len(level):
self.idx[-1] = i
return level[i]
if len(self.idx) > 1:
self.idx = self.idx[:-1]
self.levels = self.levels[:-1]
self.idx[-1] += 1
self.visited = False
return {'type':'level','up':True}
if not self.enclose_ended:
self.enclose_ended = True
return {'type':'level','up':True}
raise StopIteration()
class PreviousIterator(HierarchyIterator):
def _default_start(self, children):
self.idx = []
self.levels = []
self._goto_last(children)
def _goto_last(self, children):
level = children
while level:
i = len(level) - 1
self.idx.append(i)
self.levels.append(level)
level = level[i].get('children')
def __next__(self):
i = self.idx[-1]
level = self.levels[-1]
if not self.visited:
self.visited = True
return level[i]
elif i > 0:
i -= 1
self.idx[-1] = i
self._goto_last(level[i].get('children'))
return self.levels[-1][self.idx[-1]]
elif len(self.idx) > 1:
self.idx = self.idx[:-1]
self.levels = self.levels[:-1]
return self.levels[-1][self.idx[-1]]
raise StopIteration()
class ContentMixin(object):
def created(self):
return self.data['created']
def total(self):
return self.data['total']
def modules(self):
return self.data['modules']
def modules_flatted(self):
for module in self.data['modules']:
module['flatted'] = self.flat_module(module)
return self.data['modules']
def categories(self):
categories = list(self.data['categories'].values())
categories.sort(key=lambda entry: entry['name'])
return categories
def flat_module(self, module, enclosed=True):
modules = self.modules()
idx = self._model_idx(module)
tree = self._by_idx(modules, idx)
return NextIterator(tree[0]['children'], enclosed=enclosed)
def flat_full(self):
return NextIterator(self.modules(), enclosed=False)
def begin(self):
for entry in self.flat_full():
if entry['type'] == 'exercise':
return entry
return None
def find_path(self, module_id, path):
paths = self.data['paths'].get(module_id, {})
if path in paths:
return paths[path]
raise NoSuchContent()
def find_number(self, number):
hit = None
search = self.modules()
parts = number.split('.')
for i in range(len(parts)):
number = '.'.join(parts[0:i+1])
for s in search:
if s['number'] == number:
hit = s
search = hit['children']
break
if not hit:
raise NoSuchContent()
return hit
def find_category(self, category_id):
categories = self.data['categories']
if category_id in categories:
return categories[category_id]
raise NoSuchContent()
def find(self, model):
modules = self.modules()
idx = self._model_idx(model)
tree = self._by_idx(modules, idx)
return (
tree[-1],
tree,
self._previous(idx, tree),
self._next(idx, tree),
)
def search_exercises(self, **kwargs):
_, entries = self.search_entries(**kwargs)
return [e for e in entries if e['type'] == 'exercise']
def search_entries(self, number=None, category_id=None, module_id=None,
exercise_id=None, filter_for_assistant=False, best=False):
entry = None
if number:
try:
entry = self.find_number(number)
if entry['type'] == 'module':
module_id = entry['id']
elif entry['type'] == 'exercise':
exercise_id = entry['id']
except NoSuchContent:
pass
search = None
if not exercise_id is None:
search = { 'type': 'exercise', 'id': int(exercise_id) }
elif not module_id is None:
search = { 'type': 'module', 'id': int(module_id) }
if search:
idx = self._model_idx(search)
tree = self._by_idx(self.modules(), idx)
else:
tree = [{ 'type': 'all', 'children': self.modules() }]
exercises = []
def recursion(entry):
if (
entry['type'] == 'module' or (
entry['type'] == 'exercise' and
(category_id is None or entry['category_id'] == category_id) and
(not filter_for_assistant or entry['allow_assistant_viewing'])
)
):
exercises.append(entry)
for child in entry['children']:
recursion(child)
recursion(tree[-1])
return entry, exercises
def _previous(self, idx, tree):
for entry in PreviousIterator(self.modules(), idx, tree, visited=True):
if self.is_listed(entry):
return entry
return None
def _next(self, idx, tree):
for entry in NextIterator(self.modules(), idx, tree, visited=True, enclosed=False):
if self.is_listed(entry):
return entry
return None
def _model_idx(self, model):
def find(index, search):
if search in index:
return index[search]
raise NoSuchContent()
entry_type = None
if isinstance(model, dict):
entry_type = model.get('type', None)
if entry_type == 'module':
return find(self.data['module_index'], model['id'])
elif entry_type == 'exercise':
return find(self.data['exercise_index'], model['id'])
elif isinstance(model, CourseModule):
return find(self.data['module_index'], model.id)
elif isinstance(model, LearningObject):
return find(self.data['exercise_index'], model.id)
else:
raise NoSuchContent()
@classmethod
def _by_idx(cls, hierarchy, idx):
tree = []
for i in idx:
entry = hierarchy[i]
hierarchy = entry['children']
tree.append(entry)
return tree
@classmethod
def _add_by_difficulty(cls, to, difficulty, points):
if difficulty in to:
to[difficulty] += points
else:
to[difficulty] = points
@classmethod
def is_visible(cls, entry):
t = entry['type']
if t == 'exercise':
return (
entry.get('category_status') != LearningObjectCategory.STATUS.HIDDEN
and entry.get('module_status') != CourseModule.STATUS.HIDDEN
and not entry['status'] in (
LearningObject.STATUS.HIDDEN,
LearningObject.STATUS.ENROLLMENT,
LearningObject.STATUS.ENROLLMENT_EXTERNAL,
)
)
if t == 'module':
return entry['status'] != CourseModule.STATUS.HIDDEN
if t == 'category':
return not entry['status'] in (
LearningObjectCategory.STATUS.HIDDEN,
LearningObjectCategory.STATUS.NOTOTAL,
)
return False
@classmethod
def is_listed(cls, entry):
if not cls.is_visible(entry):
return False
t = entry['type']
if t == 'exercise':
return (
entry.get('category_status') != LearningObjectCategory.STATUS.HIDDEN
and entry.get('module_status') != CourseModule.STATUS.UNLISTED
and entry['status'] != LearningObject.STATUS.UNLISTED
)
if t == 'module':
return entry['status'] != CourseModule.STATUS.UNLISTED
if t == 'category':
return entry['status'] != LearningObjectCategory.STATUS.HIDDEN
return True
@classmethod
def is_in_maintenance(cls, entry):
t = entry['type']
if t == 'exercise':
return (
entry['module_status'] == CourseModule.STATUS.MAINTENANCE
or entry['status'] == LearningObject.STATUS.MAINTENANCE
)
if t == 'module':
return entry['status'] == CourseModule.STATUS.MAINTENANCE
return False
| gpl-3.0 |
vietdh85/vh-utility | script/rcb/graspgold.py | 1 | 1037 | import sys
import os.path
from pyquery import PyQuery as pq
import time
import common
def getValues(item):
url = item[3]
print("getValues(): ", url)
format = "%b %d %Y %H:%M:%S"
d = pq(url=url)
list = d(".list td")
index = 0
while index < len(list):
try :
obj = {}
obj['date'] = common.removeNumberString(list[index].text_content())
obj['time'] = common.dateStringToTimestamp(obj['date'], format=format)
obj['time'] = common.formatTimestamp(obj['time'])
obj['user'] = list[index + 1].text_content()
obj['deposit'] = list[index + 2].text_content().split("/")[0].replace("$", "")
obj['site_id'] = item[0]
obj['monitor'] = item[2]
print("{0} - {1} - {2} - {3} - {4} - {5}".format(obj['site_id'], obj['monitor'], obj['date'], obj['time'], obj['user'], obj['deposit']))
if common.insertUserRcb(obj) == -1:
return
except Exception:
pass
index += 5
def run(item):
print "\n========== RUN graspgold.run() ============"
# try :
getValues(item)
# except Exception:
# pass
| gpl-3.0 |
JamieFBousfield/heekscnc | pycnc/DepthOp.py | 25 | 2388 | from SpeedOp import SpeedOp
from consts import *
from CNCConfig import CNCConfig
import HeeksCNC
class DepthOp(SpeedOp):
def __init__(self):
SpeedOp.__init__(self)
def ReadDefaultValues(self):
SpeedOp.ReadDefaultValues(self)
config = CNCConfig()
self.abs_mode = config.ReadInt("DepthOpAbsMode", ABS_MODE_ABSOLUTE)
self.clearance_height = config.ReadFloat("DepthOpClearance", 5.0)
self.start_depth = config.ReadFloat("DepthOpStartDepth", 0.0)
self.step_down = config.ReadFloat("DepthOpStepDown", 1.0)
self.final_depth = config.ReadFloat("DepthOpFinalDepth", -1.0)
self.rapid_safety_space = config.ReadFloat("DepthOpRapidSpace", 2.0)
def WriteDefaultValues(self):
SpeedOp.WriteDefaultValues(self)
config = CNCConfig()
config.WriteInt("DepthOpAbsMode", self.abs_mode)
config.WriteFloat("DepthOpClearance", self.clearance_height)
config.WriteFloat("DepthOpStartDepth", self.start_depth)
config.WriteFloat("DepthOpStepDown", self.step_down)
config.WriteFloat("DepthOpFinalDepth", self.final_depth)
config.WriteFloat("DepthOpRapidSpace", self.rapid_safety_space)
def AppendTextToProgram(self):
SpeedOp.AppendTextToProgram(self)
HeeksCNC.program.python_program += "clearance = float(" + str(self.clearance_height / HeeksCNC.program.units) + ")\n"
HeeksCNC.program.python_program += "rapid_safety_space = float(" + str(self.rapid_safety_space / HeeksCNC.program.units) + ")\n"
HeeksCNC.program.python_program += "start_depth = float(" + str(self.start_depth / HeeksCNC.program.units) + ")\n"
HeeksCNC.program.python_program += "step_down = float(" + str(self.step_down / HeeksCNC.program.units) + ")\n"
HeeksCNC.program.python_program += "final_depth = float(" + str(self.final_depth / HeeksCNC.program.units) + ")\n"
tool = HeeksCNC.program.tools.FindTool(self.tool_number)
if tool != None:
HeeksCNC.program.python_program += "tool_diameter = float(" + str(tool.diameter) + ")\n"
if self.abs_mode == ABS_MODE_ABSOLUTE:
HeeksCNC.program.python_program += "#absolute() mode\n"
else:
HeeksCNC.program.python_program += "rapid(z=clearance)\n"
HeeksCNC.program.python_program += "incremental()\n"
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.