repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
DavidGuben/rcbplayspokemon | app/pywin32-220/com/win32comext/shell/demos/browse_for_folder.py | 47 | 1661 | # A couple of samples using SHBrowseForFolder
import sys, os
from win32com.shell import shell, shellcon
import win32gui
# A callback procedure - called by SHBrowseForFolder
def BrowseCallbackProc(hwnd, msg, lp, data):
if msg== shellcon.BFFM_INITIALIZED:
win32gui.SendMessage(hwnd, shellcon.BFFM_SETSELECTION, 1, data)
elif msg == shellcon.BFFM_SELCHANGED:
# Set the status text of the
# For this message, 'lp' is the address of the PIDL.
pidl = shell.AddressAsPIDL(lp)
try:
path = shell.SHGetPathFromIDList(pidl)
win32gui.SendMessage(hwnd, shellcon.BFFM_SETSTATUSTEXT, 0, path)
except shell.error:
# No path for this PIDL
pass
if __name__=='__main__':
# Demonstrate a dialog with the cwd selected as the default - this
# must be done via a callback function.
flags = shellcon.BIF_STATUSTEXT
shell.SHBrowseForFolder(0, # parent HWND
None, # root PIDL.
"Default of %s" % os.getcwd(), # title
flags, # flags
BrowseCallbackProc, # callback function
os.getcwd() # 'data' param for the callback
)
# Browse from this directory down only.
# Get the PIDL for the cwd.
desktop = shell.SHGetDesktopFolder()
cb, pidl, extra = desktop.ParseDisplayName(0, None, os.getcwd())
shell.SHBrowseForFolder(0, # parent HWND
pidl, # root PIDL.
"From %s down only" % os.getcwd(), # title
)
| mit |
tungvx/deploy | .google_appengine/lib/django_1_2/tests/modeltests/defer/tests.py | 92 | 5272 | from django.db.models.query_utils import DeferredAttribute
from django.test import TestCase
from models import Secondary, Primary, Child, BigChild
class DeferTests(TestCase):
def assert_delayed(self, obj, num):
count = 0
for field in obj._meta.fields:
if isinstance(obj.__class__.__dict__.get(field.attname),
DeferredAttribute):
count += 1
self.assertEqual(count, num)
def test_defer(self):
# To all outward appearances, instances with deferred fields look the
# same as normal instances when we examine attribute values. Therefore
# we test for the number of deferred fields on returned instances (by
# poking at the internals), as a way to observe what is going on.
s1 = Secondary.objects.create(first="x1", second="y1")
p1 = Primary.objects.create(name="p1", value="xx", related=s1)
qs = Primary.objects.all()
self.assert_delayed(qs.defer("name")[0], 1)
self.assert_delayed(qs.only("name")[0], 2)
self.assert_delayed(qs.defer("related__first")[0], 0)
obj = qs.select_related().only("related__first")[0]
self.assert_delayed(obj, 2)
self.assertEqual(obj.related_id, s1.pk)
self.assert_delayed(qs.defer("name").extra(select={"a": 1})[0], 1)
self.assert_delayed(qs.extra(select={"a": 1}).defer("name")[0], 1)
self.assert_delayed(qs.defer("name").defer("value")[0], 2)
self.assert_delayed(qs.only("name").only("value")[0], 2)
self.assert_delayed(qs.only("name").defer("value")[0], 2)
self.assert_delayed(qs.only("name", "value").defer("value")[0], 2)
self.assert_delayed(qs.defer("name").only("value")[0], 2)
obj = qs.only()[0]
self.assert_delayed(qs.defer(None)[0], 0)
self.assert_delayed(qs.only("name").defer(None)[0], 0)
# User values() won't defer anything (you get the full list of
# dictionaries back), but it still works.
self.assertEqual(qs.defer("name").values()[0], {
"id": p1.id,
"name": "p1",
"value": "xx",
"related_id": s1.id,
})
self.assertEqual(qs.only("name").values()[0], {
"id": p1.id,
"name": "p1",
"value": "xx",
"related_id": s1.id,
})
# Using defer() and only() with get() is also valid.
self.assert_delayed(qs.defer("name").get(pk=p1.pk), 1)
self.assert_delayed(qs.only("name").get(pk=p1.pk), 2)
# DOES THIS WORK?
self.assert_delayed(qs.only("name").select_related("related")[0], 1)
self.assert_delayed(qs.defer("related").select_related("related")[0], 0)
# Saving models with deferred fields is possible (but inefficient,
# since every field has to be retrieved first).
obj = Primary.objects.defer("value").get(name="p1")
obj.name = "a new name"
obj.save()
self.assertQuerysetEqual(
Primary.objects.all(), [
"a new name",
],
lambda p: p.name
)
# Regression for #10572 - A subclass with no extra fields can defer
# fields from the base class
Child.objects.create(name="c1", value="foo", related=s1)
# You can defer a field on a baseclass when the subclass has no fields
obj = Child.objects.defer("value").get(name="c1")
self.assert_delayed(obj, 1)
self.assertEqual(obj.name, "c1")
self.assertEqual(obj.value, "foo")
obj.name = "c2"
obj.save()
# You can retrive a single column on a base class with no fields
obj = Child.objects.only("name").get(name="c2")
self.assert_delayed(obj, 3)
self.assertEqual(obj.name, "c2")
self.assertEqual(obj.value, "foo")
obj.name = "cc"
obj.save()
BigChild.objects.create(name="b1", value="foo", related=s1, other="bar")
# You can defer a field on a baseclass
obj = BigChild.objects.defer("value").get(name="b1")
self.assert_delayed(obj, 1)
self.assertEqual(obj.name, "b1")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
obj.name = "b2"
obj.save()
# You can defer a field on a subclass
obj = BigChild.objects.defer("other").get(name="b2")
self.assert_delayed(obj, 1)
self.assertEqual(obj.name, "b2")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
obj.name = "b3"
obj.save()
# You can retrieve a single field on a baseclass
obj = BigChild.objects.only("name").get(name="b3")
self.assert_delayed(obj, 4)
self.assertEqual(obj.name, "b3")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
obj.name = "b4"
obj.save()
# You can retrieve a single field on a baseclass
obj = BigChild.objects.only("other").get(name="b4")
self.assert_delayed(obj, 4)
self.assertEqual(obj.name, "b4")
self.assertEqual(obj.value, "foo")
self.assertEqual(obj.other, "bar")
obj.name = "bb"
obj.save()
| apache-2.0 |
cpritam/moose | gui/utils/mergeLists.py | 34 | 1682 | #!/usr/bin/python
''' Merge the lists together preserving the ordering of master. "master" will be the merged list after completion. '''
def mergeLists(master, slave):
for slave_value_position in xrange(0,len(slave)):
value = slave[slave_value_position]
if value in master:
continue
else:
slave_before_slice = slave[0:slave_value_position]
slave_after_slice = slave[slave_value_position+1:]
best_score = 0
best_position = 0
for master_position in xrange(0,len(master)+1):
current_score = 0
master_before_slice = master[0:master_position]
master_after_slice = master[master_position:]
for slave_slice_position in xrange(len(slave_before_slice)):
slave_value = slave_before_slice[slave_slice_position]
if slave_value in master_before_slice:
current_score += 1.0/float(len(slave_before_slice)-slave_slice_position+1)
for slave_slice_position in xrange(len(slave_after_slice)):
slave_value = slave_after_slice[slave_slice_position]
if slave_value in master_after_slice:
current_score += 1.0/float(slave_slice_position+1)
if current_score > best_score:
best_position = master_position
best_score = current_score
master.insert(best_position,value)
if __name__ == '__main__':
input = ['Variables','Functions','Kernels','BCs','Executioner','Outputs']
# input = []
template = ['Variables','AuxVariables','Kernels','AuxKernels','BCs','AuxBCs','Postprocessors','Executioner','Outputs']
# template = ['Variables','AuxVariables']
# template = []
mergeLists(input, template)
print input
| lgpl-2.1 |
mapio/prettytable-mirror | prettytable.py | 42 | 62469 | #!/usr/bin/env python
#
# Copyright (c) 2009-2014, Luke Maurits <luke@maurits.id.au>
# All rights reserved.
# With contributions from:
# * Chris Clark
# * Klein Stephane
# * John Filleau
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__version__ = "trunk"
import copy
import csv
import itertools
import math
import random
import re
import sys
import textwrap
import unicodedata
py3k = sys.version_info[0] >= 3
if py3k:
unicode = str
basestring = str
itermap = map
iterzip = zip
uni_chr = chr
from html.parser import HTMLParser
else:
itermap = itertools.imap
iterzip = itertools.izip
uni_chr = unichr
from HTMLParser import HTMLParser
if py3k and sys.version_info[1] >= 2:
from html import escape
else:
from cgi import escape
# hrule styles
FRAME = 0
ALL = 1
NONE = 2
HEADER = 3
# Table styles
DEFAULT = 10
MSWORD_FRIENDLY = 11
PLAIN_COLUMNS = 12
RANDOM = 20
_re = re.compile("\033\[[0-9;]*m")
def _get_size(text):
lines = text.split("\n")
height = len(lines)
width = max([_str_block_width(line) for line in lines])
return (width, height)
class PrettyTable(object):
def __init__(self, field_names=None, **kwargs):
"""Return a new PrettyTable instance
Arguments:
encoding - Unicode encoding scheme used to decode any encoded input
title - optional table title
field_names - list or tuple of field names
fields - list or tuple of field names to include in displays
start - index of first data row to include in output
end - index of last data row to include in output PLUS ONE (list slice style)
header - print a header showing field names (True or False)
header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None)
border - print a border around the table (True or False)
hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, HEADER, ALL, NONE
vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE
int_format - controls formatting of integer data
float_format - controls formatting of floating point data
min_table_width - minimum desired table width, in characters
max_table_width - maximum desired table width, in characters
padding_width - number of spaces on either side of column data (only used if left and right paddings are None)
left_padding_width - number of spaces on left hand side of column data
right_padding_width - number of spaces on right hand side of column data
vertical_char - single character string used to draw vertical lines
horizontal_char - single character string used to draw horizontal lines
junction_char - single character string used to draw line junctions
sortby - name of field to sort rows by
sort_key - sorting key function, applied to data points before sorting
valign - default valign for each row (None, "t", "m" or "b")
reversesort - True or False to sort in descending or ascending order
oldsortslice - Slice rows before sorting in the "old style" """
self.encoding = kwargs.get("encoding", "UTF-8")
# Data
self._field_names = []
self._rows = []
self.align = {}
self.valign = {}
self.max_width = {}
self.min_width = {}
self.int_format = {}
self.float_format = {}
if field_names:
self.field_names = field_names
else:
self._widths = []
# Options
self._options = "title start end fields header border sortby reversesort sort_key attributes format hrules vrules".split()
self._options.extend("int_format float_format min_table_width max_table_width padding_width left_padding_width right_padding_width".split())
self._options.extend("vertical_char horizontal_char junction_char header_style valign xhtml print_empty oldsortslice".split())
self._options.extend("align valign max_width min_width".split())
for option in self._options:
if option in kwargs:
self._validate_option(option, kwargs[option])
else:
kwargs[option] = None
self._title = kwargs["title"] or None
self._start = kwargs["start"] or 0
self._end = kwargs["end"] or None
self._fields = kwargs["fields"] or None
if kwargs["header"] in (True, False):
self._header = kwargs["header"]
else:
self._header = True
self._header_style = kwargs["header_style"] or None
if kwargs["border"] in (True, False):
self._border = kwargs["border"]
else:
self._border = True
self._hrules = kwargs["hrules"] or FRAME
self._vrules = kwargs["vrules"] or ALL
self._sortby = kwargs["sortby"] or None
if kwargs["reversesort"] in (True, False):
self._reversesort = kwargs["reversesort"]
else:
self._reversesort = False
self._sort_key = kwargs["sort_key"] or (lambda x: x)
# Column specific arguments, use property.setters
self.align = kwargs["align"] or {}
self.valign = kwargs["valign"] or {}
self.max_width = kwargs["max_width"] or {}
self.min_width = kwargs["min_width"] or {}
self.int_format = kwargs["int_format"] or {}
self.float_format = kwargs["float_format"] or {}
self._min_table_width = kwargs["min_table_width"] or None
self._max_table_width = kwargs["max_table_width"] or None
self._padding_width = kwargs["padding_width"] or 1
self._left_padding_width = kwargs["left_padding_width"] or None
self._right_padding_width = kwargs["right_padding_width"] or None
self._vertical_char = kwargs["vertical_char"] or self._unicode("|")
self._horizontal_char = kwargs["horizontal_char"] or self._unicode("-")
self._junction_char = kwargs["junction_char"] or self._unicode("+")
if kwargs["print_empty"] in (True, False):
self._print_empty = kwargs["print_empty"]
else:
self._print_empty = True
if kwargs["oldsortslice"] in (True, False):
self._oldsortslice = kwargs["oldsortslice"]
else:
self._oldsortslice = False
self._format = kwargs["format"] or False
self._xhtml = kwargs["xhtml"] or False
self._attributes = kwargs["attributes"] or {}
def _unicode(self, value):
if not isinstance(value, basestring):
value = str(value)
if not isinstance(value, unicode):
value = unicode(value, self.encoding, "strict")
return value
def _justify(self, text, width, align):
excess = width - _str_block_width(text)
if align == "l":
return text + excess * " "
elif align == "r":
return excess * " " + text
else:
if excess % 2:
# Uneven padding
# Put more space on right if text is of odd length...
if _str_block_width(text) % 2:
return (excess//2)*" " + text + (excess//2 + 1)*" "
# and more space on left if text is of even length
else:
return (excess//2 + 1)*" " + text + (excess//2)*" "
# Why distribute extra space this way? To match the behaviour of
# the inbuilt str.center() method.
else:
# Equal padding on either side
return (excess//2)*" " + text + (excess//2)*" "
def __getattr__(self, name):
if name == "rowcount":
return len(self._rows)
elif name == "colcount":
if self._field_names:
return len(self._field_names)
elif self._rows:
return len(self._rows[0])
else:
return 0
else:
raise AttributeError(name)
def __getitem__(self, index):
new = PrettyTable()
new.field_names = self.field_names
for attr in self._options:
setattr(new, "_"+attr, getattr(self, "_"+attr))
setattr(new, "_align", getattr(self, "_align"))
if isinstance(index, slice):
for row in self._rows[index]:
new.add_row(row)
elif isinstance(index, int):
new.add_row(self._rows[index])
else:
raise Exception("Index %s is invalid, must be an integer or slice" % str(index))
return new
if py3k:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
return self.__unicode__().encode(self.encoding)
def __unicode__(self):
return self.get_string()
##############################
# ATTRIBUTE VALIDATORS #
##############################
# The method _validate_option is all that should be used elsewhere in the code base to validate options.
# It will call the appropriate validation method for that option. The individual validation methods should
# never need to be called directly (although nothing bad will happen if they *are*).
# Validation happens in TWO places.
# Firstly, in the property setters defined in the ATTRIBUTE MANAGMENT section.
# Secondly, in the _get_options method, where keyword arguments are mixed with persistent settings
def _validate_option(self, option, val):
if option in ("field_names"):
self._validate_field_names(val)
elif option in ("start", "end", "max_width", "min_width", "min_table_width", "max_table_width", "padding_width", "left_padding_width", "right_padding_width", "format"):
self._validate_nonnegative_int(option, val)
elif option in ("sortby"):
self._validate_field_name(option, val)
elif option in ("sort_key"):
self._validate_function(option, val)
elif option in ("hrules"):
self._validate_hrules(option, val)
elif option in ("vrules"):
self._validate_vrules(option, val)
elif option in ("fields"):
self._validate_all_field_names(option, val)
elif option in ("header", "border", "reversesort", "xhtml", "print_empty", "oldsortslice"):
self._validate_true_or_false(option, val)
elif option in ("header_style"):
self._validate_header_style(val)
elif option in ("int_format"):
self._validate_int_format(option, val)
elif option in ("float_format"):
self._validate_float_format(option, val)
elif option in ("vertical_char", "horizontal_char", "junction_char"):
self._validate_single_char(option, val)
elif option in ("attributes"):
self._validate_attributes(option, val)
def _validate_field_names(self, val):
# Check for appropriate length
if self._field_names:
try:
assert len(val) == len(self._field_names)
except AssertionError:
raise Exception("Field name list has incorrect number of values, (actual) %d!=%d (expected)" % (len(val), len(self._field_names)))
if self._rows:
try:
assert len(val) == len(self._rows[0])
except AssertionError:
raise Exception("Field name list has incorrect number of values, (actual) %d!=%d (expected)" % (len(val), len(self._rows[0])))
# Check for uniqueness
try:
assert len(val) == len(set(val))
except AssertionError:
raise Exception("Field names must be unique!")
def _validate_header_style(self, val):
try:
assert val in ("cap", "title", "upper", "lower", None)
except AssertionError:
raise Exception("Invalid header style, use cap, title, upper, lower or None!")
def _validate_align(self, val):
try:
assert val in ["l","c","r"]
except AssertionError:
raise Exception("Alignment %s is invalid, use l, c or r!" % val)
def _validate_valign(self, val):
try:
assert val in ["t","m","b",None]
except AssertionError:
raise Exception("Alignment %s is invalid, use t, m, b or None!" % val)
def _validate_nonnegative_int(self, name, val):
try:
assert int(val) >= 0
except AssertionError:
raise Exception("Invalid value for %s: %s!" % (name, self._unicode(val)))
def _validate_true_or_false(self, name, val):
try:
assert val in (True, False)
except AssertionError:
raise Exception("Invalid value for %s! Must be True or False." % name)
def _validate_int_format(self, name, val):
if val == "":
return
try:
assert type(val) in (str, unicode)
assert val.isdigit()
except AssertionError:
raise Exception("Invalid value for %s! Must be an integer format string." % name)
def _validate_float_format(self, name, val):
if val == "":
return
try:
assert type(val) in (str, unicode)
assert "." in val
bits = val.split(".")
assert len(bits) <= 2
assert bits[0] == "" or bits[0].isdigit()
assert bits[1] == "" or bits[1].isdigit()
except AssertionError:
raise Exception("Invalid value for %s! Must be a float format string." % name)
def _validate_function(self, name, val):
try:
assert hasattr(val, "__call__")
except AssertionError:
raise Exception("Invalid value for %s! Must be a function." % name)
def _validate_hrules(self, name, val):
try:
assert val in (ALL, FRAME, HEADER, NONE)
except AssertionError:
raise Exception("Invalid value for %s! Must be ALL, FRAME, HEADER or NONE." % name)
def _validate_vrules(self, name, val):
try:
assert val in (ALL, FRAME, NONE)
except AssertionError:
raise Exception("Invalid value for %s! Must be ALL, FRAME, or NONE." % name)
def _validate_field_name(self, name, val):
try:
assert (val in self._field_names) or (val is None)
except AssertionError:
raise Exception("Invalid field name: %s!" % val)
def _validate_all_field_names(self, name, val):
try:
for x in val:
self._validate_field_name(name, x)
except AssertionError:
raise Exception("fields must be a sequence of field names!")
def _validate_single_char(self, name, val):
try:
assert _str_block_width(val) == 1
except AssertionError:
raise Exception("Invalid value for %s! Must be a string of length 1." % name)
def _validate_attributes(self, name, val):
try:
assert isinstance(val, dict)
except AssertionError:
raise Exception("attributes must be a dictionary of name/value pairs!")
##############################
# ATTRIBUTE MANAGEMENT #
##############################
@property
def field_names(self):
"""List or tuple of field names"""
return self._field_names
@field_names.setter
def field_names(self, val):
val = [self._unicode(x) for x in val]
self._validate_option("field_names", val)
if self._field_names:
old_names = self._field_names[:]
self._field_names = val
if self._align and old_names:
for old_name, new_name in zip(old_names, val):
self._align[new_name] = self._align[old_name]
for old_name in old_names:
if old_name not in self._align:
self._align.pop(old_name)
else:
self.align = "c"
if self._valign and old_names:
for old_name, new_name in zip(old_names, val):
self._valign[new_name] = self._valign[old_name]
for old_name in old_names:
if old_name not in self._valign:
self._valign.pop(old_name)
else:
self.valign = "t"
@property
def align(self):
"""Controls alignment of fields
Arguments:
align - alignment, one of "l", "c", or "r" """
return self._align
@align.setter
def align(self, val):
if not self._field_names:
self._align = {}
elif val is None or (isinstance(val,dict) and len(val) is 0):
for field in self._field_names:
self._align[field] = "c"
else:
self._validate_align(val)
for field in self._field_names:
self._align[field] = val
@property
def valign(self):
"""Controls vertical alignment of fields
Arguments:
valign - vertical alignment, one of "t", "m", or "b" """
return self._valign
@valign.setter
def valign(self, val):
if not self._field_names:
self._valign = {}
elif val is None or (isinstance(val,dict) and len(val) is 0):
for field in self._field_names:
self._valign[field] = "t"
else:
self._validate_valign(val)
for field in self._field_names:
self._valign[field] = val
@property
def max_width(self):
"""Controls maximum width of fields
Arguments:
max_width - maximum width integer"""
return self._max_width
@max_width.setter
def max_width(self, val):
if val is None or (isinstance(val,dict) and len(val) is 0):
self._max_width = {}
else:
self._validate_option("max_width",val)
for field in self._field_names:
self._max_width[field] = val
@property
def min_width(self):
"""Controls minimum width of fields
Arguments:
min_width - minimum width integer"""
return self._min_width
@min_width.setter
def min_width(self, val):
if val is None or (isinstance(val,dict) and len(val) is 0):
self._min_width = {}
else:
self._validate_option("min_width",val)
for field in self._field_names:
self._min_width[field] = val
@property
def min_table_width(self):
return self._min_table_width
@min_table_width.setter
def min_table_width(self, val):
self._validate_option("min_table_width", val)
self._min_table_width = val
@property
def max_table_width(self):
return self._max_table_width
@max_table_width.setter
def max_table_width(self, val):
self._validate_option("max_table_width", val)
self._max_table_width = val
@property
def fields(self):
"""List or tuple of field names to include in displays"""
return self._fields
@fields.setter
def fields(self, val):
self._validate_option("fields", val)
self._fields = val
@property
def title(self):
"""Optional table title
Arguments:
title - table title"""
return self._title
@title.setter
def title(self, val):
self._title = self._unicode(val)
@property
def start(self):
"""Start index of the range of rows to print
Arguments:
start - index of first data row to include in output"""
return self._start
@start.setter
def start(self, val):
self._validate_option("start", val)
self._start = val
@property
def end(self):
"""End index of the range of rows to print
Arguments:
end - index of last data row to include in output PLUS ONE (list slice style)"""
return self._end
@end.setter
def end(self, val):
self._validate_option("end", val)
self._end = val
@property
def sortby(self):
"""Name of field by which to sort rows
Arguments:
sortby - field name to sort by"""
return self._sortby
@sortby.setter
def sortby(self, val):
self._validate_option("sortby", val)
self._sortby = val
@property
def reversesort(self):
"""Controls direction of sorting (ascending vs descending)
Arguments:
reveresort - set to True to sort by descending order, or False to sort by ascending order"""
return self._reversesort
@reversesort.setter
def reversesort(self, val):
self._validate_option("reversesort", val)
self._reversesort = val
@property
def sort_key(self):
"""Sorting key function, applied to data points before sorting
Arguments:
sort_key - a function which takes one argument and returns something to be sorted"""
return self._sort_key
@sort_key.setter
def sort_key(self, val):
self._validate_option("sort_key", val)
self._sort_key = val
@property
def header(self):
"""Controls printing of table header with field names
Arguments:
header - print a header showing field names (True or False)"""
return self._header
@header.setter
def header(self, val):
self._validate_option("header", val)
self._header = val
@property
def header_style(self):
"""Controls stylisation applied to field names in header
Arguments:
header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None)"""
return self._header_style
@header_style.setter
def header_style(self, val):
self._validate_header_style(val)
self._header_style = val
@property
def border(self):
"""Controls printing of border around table
Arguments:
border - print a border around the table (True or False)"""
return self._border
@border.setter
def border(self, val):
self._validate_option("border", val)
self._border = val
@property
def hrules(self):
"""Controls printing of horizontal rules after rows
Arguments:
hrules - horizontal rules style. Allowed values: FRAME, ALL, HEADER, NONE"""
return self._hrules
@hrules.setter
def hrules(self, val):
self._validate_option("hrules", val)
self._hrules = val
@property
def vrules(self):
"""Controls printing of vertical rules between columns
Arguments:
vrules - vertical rules style. Allowed values: FRAME, ALL, NONE"""
return self._vrules
@vrules.setter
def vrules(self, val):
self._validate_option("vrules", val)
self._vrules = val
@property
def int_format(self):
"""Controls formatting of integer data
Arguments:
int_format - integer format string"""
return self._int_format
@int_format.setter
def int_format(self, val):
if val is None or (isinstance(val,dict) and len(val) is 0):
self._int_format = {}
else:
self._validate_option("int_format",val)
for field in self._field_names:
self._int_format[field] = val
@property
def float_format(self):
"""Controls formatting of floating point data
Arguments:
float_format - floating point format string"""
return self._float_format
@float_format.setter
def float_format(self, val):
if val is None or (isinstance(val,dict) and len(val) is 0):
self._float_format = {}
else:
self._validate_option("float_format",val)
for field in self._field_names:
self._float_format[field] = val
@property
def padding_width(self):
"""The number of empty spaces between a column's edge and its content
Arguments:
padding_width - number of spaces, must be a positive integer"""
return self._padding_width
@padding_width.setter
def padding_width(self, val):
self._validate_option("padding_width", val)
self._padding_width = val
@property
def left_padding_width(self):
"""The number of empty spaces between a column's left edge and its content
Arguments:
left_padding - number of spaces, must be a positive integer"""
return self._left_padding_width
@left_padding_width.setter
def left_padding_width(self, val):
self._validate_option("left_padding_width", val)
self._left_padding_width = val
@property
def right_padding_width(self):
"""The number of empty spaces between a column's right edge and its content
Arguments:
right_padding - number of spaces, must be a positive integer"""
return self._right_padding_width
@right_padding_width.setter
def right_padding_width(self, val):
self._validate_option("right_padding_width", val)
self._right_padding_width = val
@property
def vertical_char(self):
"""The charcter used when printing table borders to draw vertical lines
Arguments:
vertical_char - single character string used to draw vertical lines"""
return self._vertical_char
@vertical_char.setter
def vertical_char(self, val):
val = self._unicode(val)
self._validate_option("vertical_char", val)
self._vertical_char = val
@property
def horizontal_char(self):
"""The charcter used when printing table borders to draw horizontal lines
Arguments:
horizontal_char - single character string used to draw horizontal lines"""
return self._horizontal_char
@horizontal_char.setter
def horizontal_char(self, val):
val = self._unicode(val)
self._validate_option("horizontal_char", val)
self._horizontal_char = val
@property
def junction_char(self):
"""The charcter used when printing table borders to draw line junctions
Arguments:
junction_char - single character string used to draw line junctions"""
return self._junction_char
@junction_char.setter
def junction_char(self, val):
val = self._unicode(val)
self._validate_option("vertical_char", val)
self._junction_char = val
@property
def format(self):
"""Controls whether or not HTML tables are formatted to match styling options
Arguments:
format - True or False"""
return self._format
@format.setter
def format(self, val):
self._validate_option("format", val)
self._format = val
@property
def print_empty(self):
"""Controls whether or not empty tables produce a header and frame or just an empty string
Arguments:
print_empty - True or False"""
return self._print_empty
@print_empty.setter
def print_empty(self, val):
self._validate_option("print_empty", val)
self._print_empty = val
@property
def attributes(self):
"""A dictionary of HTML attribute name/value pairs to be included in the <table> tag when printing HTML
Arguments:
attributes - dictionary of attributes"""
return self._attributes
@attributes.setter
def attributes(self, val):
self._validate_option("attributes", val)
self._attributes = val
@property
def oldsortslice(self):
""" oldsortslice - Slice rows before sorting in the "old style" """
return self._oldsortslice
@oldsortslice.setter
def oldsortslice(self, val):
self._validate_option("oldsortslice", val)
self._oldsortslice = val
##############################
# OPTION MIXER #
##############################
def _get_options(self, kwargs):
options = {}
for option in self._options:
if option in kwargs:
self._validate_option(option, kwargs[option])
options[option] = kwargs[option]
else:
options[option] = getattr(self, "_"+option)
return options
##############################
# PRESET STYLE LOGIC #
##############################
def set_style(self, style):
if style == DEFAULT:
self._set_default_style()
elif style == MSWORD_FRIENDLY:
self._set_msword_style()
elif style == PLAIN_COLUMNS:
self._set_columns_style()
elif style == RANDOM:
self._set_random_style()
else:
raise Exception("Invalid pre-set style!")
def _set_default_style(self):
self.header = True
self.border = True
self._hrules = FRAME
self._vrules = ALL
self.padding_width = 1
self.left_padding_width = 1
self.right_padding_width = 1
self.vertical_char = "|"
self.horizontal_char = "-"
self.junction_char = "+"
def _set_msword_style(self):
self.header = True
self.border = True
self._hrules = NONE
self.padding_width = 1
self.left_padding_width = 1
self.right_padding_width = 1
self.vertical_char = "|"
def _set_columns_style(self):
self.header = True
self.border = False
self.padding_width = 1
self.left_padding_width = 0
self.right_padding_width = 8
def _set_random_style(self):
# Just for fun!
self.header = random.choice((True, False))
self.border = random.choice((True, False))
self._hrules = random.choice((ALL, FRAME, HEADER, NONE))
self._vrules = random.choice((ALL, FRAME, NONE))
self.left_padding_width = random.randint(0,5)
self.right_padding_width = random.randint(0,5)
self.vertical_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?")
self.horizontal_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?")
self.junction_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?")
##############################
# DATA INPUT METHODS #
##############################
def add_row(self, row):
"""Add a row to the table
Arguments:
row - row of data, should be a list with as many elements as the table
has fields"""
if self._field_names and len(row) != len(self._field_names):
raise Exception("Row has incorrect number of values, (actual) %d!=%d (expected)" %(len(row),len(self._field_names)))
if not self._field_names:
self.field_names = [("Field %d" % (n+1)) for n in range(0,len(row))]
self._rows.append(list(row))
def del_row(self, row_index):
"""Delete a row to the table
Arguments:
row_index - The index of the row you want to delete. Indexing starts at 0."""
if row_index > len(self._rows)-1:
raise Exception("Cant delete row at index %d, table only has %d rows!" % (row_index, len(self._rows)))
del self._rows[row_index]
def add_column(self, fieldname, column, align="c", valign="t"):
"""Add a column to the table.
Arguments:
fieldname - name of the field to contain the new column of data
column - column of data, should be a list with as many elements as the
table has rows
align - desired alignment for this column - "l" for left, "c" for centre and "r" for right
valign - desired vertical alignment for new columns - "t" for top, "m" for middle and "b" for bottom"""
if len(self._rows) in (0, len(column)):
self._validate_align(align)
self._validate_valign(valign)
self._field_names.append(fieldname)
self._align[fieldname] = align
self._valign[fieldname] = valign
for i in range(0, len(column)):
if len(self._rows) < i+1:
self._rows.append([])
self._rows[i].append(column[i])
else:
raise Exception("Column length %d does not match number of rows %d!" % (len(column), len(self._rows)))
def clear_rows(self):
"""Delete all rows from the table but keep the current field names"""
self._rows = []
def clear(self):
"""Delete all rows and field names from the table, maintaining nothing but styling options"""
self._rows = []
self._field_names = []
self._widths = []
##############################
# MISC PUBLIC METHODS #
##############################
def copy(self):
return copy.deepcopy(self)
##############################
# MISC PRIVATE METHODS #
##############################
def _format_value(self, field, value):
if isinstance(value, int) and field in self._int_format:
value = self._unicode(("%%%sd" % self._int_format[field]) % value)
elif isinstance(value, float) and field in self._float_format:
value = self._unicode(("%%%sf" % self._float_format[field]) % value)
return self._unicode(value)
def _compute_table_width(self, options):
table_width = 2 if options["vrules"] in (FRAME, ALL) else 0
per_col_padding = sum(self._get_padding_widths(options))
for index, fieldname in enumerate(self.field_names):
if not options["fields"] or (options["fields"] and fieldname in options["fields"]):
table_width += self._widths[index] + per_col_padding
return table_width
def _compute_widths(self, rows, options):
if options["header"]:
widths = [_get_size(field)[0] for field in self._field_names]
else:
widths = len(self.field_names) * [0]
for row in rows:
for index, value in enumerate(row):
fieldname = self.field_names[index]
if fieldname in self.max_width:
widths[index] = max(widths[index], min(_get_size(value)[0], self.max_width[fieldname]))
else:
widths[index] = max(widths[index], _get_size(value)[0])
if fieldname in self.min_width:
widths[index] = max(widths[index], self.min_width[fieldname])
self._widths = widths
# Are we exceeding max_table_width?
if self._max_table_width:
table_width = self._compute_table_width(options)
if table_width > self._max_table_width:
# Shrink widths in proportion
scale = 1.0*self._max_table_width / table_width
widths = [int(math.floor(w*scale)) for w in widths]
self._widths = widths
# Are we under min_table_width or title width?
if self._min_table_width or options["title"]:
if options["title"]:
title_width = len(options["title"])+sum(self._get_padding_widths(options))
if options["vrules"] in (FRAME, ALL):
title_width += 2
else:
title_width = 0
min_table_width = self.min_table_width or 0
min_width = max(title_width, min_table_width)
table_width = self._compute_table_width(options)
if table_width < min_width:
# Grow widths in proportion
scale = 1.0*min_width / table_width
widths = [int(math.ceil(w*scale)) for w in widths]
self._widths = widths
def _get_padding_widths(self, options):
if options["left_padding_width"] is not None:
lpad = options["left_padding_width"]
else:
lpad = options["padding_width"]
if options["right_padding_width"] is not None:
rpad = options["right_padding_width"]
else:
rpad = options["padding_width"]
return lpad, rpad
def _get_rows(self, options):
"""Return only those data rows that should be printed, based on slicing and sorting.
Arguments:
options - dictionary of option settings."""
if options["oldsortslice"]:
rows = copy.deepcopy(self._rows[options["start"]:options["end"]])
else:
rows = copy.deepcopy(self._rows)
# Sort
if options["sortby"]:
sortindex = self._field_names.index(options["sortby"])
# Decorate
rows = [[row[sortindex]]+row for row in rows]
# Sort
rows.sort(reverse=options["reversesort"], key=options["sort_key"])
# Undecorate
rows = [row[1:] for row in rows]
# Slice if necessary
if not options["oldsortslice"]:
rows = rows[options["start"]:options["end"]]
return rows
def _format_row(self, row, options):
return [self._format_value(field, value) for (field, value) in zip(self._field_names, row)]
def _format_rows(self, rows, options):
return [self._format_row(row, options) for row in rows]
##############################
# PLAIN TEXT STRING METHODS #
##############################
def get_string(self, **kwargs):
"""Return string representation of table in current state.
Arguments:
title - optional table title
start - index of first data row to include in output
end - index of last data row to include in output PLUS ONE (list slice style)
fields - names of fields (columns) to include
header - print a header showing field names (True or False)
border - print a border around the table (True or False)
hrules - controls printing of horizontal rules after rows. Allowed values: ALL, FRAME, HEADER, NONE
vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE
int_format - controls formatting of integer data
float_format - controls formatting of floating point data
padding_width - number of spaces on either side of column data (only used if left and right paddings are None)
left_padding_width - number of spaces on left hand side of column data
right_padding_width - number of spaces on right hand side of column data
vertical_char - single character string used to draw vertical lines
horizontal_char - single character string used to draw horizontal lines
junction_char - single character string used to draw line junctions
sortby - name of field to sort rows by
sort_key - sorting key function, applied to data points before sorting
reversesort - True or False to sort in descending or ascending order
print empty - if True, stringify just the header for an empty table, if False return an empty string """
options = self._get_options(kwargs)
lines = []
# Don't think too hard about an empty table
# Is this the desired behaviour? Maybe we should still print the header?
if self.rowcount == 0 and (not options["print_empty"] or not options["border"]):
return ""
# Get the rows we need to print, taking into account slicing, sorting, etc.
rows = self._get_rows(options)
# Turn all data in all rows into Unicode, formatted as desired
formatted_rows = self._format_rows(rows, options)
# Compute column widths
self._compute_widths(formatted_rows, options)
self._hrule = self._stringify_hrule(options)
# Add title
title = options["title"] or self._title
if title:
lines.append(self._stringify_title(title, options))
# Add header or top of border
if options["header"]:
lines.append(self._stringify_header(options))
elif options["border"] and options["hrules"] in (ALL, FRAME):
lines.append(self._hrule)
# Add rows
for row in formatted_rows:
lines.append(self._stringify_row(row, options))
# Add bottom of border
if options["border"] and options["hrules"] == FRAME:
lines.append(self._hrule)
return self._unicode("\n").join(lines)
def _stringify_hrule(self, options):
if not options["border"]:
return ""
lpad, rpad = self._get_padding_widths(options)
if options['vrules'] in (ALL, FRAME):
bits = [options["junction_char"]]
else:
bits = [options["horizontal_char"]]
# For tables with no data or fieldnames
if not self._field_names:
bits.append(options["junction_char"])
return "".join(bits)
for field, width in zip(self._field_names, self._widths):
if options["fields"] and field not in options["fields"]:
continue
bits.append((width+lpad+rpad)*options["horizontal_char"])
if options['vrules'] == ALL:
bits.append(options["junction_char"])
else:
bits.append(options["horizontal_char"])
if options["vrules"] == FRAME:
bits.pop()
bits.append(options["junction_char"])
return "".join(bits)
def _stringify_title(self, title, options):
lines = []
lpad, rpad = self._get_padding_widths(options)
if options["border"]:
if options["vrules"] == ALL:
options["vrules"] = FRAME
lines.append(self._stringify_hrule(options))
options["vrules"] = ALL
elif options["vrules"] == FRAME:
lines.append(self._stringify_hrule(options))
bits = []
endpoint = options["vertical_char"] if options["vrules"] in (ALL, FRAME) else " "
bits.append(endpoint)
title = " "*lpad + title + " "*rpad
bits.append(self._justify(title, len(self._hrule)-2, "c"))
bits.append(endpoint)
lines.append("".join(bits))
return "\n".join(lines)
def _stringify_header(self, options):
bits = []
lpad, rpad = self._get_padding_widths(options)
if options["border"]:
if options["hrules"] in (ALL, FRAME):
bits.append(self._hrule)
bits.append("\n")
if options["vrules"] in (ALL, FRAME):
bits.append(options["vertical_char"])
else:
bits.append(" ")
# For tables with no data or field names
if not self._field_names:
if options["vrules"] in (ALL, FRAME):
bits.append(options["vertical_char"])
else:
bits.append(" ")
for field, width, in zip(self._field_names, self._widths):
if options["fields"] and field not in options["fields"]:
continue
if self._header_style == "cap":
fieldname = field.capitalize()
elif self._header_style == "title":
fieldname = field.title()
elif self._header_style == "upper":
fieldname = field.upper()
elif self._header_style == "lower":
fieldname = field.lower()
else:
fieldname = field
bits.append(" " * lpad + self._justify(fieldname, width, self._align[field]) + " " * rpad)
if options["border"]:
if options["vrules"] == ALL:
bits.append(options["vertical_char"])
else:
bits.append(" ")
# If vrules is FRAME, then we just appended a space at the end
# of the last field, when we really want a vertical character
if options["border"] and options["vrules"] == FRAME:
bits.pop()
bits.append(options["vertical_char"])
if options["border"] and options["hrules"] != NONE:
bits.append("\n")
bits.append(self._hrule)
return "".join(bits)
def _stringify_row(self, row, options):
for index, field, value, width, in zip(range(0,len(row)), self._field_names, row, self._widths):
# Enforce max widths
lines = value.split("\n")
new_lines = []
for line in lines:
if _str_block_width(line) > width:
line = textwrap.fill(line, width)
new_lines.append(line)
lines = new_lines
value = "\n".join(lines)
row[index] = value
row_height = 0
for c in row:
h = _get_size(c)[1]
if h > row_height:
row_height = h
bits = []
lpad, rpad = self._get_padding_widths(options)
for y in range(0, row_height):
bits.append([])
if options["border"]:
if options["vrules"] in (ALL, FRAME):
bits[y].append(self.vertical_char)
else:
bits[y].append(" ")
for field, value, width, in zip(self._field_names, row, self._widths):
valign = self._valign[field]
lines = value.split("\n")
dHeight = row_height - len(lines)
if dHeight:
if valign == "m":
lines = [""] * int(dHeight / 2) + lines + [""] * (dHeight - int(dHeight / 2))
elif valign == "b":
lines = [""] * dHeight + lines
else:
lines = lines + [""] * dHeight
y = 0
for l in lines:
if options["fields"] and field not in options["fields"]:
continue
bits[y].append(" " * lpad + self._justify(l, width, self._align[field]) + " " * rpad)
if options["border"]:
if options["vrules"] == ALL:
bits[y].append(self.vertical_char)
else:
bits[y].append(" ")
y += 1
# If vrules is FRAME, then we just appended a space at the end
# of the last field, when we really want a vertical character
for y in range(0, row_height):
if options["border"] and options["vrules"] == FRAME:
bits[y].pop()
bits[y].append(options["vertical_char"])
if options["border"] and options["hrules"]== ALL:
bits[row_height-1].append("\n")
bits[row_height-1].append(self._hrule)
for y in range(0, row_height):
bits[y] = "".join(bits[y])
return "\n".join(bits)
def paginate(self, page_length=58, **kwargs):
pages = []
kwargs["start"] = kwargs.get("start", 0)
true_end = kwargs.get("end", self.rowcount)
while True:
kwargs["end"] = min(kwargs["start"] + page_length, true_end)
pages.append(self.get_string(**kwargs))
if kwargs["end"] == true_end:
break
kwargs["start"] += page_length
return "\f".join(pages)
##############################
# HTML STRING METHODS #
##############################
def get_html_string(self, **kwargs):
"""Return string representation of HTML formatted version of table in current state.
Arguments:
title - optional table title
start - index of first data row to include in output
end - index of last data row to include in output PLUS ONE (list slice style)
fields - names of fields (columns) to include
header - print a header showing field names (True or False)
border - print a border around the table (True or False)
hrules - controls printing of horizontal rules after rows. Allowed values: ALL, FRAME, HEADER, NONE
vrules - controls printing of vertical rules between columns. Allowed values: FRAME, ALL, NONE
int_format - controls formatting of integer data
float_format - controls formatting of floating point data
padding_width - number of spaces on either side of column data (only used if left and right paddings are None)
left_padding_width - number of spaces on left hand side of column data
right_padding_width - number of spaces on right hand side of column data
sortby - name of field to sort rows by
sort_key - sorting key function, applied to data points before sorting
attributes - dictionary of name/value pairs to include as HTML attributes in the <table> tag
xhtml - print <br/> tags if True, <br> tags if false"""
options = self._get_options(kwargs)
if options["format"]:
string = self._get_formatted_html_string(options)
else:
string = self._get_simple_html_string(options)
return string
def _get_simple_html_string(self, options):
lines = []
if options["xhtml"]:
linebreak = "<br/>"
else:
linebreak = "<br>"
open_tag = []
open_tag.append("<table")
if options["attributes"]:
for attr_name in options["attributes"]:
open_tag.append(" %s=\"%s\"" % (attr_name, options["attributes"][attr_name]))
open_tag.append(">")
lines.append("".join(open_tag))
# Title
title = options["title"] or self._title
if title:
cols = len(options["fields"]) if options["fields"] else len(self.field_names)
lines.append(" <tr>")
lines.append(" <td colspan=%d>%s</td>" % (cols, title))
lines.append(" </tr>")
# Headers
if options["header"]:
lines.append(" <tr>")
for field in self._field_names:
if options["fields"] and field not in options["fields"]:
continue
lines.append(" <th>%s</th>" % escape(field).replace("\n", linebreak))
lines.append(" </tr>")
# Data
rows = self._get_rows(options)
formatted_rows = self._format_rows(rows, options)
for row in formatted_rows:
lines.append(" <tr>")
for field, datum in zip(self._field_names, row):
if options["fields"] and field not in options["fields"]:
continue
lines.append(" <td>%s</td>" % escape(datum).replace("\n", linebreak))
lines.append(" </tr>")
lines.append("</table>")
return self._unicode("\n").join(lines)
def _get_formatted_html_string(self, options):
lines = []
lpad, rpad = self._get_padding_widths(options)
if options["xhtml"]:
linebreak = "<br/>"
else:
linebreak = "<br>"
open_tag = []
open_tag.append("<table")
if options["border"]:
if options["hrules"] == ALL and options["vrules"] == ALL:
open_tag.append(" frame=\"box\" rules=\"all\"")
elif options["hrules"] == FRAME and options["vrules"] == FRAME:
open_tag.append(" frame=\"box\"")
elif options["hrules"] == FRAME and options["vrules"] == ALL:
open_tag.append(" frame=\"box\" rules=\"cols\"")
elif options["hrules"] == FRAME:
open_tag.append(" frame=\"hsides\"")
elif options["hrules"] == ALL:
open_tag.append(" frame=\"hsides\" rules=\"rows\"")
elif options["vrules"] == FRAME:
open_tag.append(" frame=\"vsides\"")
elif options["vrules"] == ALL:
open_tag.append(" frame=\"vsides\" rules=\"cols\"")
if options["attributes"]:
for attr_name in options["attributes"]:
open_tag.append(" %s=\"%s\"" % (attr_name, options["attributes"][attr_name]))
open_tag.append(">")
lines.append("".join(open_tag))
# Title
title = options["title"] or self._title
if title:
cols = len(options["fields"]) if options["fields"] else len(self.field_names)
lines.append(" <tr>")
lines.append(" <td colspan=%d>%s</td>" % (cols, title))
lines.append(" </tr>")
# Headers
if options["header"]:
lines.append(" <tr>")
for field in self._field_names:
if options["fields"] and field not in options["fields"]:
continue
lines.append(" <th style=\"padding-left: %dem; padding-right: %dem; text-align: center\">%s</th>" % (lpad, rpad, escape(field).replace("\n", linebreak)))
lines.append(" </tr>")
# Data
rows = self._get_rows(options)
formatted_rows = self._format_rows(rows, options)
aligns = []
valigns = []
for field in self._field_names:
aligns.append({ "l" : "left", "r" : "right", "c" : "center" }[self._align[field]])
valigns.append({"t" : "top", "m" : "middle", "b" : "bottom"}[self._valign[field]])
for row in formatted_rows:
lines.append(" <tr>")
for field, datum, align, valign in zip(self._field_names, row, aligns, valigns):
if options["fields"] and field not in options["fields"]:
continue
lines.append(" <td style=\"padding-left: %dem; padding-right: %dem; text-align: %s; vertical-align: %s\">%s</td>" % (lpad, rpad, align, valign, escape(datum).replace("\n", linebreak)))
lines.append(" </tr>")
lines.append("</table>")
return self._unicode("\n").join(lines)
##############################
# UNICODE WIDTH FUNCTIONS #
##############################
def _char_block_width(char):
# Basic Latin, which is probably the most common case
#if char in xrange(0x0021, 0x007e):
#if char >= 0x0021 and char <= 0x007e:
if 0x0021 <= char <= 0x007e:
return 1
# Chinese, Japanese, Korean (common)
if 0x4e00 <= char <= 0x9fff:
return 2
# Hangul
if 0xac00 <= char <= 0xd7af:
return 2
# Combining?
if unicodedata.combining(uni_chr(char)):
return 0
# Hiragana and Katakana
if 0x3040 <= char <= 0x309f or 0x30a0 <= char <= 0x30ff:
return 2
# Full-width Latin characters
if 0xff01 <= char <= 0xff60:
return 2
# CJK punctuation
if 0x3000 <= char <= 0x303e:
return 2
# Backspace and delete
if char in (0x0008, 0x007f):
return -1
# Other control characters
elif char in (0x0000, 0x000f, 0x001f):
return 0
# Take a guess
return 1
def _str_block_width(val):
return sum(itermap(_char_block_width, itermap(ord, _re.sub("", val))))
##############################
# TABLE FACTORIES #
##############################
def from_csv(fp, field_names = None, **kwargs):
fmtparams = {}
for param in ["delimiter", "doublequote", "escapechar", "lineterminator",
"quotechar", "quoting", "skipinitialspace", "strict"]:
if param in kwargs:
fmtparams[param] = kwargs.pop(param)
if fmtparams:
reader = csv.reader(fp, **fmtparams)
else:
dialect = csv.Sniffer().sniff(fp.read(1024))
fp.seek(0)
reader = csv.reader(fp, dialect)
table = PrettyTable(**kwargs)
if field_names:
table.field_names = field_names
else:
if py3k:
table.field_names = [x.strip() for x in next(reader)]
else:
table.field_names = [x.strip() for x in reader.next()]
for row in reader:
table.add_row([x.strip() for x in row])
return table
def from_db_cursor(cursor, **kwargs):
if cursor.description:
table = PrettyTable(**kwargs)
table.field_names = [col[0] for col in cursor.description]
for row in cursor.fetchall():
table.add_row(row)
return table
class TableHandler(HTMLParser):
def __init__(self, **kwargs):
HTMLParser.__init__(self)
self.kwargs = kwargs
self.tables = []
self.last_row = []
self.rows = []
self.max_row_width = 0
self.active = None
self.last_content = ""
self.is_last_row_header = False
self.colspan = 0
def handle_starttag(self,tag, attrs):
self.active = tag
if tag == "th":
self.is_last_row_header = True
for (key, value) in attrs:
if key == "colspan":
self.colspan = int(value)
def handle_endtag(self,tag):
if tag in ["th", "td"]:
stripped_content = self.last_content.strip()
self.last_row.append(stripped_content)
if self.colspan:
for i in range(1, self.colspan):
self.last_row.append("")
self.colspan = 0
if tag == "tr":
self.rows.append(
(self.last_row, self.is_last_row_header))
self.max_row_width = max(self.max_row_width, len(self.last_row))
self.last_row = []
self.is_last_row_header = False
if tag == "table":
table = self.generate_table(self.rows)
self.tables.append(table)
self.rows = []
self.last_content = " "
self.active = None
def handle_data(self, data):
self.last_content += data
def generate_table(self, rows):
"""
Generates from a list of rows a PrettyTable object.
"""
table = PrettyTable(**self.kwargs)
for row in self.rows:
if len(row[0]) < self.max_row_width:
appends = self.max_row_width - len(row[0])
for i in range(1,appends):
row[0].append("-")
if row[1] == True:
self.make_fields_unique(row[0])
table.field_names = row[0]
else:
table.add_row(row[0])
return table
def make_fields_unique(self, fields):
"""
iterates over the row and make each field unique
"""
for i in range(0, len(fields)):
for j in range(i+1, len(fields)):
if fields[i] == fields[j]:
fields[j] += "'"
def from_html(html_code, **kwargs):
"""
Generates a list of PrettyTables from a string of HTML code. Each <table> in
the HTML becomes one PrettyTable object.
"""
parser = TableHandler(**kwargs)
parser.feed(html_code)
return parser.tables
def from_html_one(html_code, **kwargs):
"""
Generates a PrettyTables from a string of HTML code which contains only a
single <table>
"""
tables = from_html(html_code, **kwargs)
try:
assert len(tables) == 1
except AssertionError:
raise Exception("More than one <table> in provided HTML code! Use from_html instead.")
return tables[0]
##############################
# MAIN (TEST FUNCTION) #
##############################
def main():
print("Generated using setters:")
x = PrettyTable(["City name", "Area", "Population", "Annual Rainfall"])
x.title = "Australian capital cities"
x.sortby = "Population"
x.reversesort = True
x.int_format["Area"] = "04"
x.float_format = "6.1"
x.align["City name"] = "l" # Left align city names
x.add_row(["Adelaide", 1295, 1158259, 600.5])
x.add_row(["Brisbane", 5905, 1857594, 1146.4])
x.add_row(["Darwin", 112, 120900, 1714.7])
x.add_row(["Hobart", 1357, 205556, 619.5])
x.add_row(["Sydney", 2058, 4336374, 1214.8])
x.add_row(["Melbourne", 1566, 3806092, 646.9])
x.add_row(["Perth", 5386, 1554769, 869.4])
print(x)
print
print("Generated using constructor arguments:")
y = PrettyTable(["City name", "Area", "Population", "Annual Rainfall"],
title = "Australian capital cities",
sortby = "Population",
reversesort = True,
int_format = "04",
float_format = "6.1",
max_width = 12,
min_width = 4,
align = "c",
valign = "t")
y.align["City name"] = "l" # Left align city names
y.add_row(["Adelaide", 1295, 1158259, 600.5])
y.add_row(["Brisbane", 5905, 1857594, 1146.4])
y.add_row(["Darwin", 112, 120900, 1714.7])
y.add_row(["Hobart", 1357, 205556, 619.5])
y.add_row(["Sydney", 2058, 4336374, 1214.8])
y.add_row(["Melbourne", 1566, 3806092, 646.9])
y.add_row(["Perth", 5386, 1554769, 869.4])
print(y)
if __name__ == "__main__":
main()
| bsd-3-clause |
indashnet/InDashNet.Open.UN2000 | android/bionic/libc/tools/generate-NOTICE.py | 5 | 5062 | #!/usr/bin/python
# Run with directory arguments from any directory, with no special setup required.
# Or:
# for i in libc libdl libm linker libstdc++ libthread_db ; do ./libc/tools/generate-NOTICE.py $i > $i/NOTICE ; done
import ftplib
import hashlib
import os
import re
import shutil
import string
import subprocess
import sys
import tarfile
import tempfile
def IsUninteresting(path):
path = path.lower()
if path.endswith(".mk") or path.endswith(".py") or path.endswith(".pyc") or path.endswith(".txt") or path.endswith(".3"):
return True
if path.endswith("/notice") or path.endswith("/readme") or path.endswith("/caveats"):
return True
if path.endswith("/tzdata") or path.endswith("/zoneinfo/generate"):
return True
return False
def IsAutoGenerated(content):
if "generated by gensyscalls.py" in content or "generated by genserv.py" in content:
return True
if "This header was automatically generated from a Linux kernel header" in content:
return True
return False
copyrights = set()
def ExtractCopyrightAt(lines, i):
hash = lines[i].startswith("#")
# Do we need to back up to find the start of the copyright header?
start = i
if not hash:
while start > 0:
if "/*" in lines[start - 1]:
break
start -= 1
# Read comment lines until we hit something that terminates a
# copyright header.
while i < len(lines):
if "*/" in lines[i]:
break
if hash and len(lines[i]) == 0:
break
if "\t@(#)" in lines[i] or "\tfrom: @(#)" in lines[i] or "From: @(#)" in lines[i] or "from OpenBSD:" in lines[i]:
break
if "\tcitrus Id: " in lines[i]:
break
if "\t$OpenBSD: " in lines[i] or " $FreeBSD: " in lines[i] or "\t$NetBSD: " in lines[i]:
break
if "$FreeBSD$" in lines[i] or "$Citrus$" in lines[i]:
break
i += 1
end = i
# Trim trailing cruft.
while end > 0:
if lines[end - 1] != " *" and lines[end - 1] != " * ====================================================":
break
end -= 1
# Remove C/assembler comment formatting, pulling out just the text.
clean_lines = []
for line in lines[start:end]:
line = line.replace("\t", " ")
line = line.replace("/* ", "")
line = line.replace(" * ", "")
line = line.replace("** ", "")
line = line.replace("# ", "")
if line.startswith("++Copyright++"):
continue
line = line.replace("--Copyright--", "")
line = line.rstrip()
# These come last and take care of "blank" comment lines.
if line == "#" or line == " *" or line == "**" or line == "-":
line = ""
clean_lines.append(line)
# Trim blank lines from head and tail.
while clean_lines[0] == "":
clean_lines = clean_lines[1:]
while clean_lines[len(clean_lines) - 1] == "":
clean_lines = clean_lines[0:(len(clean_lines) - 1)]
copyright = "\n".join(clean_lines)
copyrights.add(copyright)
return i
args = sys.argv[1:]
if len(args) == 0:
args = [ "." ]
for arg in args:
sys.stderr.write('Searching for source files in "%s"...\n' % arg)
for directory, sub_directories, filenames in os.walk(arg):
if ".git" in sub_directories:
sub_directories.remove(".git")
sub_directories = sorted(sub_directories)
for filename in sorted(filenames):
path = os.path.join(directory, filename)
if IsUninteresting(path):
#print "ignoring uninteresting file %s" % path
continue
try:
content = open(path, 'r').read().decode('utf-8')
except:
# TODO: update hash.h, md5.c, and md5.h; upstream is probably UTF-8 already.
sys.stderr.write('warning: bad UTF-8 in %s\n' % path)
content = open(path, 'r').read().decode('iso-8859-1')
lines = content.split("\n")
if len(lines) <= 4:
#print "ignoring short file %s" % path
continue
if IsAutoGenerated(content):
#print "ignoring auto-generated file %s" % path
continue
if not "Copyright" in content:
if "public domain" in content.lower():
#print "ignoring public domain file %s" % path
continue
sys.stderr.write('warning: no copyright notice found in "%s" (%d lines)\n' % (path, len(lines)))
continue
i = 0
while i < len(lines):
if "Copyright" in lines[i]:
i = ExtractCopyrightAt(lines, i)
i += 1
#print path
for copyright in sorted(copyrights):
print copyright.encode('utf-8')
print
print '-------------------------------------------------------------------'
print
sys.exit(0)
| apache-2.0 |
renyi533/tensorflow | tensorflow/python/ops/control_flow_v2_func_graphs.py | 8 | 1402 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""FuncGraphs for V2 control flow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework.func_graph import FuncGraph
class CondBranchFuncGraph(FuncGraph):
"""FuncGraph for branches of tf.cond().
This is used to distinguish cond branches from other functions.
"""
pass
class WhileCondFuncGraph(FuncGraph):
"""FuncGraph for the condition of tf.while_loop().
This is used to distinguish while conditions from other functions.
"""
pass
class WhileBodyFuncGraph(FuncGraph):
"""FuncGraph for the body of tf.while_loop().
This is used to distinguish while bodies from other functions.
"""
pass
| apache-2.0 |
petrjasek/superdesk-core | apps/publish/publish_service_tests.py | 2 | 3235 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from bson import ObjectId
from nose.tools import assert_raises
from apps.publish import init_app
from superdesk.errors import PublishQueueError
from superdesk.publish import SUBSCRIBER_TYPES
from superdesk.publish.publish_service import PublishService
from superdesk.tests import TestCase
class PublishServiceTests(TestCase):
queue_items = [
{
"_id": "571075791d41c81e204c5c8c",
"destination": {"name": "NITF", "delivery_type": "ftp", "format": "nitf", "config": {}},
"subscriber_id": "1",
"state": "in-progress",
"item_id": 1,
"formatted_item": "",
}
]
subscribers = [
{
"_id": "1",
"name": "Test",
"subscriber_type": SUBSCRIBER_TYPES.WIRE,
"media_type": "media",
"is_active": True,
"sequence_num_settings": {"max": 10, "min": 1},
"critical_errors": {"9004": True},
"destinations": [{"name": "NITF", "delivery_type": "ftp", "format": "nitf", "config": {}}],
}
]
def setUp(self):
with self.app.app_context():
self.app.data.insert("subscribers", self.subscribers)
self.queue_items[0]["_id"] = ObjectId(self.queue_items[0]["_id"])
self.app.data.insert("publish_queue", self.queue_items)
init_app(self.app)
def test_close_subscriber_doesnt_close(self):
with self.app.app_context():
subscriber = self.app.data.find_one("subscribers", None)
self.assertTrue(subscriber.get("is_active"))
PublishService().close_transmitter(subscriber, PublishQueueError.unknown_format_error())
subscriber = self.app.data.find_one("subscribers", None)
self.assertTrue(subscriber.get("is_active"))
def test_close_subscriber_does_close(self):
with self.app.app_context():
subscriber = self.app.data.find_one("subscribers", None)
self.assertTrue(subscriber.get("is_active"))
PublishService().close_transmitter(subscriber, PublishQueueError.bad_schedule_error())
subscriber = self.app.data.find_one("subscribers", None)
self.assertFalse(subscriber.get("is_active"))
def test_transmit_closes_subscriber(self):
def mock_transmit(*args):
raise PublishQueueError.bad_schedule_error()
with self.app.app_context():
subscriber = self.app.data.find_one("subscribers", None)
publish_service = PublishService()
publish_service._transmit = mock_transmit
with assert_raises(PublishQueueError):
publish_service.transmit(self.queue_items[0])
subscriber = self.app.data.find_one("subscribers", None)
self.assertFalse(subscriber.get("is_active"))
self.assertIsNotNone(subscriber.get("last_closed"))
| agpl-3.0 |
whereismyjetpack/ansible | test/units/plugins/strategy/test_strategy_base.py | 32 | 20994 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import uuid
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.plugins.strategy import StrategyBase
from ansible.executor.process.worker import WorkerProcess
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.executor.task_result import TaskResult
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.inventory.host import Host
from six.moves import queue as Queue
from units.mock.loader import DictDataLoader
class TestStrategyBase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strategy_base_init(self):
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm = MagicMock(TaskQueueManager)
mock_tqm._final_q = mock_queue
mock_tqm._options = MagicMock()
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base.cleanup()
def test_strategy_base_run(self):
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm = MagicMock(TaskQueueManager)
mock_tqm._final_q = mock_queue
mock_tqm._stats = MagicMock()
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
mock_tqm.send_callback.return_value = None
for attr in ('RUN_OK', 'RUN_ERROR', 'RUN_FAILED_HOSTS', 'RUN_UNREACHABLE_HOSTS'):
setattr(mock_tqm, attr, getattr(TaskQueueManager, attr))
mock_iterator = MagicMock()
mock_iterator._play = MagicMock()
mock_iterator._play.handlers = []
mock_play_context = MagicMock()
mock_tqm._failed_hosts = dict()
mock_tqm._unreachable_hosts = dict()
mock_tqm._options = MagicMock()
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
strategy_base = StrategyBase(tqm=mock_tqm)
mock_host = MagicMock()
mock_host.name = 'host1'
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context), mock_tqm.RUN_OK)
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=TaskQueueManager.RUN_ERROR), mock_tqm.RUN_ERROR)
mock_tqm._failed_hosts = dict(host1=True)
mock_iterator.get_failed_hosts.return_value = [mock_host]
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_FAILED_HOSTS)
mock_tqm._unreachable_hosts = dict(host1=True)
mock_iterator.get_failed_hosts.return_value = []
self.assertEqual(strategy_base.run(iterator=mock_iterator, play_context=mock_play_context, result=False), mock_tqm.RUN_UNREACHABLE_HOSTS)
strategy_base.cleanup()
def test_strategy_base_get_hosts(self):
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_hosts = []
for i in range(0, 5):
mock_host = MagicMock()
mock_host.name = "host%02d" % (i+1)
mock_host.has_hostkey = True
mock_hosts.append(mock_host)
mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = mock_hosts
mock_tqm = MagicMock()
mock_tqm._final_q = mock_queue
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
mock_tqm.get_inventory.return_value = mock_inventory
mock_play = MagicMock()
mock_play.hosts = ["host%02d" % (i+1) for i in range(0, 5)]
strategy_base = StrategyBase(tqm=mock_tqm)
mock_tqm._failed_hosts = []
mock_tqm._unreachable_hosts = []
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts)
mock_tqm._failed_hosts = ["host01"]
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[1:])
self.assertEqual(strategy_base.get_failed_hosts(play=mock_play), [mock_hosts[0]])
mock_tqm._unreachable_hosts = ["host02"]
self.assertEqual(strategy_base.get_hosts_remaining(play=mock_play), mock_hosts[2:])
strategy_base.cleanup()
@patch.object(WorkerProcess, 'run')
def test_strategy_base_queue_task(self, mock_worker):
def fake_run(self):
return
mock_worker.run.side_effect = fake_run
fake_loader = DictDataLoader()
mock_var_manager = MagicMock()
mock_host = MagicMock()
mock_host.has_hostkey = True
mock_inventory = MagicMock()
mock_options = MagicMock()
mock_options.module_path = None
tqm = TaskQueueManager(
inventory=mock_inventory,
variable_manager=mock_var_manager,
loader=fake_loader,
options=mock_options,
passwords=None,
)
tqm._initialize_processes(3)
tqm.hostvars = dict()
try:
strategy_base = StrategyBase(tqm=tqm)
strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 1)
self.assertEqual(strategy_base._pending_results, 1)
strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 2)
self.assertEqual(strategy_base._pending_results, 2)
strategy_base._queue_task(host=mock_host, task=MagicMock(), task_vars=dict(), play_context=MagicMock())
self.assertEqual(strategy_base._cur_worker, 0)
self.assertEqual(strategy_base._pending_results, 3)
finally:
tqm.cleanup()
def test_strategy_base_process_pending_results(self):
mock_tqm = MagicMock()
mock_tqm._terminated = False
mock_tqm._failed_hosts = dict()
mock_tqm._unreachable_hosts = dict()
mock_tqm.send_callback.return_value = None
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm._final_q = mock_queue
mock_tqm._stats = MagicMock()
mock_tqm._stats.increment.return_value = None
mock_play = MagicMock()
mock_host = MagicMock()
mock_host.name = 'test01'
mock_host.vars = dict()
mock_host.has_hostkey = True
mock_task = MagicMock()
mock_task._role = None
mock_task._parent = None
mock_task.ignore_errors = False
mock_task._uuid = uuid.uuid4()
mock_task.loop = None
mock_task.copy.return_value = mock_task
mock_handler_task = MagicMock(Handler)
mock_handler_task.name = 'test handler'
mock_handler_task.action = 'foo'
mock_handler_task._parent = None
mock_handler_task.get_name.return_value = "test handler"
mock_handler_task.has_triggered.return_value = False
mock_handler_task._uuid = 'xxxxxxxxxxxxx'
mock_handler_task.copy.return_value = mock_handler_task
mock_iterator = MagicMock()
mock_iterator._play = mock_play
mock_iterator.mark_host_failed.return_value = None
mock_iterator.get_next_task_for_host.return_value = (None, None)
mock_iterator.get_original_task.return_value = mock_task
mock_handler_block = MagicMock()
mock_handler_block.block = [mock_handler_task]
mock_handler_block.rescue = []
mock_handler_block.always = []
mock_play.handlers = [mock_handler_block]
mock_tqm._notified_handlers = {mock_handler_task._uuid: []}
mock_tqm._listening_handlers = {}
mock_group = MagicMock()
mock_group.add_host.return_value = None
def _get_host(host_name):
if host_name == 'test01':
return mock_host
return None
def _get_group(group_name):
if group_name in ('all', 'foo'):
return mock_group
return None
mock_inventory = MagicMock()
mock_inventory._hosts_cache = dict()
mock_inventory.get_host.side_effect = _get_host
mock_inventory.get_group.side_effect = _get_group
mock_inventory.clear_pattern_cache.return_value = None
mock_inventory.clear_group_dict_cache.return_value = None
mock_inventory.get_host_vars.return_value = {}
mock_var_mgr = MagicMock()
mock_var_mgr.set_host_variable.return_value = None
mock_var_mgr.set_host_facts.return_value = None
mock_var_mgr.get_vars.return_value = dict()
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._inventory = mock_inventory
strategy_base._variable_manager = mock_var_mgr
strategy_base._blocked_hosts = dict()
def _has_dead_workers():
return False
strategy_base._tqm.has_dead_workers.side_effect = _has_dead_workers
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 0)
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True))
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"failed":true}')
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
mock_iterator.is_failed.return_value = True
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
#self.assertIn('test01', mock_tqm._failed_hosts)
#del mock_tqm._failed_hosts['test01']
mock_iterator.is_failed.return_value = False
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"unreachable": true}')
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
self.assertIn('test01', mock_tqm._unreachable_hosts)
del mock_tqm._unreachable_hosts['test01']
task_result = TaskResult(host=mock_host.name, task=mock_task._uuid, return_data='{"skipped": true}')
queue_items.append(task_result)
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(results[0], task_result)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_host=dict(host_name='newhost01', new_groups=['foo']))))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(add_group=dict(group_name='foo'))))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
queue_items.append(TaskResult(host=mock_host.name, task=mock_task._uuid, return_data=dict(changed=True, _ansible_notify=['test handler'])))
strategy_base._blocked_hosts['test01'] = True
strategy_base._pending_results = 1
results = strategy_base._wait_on_pending_results(iterator=mock_iterator)
self.assertEqual(len(results), 1)
self.assertEqual(strategy_base._pending_results, 0)
self.assertNotIn('test01', strategy_base._blocked_hosts)
self.assertIn(mock_handler_task._uuid, strategy_base._notified_handlers)
self.assertIn(mock_host, strategy_base._notified_handlers[mock_handler_task._uuid])
#queue_items.append(('set_host_var', mock_host, mock_task, None, 'foo', 'bar'))
#results = strategy_base._process_pending_results(iterator=mock_iterator)
#self.assertEqual(len(results), 0)
#self.assertEqual(strategy_base._pending_results, 1)
#queue_items.append(('set_host_facts', mock_host, mock_task, None, 'foo', dict()))
#results = strategy_base._process_pending_results(iterator=mock_iterator)
#self.assertEqual(len(results), 0)
#self.assertEqual(strategy_base._pending_results, 1)
#queue_items.append(('bad'))
#self.assertRaises(AnsibleError, strategy_base._process_pending_results, iterator=mock_iterator)
strategy_base.cleanup()
def test_strategy_base_load_included_file(self):
fake_loader = DictDataLoader({
"test.yml": """
- debug: msg='foo'
""",
"bad.yml": """
""",
})
queue_items = []
def _queue_empty(*args, **kwargs):
return len(queue_items) == 0
def _queue_get(*args, **kwargs):
if len(queue_items) == 0:
raise Queue.Empty
else:
return queue_items.pop()
def _queue_put(item, *args, **kwargs):
queue_items.append(item)
mock_queue = MagicMock()
mock_queue.empty.side_effect = _queue_empty
mock_queue.get.side_effect = _queue_get
mock_queue.put.side_effect = _queue_put
mock_tqm = MagicMock()
mock_tqm._final_q = mock_queue
mock_tqm._notified_handlers = {}
mock_tqm._listening_handlers = {}
strategy_base = StrategyBase(tqm=mock_tqm)
strategy_base._loader = fake_loader
strategy_base.cleanup()
mock_play = MagicMock()
mock_block = MagicMock()
mock_block._play = mock_play
mock_block.vars = dict()
mock_task = MagicMock()
mock_task._block = mock_block
mock_task._role = None
mock_task._parent = None
mock_iterator = MagicMock()
mock_iterator.mark_host_failed.return_value = None
mock_inc_file = MagicMock()
mock_inc_file._task = mock_task
mock_inc_file._filename = "test.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
mock_inc_file._filename = "bad.yml"
res = strategy_base._load_included_file(included_file=mock_inc_file, iterator=mock_iterator)
self.assertEqual(res, [])
@patch.object(WorkerProcess, 'run')
def test_strategy_base_run_handlers(self, mock_worker):
def fake_run(*args):
return
mock_worker.side_effect = fake_run
mock_play_context = MagicMock()
mock_handler_task = MagicMock(Handler)
mock_handler_task.action = 'foo'
mock_handler_task.get_name.return_value = "test handler"
mock_handler_task.has_triggered.return_value = False
mock_handler_task.listen = None
mock_handler_task._role = None
mock_handler_task._parent = None
mock_handler_task._uuid = 'xxxxxxxxxxxxxxxx'
mock_handler = MagicMock()
mock_handler.block = [mock_handler_task]
mock_handler.flag_for_host.return_value = False
mock_play = MagicMock()
mock_play.handlers = [mock_handler]
mock_host = MagicMock(Host)
mock_host.name = "test01"
mock_host.has_hostkey = True
mock_inventory = MagicMock()
mock_inventory.get_hosts.return_value = [mock_host]
mock_var_mgr = MagicMock()
mock_var_mgr.get_vars.return_value = dict()
mock_iterator = MagicMock()
mock_iterator._play = mock_play
mock_iterator.get_original_task.return_value = mock_handler_task
fake_loader = DictDataLoader()
mock_options = MagicMock()
mock_options.module_path = None
tqm = TaskQueueManager(
inventory=mock_inventory,
variable_manager=mock_var_mgr,
loader=fake_loader,
options=mock_options,
passwords=None,
)
tqm._initialize_processes(3)
tqm._initialize_notified_handlers(mock_play)
tqm.hostvars = dict()
try:
strategy_base = StrategyBase(tqm=tqm)
strategy_base._inventory = mock_inventory
strategy_base._notified_handlers = {mock_handler_task._uuid: [mock_host]}
task_result = TaskResult(Host('host01'), Handler(), dict(changed=False))
tqm._final_q.put(task_result)
result = strategy_base.run_handlers(iterator=mock_iterator, play_context=mock_play_context)
finally:
strategy_base.cleanup()
tqm.cleanup()
| gpl-3.0 |
a-slide/ContaVect | ContaVect_src/Contavect_csv_sum.py | 3 | 2177 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from sys import argv as argv
import os, csv, glob
""" Draft function to summarize the number of reads mapped in distribution files from several runs
of Contavect in a single comprehensive csv file"""
def main ():
"""Find csv files with a common patern fetch the 2 first row of the first alphabetical
file and the 3rd row of all files. Merge everything in a common csv file"""
ref_all = []
# Find ref matching the patern and sorting the list alphabetically
ref = list(glob.iglob("*"+argv[1]))
ref.sort()
print (ref)
# Fetch first column (ref names)
with open(ref[0], newline='') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
ref_all.append([row[0] for row in reader])
# Fetch second column (ref len)
with open(ref[0], newline='') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
ref_all.append([row[1] for row in reader])
# Fetch Third column of all reference(ref len)
for r in ref:
with open(r, newline='') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
# Parse the nread row
myrow = [row[2] for row in reader]
# Replace nread by the name of the ref minus the common suffix
myrow[0] = r.replace(argv[1], '')
ref_all.append(myrow)
# Transpose the table
t_ref_all = [[x[i] for x in ref_all] for i in range(len(ref_all[0]))]
# Finally write a new table
with open(argv[2], 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter='\t', quotechar='|', quoting=csv.QUOTE_MINIMAL)
for i in t_ref_all:
writer.writerow(i)
print("done")
exit (0)
def usage():
"""Simple usage function"""
print ("Usage: ", argv[0], "<Patern of csv files to match> <output name of the csv file>")
print ("\tExample : ", argv[0], " _Reference_distribution.csv ALL_Reference_distribution.csv")
if __name__ == '__main__':
if len(argv) < 2: # if not enought arg call usage function
usage()
else:
main() # else call the main function
| gpl-2.0 |
mwiencek/picard | picard/ui/cdlookup.py | 1 | 3277 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from PyQt4 import QtCore, QtGui
from picard.ui.ui_cdlookup import Ui_Dialog
from picard.mbxml import artist_credit_from_node, label_info_from_node
class CDLookupDialog(QtGui.QDialog):
def __init__(self, releases, disc, parent=None):
QtGui.QDialog.__init__(self, parent)
self.releases = releases
self.disc = disc
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.release_list.setSortingEnabled(True)
self.ui.release_list.setHeaderLabels([_(u"Album"), _(u"Artist"), _(u"Date"), _(u"Country"),
_(u"Labels"), _(u"Catalog #s"), _(u"Barcode")])
if self.releases:
for release in self.releases:
labels, catalog_numbers = label_info_from_node(release.label_info_list[0])
date = release.date[0].text if "date" in release.children else ""
country = release.country[0].text if "country" in release.children else ""
barcode = release.barcode[0].text if "barcode" in release.children else ""
item = QtGui.QTreeWidgetItem(self.ui.release_list)
item.setText(0, release.title[0].text)
item.setText(1, artist_credit_from_node(release.artist_credit[0], self.config)[0])
item.setText(2, date)
item.setText(3, country)
item.setText(4, ", ".join(labels))
item.setText(5, ", ".join(catalog_numbers))
item.setText(6, barcode)
item.setData(0, QtCore.Qt.UserRole, QtCore.QVariant(release.id))
self.ui.release_list.setCurrentItem(self.ui.release_list.topLevelItem(0))
self.ui.ok_button.setEnabled(True)
[self.ui.release_list.resizeColumnToContents(i) for i in range(self.ui.release_list.columnCount() - 1)]
# Sort by descending date, then ascending country
self.ui.release_list.sortByColumn(3, QtCore.Qt.AscendingOrder)
self.ui.release_list.sortByColumn(2, QtCore.Qt.DescendingOrder)
self.ui.lookup_button.clicked.connect(self.lookup)
def accept(self):
release_id = str(self.ui.release_list.currentItem().data(0, QtCore.Qt.UserRole).toString())
self.tagger.load_album(release_id, discid=self.disc.id)
QtGui.QDialog.accept(self)
def lookup(self):
lookup = self.tagger.get_file_lookup()
lookup.discLookup(self.disc.submission_url)
QtGui.QDialog.accept(self)
| gpl-2.0 |
BorisJeremic/Real-ESSI-Examples | analytic_solution/test_cases/Contact/Coupled_Contact/Steady_State_Single_Foundation_Sysytem_Under_Tension/CoupledSoftContact/n_0.5/compare_txt.py | 637 | 2094 | #!/usr/bin/python
import h5py
import sys
import numpy as np
import os
import re
import random
# find the path to my own python function:
cur_dir=os.getcwd()
sep='test_cases'
test_DIR=cur_dir.split(sep,1)[0]
scriptDIR=test_DIR+'compare_function'
sys.path.append(scriptDIR)
# import my own function for color and comparator
from mycomparator import *
from mycolor_fun import *
# analytic_solution = sys.argv[1]
# numeric_result = sys.argv[2]
analytic_solution = 'analytic_solution.txt'
numeric_result = 'numeric_result.txt'
analytic_sol = np.loadtxt(analytic_solution)
numeric_res = np.loadtxt(numeric_result)
abs_error = abs(analytic_sol - numeric_res)
rel_error = abs_error/analytic_sol
analytic_sol = float(analytic_sol)
numeric_res = float(numeric_res)
rel_error = float(rel_error)
# print the results
case_flag=1
print headrun() , "-----------Testing results-----------------"
print headstep() ,'{0} {1} {2} '.format('analytic_solution ','numeric_result ','error[%]')
print headOK() ,'{0:+e} {1:+e} {2:+0.2f} '.format(analytic_sol, numeric_res, rel_error )
if(case_flag==1):
print headOKCASE(),"-----------Done this case!-----------------"
# legacy backup
# find . -name 'element.fei' -exec bash -c 'mv $0 ${0/element.fei/add_element.include}' {} \;
# find . -name 'constraint.fei' -exec bash -c 'mv $0 ${0/constraint.fei/add_constraint.include}' {} \;
# find . -name 'node.fei' -exec bash -c 'mv $0 ${0/node.fei/add_node.include}' {} \;
# find . -name 'add_node.fei' -exec bash -c 'mv $0 ${0/add_node.fei/add_node.include}' {} \;
# find . -name 'elementLT.fei' -exec bash -c 'mv $0 ${0/elementLT.fei/add_elementLT.include}' {} \;
# sed -i "s/node\.fei/add_node.include/" main.fei
# sed -i "s/add_node\.fei/add_node.include/" main.fei
# sed -i "s/element\.fei/add_element.include/" main.fei
# sed -i "s/elementLT\.fei/add_elementLT.include/" main.fei
# sed -i "s/constraint\.fei/add_constraint.include/" main.fei
# find . -name '*_bak.h5.feioutput' -exec bash -c 'mv $0 ${0/\_bak.h5.feioutput/\_original\.h5.feioutput}' {} \;
| cc0-1.0 |
p2pu/mechanical-mooc | twitter/views.py | 1 | 1825 | from django import http
from django.conf import settings
from django.views.decorators.http import require_http_methods
import json
from twitter import utils
@require_http_methods(['POST'])
def get_data(request):
if 'twitter_handle' not in request.POST.keys():
return http.HttpResponseServerError()
twitter_handle = request.POST.get('twitter_handle')
creds = (settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET)
try:
user_data = utils.get_user_data(twitter_handle, creds)
bio_data = {
'avatar': user_data['profile_image_url'],
'name': user_data['name'],
'bio': user_data['description']
}
if '_normal' in bio_data['avatar']:
bio_data['avatar'] = bio_data['avatar'].replace('_normal', '')
return http.HttpResponse(json.dumps(bio_data))
except:
return http.HttpResponseNotFound()
def old(request):
request_token_dict = utils.get_request_token()
request.session['oauth_token'] = request_token_dict['oauth_token']
request.session['oauth_token_secret'] = request_token_dict['oauth_token_secret']
redirect_url = 'https://api.twitter.com/oauth/authenticate?oauth_token={0}'.format(
request_token_dict['oauth_token']
)
return http.HttpResponseRedirect(redirect_url)
def oauth_callback(request):
oauth_token = request.GET.get('oauth_token')
oauth_verifier = request.GET.get('oauth_verifier')
oauth_token_secret = request.session['oauth_token_secret']
access_token_dict = utils.get_access_token(oauth_verifier, (oauth_token, oauth_token_secret))
user = utils.get_user_data(
access_token_dict['screen_name'],
(access_token_dict['oauth_token'], access_token_dict['oauth_token_secret'])
)
raise Exception()
| mit |
gsehub/edx-platform | openedx/tests/completion_integration/test_models.py | 10 | 9583 | """
Test models, managers, and validators.
"""
from __future__ import absolute_import, division, unicode_literals
from completion import models, waffle
from completion.test_utils import CompletionWaffleTestMixin, submit_completions_for_testing
from django.core.exceptions import ValidationError
from django.test import TestCase
from opaque_keys.edx.keys import CourseKey, UsageKey
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import CourseEnrollmentFactory, UserFactory
@skip_unless_lms
class PercentValidatorTestCase(TestCase):
"""
Test that validate_percent only allows floats (and ints) between 0.0 and 1.0.
"""
def test_valid_percents(self):
for value in [1.0, 0.0, 1, 0, 0.5, 0.333081348071397813987230871]:
models.validate_percent(value)
def test_invalid_percent(self):
for value in [-0.00000000001, 1.0000000001, 47.1, 1000, None, float('inf'), float('nan')]:
self.assertRaises(ValidationError, models.validate_percent, value)
class CompletionSetUpMixin(CompletionWaffleTestMixin):
"""
Mixin that provides helper to create test BlockCompletion object.
"""
def set_up_completion(self):
self.user = UserFactory()
self.block_key = UsageKey.from_string(u'block-v1:edx+test+run+type@video+block@doggos')
self.completion = models.BlockCompletion.objects.create(
user=self.user,
course_key=self.block_key.course_key,
block_type=self.block_key.block_type,
block_key=self.block_key,
completion=0.5,
)
@skip_unless_lms
class SubmitCompletionTestCase(CompletionSetUpMixin, TestCase):
"""
Test that BlockCompletion.objects.submit_completion has the desired
semantics.
"""
def setUp(self):
super(SubmitCompletionTestCase, self).setUp()
self.override_waffle_switch(True)
self.set_up_completion()
def test_changed_value(self):
with self.assertNumQueries(4): # Get, update, 2 * savepoints
completion, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.9,
)
completion.refresh_from_db()
self.assertEqual(completion.completion, 0.9)
self.assertFalse(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
def test_unchanged_value(self):
with self.assertNumQueries(1): # Get
completion, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.5,
)
completion.refresh_from_db()
self.assertEqual(completion.completion, 0.5)
self.assertFalse(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
def test_new_user(self):
newuser = UserFactory()
with self.assertNumQueries(4): # Get, update, 2 * savepoints
_, isnew = models.BlockCompletion.objects.submit_completion(
user=newuser,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.0,
)
self.assertTrue(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 2)
def test_new_block(self):
newblock = UsageKey.from_string(u'block-v1:edx+test+run+type@video+block@puppers')
with self.assertNumQueries(4): # Get, update, 2 * savepoints
_, isnew = models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=newblock.course_key,
block_key=newblock,
completion=1.0,
)
self.assertTrue(isnew)
self.assertEqual(models.BlockCompletion.objects.count(), 2)
def test_invalid_completion(self):
with self.assertRaises(ValidationError):
models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=1.2
)
completion = models.BlockCompletion.objects.get(user=self.user, block_key=self.block_key)
self.assertEqual(completion.completion, 0.5)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
@skip_unless_lms
class CompletionDisabledTestCase(CompletionSetUpMixin, TestCase):
"""
Tests that completion API is not called when the feature is disabled.
"""
def setUp(self):
super(CompletionDisabledTestCase, self).setUp()
# insert one completion record...
self.set_up_completion()
# ...then disable the feature.
self.override_waffle_switch(False)
def test_cannot_call_submit_completion(self):
self.assertEqual(models.BlockCompletion.objects.count(), 1)
with self.assertRaises(RuntimeError):
models.BlockCompletion.objects.submit_completion(
user=self.user,
course_key=self.block_key.course_key,
block_key=self.block_key,
completion=0.9,
)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
@skip_unless_lms
class SubmitBatchCompletionTestCase(CompletionWaffleTestMixin, TestCase):
"""
Test that BlockCompletion.objects.submit_batch_completion has the desired
semantics.
"""
def setUp(self):
super(SubmitBatchCompletionTestCase, self).setUp()
self.override_waffle_switch(True)
self.block_key = UsageKey.from_string('block-v1:edx+test+run+type@video+block@doggos')
self.course_key_obj = CourseKey.from_string('course-v1:edx+test+run')
self.user = UserFactory()
CourseEnrollmentFactory.create(user=self.user, course_id=unicode(self.course_key_obj))
def test_submit_batch_completion(self):
blocks = [(self.block_key, 1.0)]
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
self.assertEqual(models.BlockCompletion.objects.last().completion, 1.0)
def test_submit_batch_completion_without_waffle(self):
with waffle.waffle().override(waffle.ENABLE_COMPLETION_TRACKING, False):
with self.assertRaises(RuntimeError):
blocks = [(self.block_key, 1.0)]
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
def test_submit_batch_completion_with_same_block_new_completion_value(self):
blocks = [(self.block_key, 0.0)]
self.assertEqual(models.BlockCompletion.objects.count(), 0)
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
model = models.BlockCompletion.objects.first()
self.assertEqual(model.completion, 0.0)
blocks = [
(UsageKey.from_string('block-v1:edx+test+run+type@video+block@doggos'), 1.0),
]
models.BlockCompletion.objects.submit_batch_completion(self.user, self.course_key_obj, blocks)
self.assertEqual(models.BlockCompletion.objects.count(), 1)
model = models.BlockCompletion.objects.first()
self.assertEqual(model.completion, 1.0)
@skip_unless_lms
class BatchCompletionMethodTests(CompletionWaffleTestMixin, TestCase):
"""
Tests for the classmethods that retrieve course/block completion data.
"""
def setUp(self):
super(BatchCompletionMethodTests, self).setUp()
self.override_waffle_switch(True)
self.user = UserFactory.create()
self.other_user = UserFactory.create()
self.course_key = CourseKey.from_string("edX/MOOC101/2049_T2")
self.other_course_key = CourseKey.from_string("course-v1:ReedX+Hum110+1904")
self.block_keys = [UsageKey.from_string("i4x://edX/MOOC101/video/{}".format(number)) for number in xrange(5)]
submit_completions_for_testing(self.user, self.course_key, self.block_keys[:3])
submit_completions_for_testing(self.other_user, self.course_key, self.block_keys[2:])
submit_completions_for_testing(self.user, self.other_course_key, [self.block_keys[4]])
def test_get_course_completions_missing_runs(self):
actual_completions = models.BlockCompletion.get_course_completions(self.user, self.course_key)
expected_block_keys = [key.replace(course_key=self.course_key) for key in self.block_keys[:3]]
expected_completions = dict(zip(expected_block_keys, [1.0, 0.8, 0.6]))
self.assertEqual(expected_completions, actual_completions)
def test_get_course_completions_empty_result_set(self):
self.assertEqual(
models.BlockCompletion.get_course_completions(self.other_user, self.other_course_key),
{}
)
def test_get_latest_block_completed(self):
self.assertEqual(
models.BlockCompletion.get_latest_block_completed(self.user, self.course_key).block_key,
self.block_keys[2]
)
def test_get_latest_completed_none_exist(self):
self.assertIsNone(models.BlockCompletion.get_latest_block_completed(self.other_user, self.other_course_key))
| agpl-3.0 |
noironetworks/nova | nova/scheduler/filters/isolated_hosts_filter.py | 60 | 3357 | # Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.scheduler import filters
isolated_opts = [
cfg.ListOpt('isolated_images',
default=[],
help='Images to run on isolated host'),
cfg.ListOpt('isolated_hosts',
default=[],
help='Host reserved for specific images'),
cfg.BoolOpt('restrict_isolated_hosts_to_isolated_images',
default=True,
help='Whether to force isolated hosts to run only isolated '
'images'),
]
CONF = cfg.CONF
CONF.register_opts(isolated_opts)
class IsolatedHostsFilter(filters.BaseHostFilter):
"""Keep specified images to selected hosts."""
# The configuration values do not change within a request
run_filter_once_per_request = True
def host_passes(self, host_state, filter_properties):
"""Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
to True::
| | isolated_image | non_isolated_image
| -------------+----------------+-------------------
| iso_host | True | False
| non_iso_host | False | True
Result Matrix with 'restrict_isolated_hosts_to_isolated_images' set
to False::
| | isolated_image | non_isolated_image
| -------------+----------------+-------------------
| iso_host | True | True
| non_iso_host | False | True
"""
# If the configuration does not list any hosts, the filter will always
# return True, assuming a configuration error, so letting all hosts
# through.
isolated_hosts = CONF.isolated_hosts
isolated_images = CONF.isolated_images
restrict_isolated_hosts_to_isolated_images = (CONF.
restrict_isolated_hosts_to_isolated_images)
if not isolated_images:
# As there are no images to match, return True if the filter is
# not restrictive otherwise return False if the host is in the
# isolation list.
return ((not restrict_isolated_hosts_to_isolated_images) or
(host_state.host not in isolated_hosts))
spec = filter_properties.get('request_spec', {})
props = spec.get('instance_properties', {})
image_ref = props.get('image_ref')
image_isolated = image_ref in isolated_images
host_isolated = host_state.host in isolated_hosts
if restrict_isolated_hosts_to_isolated_images:
return (image_isolated == host_isolated)
else:
return (not image_isolated) or host_isolated
| apache-2.0 |
bolshoibooze/sympy_gamma | app/logic/utils.py | 5 | 16740 | from __future__ import division
import difflib
import collections
import traceback
import sys
import ast
import re
from StringIO import StringIO
import sympy
from sympy.core.relational import Relational
import sympy.parsing.sympy_tokenize as sympy_tokenize
from token import NAME
OTHER_SYMPY_FUNCTIONS = ('sqrt',)
Arguments = collections.namedtuple('Arguments', 'function args kwargs')
class Eval(object):
def __init__(self, namespace={}):
self._namespace = namespace
def get(self, name):
return self._namespace.get(name)
def set(self, name, value):
self._namespace[name] = value
def eval_node(self, node):
tree = ast.fix_missing_locations(ast.Expression(node))
return eval(compile(tree, '<string>', 'eval'), self._namespace)
def eval(self, x, use_none_for_exceptions=False, repr_expression=True):
globals = self._namespace
try:
x = x.strip()
x = x.replace("\r", "")
y = x.split('\n')
if len(y) == 0:
return ''
s = '\n'.join(y[:-1]) + '\n'
t = y[-1]
try:
z = compile(t + '\n', '', 'eval')
except SyntaxError:
s += '\n' + t
z = None
try:
old_stdout = sys.stdout
sys.stdout = StringIO()
eval(compile(s, '', 'exec', division.compiler_flag), globals, globals)
if not z is None:
r = eval(z, globals)
if repr_expression:
r = repr(r)
else:
r = ''
if repr_expression:
sys.stdout.seek(0)
r = sys.stdout.read() + r
finally:
sys.stdout = old_stdout
return r
except:
if use_none_for_exceptions:
return
etype, value, tb = sys.exc_info()
# If we decide in the future to remove the first frame fromt he
# traceback (since it links to our code, so it could be confusing
# to the user), it's easy to do:
#tb = tb.tb_next
s = "".join(traceback.format_exception(etype, value, tb))
return s
class LatexVisitor(ast.NodeVisitor):
EXCEPTIONS = {'integrate': sympy.Integral, 'diff': sympy.Derivative}
formatters = {}
@staticmethod
def formats_function(name):
def _formats_function(f):
LatexVisitor.formatters[name] = f
return f
return _formats_function
def format(self, name, node):
formatter = LatexVisitor.formatters.get(name)
if not formatter:
return None
return formatter(node, self)
def visit_Call(self, node):
buffer = []
fname = node.func.id
# Only apply to lowercase names (i.e. functions, not classes)
if fname in self.__class__.EXCEPTIONS:
node.func.id = self.__class__.EXCEPTIONS[fname].__name__
self.latex = sympy.latex(self.evaluator.eval_node(node))
else:
result = self.format(fname, node)
if result:
self.latex = result
elif fname[0].islower() and fname not in OTHER_SYMPY_FUNCTIONS:
buffer.append("\\mathrm{%s}" % fname.replace('_', '\\_'))
buffer.append('(')
latexes = []
for arg in node.args:
if isinstance(arg, ast.Call) and getattr(arg.func, 'id', None) and arg.func.id[0].lower() == arg.func.id[0]:
latexes.append(self.visit_Call(arg))
else:
latexes.append(sympy.latex(self.evaluator.eval_node(arg)))
buffer.append(', '.join(latexes))
buffer.append(')')
self.latex = ''.join(buffer)
else:
self.latex = sympy.latex(self.evaluator.eval_node(node))
return self.latex
@LatexVisitor.formats_function('solve')
def format_solve(node, visitor):
expr = visitor.evaluator.eval_node(node.args[0])
buffer = [r'\mathrm{solve}\;', sympy.latex(expr)]
if not isinstance(expr, Relational):
buffer.append('=0')
if len(node.args) > 1:
buffer.append(r'\;\mathrm{for}\;')
for arg in node.args[1:]:
buffer.append(sympy.latex(visitor.evaluator.eval_node(arg)))
buffer.append(r',\, ')
if len(node.args) > 1:
buffer.pop()
return ''.join(buffer)
@LatexVisitor.formats_function('limit')
def format_limit(node, visitor):
if len(node.args) >= 3:
return sympy.latex(
sympy.Limit(*[visitor.evaluator.eval_node(arg) for arg in node.args]))
@LatexVisitor.formats_function('prime')
def format_prime(node, visitor):
number = sympy.latex(visitor.evaluator.eval_node(node.args[0]))
return ''.join([number,
r'^\mathrm{',
ordinal(int(number)),
r'}\; \mathrm{prime~number}'])
@LatexVisitor.formats_function('isprime')
def format_isprime(node, visitor):
number = sympy.latex(visitor.evaluator.eval_node(node.args[0]))
return ''.join([r'\mathrm{Is~}', number, r'\mathrm{~prime?}'])
@LatexVisitor.formats_function('nextprime')
def format_nextprime(node, visitor):
number = sympy.latex(visitor.evaluator.eval_node(node.args[0]))
return r'\mathrm{Least~prime~greater~than~}' + number
@LatexVisitor.formats_function('factorint')
def format_factorint(node, visitor):
number = sympy.latex(visitor.evaluator.eval_node(node.args[0]))
return r'\mathrm{Prime~factorization~of~}' + number
@LatexVisitor.formats_function('factor')
def format_factor(node, visitor):
expression = sympy.latex(visitor.evaluator.eval_node(node.args[0]))
return r'\mathrm{Factorization~of~}' + expression
@LatexVisitor.formats_function('solve_poly_system')
def format_factorint(node, visitor):
equations = visitor.evaluator.eval_node(node.args[0])
variables = tuple(map(visitor.evaluator.eval_node, node.args[1:]))
if len(variables) == 1:
variables = variables[0]
return ''.join([r'\mathrm{Solve~} \begin{cases} ',
r'\\'.join(map(sympy.latex, equations)),
r'\end{cases} \mathrm{~for~}',
sympy.latex(variables)])
@LatexVisitor.formats_function('plot')
def format_plot(node, visitor):
if node.args:
function = sympy.latex(visitor.evaluator.eval_node(node.args[0]))
else:
keywords = {}
for keyword in node.keywords:
keywords[keyword.arg] = visitor.evaluator.eval_node(keyword.value)
function = sympy.latex(keywords)
return r'\mathrm{Plot~}' + function
@LatexVisitor.formats_function('rsolve')
def format_rsolve(node, visitor):
recurrence = sympy.latex(sympy.Eq(visitor.evaluator.eval_node(node.args[0]), 0))
if len(node.args) == 3:
conds = visitor.evaluator.eval_node(node.args[2])
initconds = '\\\\\n'.join('&' + sympy.latex(sympy.Eq(eqn, val)) for eqn, val in conds.items())
text = r'&\mathrm{Solve~the~recurrence~}' + recurrence + r'\\'
condstext = r'&\mathrm{with~initial~conditions}\\'
return r'\begin{align}' + text + condstext + initconds + r'\end{align}'
else:
return r'\mathrm{Solve~the~recurrence~}' + recurrence
diophantine_template = (r"\begin{{align}}&{}\\&\mathrm{{where~}}"
r"{}\mathrm{{~are~integers}}\end{{align}}")
@LatexVisitor.formats_function('diophantine')
def format_diophantine(node, visitor):
expression = visitor.evaluator.eval_node(node.args[0])
symbols = None
if isinstance(expression, sympy.Basic):
symbols = expression.free_symbols
equation = sympy.latex(sympy.Eq(expression, 0))
result = r'\mathrm{Solve~the~diophantine~equation~}' + equation
if symbols:
result = diophantine_template.format(result, tuple(symbols))
return result
@LatexVisitor.formats_function('summation')
@LatexVisitor.formats_function('product')
def format_diophantine(node, visitor):
if node.func.id == 'summation':
klass = sympy.Sum
else:
klass = sympy.Product
return sympy.latex(klass(*map(visitor.evaluator.eval_node, node.args)))
@LatexVisitor.formats_function('help')
def format_help(node, visitor):
if node.args:
function = visitor.evaluator.eval_node(node.args[0])
return r'\mathrm{Show~documentation~for~}' + function.__name__
return r'\mathrm{Show~documentation~(requires~1~argument)}'
class TopCallVisitor(ast.NodeVisitor):
def __init__(self):
super(TopCallVisitor, self).__init__()
self.call = None
def visit_Call(self, node):
self.call = node
def visit_Name(self, node):
if not self.call:
self.call = node
# From http://stackoverflow.com/a/739301/262727
def ordinal(n):
if 10 <= n % 100 < 20:
return 'th'
else:
return {1 : 'st', 2 : 'nd', 3 : 'rd'}.get(n % 10, "th")
# TODO: modularize all of this
def latexify(string, evaluator):
a = LatexVisitor()
a.evaluator = evaluator
a.visit(ast.parse(string))
return a.latex
def topcall(string):
a = TopCallVisitor()
a.visit(ast.parse(string))
if hasattr(a, 'call'):
return getattr(a.call.func, 'id', None)
return None
def arguments(string_or_node, evaluator):
node = None
if not isinstance(string_or_node, ast.Call):
a = TopCallVisitor()
a.visit(ast.parse(string_or_node))
if hasattr(a, 'call'):
node = a.call
else:
node = string_or_node
if node:
if isinstance(node, ast.Call):
name = getattr(node.func, 'id', None) # when is it undefined?
args, kwargs = None, None
if node.args:
args = list(map(evaluator.eval_node, node.args))
kwargs = node.keywords
if kwargs:
kwargs = {kwarg.arg: evaluator.eval_node(kwarg.value) for kwarg in kwargs}
return Arguments(name, args, kwargs)
elif isinstance(node, ast.Name):
return Arguments(node.id, [], {})
return None
re_calls = re.compile(r'(Integer|Symbol|Float|Rational)\s*\([\'\"]?([a-zA-Z0-9\.]+)[\'\"]?\s*\)')
def re_calls_sub(match):
return match.groups()[1]
def removeSymPy(string):
try:
return re_calls.sub(re_calls_sub, string)
except IndexError:
return string
from sympy.parsing.sympy_parser import (
AppliedFunction, implicit_multiplication, split_symbols,
function_exponentiation, implicit_application, OP, NAME,
_group_parentheses, _apply_functions, _flatten, _token_callable)
def _implicit_multiplication(tokens, local_dict, global_dict):
result = []
for tok, nextTok in zip(tokens, tokens[1:]):
result.append(tok)
if (isinstance(tok, AppliedFunction) and
isinstance(nextTok, AppliedFunction)):
result.append((OP, '*'))
elif (isinstance(tok, AppliedFunction) and
nextTok[0] == OP and nextTok[1] == '('):
# Applied function followed by an open parenthesis
if (tok.function[1] == 'Symbol' and
len(tok.args[1][1]) == 3):
# Allow implicit function symbol creation
# TODO XXX need some way to offer alternative parsing here -
# sometimes we want this and sometimes not, hard to tell when
# (making it context-sensitive based on input function best)
continue
result.append((OP, '*'))
elif (tok[0] == OP and tok[1] == ')' and
isinstance(nextTok, AppliedFunction)):
# Close parenthesis followed by an applied function
result.append((OP, '*'))
elif (tok[0] == OP and tok[1] == ')' and
nextTok[0] == NAME):
# Close parenthesis followed by an implicitly applied function
result.append((OP, '*'))
elif (tok[0] == nextTok[0] == OP
and tok[1] == ')' and nextTok[1] == '('):
# Close parenthesis followed by an open parenthesis
result.append((OP, '*'))
elif (isinstance(tok, AppliedFunction) and nextTok[0] == NAME):
# Applied function followed by implicitly applied function
result.append((OP, '*'))
elif (tok[0] == NAME and
not _token_callable(tok, local_dict, global_dict) and
nextTok[0] == OP and nextTok[1] == '('):
# Constant followed by parenthesis
result.append((OP, '*'))
elif (tok[0] == NAME and
not _token_callable(tok, local_dict, global_dict) and
nextTok[0] == NAME and
not _token_callable(nextTok, local_dict, global_dict)):
# Constant followed by constant
result.append((OP, '*'))
elif (tok[0] == NAME and
not _token_callable(tok, local_dict, global_dict) and
(isinstance(nextTok, AppliedFunction) or nextTok[0] == NAME)):
# Constant followed by (implicitly applied) function
result.append((OP, '*'))
if tokens:
result.append(tokens[-1])
return result
def implicit_multiplication(result, local_dict, global_dict):
"""Makes the multiplication operator optional in most cases.
Use this before :func:`implicit_application`, otherwise expressions like
``sin 2x`` will be parsed as ``x * sin(2)`` rather than ``sin(2*x)``.
Example:
>>> from sympy.parsing.sympy_parser import (parse_expr,
... standard_transformations, implicit_multiplication)
>>> transformations = standard_transformations + (implicit_multiplication,)
>>> parse_expr('3 x y', transformations=transformations)
3*x*y
"""
for step in (_group_parentheses(implicit_multiplication),
_apply_functions,
_implicit_multiplication):
result = step(result, local_dict, global_dict)
result = _flatten(result)
return result
def custom_implicit_transformation(result, local_dict, global_dict):
"""Allows a slightly relaxed syntax.
- Parentheses for single-argument method calls are optional.
- Multiplication is implicit.
- Symbol names can be split (i.e. spaces are not needed between
symbols).
- Functions can be exponentiated.
Example:
>>> from sympy.parsing.sympy_parser import (parse_expr,
... standard_transformations, implicit_multiplication_application)
>>> parse_expr("10sin**2 x**2 + 3xyz + tan theta",
... transformations=(standard_transformations +
... (implicit_multiplication_application,)))
3*x*y*z + 10*sin(x**2)**2 + tan(theta)
"""
for step in (split_symbols, implicit_multiplication,
implicit_application, function_exponentiation):
result = step(result, local_dict, global_dict)
return result
SYNONYMS = {
u'derivative': 'diff',
u'derive': 'diff',
u'integral': 'integrate',
u'antiderivative': 'integrate',
u'factorize': 'factor',
u'graph': 'plot',
u'draw': 'plot'
}
def synonyms(tokens, local_dict, global_dict):
"""Make some names synonyms for others.
This is done at the token level so that the "stringified" output that
Gamma displays shows the correct function name. Must be applied before
auto_symbol.
"""
result = []
for token in tokens:
if token[0] == NAME:
if token[1] in SYNONYMS:
result.append((NAME, SYNONYMS[token[1]]))
continue
result.append(token)
return result
def close_matches(s, global_dict):
"""
Checks undefined names to see if they are close matches to a defined name.
"""
tokens = sympy_tokenize.generate_tokens(StringIO(s.strip()).readline)
result = []
has_result = False
all_names = set(global_dict).union(SYNONYMS)
# strip the token location info to avoid strange untokenize results
tokens = [(tok[0], tok[1]) for tok in tokens]
for token in tokens:
if (token[0] == NAME and
token[1] not in all_names and
len(token[1]) > 1):
matches = difflib.get_close_matches(token[1], all_names)
if matches and matches[0] == token[1]:
matches = matches[1:]
if matches:
result.append((NAME, matches[0]))
has_result = True
continue
result.append(token)
if has_result:
return sympy_tokenize.untokenize(result).strip()
return None
| bsd-3-clause |
fschwiet/letscodejavascript | node_modules/npm/node_modules/node-gyp/gyp/test/mac/gyptest-postbuild-fail.py | 84 | 1594 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a failing postbuild step lets the build fail.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
# set |match| to ignore build stderr output.
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
match = lambda a, b: True)
test.run_gyp('test.gyp', chdir='postbuild-fail')
build_error_code = {
'xcode': 1,
'make': 2,
'ninja': 1,
}[test.format]
# If a postbuild fails, all postbuilds should be re-run on the next build.
# However, even if the first postbuild fails the other postbuilds are still
# executed.
# Non-bundles
test.build('test.gyp', 'nonbundle', chdir='postbuild-fail',
status=build_error_code)
test.built_file_must_exist('static_touch',
chdir='postbuild-fail')
# Check for non-up-to-date-ness by checking if building again produces an
# error.
test.build('test.gyp', 'nonbundle', chdir='postbuild-fail',
status=build_error_code)
# Bundles
test.build('test.gyp', 'bundle', chdir='postbuild-fail',
status=build_error_code)
test.built_file_must_exist('dynamic_touch',
chdir='postbuild-fail')
# Check for non-up-to-date-ness by checking if building again produces an
# error.
test.build('test.gyp', 'bundle', chdir='postbuild-fail',
status=build_error_code)
test.pass_test()
| mit |
richardxx/mongoctl-service | build/lib.linux-x86_64-2.7/mongoctl/commands/misc/install.py | 2 | 9046 | __author__ = 'abdul'
import os
import platform
import urllib
import shutil
from mongoctl.mongo_version import is_valid_version
import mongoctl.config as config
from mongoctl.prompt import prompt_execute_task, is_interactive_mode
from mongoctl.utils import ensure_dir, dir_exists
from mongoctl.mongoctl_logging import log_info, log_error, log_exception
from mongoctl.errors import MongoctlException
from mongoctl.utils import call_command, which
from mongoctl.mongo_version import version_obj
from mongoctl.commands.command_utils import find_all_executables
###############################################################################
# CONSTS
###############################################################################
LATEST_VERSION_FILE_URL = "https://raw.github.com/mongolab/mongoctl/master/" \
"mongo_latest_stable_version.txt"
###############################################################################
# install command
###############################################################################
def install_command(parsed_options):
install_mongodb(version=parsed_options.version)
###############################################################################
# uninstall command
###############################################################################
def uninstall_command(parsed_options):
uninstall_mongodb(version=parsed_options.version)
###############################################################################
# list-versions command
###############################################################################
def list_versions_command(parsed_options):
mongo_installations = find__all_mongo_installations()
bar = "-" * 80
print bar
formatter = "%-20s %s"
print formatter % ("VERSION", "LOCATION")
print bar
for install_dir,version in mongo_installations:
print formatter % (version, install_dir)
print "\n"
###############################################################################
# install_mongodb
###############################################################################
def install_mongodb(version):
bits = platform.architecture()[0].replace("bit", "")
os_name = platform.system().lower()
if os_name == 'darwin' and platform.mac_ver():
os_name = "osx"
return do_install_mongodb(os_name, bits, version)
###############################################################################
def do_install_mongodb(os_name, bits, version):
if version is None:
version = fetch_latest_stable_version()
log_info("Installing latest stable MongoDB version '%s'..." % version)
# validate version string
elif not is_valid_version(version):
raise MongoctlException("Invalid version '%s'. Please provide a"
" valid MongoDB version." % version)
mongodb_installs_dir = config.get_mongodb_installs_dir()
if not mongodb_installs_dir:
raise MongoctlException("No mongoDBInstallationsDirectory configured"
" in mongoctl.config")
platform_spec = get_validate_platform_spec(os_name, bits)
log_info("Running install for %s %sbit to "
"mongoDBInstallationsDirectory (%s)..." % (os_name, bits,
mongodb_installs_dir))
mongo_installation = get_mongo_installation(version)
if mongo_installation is not None: # no-op
log_info("You already have MongoDB %s installed ('%s'). "
"Nothing to do." % (version, mongo_installation))
return mongo_installation
archive_name = "mongodb-%s-%s.tgz" % (platform_spec, version)
url = "http://fastdl.mongodb.org/%s/%s" % (os_name, archive_name)
# Validate if the version exists
response = urllib.urlopen(url)
if response.getcode() != 200:
msg = ("Unable to download from url '%s' (response code '%s'). "
"It could be that version '%s' you specified does not exist."
" Please double check the version you provide" %
(url, response.getcode(), version))
raise MongoctlException(msg)
mongo_dir_name = "mongodb-%s-%s" % (platform_spec, version)
install_dir = os.path.join(mongodb_installs_dir, mongo_dir_name)
ensure_dir(mongodb_installs_dir)
# XXX LOOK OUT! Two processes installing same version simultaneously => BAD.
# TODO: mutex to protect the following
if not dir_exists(install_dir):
try:
## download the url
download(url)
extract_archive(archive_name)
log_info("Moving extracted folder to %s" % mongodb_installs_dir)
shutil.move(mongo_dir_name, mongodb_installs_dir)
os.remove(archive_name)
log_info("Deleting archive %s" % archive_name)
log_info("MongoDB %s installed successfully!" % version)
return install_dir
except Exception, e:
log_exception(e)
log_error("Failed to install MongoDB '%s'. Cause: %s" %
(version, e))
###############################################################################
# uninstall_mongodb
###############################################################################
def uninstall_mongodb(version):
# validate version string
if not is_valid_version(version):
raise MongoctlException("Invalid version '%s'. Please provide a"
" valid MongoDB version." % version)
mongo_installation = get_mongo_installation(version)
if mongo_installation is None: # no-op
msg = ("Cannot find a MongoDB installation for version '%s'. Please"
" use list-versions to see all possible versions " % version)
log_info(msg)
return
log_info("Found MongoDB '%s' in '%s'" % (version, mongo_installation))
def rm_mongodb():
log_info("Deleting '%s'" % mongo_installation)
shutil.rmtree(mongo_installation)
log_info("MongoDB '%s' Uninstalled successfully!" % version)
prompt_execute_task("Proceed uninstall?" , rm_mongodb)
###############################################################################
def fetch_latest_stable_version():
response = urllib.urlopen(LATEST_VERSION_FILE_URL)
if response.getcode() == 200:
return response.read().strip()
else:
raise MongoctlException("Unable to fetch MongoDB latest stable version"
" from '%s' (Response code %s)" %
(LATEST_VERSION_FILE_URL, response.getcode()))
###############################################################################
def get_mongo_installation(version_str):
# get all mongod installation dirs and return the one
# whose version == specified version. If any...
version = version_obj(version_str)
for install_dir, install_version in find__all_mongo_installations():
if install_version == version:
return install_dir
return None
###############################################################################
def find__all_mongo_installations():
all_installs = []
all_mongod_exes = find_all_executables('mongod')
for exe_path, exe_version in all_mongod_exes:
# install dir is exe parent's (bin) parent
install_dir = os.path.dirname(os.path.dirname(exe_path))
all_installs.append((install_dir,exe_version))
return all_installs
###############################################################################
def get_validate_platform_spec(os_name, bits):
if os_name not in ["linux", "osx", "win32", "sunos5"]:
raise MongoctlException("Unsupported OS %s" % os_name)
if bits == "64":
return "%s-x86_64" % os_name
else:
if os_name == "linux":
return "linux-i686"
elif os_name in ["osx" , "win32"]:
return "%s-i386" % os_name
elif os_name == "sunos5":
return "i86pc"
###############################################################################
def download(url):
log_info("Downloading %s..." % url)
if which("curl"):
download_cmd = ['curl', '-O']
if not is_interactive_mode():
download_cmd.append('-Ss')
elif which("wget"):
download_cmd = ['wget']
else:
msg = ("Cannot download file.You need to have 'curl' or 'wget"
"' command in your path in order to proceed.")
raise MongoctlException(msg)
download_cmd.append(url)
call_command(download_cmd)
###############################################################################
def extract_archive(archive_name):
log_info("Extracting %s..." % archive_name)
if not which("tar"):
msg = ("Cannot extract archive.You need to have 'tar' command in your"
" path in order to proceed.")
raise MongoctlException(msg)
tar_cmd = ['tar', 'xvf', archive_name]
call_command(tar_cmd)
| mit |
begoldsm/autorest | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/Header/autorestswaggerbatheaderservice/operations/header_operations.py | 14 | 54924 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from .. import models
class HeaderOperations(object):
"""HeaderOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def param_existing_key(
self, user_agent, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header value "User-Agent": "overwrite".
:param user_agent: Send a post request with header value "User-Agent":
"overwrite"
:type user_agent: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/existingkey'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['User-Agent'] = self._serialize.header("user_agent", user_agent, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_existing_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "User-Agent": "overwrite".
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/existingkey'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'User-Agent': 'str',
})
return client_raw_response
def param_protected_key(
self, content_type, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header value "Content-Type": "text/html".
:param content_type: Send a post request with header value
"Content-Type": "text/html"
:type content_type: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/protectedkey'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_protected_key(
self, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "Content-Type": "text/html".
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/protectedkey'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Content-Type': 'str',
})
return client_raw_response
def param_integer(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "positive", "value":
1 or "scenario": "negative", "value": -2 .
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param value: Send a post request with header values 1 or -2
:type value: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/integer'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'int')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_integer(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "value": 1 or -2.
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/integer'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'int',
})
return client_raw_response
def param_long(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "positive", "value":
105 or "scenario": "negative", "value": -2 .
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param value: Send a post request with header values 105 or -2
:type value: long
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/long'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'long')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_long(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "value": 105 or -2.
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/long'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'long',
})
return client_raw_response
def param_float(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "positive", "value":
0.07 or "scenario": "negative", "value": -3.0.
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param value: Send a post request with header values 0.07 or -3.0
:type value: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/float'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'float')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_float(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "value": 0.07 or -3.0.
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/float'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'float',
})
return client_raw_response
def param_double(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "positive", "value":
7e120 or "scenario": "negative", "value": -3.0.
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param value: Send a post request with header values 7e120 or -3.0
:type value: float
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/double'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'float')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_double(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "value": 7e120 or -3.0.
:param scenario: Send a post request with header values "scenario":
"positive" or "negative"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/double'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'float',
})
return client_raw_response
def param_bool(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "true", "value":
true or "scenario": "false", "value": false.
:param scenario: Send a post request with header values "scenario":
"true" or "false"
:type scenario: str
:param value: Send a post request with header values true or false
:type value: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/bool'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'bool')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_bool(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header value "value": true or false.
:param scenario: Send a post request with header values "scenario":
"true" or "false"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/bool'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'bool',
})
return client_raw_response
def param_string(
self, scenario, value=None, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"The quick brown fox jumps over the lazy dog" or "scenario": "null",
"value": null or "scenario": "empty", "value": "".
:param scenario: Send a post request with header values "scenario":
"valid" or "null" or "empty"
:type scenario: str
:param value: Send a post request with header values "The quick brown
fox jumps over the lazy dog" or null or ""
:type value: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/string'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
if value is not None:
header_parameters['value'] = self._serialize.header("value", value, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_string(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "The quick brown fox jumps over the
lazy dog" or null or "".
:param scenario: Send a post request with header values "scenario":
"valid" or "null" or "empty"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/string'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'str',
})
return client_raw_response
def param_date(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"2010-01-01" or "scenario": "min", "value": "0001-01-01".
:param scenario: Send a post request with header values "scenario":
"valid" or "min"
:type scenario: str
:param value: Send a post request with header values "2010-01-01" or
"0001-01-01"
:type value: date
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/date'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'date')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_date(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "2010-01-01" or "0001-01-01".
:param scenario: Send a post request with header values "scenario":
"valid" or "min"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/date'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'date',
})
return client_raw_response
def param_datetime(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"2010-01-01T12:34:56Z" or "scenario": "min", "value":
"0001-01-01T00:00:00Z".
:param scenario: Send a post request with header values "scenario":
"valid" or "min"
:type scenario: str
:param value: Send a post request with header values
"2010-01-01T12:34:56Z" or "0001-01-01T00:00:00Z"
:type value: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/datetime'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'iso-8601')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_datetime(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "2010-01-01T12:34:56Z" or
"0001-01-01T00:00:00Z".
:param scenario: Send a post request with header values "scenario":
"valid" or "min"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/datetime'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'iso-8601',
})
return client_raw_response
def param_datetime_rfc1123(
self, scenario, value=None, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"Wed, 01 Jan 2010 12:34:56 GMT" or "scenario": "min", "value": "Mon, 01
Jan 0001 00:00:00 GMT".
:param scenario: Send a post request with header values "scenario":
"valid" or "min"
:type scenario: str
:param value: Send a post request with header values "Wed, 01 Jan 2010
12:34:56 GMT" or "Mon, 01 Jan 0001 00:00:00 GMT"
:type value: datetime
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/datetimerfc1123'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
if value is not None:
header_parameters['value'] = self._serialize.header("value", value, 'rfc-1123')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_datetime_rfc1123(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "Wed, 01 Jan 2010 12:34:56 GMT" or
"Mon, 01 Jan 0001 00:00:00 GMT".
:param scenario: Send a post request with header values "scenario":
"valid" or "min"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/datetimerfc1123'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'rfc-1123',
})
return client_raw_response
def param_duration(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"P123DT22H14M12.011S".
:param scenario: Send a post request with header values "scenario":
"valid"
:type scenario: str
:param value: Send a post request with header values
"P123DT22H14M12.011S"
:type value: timedelta
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/duration'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'duration')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_duration(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "P123DT22H14M12.011S".
:param scenario: Send a post request with header values "scenario":
"valid"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/duration'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'duration',
})
return client_raw_response
def param_byte(
self, scenario, value, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"啊齄丂狛狜隣郎隣兀﨩".
:param scenario: Send a post request with header values "scenario":
"valid"
:type scenario: str
:param value: Send a post request with header values "啊齄丂狛狜隣郎隣兀﨩"
:type value: bytearray
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/byte'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
header_parameters['value'] = self._serialize.header("value", value, 'bytearray')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_byte(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "啊齄丂狛狜隣郎隣兀﨩".
:param scenario: Send a post request with header values "scenario":
"valid"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/byte'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': 'bytearray',
})
return client_raw_response
def param_enum(
self, scenario, value=None, custom_headers=None, raw=False, **operation_config):
"""Send a post request with header values "scenario": "valid", "value":
"GREY" or "scenario": "null", "value": null.
:param scenario: Send a post request with header values "scenario":
"valid" or "null" or "empty"
:type scenario: str
:param value: Send a post request with header values 'GREY'. Possible
values include: 'White', 'black', 'GREY'
:type value: str or :class:`GreyscaleColors
<fixtures.acceptancetestsheader.models.GreyscaleColors>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/param/prim/enum'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
if value is not None:
header_parameters['value'] = self._serialize.header("value", value, 'GreyscaleColors')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def response_enum(
self, scenario, custom_headers=None, raw=False, **operation_config):
"""Get a response with header values "GREY" or null.
:param scenario: Send a post request with header values "scenario":
"valid" or "null" or "empty"
:type scenario: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/response/prim/enum'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
header_parameters['scenario'] = self._serialize.header("scenario", scenario, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'value': models.GreyscaleColors,
})
return client_raw_response
def custom_request_id(
self, custom_headers=None, raw=False, **operation_config):
"""Send x-ms-client-request-id = 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0 in
the header of the request.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: None
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises:
:class:`ErrorException<fixtures.acceptancetestsheader.models.ErrorException>`
"""
# Construct URL
url = '/header/custom/x-ms-client-request-id/9C4D50EE-2D56-4CD3-8152-34347DC9F2B0'
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
| mit |
DavidNorman/tensorflow | third_party/toolchains/preconfig/ubuntu14.04/gcc-nvcc-cuda10.0/windows/msvc_wrapper_for_nvcc.py | 13 | 6700 | #!/usr/bin/env python
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Crosstool wrapper for compiling CUDA programs with nvcc on Windows.
DESCRIPTION:
This script is the Windows version of //third_party/gpus/crosstool/crosstool_wrapper_is_not_gcc
"""
from __future__ import print_function
from argparse import ArgumentParser
import os
import subprocess
import re
import sys
import pipes
# Template values set by cuda_autoconf.
CPU_COMPILER = ('/usr/bin/gcc')
GCC_HOST_COMPILER_PATH = ('/usr/bin/gcc')
NVCC_PATH = '/usr/local/cuda-10.0/bin/nvcc'
NVCC_VERSION = '10.0'
NVCC_TEMP_DIR = "C:\\Windows\\Temp\\nvcc_inter_files_tmp_dir"
supported_cuda_compute_capabilities = [ "3.0", "6.0" ]
def Log(s):
print('gpus/crosstool: {0}'.format(s))
def GetOptionValue(argv, option):
"""Extract the list of values for option from options.
Args:
option: The option whose value to extract, without the leading '/'.
Returns:
1. A list of values, either directly following the option,
(eg., /opt val1 val2) or values collected from multiple occurrences of
the option (eg., /opt val1 /opt val2).
2. The leftover options.
"""
parser = ArgumentParser(prefix_chars='/')
parser.add_argument('/' + option, nargs='*', action='append')
args, leftover = parser.parse_known_args(argv)
if args and vars(args)[option]:
return (sum(vars(args)[option], []), leftover)
return ([], leftover)
def _update_options(nvcc_options):
if NVCC_VERSION in ("7.0",):
return nvcc_options
update_options = { "relaxed-constexpr" : "expt-relaxed-constexpr" }
return [ update_options[opt] if opt in update_options else opt
for opt in nvcc_options ]
def GetNvccOptions(argv):
"""Collect the -nvcc_options values from argv.
Args:
argv: A list of strings, possibly the argv passed to main().
Returns:
1. The string that can be passed directly to nvcc.
2. The leftover options.
"""
parser = ArgumentParser()
parser.add_argument('-nvcc_options', nargs='*', action='append')
args, leftover = parser.parse_known_args(argv)
if args.nvcc_options:
options = _update_options(sum(args.nvcc_options, []))
return (['--' + a for a in options], leftover)
return ([], leftover)
def InvokeNvcc(argv, log=False):
"""Call nvcc with arguments assembled from argv.
Args:
argv: A list of strings, possibly the argv passed to main().
log: True if logging is requested.
Returns:
The return value of calling os.system('nvcc ' + args)
"""
src_files = [f for f in argv if
re.search('\.cpp$|\.cc$|\.c$|\.cxx$|\.C$', f)]
if len(src_files) == 0:
raise Error('No source files found for cuda compilation.')
out_file = [ f for f in argv if f.startswith('/Fo') ]
if len(out_file) != 1:
raise Error('Please sepecify exactly one output file for cuda compilation.')
out = ['-o', out_file[0][len('/Fo'):]]
nvcc_compiler_options, argv = GetNvccOptions(argv)
opt_option, argv = GetOptionValue(argv, 'O')
opt = ['-g', '-G']
if (len(opt_option) > 0 and opt_option[0] != 'd'):
opt = ['-O2']
include_options, argv = GetOptionValue(argv, 'I')
includes = ["-I " + include for include in include_options]
defines, argv = GetOptionValue(argv, 'D')
defines = ['-D' + define for define in defines]
undefines, argv = GetOptionValue(argv, 'U')
undefines = ['-U' + define for define in undefines]
# The rest of the unrecongized options should be passed to host compiler
host_compiler_options = [option for option in argv if option not in (src_files + out_file)]
m_options = ["-m64"]
nvccopts = ['-D_FORCE_INLINES']
for capability in supported_cuda_compute_capabilities:
capability = capability.replace('.', '')
nvccopts += [r'-gencode=arch=compute_%s,"code=sm_%s,compute_%s"' % (
capability, capability, capability)]
nvccopts += nvcc_compiler_options
nvccopts += undefines
nvccopts += defines
nvccopts += m_options
nvccopts += ['--compiler-options="' + " ".join(host_compiler_options) + '"']
nvccopts += ['-x', 'cu'] + opt + includes + out + ['-c'] + src_files
# If we don't specify --keep-dir, nvcc will generate intermediate files under TEMP
# Put them under NVCC_TEMP_DIR instead, then Bazel can ignore files under NVCC_TEMP_DIR during dependency check
# http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#options-for-guiding-compiler-driver
# Different actions are sharing NVCC_TEMP_DIR, so we cannot remove it if the directory already exists.
if os.path.isfile(NVCC_TEMP_DIR):
os.remove(NVCC_TEMP_DIR)
if not os.path.exists(NVCC_TEMP_DIR):
os.makedirs(NVCC_TEMP_DIR)
nvccopts += ['--keep', '--keep-dir', NVCC_TEMP_DIR]
cmd = [NVCC_PATH] + nvccopts
if log:
Log(cmd)
proc = subprocess.Popen(cmd,
stdout=sys.stdout,
stderr=sys.stderr,
env=os.environ.copy(),
shell=True)
proc.wait()
return proc.returncode
def main():
parser = ArgumentParser()
parser.add_argument('-x', nargs=1)
parser.add_argument('--cuda_log', action='store_true')
args, leftover = parser.parse_known_args(sys.argv[1:])
if args.x and args.x[0] == 'cuda':
if args.cuda_log: Log('-x cuda')
leftover = [pipes.quote(s) for s in leftover]
if args.cuda_log: Log('using nvcc')
return InvokeNvcc(leftover, log=args.cuda_log)
# Strip our flags before passing through to the CPU compiler for files which
# are not -x cuda. We can't just pass 'leftover' because it also strips -x.
# We not only want to pass -x to the CPU compiler, but also keep it in its
# relative location in the argv list (the compiler is actually sensitive to
# this).
cpu_compiler_flags = [flag for flag in sys.argv[1:]
if not flag.startswith(('--cuda_log'))
and not flag.startswith(('-nvcc_options'))]
return subprocess.call([CPU_COMPILER] + cpu_compiler_flags)
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
OCA/carrier-delivery | base_delivery_carrier_label/models/delivery_carrier.py | 1 | 1276 | # -*- coding: utf-8 -*-
# Copyright 2012 Akretion <http://www.akretion.com>.
# Copyright 2013-2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
class DeliveryCarrier(models.Model):
_inherit = 'delivery.carrier'
@api.model
def _get_carrier_type_selection(self):
""" To inherit to add carrier type """
return []
carrier_type = fields.Selection(
selection='_get_carrier_type_selection',
string='Type',
help="Carrier type (combines several delivery methods)",
oldname='type',
)
code = fields.Char(
help="Delivery Method Code (according to carrier)",
)
description = fields.Text()
available_option_ids = fields.One2many(
comodel_name='delivery.carrier.option',
inverse_name='carrier_id',
string='Option',
)
@api.multi
def default_options(self):
""" Returns default and available options for a carrier """
options = self.env['delivery.carrier.option'].browse()
for available_option in self.available_option_ids:
if (available_option.mandatory or available_option.by_default):
options |= available_option
return options
| agpl-3.0 |
nmayorov/scipy | benchmarks/benchmarks/signal_filtering.py | 3 | 2439 | import numpy as np
import timeit
from concurrent.futures import ThreadPoolExecutor, wait
try:
from scipy.signal import lfilter, firwin, decimate, butter, sosfilt
except ImportError:
pass
from .common import Benchmark
class Decimate(Benchmark):
param_names = ['q', 'ftype', 'zero_phase']
params = [
[2, 10, 30],
['iir', 'fir'],
[True, False]
]
def setup(self, q, ftype, zero_phase):
np.random.seed(123456)
sample_rate = 10000.
t = np.arange(int(1e6), dtype=np.float64) / sample_rate
self.sig = np.sin(2*np.pi*500*t) + 0.3 * np.sin(2*np.pi*4e3*t)
def time_decimate(self, q, ftype, zero_phase):
decimate(self.sig, q, ftype=ftype, zero_phase=zero_phase)
class Lfilter(Benchmark):
param_names = ['n_samples', 'numtaps']
params = [
[1e3, 50e3, 1e6],
[9, 23, 51]
]
def setup(self, n_samples, numtaps):
np.random.seed(125678)
sample_rate = 25000.
t = np.arange(n_samples, dtype=np.float64) / sample_rate
nyq_rate = sample_rate / 2.
cutoff_hz = 3000.0
self.sig = np.sin(2*np.pi*500*t) + 0.3 * np.sin(2*np.pi*11e3*t)
self.coeff = firwin(numtaps, cutoff_hz/nyq_rate)
def time_lfilter(self, n_samples, numtaps):
lfilter(self.coeff, 1.0, self.sig)
class ParallelSosfilt(Benchmark):
timeout = 100
timer = timeit.default_timer
param_names = ['n_samples', 'threads']
params = [
[1e3, 10e3],
[1, 2, 4]
]
def setup(self, n_samples, threads):
self.filt = butter(8, 8e-6, "lowpass", output="sos")
self.data = np.arange(int(n_samples) * 3000).reshape(int(n_samples), 3000)
self.chunks = np.array_split(self.data, threads)
def time_sosfilt(self, n_samples, threads):
with ThreadPoolExecutor(max_workers=threads) as pool:
futures = []
for i in range(threads):
futures.append(pool.submit(sosfilt, self.filt, self.chunks[i]))
wait(futures)
class Sosfilt(Benchmark):
param_names = ['n_samples', 'order']
params = [
[1000, 1000000],
[6, 20]
]
def setup(self, n_samples, order):
self.sos = butter(order, [0.1575, 0.1625], 'band', output='sos')
self.y = np.random.RandomState(0).randn(n_samples)
def time_sosfilt_basic(self, n_samples, order):
sosfilt(self.sos, self.y)
| bsd-3-clause |
mixturemodel-flow/tensorflow | tensorflow/contrib/slim/nets.py | 191 | 1609 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TF-Slim Nets.
## Standard Networks.
@@alexnet_v2
@@inception_v1
@@inception_v1_base
@@inception_v2
@@inception_v2_base
@@inception_v3
@@inception_v3_base
@@overfeat
@@vgg_a
@@vgg_16
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,
# Collapse nets into a single namespace.
from tensorflow.contrib.slim.python.slim.nets import alexnet
from tensorflow.contrib.slim.python.slim.nets import inception
from tensorflow.contrib.slim.python.slim.nets import overfeat
from tensorflow.contrib.slim.python.slim.nets import resnet_utils
from tensorflow.contrib.slim.python.slim.nets import resnet_v1
from tensorflow.contrib.slim.python.slim.nets import resnet_v2
from tensorflow.contrib.slim.python.slim.nets import vgg
from tensorflow.python.util.all_util import make_all
# pylint: enable=unused-import
__all__ = make_all(__name__)
| apache-2.0 |
bigmonachus/Delaunay | site_scons/site_tools/scons_qt4/test/basic/reentrant/sconstest-reentrant.py | 6 | 1599 | #!/usr/bin/env python
#
# Copyright (c) 2001-2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""
Test creation from a copied environment that already has QT variables.
This makes sure the tool initialization is re-entrant.
"""
import TestSCons
test = TestSCons.TestSCons()
test.dir_fixture('image')
test.file_fixture('../../qtenv.py')
test.file_fixture('../../../__init__.py','site_scons/site_tools/qt4/__init__.py')
test.run()
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-3.0 |
lovexiaov/SandwichApp | venv/lib/python2.7/site-packages/wheel/signatures/__init__.py | 565 | 3779 | """
Create and verify jws-js format Ed25519 signatures.
"""
__all__ = [ 'sign', 'verify' ]
import json
from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary
ed25519ll = None
ALG = "Ed25519"
def get_ed25519ll():
"""Lazy import-and-test of ed25519 module"""
global ed25519ll
if not ed25519ll:
try:
import ed25519ll # fast (thousands / s)
except (ImportError, OSError): # pragma nocover
from . import ed25519py as ed25519ll # pure Python (hundreds / s)
test()
return ed25519ll
def sign(payload, keypair):
"""Return a JWS-JS format signature given a JSON-serializable payload and
an Ed25519 keypair."""
get_ed25519ll()
#
header = {
"alg": ALG,
"jwk": {
"kty": ALG, # alg -> kty in jwk-08.
"vk": native(urlsafe_b64encode(keypair.vk))
}
}
encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True)))
encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True)))
secured_input = b".".join((encoded_header, encoded_payload))
sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk)
signature = sig_msg[:ed25519ll.SIGNATUREBYTES]
encoded_signature = urlsafe_b64encode(signature)
return {"recipients":
[{"header":native(encoded_header),
"signature":native(encoded_signature)}],
"payload": native(encoded_payload)}
def assertTrue(condition, message=""):
if not condition:
raise ValueError(message)
def verify(jwsjs):
"""Return (decoded headers, payload) if all signatures in jwsjs are
consistent, else raise ValueError.
Caller must decide whether the keys are actually trusted."""
get_ed25519ll()
# XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+)
recipients = jwsjs["recipients"]
encoded_payload = binary(jwsjs["payload"])
headers = []
for recipient in recipients:
assertTrue(len(recipient) == 2, "Unknown recipient key {0}".format(recipient))
h = binary(recipient["header"])
s = binary(recipient["signature"])
header = json.loads(native(urlsafe_b64decode(h)))
assertTrue(header["alg"] == ALG,
"Unexpected algorithm {0}".format(header["alg"]))
if "alg" in header["jwk"] and not "kty" in header["jwk"]:
header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08
assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519
"Unexpected key type {0}".format(header["jwk"]["kty"]))
vk = urlsafe_b64decode(binary(header["jwk"]["vk"]))
secured_input = b".".join((h, encoded_payload))
sig = urlsafe_b64decode(s)
sig_msg = sig+secured_input
verified_input = native(ed25519ll.crypto_sign_open(sig_msg, vk))
verified_header, verified_payload = verified_input.split('.')
verified_header = binary(verified_header)
decoded_header = native(urlsafe_b64decode(verified_header))
headers.append(json.loads(decoded_header))
verified_payload = binary(verified_payload)
# only return header, payload that have passed through the crypto library.
payload = json.loads(native(urlsafe_b64decode(verified_payload)))
return headers, payload
def test():
kp = ed25519ll.crypto_sign_keypair()
payload = {'test': 'onstartup'}
jwsjs = json.loads(json.dumps(sign(payload, kp)))
verify(jwsjs)
jwsjs['payload'] += 'x'
try:
verify(jwsjs)
except ValueError:
pass
else: # pragma no cover
raise RuntimeError("No error from bad wheel.signatures payload.")
| apache-2.0 |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.5.0/Lib/ctypes/test/test_callbacks.py | 77 | 7848 | import unittest
from ctypes import *
from ctypes.test import need_symbol
import _ctypes_test
class Callbacks(unittest.TestCase):
functype = CFUNCTYPE
## def tearDown(self):
## import gc
## gc.collect()
def callback(self, *args):
self.got_args = args
return args[-1]
def check_type(self, typ, arg):
PROTO = self.functype.__func__(typ, typ)
result = PROTO(self.callback)(arg)
if typ == c_float:
self.assertAlmostEqual(result, arg, places=5)
else:
self.assertEqual(self.got_args, (arg,))
self.assertEqual(result, arg)
PROTO = self.functype.__func__(typ, c_byte, typ)
result = PROTO(self.callback)(-3, arg)
if typ == c_float:
self.assertAlmostEqual(result, arg, places=5)
else:
self.assertEqual(self.got_args, (-3, arg))
self.assertEqual(result, arg)
################
def test_byte(self):
self.check_type(c_byte, 42)
self.check_type(c_byte, -42)
def test_ubyte(self):
self.check_type(c_ubyte, 42)
def test_short(self):
self.check_type(c_short, 42)
self.check_type(c_short, -42)
def test_ushort(self):
self.check_type(c_ushort, 42)
def test_int(self):
self.check_type(c_int, 42)
self.check_type(c_int, -42)
def test_uint(self):
self.check_type(c_uint, 42)
def test_long(self):
self.check_type(c_long, 42)
self.check_type(c_long, -42)
def test_ulong(self):
self.check_type(c_ulong, 42)
def test_longlong(self):
self.check_type(c_longlong, 42)
self.check_type(c_longlong, -42)
def test_ulonglong(self):
self.check_type(c_ulonglong, 42)
def test_float(self):
# only almost equal: double -> float -> double
import math
self.check_type(c_float, math.e)
self.check_type(c_float, -math.e)
def test_double(self):
self.check_type(c_double, 3.14)
self.check_type(c_double, -3.14)
def test_longdouble(self):
self.check_type(c_longdouble, 3.14)
self.check_type(c_longdouble, -3.14)
def test_char(self):
self.check_type(c_char, b"x")
self.check_type(c_char, b"a")
# disabled: would now (correctly) raise a RuntimeWarning about
# a memory leak. A callback function cannot return a non-integral
# C type without causing a memory leak.
@unittest.skip('test disabled')
def test_char_p(self):
self.check_type(c_char_p, "abc")
self.check_type(c_char_p, "def")
def test_pyobject(self):
o = ()
from sys import getrefcount as grc
for o in (), [], object():
initial = grc(o)
# This call leaks a reference to 'o'...
self.check_type(py_object, o)
before = grc(o)
# ...but this call doesn't leak any more. Where is the refcount?
self.check_type(py_object, o)
after = grc(o)
self.assertEqual((after, o), (before, o))
def test_unsupported_restype_1(self):
# Only "fundamental" result types are supported for callback
# functions, the type must have a non-NULL stgdict->setfunc.
# POINTER(c_double), for example, is not supported.
prototype = self.functype.__func__(POINTER(c_double))
# The type is checked when the prototype is called
self.assertRaises(TypeError, prototype, lambda: None)
def test_unsupported_restype_2(self):
prototype = self.functype.__func__(object)
self.assertRaises(TypeError, prototype, lambda: None)
def test_issue_7959(self):
proto = self.functype.__func__(None)
class X(object):
def func(self): pass
def __init__(self):
self.v = proto(self.func)
import gc
for i in range(32):
X()
gc.collect()
live = [x for x in gc.get_objects()
if isinstance(x, X)]
self.assertEqual(len(live), 0)
def test_issue12483(self):
import gc
class Nasty:
def __del__(self):
gc.collect()
CFUNCTYPE(None)(lambda x=Nasty(): None)
@need_symbol('WINFUNCTYPE')
class StdcallCallbacks(Callbacks):
try:
functype = WINFUNCTYPE
except NameError:
pass
################################################################
class SampleCallbacksTestCase(unittest.TestCase):
def test_integrate(self):
# Derived from some then non-working code, posted by David Foster
dll = CDLL(_ctypes_test.__file__)
# The function prototype called by 'integrate': double func(double);
CALLBACK = CFUNCTYPE(c_double, c_double)
# The integrate function itself, exposed from the _ctypes_test dll
integrate = dll.integrate
integrate.argtypes = (c_double, c_double, CALLBACK, c_long)
integrate.restype = c_double
def func(x):
return x**2
result = integrate(0.0, 1.0, CALLBACK(func), 10)
diff = abs(result - 1./3.)
self.assertLess(diff, 0.01, "%s not less than 0.01" % diff)
def test_issue_8959_a(self):
from ctypes.util import find_library
libc_path = find_library("c")
if not libc_path:
self.skipTest('could not find libc')
libc = CDLL(libc_path)
@CFUNCTYPE(c_int, POINTER(c_int), POINTER(c_int))
def cmp_func(a, b):
return a[0] - b[0]
array = (c_int * 5)(5, 1, 99, 7, 33)
libc.qsort(array, len(array), sizeof(c_int), cmp_func)
self.assertEqual(array[:], [1, 5, 7, 33, 99])
@need_symbol('WINFUNCTYPE')
def test_issue_8959_b(self):
from ctypes.wintypes import BOOL, HWND, LPARAM
global windowCount
windowCount = 0
@WINFUNCTYPE(BOOL, HWND, LPARAM)
def EnumWindowsCallbackFunc(hwnd, lParam):
global windowCount
windowCount += 1
return True #Allow windows to keep enumerating
windll.user32.EnumWindows(EnumWindowsCallbackFunc, 0)
def test_callback_register_int(self):
# Issue #8275: buggy handling of callback args under Win64
# NOTE: should be run on release builds as well
dll = CDLL(_ctypes_test.__file__)
CALLBACK = CFUNCTYPE(c_int, c_int, c_int, c_int, c_int, c_int)
# All this function does is call the callback with its args squared
func = dll._testfunc_cbk_reg_int
func.argtypes = (c_int, c_int, c_int, c_int, c_int, CALLBACK)
func.restype = c_int
def callback(a, b, c, d, e):
return a + b + c + d + e
result = func(2, 3, 4, 5, 6, CALLBACK(callback))
self.assertEqual(result, callback(2*2, 3*3, 4*4, 5*5, 6*6))
def test_callback_register_double(self):
# Issue #8275: buggy handling of callback args under Win64
# NOTE: should be run on release builds as well
dll = CDLL(_ctypes_test.__file__)
CALLBACK = CFUNCTYPE(c_double, c_double, c_double, c_double,
c_double, c_double)
# All this function does is call the callback with its args squared
func = dll._testfunc_cbk_reg_double
func.argtypes = (c_double, c_double, c_double,
c_double, c_double, CALLBACK)
func.restype = c_double
def callback(a, b, c, d, e):
return a + b + c + d + e
result = func(1.1, 2.2, 3.3, 4.4, 5.5, CALLBACK(callback))
self.assertEqual(result,
callback(1.1*1.1, 2.2*2.2, 3.3*3.3, 4.4*4.4, 5.5*5.5))
################################################################
if __name__ == '__main__':
unittest.main()
| mit |
kornicameister/ansible-modules-extras | cloud/amazon/cloudtrail.py | 130 | 8364 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: cloudtrail
short_description: manage CloudTrail creation and deletion
description:
- Creates or deletes CloudTrail configuration. Ensures logging is also enabled.
version_added: "2.0"
author:
- "Ansible Core Team"
- "Ted Timmons"
requirements:
- "boto >= 2.21"
options:
state:
description:
- add or remove CloudTrail configuration.
required: true
choices: ['enabled', 'disabled']
name:
description:
- name for given CloudTrail configuration.
- This is a primary key and is used to identify the configuration.
s3_bucket_prefix:
description:
- bucket to place CloudTrail in.
- this bucket should exist and have the proper policy. See U(http://docs.aws.amazon.com/awscloudtrail/latest/userguide/aggregating_logs_regions_bucket_policy.html)
- required when state=enabled.
required: false
s3_key_prefix:
description:
- prefix to keys in bucket. A trailing slash is not necessary and will be removed.
required: false
include_global_events:
description:
- record API calls from global services such as IAM and STS?
required: false
default: false
choices: ["true", "false"]
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
version_added: "1.5"
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
version_added: "1.5"
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
version_added: "1.5"
extends_documentation_fragment: aws
"""
EXAMPLES = """
- name: enable cloudtrail
local_action: cloudtrail
state=enabled name=main s3_bucket_name=ourbucket
s3_key_prefix=cloudtrail region=us-east-1
- name: enable cloudtrail with different configuration
local_action: cloudtrail
state=enabled name=main s3_bucket_name=ourbucket2
s3_key_prefix='' region=us-east-1
- name: remove cloudtrail
local_action: cloudtrail state=disabled name=main region=us-east-1
"""
HAS_BOTO = False
try:
import boto
import boto.cloudtrail
from boto.regioninfo import RegionInfo
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class CloudTrailManager:
"""Handles cloudtrail configuration"""
def __init__(self, module, region=None, **aws_connect_params):
self.module = module
self.region = region
self.aws_connect_params = aws_connect_params
self.changed = False
try:
self.conn = connect_to_aws(boto.cloudtrail, self.region, **self.aws_connect_params)
except boto.exception.NoAuthHandlerFound, e:
self.module.fail_json(msg=str(e))
def view_status(self, name):
return self.conn.get_trail_status(name)
def view(self, name):
ret = self.conn.describe_trails(trail_name_list=[name])
trailList = ret.get('trailList', [])
if len(trailList) == 1:
return trailList[0]
return None
def exists(self, name=None):
ret = self.view(name)
if ret:
return True
return False
def enable_logging(self, name):
'''Turn on logging for a cloudtrail that already exists. Throws Exception on error.'''
self.conn.start_logging(name)
def enable(self, **create_args):
return self.conn.create_trail(**create_args)
def update(self, **create_args):
return self.conn.update_trail(**create_args)
def delete(self, name):
'''Delete a given cloudtrial configuration. Throws Exception on error.'''
self.conn.delete_trail(name)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state={'required': True, 'choices': ['enabled', 'disabled'] },
name={'required': True, 'type': 'str' },
s3_bucket_name={'required': False, 'type': 'str' },
s3_key_prefix={'default':'', 'required': False, 'type': 'str' },
include_global_events={'default':True, 'required': False, 'type': 'bool' },
))
required_together = ( ['state', 's3_bucket_name'] )
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_together=required_together)
if not HAS_BOTO:
module.fail_json(msg='boto is required.')
ec2_url, access_key, secret_key, region = get_ec2_creds(module)
aws_connect_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
ct_name = module.params['name']
s3_bucket_name = module.params['s3_bucket_name']
# remove trailing slash from the key prefix, really messes up the key structure.
s3_key_prefix = module.params['s3_key_prefix'].rstrip('/')
include_global_events = module.params['include_global_events']
#if module.params['state'] == 'present' and 'ec2_elbs' not in module.params:
# module.fail_json(msg="ELBs are required for registration or viewing")
cf_man = CloudTrailManager(module, region=region, **aws_connect_params)
results = { 'changed': False }
if module.params['state'] == 'enabled':
results['exists'] = cf_man.exists(name=ct_name)
if results['exists']:
results['view'] = cf_man.view(ct_name)
# only update if the values have changed.
if results['view']['S3BucketName'] != s3_bucket_name or \
results['view']['S3KeyPrefix'] != s3_key_prefix or \
results['view']['IncludeGlobalServiceEvents'] != include_global_events:
if not module.check_mode:
results['update'] = cf_man.update(name=ct_name, s3_bucket_name=s3_bucket_name, s3_key_prefix=s3_key_prefix, include_global_service_events=include_global_events)
results['changed'] = True
else:
if not module.check_mode:
# doesn't exist. create it.
results['enable'] = cf_man.enable(name=ct_name, s3_bucket_name=s3_bucket_name, s3_key_prefix=s3_key_prefix, include_global_service_events=include_global_events)
results['changed'] = True
# given cloudtrail should exist now. Enable the logging.
results['view_status'] = cf_man.view_status(ct_name)
results['was_logging_enabled'] = results['view_status'].get('IsLogging', False)
if not results['was_logging_enabled']:
if not module.check_mode:
cf_man.enable_logging(ct_name)
results['logging_enabled'] = True
results['changed'] = True
# delete the cloudtrai
elif module.params['state'] == 'disabled':
# check to see if it exists before deleting.
results['exists'] = cf_man.exists(name=ct_name)
if results['exists']:
# it exists, so we should delete it and mark changed.
if not module.check_mode:
cf_man.delete(ct_name)
results['changed'] = True
module.exit_json(**results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
xyguo/scikit-learn | examples/cross_decomposition/plot_compare_cross_decomposition.py | 55 | 4761 | """
===================================
Compare cross decomposition methods
===================================
Simple usage of various cross decomposition algorithms:
- PLSCanonical
- PLSRegression, with multivariate response, a.k.a. PLS2
- PLSRegression, with univariate response, a.k.a. PLS1
- CCA
Given 2 multivariate covarying two-dimensional datasets, X, and Y,
PLS extracts the 'directions of covariance', i.e. the components of each
datasets that explain the most shared variance between both datasets.
This is apparent on the **scatterplot matrix** display: components 1 in
dataset X and dataset Y are maximally correlated (points lie around the
first diagonal). This is also true for components 2 in both dataset,
however, the correlation across datasets for different components is
weak: the point cloud is very spherical.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cross_decomposition import PLSCanonical, PLSRegression, CCA
###############################################################################
# Dataset based latent variables model
n = 500
# 2 latents vars:
l1 = np.random.normal(size=n)
l2 = np.random.normal(size=n)
latents = np.array([l1, l1, l2, l2]).T
X = latents + np.random.normal(size=4 * n).reshape((n, 4))
Y = latents + np.random.normal(size=4 * n).reshape((n, 4))
X_train = X[:n / 2]
Y_train = Y[:n / 2]
X_test = X[n / 2:]
Y_test = Y[n / 2:]
print("Corr(X)")
print(np.round(np.corrcoef(X.T), 2))
print("Corr(Y)")
print(np.round(np.corrcoef(Y.T), 2))
###############################################################################
# Canonical (symmetric) PLS
# Transform data
# ~~~~~~~~~~~~~~
plsca = PLSCanonical(n_components=2)
plsca.fit(X_train, Y_train)
X_train_r, Y_train_r = plsca.transform(X_train, Y_train)
X_test_r, Y_test_r = plsca.transform(X_test, Y_test)
# Scatter plot of scores
# ~~~~~~~~~~~~~~~~~~~~~~
# 1) On diagonal plot X vs Y scores on each components
plt.figure(figsize=(12, 8))
plt.subplot(221)
plt.plot(X_train_r[:, 0], Y_train_r[:, 0], "ob", label="train")
plt.plot(X_test_r[:, 0], Y_test_r[:, 0], "or", label="test")
plt.xlabel("x scores")
plt.ylabel("y scores")
plt.title('Comp. 1: X vs Y (test corr = %.2f)' %
np.corrcoef(X_test_r[:, 0], Y_test_r[:, 0])[0, 1])
plt.xticks(())
plt.yticks(())
plt.legend(loc="best")
plt.subplot(224)
plt.plot(X_train_r[:, 1], Y_train_r[:, 1], "ob", label="train")
plt.plot(X_test_r[:, 1], Y_test_r[:, 1], "or", label="test")
plt.xlabel("x scores")
plt.ylabel("y scores")
plt.title('Comp. 2: X vs Y (test corr = %.2f)' %
np.corrcoef(X_test_r[:, 1], Y_test_r[:, 1])[0, 1])
plt.xticks(())
plt.yticks(())
plt.legend(loc="best")
# 2) Off diagonal plot components 1 vs 2 for X and Y
plt.subplot(222)
plt.plot(X_train_r[:, 0], X_train_r[:, 1], "*b", label="train")
plt.plot(X_test_r[:, 0], X_test_r[:, 1], "*r", label="test")
plt.xlabel("X comp. 1")
plt.ylabel("X comp. 2")
plt.title('X comp. 1 vs X comp. 2 (test corr = %.2f)'
% np.corrcoef(X_test_r[:, 0], X_test_r[:, 1])[0, 1])
plt.legend(loc="best")
plt.xticks(())
plt.yticks(())
plt.subplot(223)
plt.plot(Y_train_r[:, 0], Y_train_r[:, 1], "*b", label="train")
plt.plot(Y_test_r[:, 0], Y_test_r[:, 1], "*r", label="test")
plt.xlabel("Y comp. 1")
plt.ylabel("Y comp. 2")
plt.title('Y comp. 1 vs Y comp. 2 , (test corr = %.2f)'
% np.corrcoef(Y_test_r[:, 0], Y_test_r[:, 1])[0, 1])
plt.legend(loc="best")
plt.xticks(())
plt.yticks(())
plt.show()
###############################################################################
# PLS regression, with multivariate response, a.k.a. PLS2
n = 1000
q = 3
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
B = np.array([[1, 2] + [0] * (p - 2)] * q).T
# each Yj = 1*X1 + 2*X2 + noize
Y = np.dot(X, B) + np.random.normal(size=n * q).reshape((n, q)) + 5
pls2 = PLSRegression(n_components=3)
pls2.fit(X, Y)
print("True B (such that: Y = XB + Err)")
print(B)
# compare pls2.coef_ with B
print("Estimated B")
print(np.round(pls2.coef_, 1))
pls2.predict(X)
###############################################################################
# PLS regression, with univariate response, a.k.a. PLS1
n = 1000
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
y = X[:, 0] + 2 * X[:, 1] + np.random.normal(size=n * 1) + 5
pls1 = PLSRegression(n_components=3)
pls1.fit(X, y)
# note that the number of components exceeds 1 (the dimension of y)
print("Estimated betas")
print(np.round(pls1.coef_, 1))
###############################################################################
# CCA (PLS mode B with symmetric deflation)
cca = CCA(n_components=2)
cca.fit(X_train, Y_train)
X_train_r, Y_train_r = plsca.transform(X_train, Y_train)
X_test_r, Y_test_r = plsca.transform(X_test, Y_test)
| bsd-3-clause |
cloudbase/neutron-virtualbox | neutron/tests/unit/notifiers/test_notifiers_nova.py | 2 | 15128 | # Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from novaclient import exceptions as nova_exceptions
from sqlalchemy.orm import attributes as sql_attr
from oslo_config import cfg
from neutron.common import constants
from neutron.db import models_v2
from neutron.notifiers import nova
from neutron.openstack.common import uuidutils
from neutron.tests import base
class TestNovaNotify(base.BaseTestCase):
def setUp(self, plugin=None):
super(TestNovaNotify, self).setUp()
class FakePlugin(object):
def get_port(self, context, port_id):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
return {'device_id': device_id,
'device_owner': 'compute:None'}
self.nova_notifier = nova.Notifier()
self.nova_notifier._plugin_ref = FakePlugin()
def test_notify_port_status_all_values(self):
states = [constants.PORT_STATUS_ACTIVE, constants.PORT_STATUS_DOWN,
constants.PORT_STATUS_ERROR, constants.PORT_STATUS_BUILD,
sql_attr.NO_VALUE]
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
# test all combinations
for previous_port_status in states:
for current_port_status in states:
port = models_v2.Port(id='port-uuid', device_id=device_id,
device_owner="compute:",
status=current_port_status)
self._record_port_status_changed_helper(current_port_status,
previous_port_status,
port)
def test_port_without_uuid_device_id_no_notify(self):
port = models_v2.Port(id='port-uuid', device_id='compute_probe:',
device_owner='compute:',
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_port_without_device_owner_no_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port = models_v2.Port(id='port-uuid', device_id=device_id,
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_port_without_device_id_no_notify(self):
port = models_v2.Port(id='port-uuid', device_owner="network:dhcp",
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_port_without_id_no_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port = models_v2.Port(device_id=device_id,
device_owner="compute:",
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def test_non_compute_instances_no_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
port = models_v2.Port(id='port-uuid', device_id=device_id,
device_owner="network:dhcp",
status=constants.PORT_STATUS_ACTIVE)
self._record_port_status_changed_helper(constants.PORT_STATUS_ACTIVE,
sql_attr.NO_VALUE,
port)
def _record_port_status_changed_helper(self, current_port_status,
previous_port_status, port):
if not (port.device_id and port.id and port.device_owner and
port.device_owner.startswith('compute:') and
uuidutils.is_uuid_like(port.device_id)):
return
if (previous_port_status == constants.PORT_STATUS_ACTIVE and
current_port_status == constants.PORT_STATUS_DOWN):
event_name = nova.VIF_UNPLUGGED
elif (previous_port_status in [sql_attr.NO_VALUE,
constants.PORT_STATUS_DOWN,
constants.PORT_STATUS_BUILD]
and current_port_status in [constants.PORT_STATUS_ACTIVE,
constants.PORT_STATUS_ERROR]):
event_name = nova.VIF_PLUGGED
else:
return
status = nova.NEUTRON_NOVA_EVENT_STATUS_MAP.get(current_port_status)
self.nova_notifier.record_port_status_changed(port,
current_port_status,
previous_port_status,
None)
event = {'server_uuid': port.device_id, 'status': status,
'name': event_name, 'tag': 'port-uuid'}
self.assertEqual(event, port._notify_event)
def test_update_fixed_ip_changed(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'port':
{'device_owner': u'compute:dfd',
'id': u'bee50827-bcee-4cc8-91c1-a27b0ce54222',
'device_id': device_id}}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event('update_port',
{}, returned_obj)
self.assertEqual(event, expected_event)
def test_create_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip':
{'port_id': u'bee50827-bcee-4cc8-91c1-a27b0ce54222'}}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'create_floatingip', {}, returned_obj)
self.assertEqual(event, expected_event)
def test_create_floatingip_no_port_id_no_notify(self):
returned_obj = {'floatingip':
{'port_id': None}}
event = self.nova_notifier.create_port_changed_event(
'create_floatingip', {}, returned_obj)
self.assertFalse(event, None)
def test_delete_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip':
{'port_id': u'bee50827-bcee-4cc8-91c1-a27b0ce54222'}}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'delete_floatingip', {}, returned_obj)
self.assertEqual(expected_event, event)
def test_delete_floatingip_no_port_id_no_notify(self):
returned_obj = {'floatingip':
{'port_id': None}}
event = self.nova_notifier.create_port_changed_event(
'delete_floatingip', {}, returned_obj)
self.assertEqual(event, None)
def test_associate_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip':
{'port_id': u'5a39def4-3d3f-473d-9ff4-8e90064b9cc1'}}
original_obj = {'port_id': None}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(expected_event, event)
def test_disassociate_floatingip_notify(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
returned_obj = {'floatingip': {'port_id': None}}
original_obj = {'port_id': '5a39def4-3d3f-473d-9ff4-8e90064b9cc1'}
expected_event = {'server_uuid': device_id,
'name': 'network-changed'}
event = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(expected_event, event)
def test_no_notification_notify_nova_on_port_data_changes_false(self):
cfg.CONF.set_override('notify_nova_on_port_data_changes', False)
with mock.patch.object(self.nova_notifier,
'send_events') as send_events:
self.nova_notifier.send_network_change('update_floatingip',
{}, {})
self.assertFalse(send_events.called, False)
def test_nova_send_events_returns_bad_list(self):
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = 'i am a string!'
self.nova_notifier.send_events()
def test_nova_send_event_rasies_404(self):
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.side_effect = nova_exceptions.NotFound
self.nova_notifier.send_events()
def test_nova_send_events_raises(self):
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.side_effect = Exception
self.nova_notifier.send_events()
def test_nova_send_events_returns_non_200(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = [{'code': 404,
'name': 'network-changed',
'server_uuid': device_id}]
self.nova_notifier.pending_events.append(
{'name': 'network-changed', 'server_uuid': device_id})
self.nova_notifier.send_events()
def test_nova_send_events_return_200(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = [{'code': 200,
'name': 'network-changed',
'server_uuid': device_id}]
self.nova_notifier.pending_events.append(
{'name': 'network-changed', 'server_uuid': device_id})
self.nova_notifier.send_events()
def test_nova_send_events_multiple(self):
device_id = '32102d7b-1cf4-404d-b50a-97aae1f55f87'
with mock.patch.object(
self.nova_notifier.nclient.server_external_events,
'create') as nclient_create:
nclient_create.return_value = [{'code': 200,
'name': 'network-changed',
'server_uuid': device_id},
{'code': 200,
'name': 'network-changed',
'server_uuid': device_id}]
self.nova_notifier.pending_events.append(
{'name': 'network-changed', 'server_uuid': device_id})
self.nova_notifier.pending_events.append(
{'name': 'network-changed', 'server_uuid': device_id})
self.nova_notifier.send_events()
def test_queue_event_no_event(self):
with mock.patch('eventlet.spawn_n') as spawn_n:
self.nova_notifier.queue_event(None)
self.assertEqual(0, len(self.nova_notifier.pending_events))
self.assertEqual(0, spawn_n.call_count)
def test_queue_event_first_event(self):
with mock.patch('eventlet.spawn_n') as spawn_n:
self.nova_notifier.queue_event(mock.Mock())
self.assertEqual(1, len(self.nova_notifier.pending_events))
self.assertEqual(1, spawn_n.call_count)
def test_queue_event_multiple_events(self):
with mock.patch('eventlet.spawn_n') as spawn_n:
events = 6
for i in range(0, events):
self.nova_notifier.queue_event(mock.Mock())
self.assertEqual(events, len(self.nova_notifier.pending_events))
self.assertEqual(1, spawn_n.call_count)
def test_queue_event_call_send_events(self):
with mock.patch.object(self.nova_notifier,
'send_events') as send_events:
with mock.patch('eventlet.spawn_n') as spawn_n:
spawn_n.side_effect = lambda func: func()
self.nova_notifier.queue_event(mock.Mock())
self.assertFalse(self.nova_notifier._waiting_to_send)
send_events.assert_called_once_with()
def test_reassociate_floatingip_without_disassociate_event(self):
returned_obj = {'floatingip':
{'port_id': 'f5348a16-609a-4971-b0f0-4b8def5235fb'}}
original_obj = {'port_id': '5a39def4-3d3f-473d-9ff4-8e90064b9cc1'}
self.nova_notifier._waiting_to_send = True
self.nova_notifier.send_network_change(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(2, len(self.nova_notifier.pending_events))
returned_obj_non = {'floatingip': {'port_id': None}}
event_dis = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj_non)
event_assoc = self.nova_notifier.create_port_changed_event(
'update_floatingip', original_obj, returned_obj)
self.assertEqual(self.nova_notifier.pending_events[0], event_dis)
self.assertEqual(self.nova_notifier.pending_events[1], event_assoc)
| apache-2.0 |
hardikk/newfies-dialer | newfies/dialer_campaign/tests.py | 3 | 15255 | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2013 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command
from django.test import TestCase
from dialer_campaign.models import Campaign, Subscriber, \
common_contact_authorization
from dialer_campaign.forms import CampaignForm
from dialer_campaign.views import campaign_list, campaign_add, \
campaign_change, campaign_del, notify_admin, \
update_campaign_status_admin, \
get_url_campaign_status, campaign_duplicate
from dialer_campaign.tasks import campaign_running, \
collect_subscriber, campaign_expire_check
from dialer_settings.models import DialerSetting
from dialer_campaign.constants import SUBSCRIBER_STATUS
from common.utils import BaseAuthenticatedClient
class DialerCampaignView(BaseAuthenticatedClient):
"""Test cases for Campaign, Subscriber Admin Interface."""
def test_admin_campaign_view_list(self):
"""Test Function to check admin campaign list"""
response = self.client.get('/admin/dialer_campaign/campaign/')
self.failUnlessEqual(response.status_code, 200)
def test_admin_campaign_view_add(self):
"""Test Function to check admin campaign add"""
response = self.client.get('/admin/dialer_campaign/campaign/add/')
self.failUnlessEqual(response.status_code, 200)
response = self.client.post(
'/admin/dialer_campaign/campaign/add/',
data={
"name": "mycampaign_admin",
"description": "xyz",
"startingdate": "1301392136.0",
"expirationdate": "1301332136.0",
"frequency": "20",
"callmaxduration": "50",
"maxretry": "3",
"intervalretry": "3000",
"calltimeout": "60",
"aleg_gateway": "1",
"user": "1",
"content_object": "type:32-id:1",
"extra_data": "2000"})
self.assertEqual(response.status_code, 200)
def test_admin_subscriber_view_list(self):
"""Test Function to check admin subscriber list"""
response =\
self.client.get('/admin/dialer_campaign/subscriber/')
self.failUnlessEqual(response.status_code, 200)
def test_admin_subscriber_view_add(self):
"""Test Function to check admin subscriber add"""
response =\
self.client.get('/admin/dialer_campaign/subscriber/add/')
self.failUnlessEqual(response.status_code, 200)
response = self.client.post(
'/admin/dialer_campaign/subscriber/add/',
data={
"status": "1",
"campaign": "1",
"duplicate_contact": "1234567",
"count_attempt": "1",
"completion_count_attempt": "1",
})
self.assertEqual(response.status_code, 200)
class DialerCampaignCustomerView(BaseAuthenticatedClient):
"""Test cases for Campaign, Subscriber Customer Interface."""
fixtures = ['auth_user.json', 'gateway.json', 'dialer_setting.json',
'user_profile.json', 'contenttype.json',
'phonebook.json', 'contact.json', 'survey.json',
'dnc_list.json', 'dnc_contact.json',
'campaign.json', 'subscriber.json',]
def test_campaign_view_list(self):
"""Test Function to check campaign list"""
response = self.client.get('/campaign/')
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'frontend/campaign/list.html')
request = self.factory.get('/campaign/')
request.user = self.user
request.session = {}
response = campaign_list(request)
self.assertEqual(response.status_code, 200)
def test_campaign_view_add(self):
"""Test Function to check add campaign"""
request = self.factory.get('/campaign/add/')
request.user = self.user
request.session = {}
response = campaign_add(request)
self.assertEqual(response.status_code, 200)
response = self.client.post('/campaign/add/', data={
"name": "mylittlecampaign",
"description": "xyz",
"startingdate": "1301392136.0",
"expirationdate": "1301332136.0",
"frequency": "20",
"callmaxduration": "50",
"maxretry": "3",
"intervalretry": "3000",
"calltimeout": "60",
"aleg_gateway": "1",
"content_object": "type:43-id:1",
"extra_data": "2000",
"ds_user": self.user}, follow=True)
self.assertEqual(response.status_code, 200)
request = self.factory.post('/campaign/add/', {
"name": "mylittlecampaign",
"description": "xyz",
"startingdate": "1301392136.0",
"expirationdate": "1301332136.0",
"frequency": "20",
"callmaxduration": "50",
"maxretry": "3",
"intervalretry": "3000",
"calltimeout": "60",
"aleg_gateway": "1",
"content_object": "type:43-id:1",
"extra_data": "2000",
"ds_user": self.user}, follow=True)
request.user = self.user
request.session = {}
response = campaign_add(request)
self.assertEqual(response.status_code, 200)
request = self.factory.post('/campaign/add/', {
"name": "mycampaign",
"description": "xyz",
"startingdate": "1301392136.0",
"expirationdate": "1301332136.0",
"frequency": "20",
"callmaxduration": "50",
"maxretry": "3",
"intervalretry": "3000",
"calltimeout": "60",
"aleg_gateway": "1",
"content_object": "type:43-id:1",
"extra_data": "2000",
"ds_user": self.user,}, follow=True)
request.user = self.user
request.session = {}
response = campaign_add(request)
self.assertEqual(response.status_code, 200)
def test_campaign_view_update(self):
"""Test Function to check update campaign"""
request = self.factory.post('/campaign/1/', {
"name": "Sample campaign",
"content_object": "type:43-id:1",
"ds_user": self.user,
}, follow=True)
request.user = self.user
request.session = {}
response = campaign_change(request, 1)
self.assertEqual(response.status_code, 200)
request = self.factory.post('/campaign/1/',
{'delete': True}, follow=True)
request.user = self.user
request.session = {}
response = campaign_change(request, 1)
self.assertEqual(response.status_code, 302)
def test_campaign_view_delete(self):
"""Test Function to check delete campaign"""
# delete campaign through campaign_change
request = self.factory.post('/campaign/del/1/', follow=True)
request.user = self.user
request.session = {}
response = campaign_del(request, 1)
self.assertEqual(response['Location'], '/campaign/')
self.assertEqual(response.status_code, 302)
request = self.factory.post('/campaign/del/', {'select': '1'})
request.user = self.user
request.session = {}
response = campaign_del(request, 0)
self.assertEqual(response['Location'], '/campaign/')
self.assertEqual(response.status_code, 302)
request = self.factory.post(
'/campaign/del/0/?stop_campaign=True', {'select': '1'})
request.user = self.user
request.session = {}
response = campaign_del(request, 0)
self.assertEqual(response['Location'], '/campaign/')
self.assertEqual(response.status_code, 302)
def test_notify_admin(self):
"""Test Function to check notify_admin"""
request = self.factory.post('/notify/admin/', follow=True)
request.user = self.user
request.session = {}
request.session['has_notified'] = False
response = notify_admin(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/dashboard/')
def test_update_campaign_status_admin(self):
request = self.factory.post('update_campaign_status_admin/1/1/',
follow=True)
request.user = self.user
request.session = {}
response = update_campaign_status_admin(request, 1, 1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'],
'/admin/dialer_campaign/campaign/')
def test_campaign_duplicate(self):
"""test duplicate campaign"""
request = self.factory.get('campaign_duplicate/1/')
request.user = self.user
request.session = {}
response = campaign_duplicate(request, 1)
self.assertEqual(response.status_code, 200)
request = self.factory.post('campaign_duplicate/1/',
{'name': 'duplicate', 'campaign_code': 'ZUXSA'},
follow=True)
request.user = self.user
request.session = {}
response = campaign_duplicate(request, 1)
self.assertEqual(response.status_code, 302)
class DialerCampaignCeleryTaskTestCase(TestCase):
"""Test cases for celery task"""
fixtures = ['auth_user.json', 'gateway.json',
'dialer_setting.json', 'user_profile.json',
'phonebook.json', 'contact.json', 'survey.json',
'dnc_list.json', 'dnc_contact.json',
'campaign.json', 'subscriber.json',
]
#def test_check_campaign_pendingcall(self):
# """Test that the ``check_campaign_pendingcall``
# task runs with no errors, and returns the correct result."""
# result = check_campaign_pendingcall.delay(1)
# self.assertEqual(result.successful(), True)
def test_campaign_running(self):
"""Test that the ``campaign_running``
periodic task runs with no errors, and returns the correct result."""
result = campaign_running.delay()
self.assertEqual(result.successful(), True)
def test_collect_subscriber(self):
"""Test that the ``collect_subscriber``
task runs with no errors, and returns the correct result."""
result = collect_subscriber.delay(1)
self.assertEqual(result.successful(), True)
def test_campaign_expire_check(self):
"""Test that the ``campaign_expire_check``
periodic task runs with no errors, and returns the correct result."""
result = campaign_expire_check.delay()
self.assertEqual(result.successful(), True)
class DialerCampaignModel(TestCase):
"""Test Campaign, Subscriber models"""
fixtures = ['auth_user.json', 'gateway.json',
'dialer_setting.json', 'user_profile.json',
'phonebook.json', 'contact.json', 'survey.json',
'dnc_list.json', 'dnc_contact.json',
'campaign.json', 'subscriber.json',
]
def setUp(self):
self.user = User.objects.get(username='admin')
# Campaign model
try:
self.content_type_id = \
ContentType.objects.get(model='survey_template').id
except:
self.content_type_id = 1
self.campaign = Campaign(
name="sample_campaign",
user=self.user,
aleg_gateway_id=1,
content_type_id=self.content_type_id,
object_id=1,
status=1
)
self.campaign.save()
self.assertEqual(self.campaign.__unicode__(), u'sample_campaign')
# Subscriber model
self.subscriber = Subscriber(
contact_id=1,
campaign=self.campaign,
count_attempt=0,
completion_count_attempt=0,
status=1
)
self.subscriber.save()
self.assertTrue(self.subscriber.__unicode__())
# Test mgt command
call_command("create_subscriber", "123456|1")
call_command("create_subscriber", "123456|3")
def test_campaign_form(self):
self.assertEqual(self.campaign.name, "sample_campaign")
Campaign.objects.get_running_campaign()
Campaign.objects.get_expired_campaign()
dialersetting = DialerSetting.objects.get(pk=1)
#self.user.get_profile().dialersetting
common_contact_authorization(dialersetting, '1234567890')
# status = 1
self.campaign.update_campaign_status()
get_url_campaign_status(self.campaign.pk, self.campaign.status)
self.campaign.status = 2
self.campaign.save()
self.campaign.update_campaign_status()
get_url_campaign_status(self.campaign.pk, self.campaign.status)
self.campaign.status = 3
self.campaign.save()
self.campaign.update_campaign_status()
get_url_campaign_status(self.campaign.pk, self.campaign.status)
self.campaign.status = 4
self.campaign.save()
self.campaign.update_campaign_status()
get_url_campaign_status(self.campaign.pk, self.campaign.status)
self.campaign.is_authorized_contact(dialersetting, '123456789')
self.campaign.get_active_max_frequency()
self.campaign.get_active_callmaxduration()
self.campaign.get_active_contact()
self.campaign.progress_bar()
self.campaign.subscriber_detail()
self.campaign.get_pending_subscriber_update(10, SUBSCRIBER_STATUS.IN_PROCESS)
self.assertEqual(self.subscriber.campaign, self.campaign)
form = CampaignForm(self.user)
obj = form.save(commit=False)
obj.name = "new_campaign"
obj.user = self.user
obj.phonebook_id = 1
obj.aleg_gateway_id = 1
obj.content_type_id = self.content_type_id
obj.object_id = 1
obj.save()
form = CampaignForm(self.user, instance=self.campaign)
self.assertTrue(isinstance(form.instance, Campaign))
form = CampaignForm(self.user, data={
"name": "mylittle_campaign",
"description": "xyz",
"startingdate": "1301392136.0",
"expirationdate": "1301332136.0",
"frequency": "120",
"callmaxduration": "50",
"maxretry": "3",
"intervalretry": "2000",
"calltimeout": "60",
"aleg_gateway": "1",
"content_object": "type:32-id:1",
"extra_data": "2000",
"ds_user": self.user})
self.assertEquals(form.is_valid(), False)
def teardown(self):
self.campaign.delete()
self.subscriber.delete()
| mpl-2.0 |
arocks/steel-rumors | steelrumors/settings.py | 1 | 5450 | # Django settings for steelrumors project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'database.db', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': '',
'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'g3pn)t)h(k8ek8%rkeh+hhhklxhx+!n03b9kl0@^xuf9pzcqt!'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'steelrumors.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'steelrumors.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'registration',
'links',
'steelrumors',
)
from django.core.urlresolvers import reverse_lazy
LOGIN_URL=reverse_lazy("login")
LOGIN_REDIRECT_URL=reverse_lazy("home")
LOGOUT_URL=reverse_lazy("logout")
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| mit |
lmprice/ansible | test/units/modules/network/f5/test_bigip_monitor_tcp.py | 26 | 13885 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
import pytest
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_monitor_tcp import Parameters
from library.modules.bigip_monitor_tcp import ModuleManager
from library.modules.bigip_monitor_tcp import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_monitor_tcp import Parameters
from ansible.modules.network.f5.bigip_monitor_tcp import ModuleManager
from ansible.modules.network.f5.bigip_monitor_tcp import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo',
parent='parent',
send='this is a send string',
receive='this is a receive string',
ip='10.10.10.10',
type='TTYPE_TCP',
port=80,
interval=20,
timeout=30,
time_until_up=60,
partition='Common'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.send == 'this is a send string'
assert p.receive == 'this is a receive string'
assert p.ip == '10.10.10.10'
assert p.type == 'tcp'
assert p.port == 80
assert p.destination == '10.10.10.10:80'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
def test_module_parameters_ints_as_strings(self):
args = dict(
name='foo',
parent='parent',
send='this is a send string',
receive='this is a receive string',
ip='10.10.10.10',
type='TTYPE_TCP',
port='80',
interval='20',
timeout='30',
time_until_up='60',
partition='Common'
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.send == 'this is a send string'
assert p.receive == 'this is a receive string'
assert p.ip == '10.10.10.10'
assert p.type == 'tcp'
assert p.port == 80
assert p.destination == '10.10.10.10:80'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
def test_api_parameters(self):
args = dict(
name='foo',
defaultsFrom='/Common/parent',
send='this is a send string',
recv='this is a receive string',
destination='10.10.10.10:80',
interval=20,
timeout=30,
timeUntilUp=60
)
p = Parameters(params=args)
assert p.name == 'foo'
assert p.parent == '/Common/parent'
assert p.send == 'this is a send string'
assert p.receive == 'this is a receive string'
assert p.ip == '10.10.10.10'
assert p.type == 'tcp'
assert p.port == 80
assert p.destination == '10.10.10.10:80'
assert p.interval == 20
assert p.timeout == 30
assert p.time_until_up == 60
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_monitor(self, *args):
set_module_args(dict(
name='foo',
parent='parent',
send='this is a send string',
receive='this is a receive string',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
time_until_up=60,
partition='Common',
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.exists = Mock(side_effect=[False, True])
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['parent'] == '/Common/parent'
def test_create_monitor_idempotent(self, *args):
set_module_args(dict(
name='foo',
parent='tcp',
send='this is a send string',
receive='this is a receive string',
ip='10.10.10.10',
port=80,
interval=20,
timeout=30,
time_until_up=60,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
results = mm.exec_module()
assert results['changed'] is False
def test_update_port(self, *args):
set_module_args(dict(
name='foo',
port=800,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['port'] == 800
def test_update_interval(self, *args):
set_module_args(dict(
name='foo',
interval=10,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['interval'] == 10
def test_update_interval_larger_than_existing_timeout(self, *args):
set_module_args(dict(
name='foo',
interval=30,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert "must be less than" in str(ex)
def test_update_interval_larger_than_new_timeout(self, *args):
set_module_args(dict(
name='foo',
interval=10,
timeout=5,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
with pytest.raises(F5ModuleError) as ex:
mm.exec_module()
assert "must be less than" in str(ex)
def test_update_send(self, *args):
set_module_args(dict(
name='foo',
send='this is another send string',
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['send'] == 'this is another send string'
def test_update_receive(self, *args):
set_module_args(dict(
name='foo',
receive='this is another receive string',
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['receive'] == 'this is another receive string'
def test_update_timeout(self, *args):
set_module_args(dict(
name='foo',
timeout=300,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['timeout'] == 300
def test_update_time_until_up(self, *args):
set_module_args(dict(
name='foo',
time_until_up=300,
partition='Common',
server='localhost',
password='password',
user='admin'
))
current = Parameters(params=load_fixture('load_ltm_monitor_tcp.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=True)
mm.read_current_from_device = Mock(return_value=current)
mm.update_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['time_until_up'] == 300
| gpl-3.0 |
favcode/pydht | mysqlmtop/global_functions.py | 2 | 2696 | #!/bin/env python
#-*-coding:utf-8-*-
import MySQLdb
import string
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import ConfigParser
import smtplib
from email.mime.text import MIMEText
from email.message import Message
from email.header import Header
def get_config(group,config_name):
config = ConfigParser.ConfigParser()
config.readfp(open('./etc/config.ini','rw'))
config_value=config.get(group,config_name).strip(' ').strip('\'').strip('\"')
return config_value
host = get_config('monitor_server','host')
port = get_config('monitor_server','port')
user = get_config('monitor_server','user')
passwd = get_config('monitor_server','passwd')
dbname = get_config('monitor_server','dbname')
def mysql_exec(sql,param):
conn=MySQLdb.connect(host=host,user=user,passwd=passwd,port=int(port),connect_timeout=5,charset='utf8')
conn.select_db(dbname)
cursor = conn.cursor()
if param <> '':
cursor.execute(sql,param)
else:
cursor.execute(sql)
conn.commit()
cursor.close()
conn.close()
def mysql_query(sql):
conn=MySQLdb.connect(host=host,user=user,passwd=passwd,port=int(port),connect_timeout=5,charset='utf8')
conn.select_db(dbname)
cursor = conn.cursor()
count=cursor.execute(sql)
if count == 0 :
result=0
else:
result=cursor.fetchall()
return result
cursor.close()
conn.close()
def get_option(key):
conn=MySQLdb.connect(host=host,user=user,passwd=passwd,port=int(port),connect_timeout=5,charset='utf8')
conn.select_db(dbname)
cursor = conn.cursor()
sql="select value from options where name=+'"+key+"'"
count=cursor.execute(sql)
if count == 0 :
result=0
else:
result=cursor.fetchone()
return result[0]
cursor.close()
conn.close()
mail_host = get_config('mail_server','mail_host')
mail_user = get_config('mail_server','mail_user')
mail_pass = get_config('mail_server','mail_pass')
mail_postfix = get_config('mail_server','mail_postfix')
def send_mail(to_list,sub,content):
'''
to_list:发给谁
sub:主题
content:内容
send_mail("aaa@126.com","sub","content")
'''
me=mail_user
#me=mail_user+"<</span>"+mail_user+"@"+mail_postfix+">"
msg = MIMEText(content, _subtype='html', _charset='utf8')
msg['Subject'] = Header(sub,'utf8')
msg['From'] = Header(me,'utf8')
msg['To'] = ";".join(to_list)
try:
s = smtplib.SMTP()
s.connect(mail_host)
s.login(mail_user,mail_pass)
s.sendmail(me,to_list, msg.as_string())
s.close()
return True
except Exception, e:
print str(e)
return False
| gpl-2.0 |
ta2-1/pootle | pootle/apps/pootle_app/management/commands/schema_commands/__init__.py | 2 | 4219 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings'
from django.core.management.base import BaseCommand, CommandError
from pootle.core.schema.base import SchemaTool
from pootle.core.utils.json import jsonify
class SchemaCommand(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'--fields',
action='store_true',
default=False,
help='Table fields.',
)
parser.add_argument(
'--indices',
action='store_true',
default=False,
help='Table indices.',
)
parser.add_argument(
'--constraints',
action='store_true',
default=False,
help='Table constraints.',
)
super(SchemaCommand, self).add_arguments(parser)
def handle_table(self, table_name, **options):
result = {table_name: {}}
all_options = (
not options['fields']
and not options['indices']
and not options['constraints']
)
if options['fields'] or all_options:
result[table_name]['fields'] = \
self.schema_tool.get_table_fields(table_name)
if options['indices'] or all_options:
result[table_name]['indices'] = \
self.schema_tool.get_table_indices(table_name)
if options['constraints'] or all_options:
result[table_name]['constraints'] = \
self.schema_tool.get_table_constraints(table_name)
return result
class SchemaTableCommand(SchemaCommand):
def add_arguments(self, parser):
parser.add_argument(
'args',
metavar='table_name',
nargs='+',
help='Table names.'
)
super(SchemaTableCommand, self).add_arguments(parser)
def handle(self, *args, **options):
self.schema_tool = SchemaTool()
all_tables = self.schema_tool.get_tables()
if not set(args).issubset(set(all_tables)):
raise CommandError("Unrecognized tables: %s" %
list(set(args) - set(all_tables)))
result = {}
for table_name in args:
result.update(
self.handle_table(table_name, **options)
)
self.stdout.write(jsonify(result))
class SchemaAppCommand(SchemaCommand):
def add_arguments(self, parser):
parser.add_argument(
'args',
metavar='app_label',
nargs='+',
help='Application labels.'
)
parser.add_argument(
'--tables',
action='store_true',
default=False,
dest='tables',
help='Print all table names.',
)
super(SchemaAppCommand, self).add_arguments(parser)
def handle(self, *args, **options):
try:
self.schema_tool = SchemaTool(*args)
except (LookupError, ImportError) as e:
raise CommandError("%s. Are you sure your INSTALLED_APPS "
"setting is correct?" % e)
if options['tables']:
result = dict(apps={})
for app_label in args:
result['apps'] = {
'name': app_label,
'tables': self.schema_tool.get_app_tables(app_label),
}
self.stdout.write(jsonify(result))
else:
result = dict(apps={})
for app_label in args:
result['apps'][app_label] = {
'name': app_label,
'tables': {}
}
for table_name in self.schema_tool.get_app_tables(app_label):
result['apps'][app_label]['tables'].update(
self.handle_table(table_name, **options)
)
self.stdout.write(jsonify(result))
| gpl-3.0 |
alvaroaleman/ansible | lib/ansible/modules/storage/netapp/netapp_e_amg.py | 30 | 12964 | #!/usr/bin/python
# (c) 2016, NetApp, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
---
module: netapp_e_amg
short_description: Create, Remove, and Update Asynchronous Mirror Groups
description:
- Allows for the creation, removal and updating of Asynchronous Mirror Groups for NetApp E-series storage arrays
version_added: '2.2'
author: Kevin Hulquest (@hulquest)
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
example:
- https://prod-1.wahoo.acme.com/devmgr/v2
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
name:
description:
- The name of the async array you wish to target, or create.
- If C(state) is present and the name isn't found, it will attempt to create.
required: yes
secondaryArrayId:
description:
- The ID of the secondary array to be used in mirroing process
required: yes
syncIntervalMinutes:
description:
- The synchronization interval in minutes
required: no
default: 10
manualSync:
description:
- Setting this to true will cause other synchronization values to be ignored
required: no
default: no
recoveryWarnThresholdMinutes:
description:
- Recovery point warning threshold (minutes). The user will be warned when the age of the last good failures point exceeds this value
required: no
default: 20
repoUtilizationWarnThreshold:
description:
- Recovery point warning threshold
required: no
default: 80
interfaceType:
description:
- The intended protocol to use if both Fibre and iSCSI are available.
choices:
- iscsi
- fibre
required: no
default: null
syncWarnThresholdMinutes:
description:
- The threshold (in minutes) for notifying the user that periodic synchronization has taken too long to complete.
required: no
default: 10
ssid:
description:
- The ID of the primary storage array for the async mirror action
required: yes
state:
description:
- A C(state) of present will either create or update the async mirror group.
- A C(state) of absent will remove the async mirror group.
required: yes
"""
EXAMPLES = """
- name: AMG removal
na_eseries_amg:
state: absent
ssid: "{{ ssid }}"
secondaryArrayId: "{{amg_secondaryArrayId}}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
new_name: "{{amg_array_name}}"
name: "{{amg_name}}"
when: amg_create
- name: AMG create
netapp_e_amg:
state: present
ssid: "{{ ssid }}"
secondaryArrayId: "{{amg_secondaryArrayId}}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
new_name: "{{amg_array_name}}"
name: "{{amg_name}}"
when: amg_create
"""
RETURN = """
msg:
description: Successful removal
returned: success
type: string
sample: "Async mirror group removed."
msg:
description: Successful creation
returned: success
type: string
sample: '{"changed": true, "connectionType": "fc", "groupRef": "3700000060080E5000299C24000006E857AC7EEC", "groupState": "optimal", "id": "3700000060080E5000299C24000006E857AC7EEC", "label": "amg_made_by_ansible", "localRole": "primary", "mirrorChannelRemoteTarget": "9000000060080E5000299C24005B06E557AC7EEC", "orphanGroup": false, "recoveryPointAgeAlertThresholdMinutes": 20, "remoteRole": "secondary", "remoteTarget": {"nodeName": {"ioInterfaceType": "fc", "iscsiNodeName": null, "remoteNodeWWN": "20040080E5299F1C"}, "remoteRef": "9000000060080E5000299C24005B06E557AC7EEC", "scsiinitiatorTargetBaseProperties": {"ioInterfaceType": "fc", "iscsiinitiatorTargetBaseParameters": null}}, "remoteTargetId": "ansible2", "remoteTargetName": "Ansible2", "remoteTargetWwn": "60080E5000299F880000000056A25D56", "repositoryUtilizationWarnThreshold": 80, "roleChangeProgress": "none", "syncActivity": "idle", "syncCompletionTimeAlertThresholdMinutes": 10, "syncIntervalMinutes": 10, "worldWideName": "60080E5000299C24000006E857AC7EEC"}'
"""
import json
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule, get_exception
from ansible.module_utils.urls import open_url
from ansible.module_utils.six.moves.urllib.error import HTTPError
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json",
}
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=False, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError:
err = get_exception()
r = err.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
def has_match(module, ssid, api_url, api_pwd, api_usr, body):
compare_keys = ['syncIntervalMinutes', 'syncWarnThresholdMinutes',
'recoveryWarnThresholdMinutes', 'repoUtilizationWarnThreshold']
desired_state = dict((x, (body.get(x))) for x in compare_keys)
label_exists = False
matches_spec = False
current_state = None
async_id = None
api_data = None
desired_name = body.get('name')
endpoint = 'storage-systems/%s/async-mirrors' % ssid
url = api_url + endpoint
try:
rc, data = request(url, url_username=api_usr, url_password=api_pwd, headers=HEADERS)
except Exception:
error = get_exception()
module.exit_json(exception="Error finding a match. Message: %s" % str(error))
for async_group in data:
if async_group['label'] == desired_name:
label_exists = True
api_data = async_group
async_id = async_group['groupRef']
current_state = dict(
syncIntervalMinutes=async_group['syncIntervalMinutes'],
syncWarnThresholdMinutes=async_group['syncCompletionTimeAlertThresholdMinutes'],
recoveryWarnThresholdMinutes=async_group['recoveryPointAgeAlertThresholdMinutes'],
repoUtilizationWarnThreshold=async_group['repositoryUtilizationWarnThreshold'],
)
if current_state == desired_state:
matches_spec = True
return label_exists, matches_spec, api_data, async_id
def create_async(module, ssid, api_url, api_pwd, api_usr, body):
endpoint = 'storage-systems/%s/async-mirrors' % ssid
url = api_url + endpoint
post_data = json.dumps(body)
try:
rc, data = request(url, data=post_data, method='POST', url_username=api_usr, url_password=api_pwd,
headers=HEADERS)
except Exception:
error = get_exception()
module.exit_json(exception="Exception while creating aysnc mirror group. Message: %s" % str(error))
return data
def update_async(module, ssid, api_url, pwd, user, body, new_name, async_id):
endpoint = 'storage-systems/%s/async-mirrors/%s' % (ssid, async_id)
url = api_url + endpoint
compare_keys = ['syncIntervalMinutes', 'syncWarnThresholdMinutes',
'recoveryWarnThresholdMinutes', 'repoUtilizationWarnThreshold']
desired_state = dict((x, (body.get(x))) for x in compare_keys)
if new_name:
desired_state['new_name'] = new_name
post_data = json.dumps(desired_state)
try:
rc, data = request(url, data=post_data, method='POST', headers=HEADERS,
url_username=user, url_password=pwd)
except Exception:
error = get_exception()
module.exit_json(exception="Exception while updating async mirror group. Message: %s" % str(error))
return data
def remove_amg(module, ssid, api_url, pwd, user, async_id):
endpoint = 'storage-systems/%s/async-mirrors/%s' % (ssid, async_id)
url = api_url + endpoint
try:
rc, data = request(url, method='DELETE', url_username=user, url_password=pwd,
headers=HEADERS)
except Exception:
error = get_exception()
module.exit_json(exception="Exception while removing async mirror group. Message: %s" % str(error))
return
def main():
argument_spec = basic_auth_argument_spec()
argument_spec.update(dict(
api_username=dict(type='str', required=True),
api_password=dict(type='str', required=True, no_log=True),
api_url=dict(type='str', required=True),
name=dict(required=True, type='str'),
new_name=dict(required=False, type='str'),
secondaryArrayId=dict(required=True, type='str'),
syncIntervalMinutes=dict(required=False, default=10, type='int'),
manualSync=dict(required=False, default=False, type='bool'),
recoveryWarnThresholdMinutes=dict(required=False, default=20, type='int'),
repoUtilizationWarnThreshold=dict(required=False, default=80, type='int'),
interfaceType=dict(required=False, choices=['fibre', 'iscsi'], type='str'),
ssid=dict(required=True, type='str'),
state=dict(required=True, choices=['present', 'absent']),
syncWarnThresholdMinutes=dict(required=False, default=10, type='int')
))
module = AnsibleModule(argument_spec=argument_spec)
p = module.params
ssid = p.pop('ssid')
api_url = p.pop('api_url')
user = p.pop('api_username')
pwd = p.pop('api_password')
new_name = p.pop('new_name')
state = p.pop('state')
if not api_url.endswith('/'):
api_url += '/'
name_exists, spec_matches, api_data, async_id = has_match(module, ssid, api_url, pwd, user, p)
if state == 'present':
if name_exists and spec_matches:
module.exit_json(changed=False, msg="Desired state met", **api_data)
elif name_exists and not spec_matches:
results = update_async(module, ssid, api_url, pwd, user,
p, new_name, async_id)
module.exit_json(changed=True,
msg="Async mirror group updated", async_id=async_id,
**results)
elif not name_exists:
results = create_async(module, ssid, api_url, user, pwd, p)
module.exit_json(changed=True, **results)
elif state == 'absent':
if name_exists:
remove_amg(module, ssid, api_url, pwd, user, async_id)
module.exit_json(changed=True, msg="Async mirror group removed.",
async_id=async_id)
else:
module.exit_json(changed=False,
msg="Async Mirror group: %s already absent" % p['name'])
if __name__ == '__main__':
main()
| gpl-3.0 |
tersmitten/ansible | lib/ansible/modules/cloud/azure/azure_rm_mysqlserver.py | 13 | 13278 | #!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mysqlserver
version_added: "2.5"
short_description: Manage MySQL Server instance.
description:
- Create, update and delete instance of MySQL Server.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
name:
description:
- The name of the server.
required: True
sku:
description:
- The SKU (pricing tier) of the server.
suboptions:
name:
description:
- The name of the sku, typically, a letter + Number code, e.g. P3.
tier:
description:
- The tier of the particular SKU, e.g. Basic.
choices: ['basic', 'standard']
capacity:
description:
- "The scale up/out capacity, representing server's compute units."
size:
description:
- The size code, to be interpreted by resource as appropriate.
location:
description:
- Resource location. If not set, location from the resource group will be used as default.
storage_mb:
description:
- The maximum storage allowed for a server.
version:
description:
- Server version.
choices: ['5.6', '5.7']
enforce_ssl:
description:
- Enable SSL enforcement.
type: bool
default: False
admin_username:
description:
- "The administrator's login name of a server. Can only be specified when the server is being created (and is required for creation)."
admin_password:
description:
- The password of the administrator login.
create_mode:
description:
- Create mode of SQL Server
default: Default
state:
description:
- Assert the state of the MySQL Server. Use C(present) to create or update a server and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- "Zim Kalinowski (@zikalino)"
'''
EXAMPLES = '''
- name: Create (or update) MySQL Server
azure_rm_mysqlserver:
resource_group: myResourceGroup
name: testserver
sku:
name: B_Gen5_1
tier: Basic
location: eastus
storage_mb: 1024
enforce_ssl: True
version: 5.6
admin_username: cloudsa
admin_password: password
'''
RETURN = '''
id:
description:
- Resource ID
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DBforMySQL/servers/mysqlsrv1b6dd89593
version:
description:
- 'Server version. Possible values include: C(5.6), C(5.7)'
returned: always
type: str
sample: 5.6
state:
description:
- 'A state of a server that is visible to user. Possible values include: C(Ready), C(Dropping), C(Disabled)'
returned: always
type: str
sample: Ready
fully_qualified_domain_name:
description:
- The fully qualified domain name of a server.
returned: always
type: str
sample: mysqlsrv1b6dd89593.mysql.database.azure.com
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from azure.mgmt.rdbms.mysql import MySQLManagementClient
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMMySqlServers(AzureRMModuleBase):
"""Configuration class for an Azure RM MySQL Server resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
sku=dict(
type='dict'
),
location=dict(
type='str'
),
storage_mb=dict(
type='int'
),
version=dict(
type='str',
choices=['5.6', '5.7']
),
enforce_ssl=dict(
type='bool',
default=False
),
create_mode=dict(
type='str',
default='Default'
),
admin_username=dict(
type='str'
),
admin_password=dict(
type='str',
no_log=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.parameters = dict()
self.tags = None
self.results = dict(changed=False)
self.state = None
self.to_do = Actions.NoAction
super(AzureRMMySqlServers, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
if key == "sku":
ev = kwargs[key]
if 'tier' in ev:
if ev['tier'] == 'basic':
ev['tier'] = 'Basic'
elif ev['tier'] == 'standard':
ev['tier'] = 'Standard'
self.parameters["sku"] = ev
elif key == "location":
self.parameters["location"] = kwargs[key]
elif key == "storage_mb":
self.parameters.setdefault("properties", {}).setdefault("storage_profile", {})["storage_mb"] = kwargs[key]
elif key == "version":
self.parameters.setdefault("properties", {})["version"] = kwargs[key]
elif key == "enforce_ssl":
self.parameters.setdefault("properties", {})["ssl_enforcement"] = 'Enabled' if kwargs[key] else 'Disabled'
elif key == "create_mode":
self.parameters.setdefault("properties", {})["create_mode"] = kwargs[key]
elif key == "admin_username":
self.parameters.setdefault("properties", {})["administrator_login"] = kwargs[key]
elif key == "admin_password":
self.parameters.setdefault("properties", {})["administrator_login_password"] = kwargs[key]
old_response = None
response = None
resource_group = self.get_resource_group(self.resource_group)
if "location" not in self.parameters:
self.parameters["location"] = resource_group.location
old_response = self.get_mysqlserver()
if not old_response:
self.log("MySQL Server instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("MySQL Server instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if MySQL Server instance has to be deleted or may be updated")
update_tags, newtags = self.update_tags(old_response.get('tags', {}))
if update_tags:
self.tags = newtags
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the MySQL Server instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_mysqlserver()
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("MySQL Server instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_mysqlserver()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_mysqlserver():
time.sleep(20)
else:
self.log("MySQL Server instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
self.results["version"] = response["version"]
self.results["state"] = response["user_visible_state"]
self.results["fully_qualified_domain_name"] = response["fully_qualified_domain_name"]
return self.results
def create_update_mysqlserver(self):
'''
Creates or updates MySQL Server with the specified configuration.
:return: deserialized MySQL Server instance state dictionary
'''
self.log("Creating / Updating the MySQL Server instance {0}".format(self.name))
try:
self.parameters['tags'] = self.tags
if self.to_do == Actions.Create:
response = self.mysql_client.servers.create(resource_group_name=self.resource_group,
server_name=self.name,
parameters=self.parameters)
else:
# structure of parameters for update must be changed
self.parameters.update(self.parameters.pop("properties", {}))
response = self.mysql_client.servers.update(resource_group_name=self.resource_group,
server_name=self.name,
parameters=self.parameters)
if isinstance(response, LROPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the MySQL Server instance.')
self.fail("Error creating the MySQL Server instance: {0}".format(str(exc)))
return response.as_dict()
def delete_mysqlserver(self):
'''
Deletes specified MySQL Server instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the MySQL Server instance {0}".format(self.name))
try:
response = self.mysql_client.servers.delete(resource_group_name=self.resource_group,
server_name=self.name)
except CloudError as e:
self.log('Error attempting to delete the MySQL Server instance.')
self.fail("Error deleting the MySQL Server instance: {0}".format(str(e)))
return True
def get_mysqlserver(self):
'''
Gets the properties of the specified MySQL Server.
:return: deserialized MySQL Server instance state dictionary
'''
self.log("Checking if the MySQL Server instance {0} is present".format(self.name))
found = False
try:
response = self.mysql_client.servers.get(resource_group_name=self.resource_group,
server_name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("MySQL Server instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the MySQL Server instance.')
if found is True:
return response.as_dict()
return False
def main():
"""Main execution"""
AzureRMMySqlServers()
if __name__ == '__main__':
main()
| gpl-3.0 |
techhat/libcloud | demos/gce_demo.py | 13 | 37094 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This example performs several tasks on Google Compute Platform. It can be
# run directly or can be imported into an interactive python session. This
# can also serve as live integration tests.
#
# To run directly, use python 2.7 or greater:
# - $ python gce_demo.py --help # to see the help screen
# - $ python gce_demo.py # to run all demos / tests
#
# To run interactively:
# - Make sure you have valid values in secrets.py
# (For more information about setting up your credentials, see the
# libcloud/common/google.py docstring)
# - Run 'python' in this directory, then:
# import gce_demo
# gce = gce_demo.get_gce_driver()
# gce.list_nodes()
# etc.
# - Or, to run the full demo from the interactive python shell:
# import gce_demo
# gce_demo.CLEANUP = False # optional
# gce_demo.MAX_NODES = 4 # optional
# gce_demo.DATACENTER = 'us-central1-a' # optional
# gce_demo.main_compute() # 'compute' only demo
# gce_demo.main_load_balancer() # 'load_balancer' only demo
# gce_demo.main_dns() # 'dns only demo
# gce_demo.main() # all demos / tests
import os.path
import sys
import datetime
import time
try:
import argparse
except:
print('This script uses the python "argparse" module. Please use Python '
'2.7 or greater.')
raise
try:
import secrets
except ImportError:
print('"demos/secrets.py" not found.\n\n'
'Please copy secrets.py-dist to secrets.py and update the GCE* '
'values with appropriate authentication information.\n'
'Additional information about setting these values can be found '
'in the docstring for:\n'
'libcloud/common/google.py\n')
sys.exit(1)
# Add parent dir of this file's dir to sys.path (OS-agnostically)
sys.path.append(
os.path.normpath(os.path.join(os.path.dirname(__file__), os.path.pardir)))
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import ResourceNotFoundError
from libcloud.loadbalancer.types import Provider as Provider_lb
from libcloud.loadbalancer.providers import get_driver as get_driver_lb
from libcloud.dns.types import Provider as Provider_dns
from libcloud.dns.providers import get_driver as get_driver_dns
from libcloud.dns.base import Record, Zone
from libcloud.utils.py3 import PY3
if PY3:
import urllib.request as url_req # pylint: disable=no-name-in-module
else:
import urllib2 as url_req
# Maximum number of 1-CPU nodes to allow to run simultaneously
MAX_NODES = 5
# String that all resource names created by the demo will start with
# WARNING: Any resource that has a matching name will be destroyed.
DEMO_BASE_NAME = 'lct'
# Datacenter to create resources in
DATACENTER = 'us-central1-f'
BACKUP_DATACENTER = 'us-east1-c'
# Clean up resources at the end (can be set to false in order to
# inspect resources at the end of the run). Resources will be cleaned
# at the beginning regardless.
CLEANUP = True
args = getattr(secrets, 'GCE_PARAMS', ())
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
# Add datacenter to kwargs for Python 2.5 compatibility
kwargs = kwargs.copy()
kwargs['datacenter'] = DATACENTER
# ==== HELPER FUNCTIONS ====
def get_gce_driver():
driver = get_driver(Provider.GCE)(*args, **kwargs)
return driver
def get_gcelb_driver(gce_driver=None):
# The GCE Load Balancer driver uses the GCE Compute driver for all of its
# API calls. You can either provide the driver directly, or provide the
# same authentication information so the LB driver can get its own
# Compute driver.
if gce_driver:
driver = get_driver_lb(Provider_lb.GCE)(gce_driver=gce_driver)
else:
driver = get_driver_lb(Provider_lb.GCE)(*args, **kwargs)
return driver
def get_dns_driver(gce_driver=None):
# The Google DNS driver uses the GCE Compute driver for all of its
# API calls. You can either provide the driver directly, or provide the
# same authentication information so the LB driver can get its own
# Compute driver.
if gce_driver:
driver = get_driver_dns(Provider_dns.GOOGLE)(gce_driver=gce_driver)
else:
driver = get_driver_dns(Provider_dns.GOOGLE)(*args, **kwargs)
return driver
def create_mig(gce, mig_base_name, zone, template, postfix, num_instances=2):
"""
Creates MIG, sets named ports, modifies various text with 'postfix'.
:param gce: An initalized GCE driver.
:type gce: :class`GCENodeDriver`
:param zone: Zone to create Managed Instance Group in.
:type zone: :class:`GCEZone` or ``str``
:param template: Instance Template to use in creating MIG.
:type template: :class:`GCEInstanceTemplate`
:param postfix: string to append to mig name, etc. Example: 'east',
'central'
:type postfix: ``str``
:param num_instances: number of instances to create in MIG. Default is 2.
:type num_instances: ``int``
:returns: initialized Managed Instance Group.
:rtype: :class:`GCEInstanceGroupManager`
"""
mig_name = '%s-%s' % (mig_base_name, postfix)
mig = gce.ex_create_instancegroupmanager(
mig_name, zone, template, num_instances, base_instance_name=mig_name,
description='Demo for %s' % postfix)
display(' Managed Instance Group [%s] "%s" created' % (postfix.upper(),
mig.name))
display(' ... MIG instances created: %s' %
','.join([x['name'] for x in mig.list_managed_instances()]))
# set the named_ports on the Instance Group.
named_ports = [{'name': '%s-http' % DEMO_BASE_NAME, 'port': 80}]
mig.set_named_ports(named_ports=named_ports)
display(' ... MIG ports set: %s' % named_ports)
return mig
def display(title, resource_list=[]):
"""
Display a list of resources.
:param title: String to be printed at the heading of the list.
:type title: ``str``
:param resource_list: List of resources to display
:type resource_list: Any ``object`` with a C{name} attribute
"""
print('=> %s' % title)
for item in resource_list:
if isinstance(item, Record):
if item.name.startswith(DEMO_BASE_NAME):
print('=> name=%s, type=%s' % (item.name, item.type))
else:
print(' name=%s, type=%s' % (item.name, item.type))
elif isinstance(item, Zone):
if item.domain.startswith(DEMO_BASE_NAME):
print('=> name=%s, dnsname=%s' % (item.id, item.domain))
else:
print(' name=%s, dnsname=%s' % (item.id, item.domain))
elif hasattr(item, 'name'):
if item.name.startswith(DEMO_BASE_NAME):
print('=> %s' % item.name)
else:
print(' %s' % item.name)
else:
if item.startswith(DEMO_BASE_NAME):
print('=> %s' % item)
else:
print(' %s' % item)
def cleanup_only():
start_time = datetime.datetime.now()
display('Clean-up start time: %s' % str(start_time))
gce = get_gce_driver()
# Get project info and print name
project = gce.ex_get_project()
display('Project: %s' % project.name)
# == Get Lists of Everything and Display the lists (up to 10) ==
# These can either just return values for the current datacenter (zone)
# or for everything.
all_nodes = gce.list_nodes(ex_zone='all')
display('Nodes:', all_nodes)
all_addresses = gce.ex_list_addresses(region='all')
display('Addresses:', all_addresses)
all_volumes = gce.list_volumes(ex_zone='all')
display('Volumes:', all_volumes)
# This can return everything, but there is a large amount of overlap,
# so we'll just get the sizes from the current zone.
sizes = gce.list_sizes()
display('Sizes:', sizes)
# These are global
firewalls = gce.ex_list_firewalls()
display('Firewalls:', firewalls)
networks = gce.ex_list_networks()
display('Networks:', networks)
images = gce.list_images()
display('Images:', images)
locations = gce.list_locations()
display('Locations:', locations)
zones = gce.ex_list_zones()
display('Zones:', zones)
snapshots = gce.ex_list_snapshots()
display('Snapshots:', snapshots)
gfrs = gce.ex_list_forwarding_rules(global_rules=True)
display("Global Forwarding Rules", gfrs)
targetproxies = gce.ex_list_targethttpproxies()
display("Target HTTP Proxies", targetproxies)
urlmaps = gce.ex_list_urlmaps()
display("URLMaps", urlmaps)
bes = gce.ex_list_backendservices()
display("Backend Services", bes)
migs = gce.ex_list_instancegroupmanagers(zone='all')
display("Instance Group Managers", migs)
its = gce.ex_list_instancetemplates()
display("Instance Templates", its)
hcs = gce.ex_list_healthchecks()
display("Health Checks", hcs)
# == Clean up any old demo resources ==
display('Cleaning up any "%s" resources' % DEMO_BASE_NAME)
clean_up(gce, DEMO_BASE_NAME, None,
gfrs + targetproxies + urlmaps + bes + hcs + migs + its)
# == Pause to let cleanup occur and repopulate volume and node lists ==
if len(migs):
time.sleep(10)
all_volumes = gce.list_volumes(ex_zone='all')
all_nodes = gce.list_nodes(ex_zone='all')
clean_up(gce, DEMO_BASE_NAME, all_nodes,
all_addresses + all_volumes + firewalls + networks + snapshots)
volumes = gce.list_volumes()
clean_up(gce, DEMO_BASE_NAME, None, volumes)
end_time = datetime.datetime.now()
display('Total runtime: %s' % str(end_time - start_time))
def clean_up(gce, base_name, node_list=None, resource_list=None):
"""
Destroy all resources that have a name beginning with 'base_name'.
:param base_name: String with the first part of the name of resources
to destroy
:type base_name: ``str``
:keyword node_list: List of nodes to consider for deletion
:type node_list: ``list`` of :class:`Node`
:keyword resource_list: List of resources to consider for deletion
:type resource_list: ``list`` of I{Resource Objects}
"""
if node_list is None:
node_list = []
if resource_list is None:
resource_list = []
# Use ex_destroy_multiple_nodes to destroy nodes
del_nodes = []
for node in node_list:
if node.name.startswith(base_name):
del_nodes.append(node)
result = gce.ex_destroy_multiple_nodes(del_nodes)
for i, success in enumerate(result):
if success:
display(' Deleted %s' % del_nodes[i].name)
else:
display(' Failed to delete %s' % del_nodes[i].name)
# Destroy everything else with just the destroy method
for resrc in resource_list:
if resrc.name.startswith(base_name):
try:
resrc.destroy()
class_name = resrc.__class__.__name__
display(' Deleted %s (%s)' % (resrc.name, class_name))
except ResourceNotFoundError:
display(' Not found: %s (%s)' % (resrc.name,
resrc.__class__.__name__))
except:
class_name = resrc.__class__.__name__
display(' Failed to Delete %s (%s)' % (resrc.name,
class_name))
raise
def main_compute():
start_time = datetime.datetime.now()
display('Compute demo/test start time: %s' % str(start_time))
gce = get_gce_driver()
# Get project info and print name
project = gce.ex_get_project()
display('Project: %s' % project.name)
# == Get Lists of Everything and Display the lists (up to 10) ==
# These can either just return values for the current datacenter (zone)
# or for everything.
all_nodes = gce.list_nodes(ex_zone='all')
display('Nodes:', all_nodes)
all_addresses = gce.ex_list_addresses(region='all')
display('Addresses:', all_addresses)
all_volumes = gce.list_volumes(ex_zone='all')
display('Volumes:', all_volumes)
# This can return everything, but there is a large amount of overlap,
# so we'll just get the sizes from the current zone.
sizes = gce.list_sizes()
display('Sizes:', sizes)
# These are global
firewalls = gce.ex_list_firewalls()
display('Firewalls:', firewalls)
subnetworks = gce.ex_list_subnetworks()
display('Subnetworks:', subnetworks)
networks = gce.ex_list_networks()
display('Networks:', networks)
images = gce.list_images()
display('Images:', images)
locations = gce.list_locations()
display('Locations:', locations)
zones = gce.ex_list_zones()
display('Zones:', zones)
snapshots = gce.ex_list_snapshots()
display('Snapshots:', snapshots)
# == Clean up any old demo resources ==
display('Cleaning up any "%s" resources' % DEMO_BASE_NAME)
# Delete subnetworks first, networks last
clean_up(gce, DEMO_BASE_NAME, None, subnetworks)
clean_up(gce, DEMO_BASE_NAME, all_nodes,
all_addresses + all_volumes + firewalls + snapshots + networks)
# == Create a Legacy Network ==
display('Creating Legacy Network:')
name = '%s-legacy-network' % DEMO_BASE_NAME
cidr = '10.10.0.0/16'
network_legacy = gce.ex_create_network(name, cidr)
display(' Network %s created' % name)
# == Delete the Legacy Network ==
display('Delete Legacy Network:')
network_legacy.destroy()
display(' Network %s delete' % name)
# == Create an auto network ==
display('Creating Auto Network:')
name = '%s-auto-network' % DEMO_BASE_NAME
network_auto = gce.ex_create_network(name, cidr=None, mode='auto')
display(' AutoNetwork %s created' % network_auto.name)
# == Display subnetworks from the auto network ==
subnets = []
for sn in network_auto.subnetworks:
subnets.append(gce.ex_get_subnetwork(sn))
display('Display subnetworks:', subnets)
# == Delete the auto network ==
display('Delete Auto Network:')
network_auto.destroy()
display(' AutoNetwork %s deleted' % name)
# == Create an custom network ==
display('Creating Custom Network:')
name = '%s-custom-network' % DEMO_BASE_NAME
network_custom = gce.ex_create_network(name, cidr=None, mode='custom')
display(' Custom Network %s created' % network_custom.name)
# == Create a subnetwork ==
display('Creating Subnetwork:')
sname = '%s-subnetwork' % DEMO_BASE_NAME
region = 'us-central1'
cidr = '192.168.17.0/24'
subnet = gce.ex_create_subnetwork(sname, cidr, network_custom, region)
display(' Subnetwork %s created' % subnet.name)
# Refresh object, now that it has a subnet
network_custom = gce.ex_get_network(name)
# == Display subnetworks from the auto network ==
subnets = []
for sn in network_custom.subnetworks:
subnets.append(gce.ex_get_subnetwork(sn))
display('Display custom subnetworks:', subnets)
# == Launch instance in custom subnetwork ==
display('Creating Node in custom subnetwork:')
name = '%s-subnet-node' % DEMO_BASE_NAME
node_1 = gce.create_node(name, 'g1-small', 'debian-8',
ex_disk_auto_delete=True,
ex_network=network_custom, ex_subnetwork=subnet)
display(' Node %s created' % name)
# == Destroy instance in custom subnetwork ==
display('Destroying Node in custom subnetwork:')
node_1.destroy()
display(' Node %s destroyed' % name)
# == Delete an subnetwork ==
display('Delete Custom Subnetwork:')
subnet.destroy()
display(' Custom Subnetwork %s deleted' % sname)
is_deleted = False
while not is_deleted:
time.sleep(3)
try:
subnet = gce.ex_get_subnetwork(sname, region)
except ResourceNotFoundError:
is_deleted = True
# == Delete the auto network ==
display('Delete Custom Network:')
network_custom.destroy()
display(' Custom Network %s deleted' % name)
# == Create Node with disk auto-created ==
if MAX_NODES > 1:
display('Creating a node with boot/local-ssd using GCE structure:')
name = '%s-gstruct' % DEMO_BASE_NAME
img_url = "projects/debian-cloud/global/images/"
img_url += "backports-debian-7-wheezy-v20141205"
disk_type_url = "projects/%s/zones/us-central1-f/" % project.name
disk_type_url += "diskTypes/local-ssd"
gce_disk_struct = [
{
"type": "PERSISTENT",
"deviceName": '%s-gstruct' % DEMO_BASE_NAME,
"initializeParams": {
"diskName": '%s-gstruct' % DEMO_BASE_NAME,
"sourceImage": img_url
},
"boot": True,
"autoDelete": True
}, {
"type": "SCRATCH",
"deviceName": '%s-gstruct-lssd' % DEMO_BASE_NAME,
"initializeParams": {
"diskType": disk_type_url
},
"autoDelete": True
}
]
node_gstruct = gce.create_node(name, 'n1-standard-1', None,
'us-central1-f',
ex_disks_gce_struct=gce_disk_struct)
num_disks = len(node_gstruct.extra['disks'])
display(' Node %s created with %d disks' % (node_gstruct.name,
num_disks))
display('Creating Node with auto-created SSD:')
name = '%s-np-node' % DEMO_BASE_NAME
node_1 = gce.create_node(name, 'n1-standard-1', 'debian-7',
ex_tags=['libcloud'], ex_disk_type='pd-ssd',
ex_disk_auto_delete=False)
display(' Node %s created' % name)
# Stop the node and change to a custom machine type (e.g. size)
display('Stopping node, setting custom size, starting node:')
name = '%s-np-node' % DEMO_BASE_NAME
gce.ex_stop_node(node_1)
gce.ex_set_machine_type(node_1, 'custom-2-4096') # 2 vCPU, 4GB RAM
gce.ex_start_node(node_1)
node_1 = gce.ex_get_node(name)
display(' %s: state=%s, size=%s' % (name, node_1.extra['status'],
node_1.size))
# == Create, and attach a disk ==
display('Creating a new disk:')
disk_name = '%s-attach-disk' % DEMO_BASE_NAME
volume = gce.create_volume(10, disk_name)
if gce.attach_volume(node_1, volume, ex_auto_delete=True):
display(' Attached %s to %s' % (volume.name, node_1.name))
display(' Disabled auto-delete for %s on %s' % (volume.name,
node_1.name))
gce.ex_set_volume_auto_delete(volume, node_1, auto_delete=False)
if CLEANUP:
# == Detach the disk ==
if gce.detach_volume(volume, ex_node=node_1):
display(' Detached %s from %s' % (volume.name, node_1.name))
# == Create Snapshot ==
display('Creating a snapshot from existing disk:')
# Create a disk to snapshot
vol_name = '%s-snap-template' % DEMO_BASE_NAME
image = gce.ex_get_image('debian-7')
vol = gce.create_volume(None, vol_name, image=image)
display('Created disk %s to shapshot:' % DEMO_BASE_NAME)
# Snapshot volume
snapshot = vol.snapshot('%s-snapshot' % DEMO_BASE_NAME)
display(' Snapshot %s created' % snapshot.name)
# == Create Node with existing disk ==
display('Creating Node with existing disk:')
name = '%s-persist-node' % DEMO_BASE_NAME
# Use objects this time instead of names
# Get latest Debian 7 image
image = gce.ex_get_image('debian-7')
# Get Machine Size
size = gce.ex_get_size('n1-standard-1')
# Create Disk from Snapshot created above
volume_name = '%s-boot-disk' % DEMO_BASE_NAME
volume = gce.create_volume(None, volume_name, snapshot=snapshot)
display(' Created %s from snapshot' % volume.name)
# Create Node with Disk
node_2 = gce.create_node(name, size, image, ex_tags=['libcloud'],
ex_boot_disk=volume, ex_disk_auto_delete=False)
display(' Node %s created with attached disk %s' % (node_2.name,
volume.name))
# == Update Tags for Node ==
display('Updating Tags for %s:' % node_2.name)
tags = node_2.extra['tags']
tags.append('newtag')
if gce.ex_set_node_tags(node_2, tags):
display(' Tags updated for %s' % node_2.name)
check_node = gce.ex_get_node(node_2.name)
display(' New tags: %s' % check_node.extra['tags'])
# == Setting Metadata for Node ==
display('Setting Metadata for %s:' % node_2.name)
if gce.ex_set_node_metadata(node_2, {'foo': 'bar', 'baz': 'foobarbaz'}):
display(' Metadata updated for %s' % node_2.name)
check_node = gce.ex_get_node(node_2.name)
display(' New Metadata: %s' % check_node.extra['metadata'])
# == Create Multiple nodes at once ==
base_name = '%s-multiple-nodes' % DEMO_BASE_NAME
number = MAX_NODES - 2
if number > 0:
display('Creating Multiple Nodes (%s):' % number)
multi_nodes = gce.ex_create_multiple_nodes(
base_name, size, image, number, ex_tags=['libcloud'],
ex_disk_auto_delete=True)
for node in multi_nodes:
display(' Node %s created' % node.name)
# == Create a Network ==
display('Creating Network:')
name = '%s-network' % DEMO_BASE_NAME
cidr = '10.10.0.0/16'
network_1 = gce.ex_create_network(name, cidr)
display(' Network %s created' % network_1.name)
# == Create a Firewall ==
display('Creating a Firewall:')
name = '%s-firewall' % DEMO_BASE_NAME
allowed = [{'IPProtocol': 'tcp', 'ports': ['3141']}]
firewall_1 = gce.ex_create_firewall(name, allowed, network=network_1,
source_tags=['libcloud'])
display(' Firewall %s created' % firewall_1.name)
# == Create a Static Address ==
display('Creating an Address:')
name = '%s-address' % DEMO_BASE_NAME
address_1 = gce.ex_create_address(name)
display(' Address %s created with IP %s' % (address_1.name,
address_1.address))
# == List Updated Resources in current zone/region ==
display('Updated Resources in current zone/region')
nodes = gce.list_nodes()
display('Nodes:', nodes)
addresses = gce.ex_list_addresses()
display('Addresses:', addresses)
firewalls = gce.ex_list_firewalls()
display('Firewalls:', firewalls)
subnetworks = gce.ex_list_subnetworks()
display('Subnetworks:', subnetworks)
networks = gce.ex_list_networks()
display('Networks:', networks)
snapshots = gce.ex_list_snapshots()
display('Snapshots:', snapshots)
if CLEANUP:
display('Cleaning up %s resources created' % DEMO_BASE_NAME)
clean_up(gce, DEMO_BASE_NAME, None, subnetworks)
clean_up(gce, DEMO_BASE_NAME, nodes,
addresses + firewalls + snapshots + networks)
volumes = gce.list_volumes()
clean_up(gce, DEMO_BASE_NAME, None, volumes)
end_time = datetime.datetime.now()
display('Total runtime: %s' % str(end_time - start_time))
# ==== LOAD BALANCER CODE STARTS HERE ====
def main_load_balancer():
start_time = datetime.datetime.now()
display('Load-balancer demo/test start time: %s' % str(start_time))
gce = get_gce_driver()
gcelb = get_gcelb_driver(gce)
# Get project info and print name
project = gce.ex_get_project()
display('Project: %s' % project.name)
# Existing Balancers
balancers = gcelb.list_balancers()
display('Load Balancers', balancers)
# Protocols
protocols = gcelb.list_protocols()
display('Protocols', protocols)
# Healthchecks
healthchecks = gcelb.ex_list_healthchecks()
display('Health Checks', healthchecks)
# This demo is based on the GCE Load Balancing Quickstart described here:
# https://developers.google.com/compute/docs/load-balancing/lb-quickstart
# == Clean-up and existing demo resources ==
all_nodes = gce.list_nodes(ex_zone='all')
firewalls = gce.ex_list_firewalls()
display('Cleaning up any "%s" resources' % DEMO_BASE_NAME)
clean_up(gce, DEMO_BASE_NAME, all_nodes,
balancers + healthchecks + firewalls)
# == Create 3 nodes to balance between ==
startup_script = ('apt-get -y update && '
'apt-get -y install apache2 && '
'hostname > /var/www/index.html')
tag = '%s-www' % DEMO_BASE_NAME
base_name = '%s-www' % DEMO_BASE_NAME
image = gce.ex_get_image('debian-7')
size = gce.ex_get_size('n1-standard-1')
number = 3
display('Creating %d nodes' % number)
metadata = {'items': [{'key': 'startup-script', 'value': startup_script}]}
lb_nodes = gce.ex_create_multiple_nodes(
base_name, size, image, number, ex_tags=[tag], ex_metadata=metadata,
ex_disk_auto_delete=True, ignore_errors=False)
display('Created Nodes', lb_nodes)
# == Create a Firewall for instances ==
display('Creating a Firewall')
name = '%s-firewall' % DEMO_BASE_NAME
allowed = [{'IPProtocol': 'tcp', 'ports': ['80']}]
firewall = gce.ex_create_firewall(name, allowed, target_tags=[tag])
display(' Firewall %s created' % firewall.name)
# == Create a Health Check ==
display('Creating a HealthCheck')
name = '%s-healthcheck' % DEMO_BASE_NAME
# These are all the default values, but listed here as an example. To
# create a healthcheck with the defaults, only name is required.
hc = gcelb.ex_create_healthcheck(
name, host=None, path='/', port='80', interval=5, timeout=5,
unhealthy_threshold=2, healthy_threshold=2)
display('Healthcheck %s created' % hc.name)
# == Create Load Balancer ==
display('Creating Load Balancer')
name = '%s-lb' % DEMO_BASE_NAME
port = 80
protocol = 'tcp'
algorithm = None
members = lb_nodes[:2] # Only attach the first two initially
healthchecks = [hc]
balancer = gcelb.create_balancer(name, port, protocol, algorithm, members,
ex_healthchecks=healthchecks)
display(' Load Balancer %s created' % balancer.name)
# == Attach third Node ==
display('Attaching additional node to Load Balancer')
member = balancer.attach_compute_node(lb_nodes[2])
display(' Attached %s to %s' % (member.id, balancer.name))
# == Show Balancer Members ==
members = balancer.list_members()
display('Load Balancer Members')
for member in members:
display(' ID: %s IP: %s' % (member.id, member.ip))
# == Remove a Member ==
display('Removing a Member')
detached = members[0]
detach = balancer.detach_member(detached)
if detach:
display(' Member %s detached from %s' % (detached.id,
balancer.name))
# == Show Updated Balancer Members ==
members = balancer.list_members()
display('Updated Load Balancer Members')
for member in members:
display(' ID: %s IP: %s' % (member.id, member.ip))
# == Reattach Member ==
display('Reattaching Member')
member = balancer.attach_member(detached)
display(' Member %s attached to %s' % (member.id, balancer.name))
# == Test Load Balancer by connecting to it multiple times ==
PAUSE = 60
display('Sleeping for %d seconds for LB members to serve...' % PAUSE)
time.sleep(PAUSE)
rounds = 200
url = 'http://%s/' % balancer.ip
line_length = 75
display('Connecting to %s %s times' % (url, rounds))
for x in range(rounds):
response = url_req.urlopen(url)
if PY3:
output = str(response.read(), encoding='utf-8').strip()
else:
output = response.read().strip()
if 'www-001' in output:
padded_output = output.center(line_length)
elif 'www-002' in output:
padded_output = output.rjust(line_length)
else:
padded_output = output.ljust(line_length)
sys.stdout.write('\r%s' % padded_output)
sys.stdout.flush()
time.sleep(.25)
print('')
if CLEANUP:
balancers = gcelb.list_balancers()
healthchecks = gcelb.ex_list_healthchecks()
nodes = gce.list_nodes(ex_zone='all')
firewalls = gce.ex_list_firewalls()
display('Cleaning up %s resources created' % DEMO_BASE_NAME)
clean_up(gce, DEMO_BASE_NAME, nodes,
balancers + healthchecks + firewalls)
end_time = datetime.datetime.now()
display('Total runtime: %s' % str(end_time - start_time))
# ==== BACKEND SERVICE LOAD BALANCER CODE STARTS HERE ====
def main_backend_service():
start_time = datetime.datetime.now()
display('Backend Service w/Global Forwarding Rule demo/test start time: %s'
% str(start_time))
gce = get_gce_driver()
# Get project info and print name
project = gce.ex_get_project()
display('Project: %s' % project.name)
# Based on the instructions at:
# https://cloud.google.com/compute/docs/load-balancing/http/#overview
zone_central = DATACENTER
zone_east = BACKUP_DATACENTER
it_name = '%s-instancetemplate' % DEMO_BASE_NAME
mig_name = '%s-mig' % DEMO_BASE_NAME
hc_name = '%s-healthcheck' % DEMO_BASE_NAME
bes_name = '%s-bes' % DEMO_BASE_NAME
urlmap_name = '%s-urlmap' % DEMO_BASE_NAME
targethttpproxy_name = '%s-httptargetproxy' % DEMO_BASE_NAME
address_name = '%s-address' % DEMO_BASE_NAME
gfr_name = '%s-gfr' % DEMO_BASE_NAME
firewall_name = '%s-firewall' % DEMO_BASE_NAME
startup_script = ('apt-get -y update && '
'apt-get -y install apache2 && '
'echo "$(hostname)" > /var/www/html/index.html')
tag = '%s-mig-www' % DEMO_BASE_NAME
metadata = {'items': [{'key': 'startup-script', 'value': startup_script}]}
mig_central = None
mig_east = None
bes = None
urlmap = None
tp = None
address = None
gfr = None
firewall = None
display('Create a BackendService')
# == Create an Instance Template ==
it = gce.ex_create_instancetemplate(it_name, size='n1-standard-1',
image='debian-8', network='default',
metadata=metadata, tags=[tag])
display(' InstanceTemplate "%s" created' % it.name)
# == Create a MIG ==
mig_central = create_mig(gce, mig_name, zone_central, it, 'central')
mig_east = create_mig(gce, mig_name, zone_east, it, 'east')
# == Create a Health Check ==
hc = gce.ex_create_healthcheck(hc_name, host=None, path='/', port='80',
interval=30, timeout=10,
unhealthy_threshold=10, healthy_threshold=1)
display(' Healthcheck %s created' % hc.name)
# == Create a Backend Service ==
be_central = gce.ex_create_backend(
instance_group=mig_central.instance_group)
be_east = gce.ex_create_backend(instance_group=mig_east.instance_group)
bes = gce.ex_create_backendservice(
bes_name, [hc], backends=[be_central, be_east], port_name='%s-http' %
DEMO_BASE_NAME, protocol='HTTP', description='%s bes desc' %
DEMO_BASE_NAME, timeout_sec=60, enable_cdn=False)
display(' Backend Service "%s" created' % bes.name)
# == Create a URLMap ==
urlmap = gce.ex_create_urlmap(urlmap_name, default_service=bes)
display(' URLMap "%s" created' % urlmap.name)
# == Create a Target (HTTP) Proxy ==
tp = gce.ex_create_targethttpproxy(targethttpproxy_name, urlmap)
display(' TargetProxy "%s" created' % tp.name)
# == Create a Static Address ==
address = gce.ex_create_address(address_name, region='global')
display(' Address "%s" created with IP "%s"' % (address.name,
address.address))
# == Create a Global Forwarding Rule ==
gfr = gce.ex_create_forwarding_rule(
gfr_name, target=tp, address=address, port_range='80',
description='%s libcloud forwarding rule http test' % DEMO_BASE_NAME,
global_rule=True)
display(' Global Forwarding Rule "%s" created' % (gfr.name))
# == Create a Firewall for instances ==
allowed = [{'IPProtocol': 'tcp', 'ports': ['80']}]
firewall = gce.ex_create_firewall(firewall_name, allowed,
target_tags=[tag])
display(' Firewall %s created' % firewall.name)
# TODO(supertom): launch instances to demostrate that it works
# take backends out of service. Adding in this functionality
# will also add 10-15 minutes to the demo.
# display("Sleeping for 10 minutes, starting at %s" %
# str(datetime.datetime.now()))
# time.sleep(600)
if CLEANUP:
display('Cleaning up %s resources created' % DEMO_BASE_NAME)
clean_up(gce, DEMO_BASE_NAME, None,
resource_list=[firewall, gfr, address, tp, urlmap, bes, hc,
mig_central, mig_east, it])
end_time = datetime.datetime.now()
display('Total runtime: %s' % str(end_time - start_time))
# ==== GOOGLE DNS CODE STARTS HERE ====
def main_dns():
start_time = datetime.datetime.now()
display('DNS demo/test start time: %s' % str(start_time))
gce = get_gce_driver()
gdns = get_dns_driver()
# Get project info and print name
project = gce.ex_get_project()
display('Project: %s' % project.name)
# Get list of managed zones
zones = gdns.iterate_zones()
display('Zones', zones)
# Get list of records
zones = gdns.iterate_zones()
for z in zones:
records = gdns.iterate_records(z)
display('Records for managed zone "%s"' % z.id, records)
# TODO(erjohnso): Finish this DNS section. Challenging in that you need to
# own a domain, so testing will require user customization. Perhaps a new
# command-line required flag unless --skip-dns is supplied. Also, real
# e2e testing should try to do DNS lookups on new records, but DNS TTL
# and propagation delays will introduce limits on what can be tested.
end_time = datetime.datetime.now()
display('Total runtime: %s' % str(end_time - start_time))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Google Cloud Platform Demo / Live Test Script')
parser.add_argument("--compute", help="perform compute demo / live tests",
dest="compute", action="store_true")
parser.add_argument("--load-balancer",
help="perform load-balancer demo / live tests",
dest="lb", action="store_true")
parser.add_argument("--backend-service",
help="perform backend-service demo / live tests",
dest="bes", action="store_true")
parser.add_argument("--dns", help="perform DNS demo / live tests",
dest="dns", action="store_true")
parser.add_argument("--cleanup-only",
help="perform clean-up (skips all tests)",
dest="cleanup", action="store_true")
cl_args = parser.parse_args()
if cl_args.cleanup:
cleanup_only()
else:
if cl_args.compute:
main_compute()
if cl_args.lb:
main_load_balancer()
if cl_args.dns:
main_dns()
if cl_args.bes:
main_backend_service()
| apache-2.0 |
yashodhank/erpnext | erpnext/config/buying.py | 1 | 4418 | from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Purchasing"),
"icon": "fa fa-star",
"items": [
{
"type": "doctype",
"name": "Material Request",
"description": _("Request for purchase."),
},
{
"type": "doctype",
"name": "Request for Quotation",
"description": _("Request for quotation."),
},
{
"type": "doctype",
"name": "Supplier Quotation",
"description": _("Quotations received from Suppliers."),
},
{
"type": "doctype",
"name": "Purchase Order",
"description": _("Purchase Orders given to Suppliers."),
},
]
},
{
"label": _("Supplier"),
"items": [
{
"type": "doctype",
"name": "Supplier",
"description": _("Supplier database."),
},
{
"type": "doctype",
"name": "Supplier Type",
"description": _("Supplier Type master.")
},
{
"type": "doctype",
"name": "Contact",
"description": _("All Contacts."),
},
{
"type": "doctype",
"name": "Address",
"description": _("All Addresses."),
},
]
},
{
"label": _("Setup"),
"icon": "fa fa-cog",
"items": [
{
"type": "doctype",
"name": "Buying Settings",
"description": _("Default settings for buying transactions.")
},
{
"type": "doctype",
"name":"Terms and Conditions",
"label": _("Terms and Conditions Template"),
"description": _("Template of terms or contract.")
},
{
"type": "doctype",
"name": "Purchase Taxes and Charges Template",
"description": _("Tax template for buying transactions.")
},
]
},
{
"label": _("Items and Pricing"),
"items": [
{
"type": "doctype",
"name": "Item",
"description": _("All Products or Services."),
},
{
"type": "doctype",
"name": "Product Bundle",
"description": _("Bundle items at time of sale."),
},
{
"type": "doctype",
"name": "Price List",
"description": _("Price List master.")
},
{
"type": "doctype",
"name": "Item Group",
"icon": "fa fa-sitemap",
"label": _("Item Group"),
"link": "Tree/Item Group",
"description": _("Tree of Item Groups."),
},
{
"type": "doctype",
"name": "Item Price",
"description": _("Multiple Item prices."),
"route": "Report/Item Price"
},
{
"type": "doctype",
"name": "Pricing Rule",
"description": _("Rules for applying pricing and discount.")
},
]
},
{
"label": _("Analytics"),
"icon": "fa fa-table",
"items": [
{
"type": "page",
"name": "purchase-analytics",
"label": _("Purchase Analytics"),
"icon": "fa fa-bar-chart",
},
{
"type": "report",
"is_query_report": True,
"name": "Supplier-Wise Sales Analytics",
"doctype": "Stock Ledger Entry"
},
{
"type": "report",
"is_query_report": True,
"name": "Purchase Order Trends",
"doctype": "Purchase Order"
},
]
},
{
"label": _("Other Reports"),
"icon": "fa fa-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Items To Be Requested",
"doctype": "Item"
},
{
"type": "report",
"is_query_report": True,
"name": "Requested Items To Be Ordered",
"doctype": "Material Request"
},
{
"type": "report",
"is_query_report": True,
"name": "Material Requests for which Supplier Quotations are not created",
"doctype": "Material Request"
},
{
"type": "report",
"is_query_report": True,
"name": "Item-wise Purchase History",
"doctype": "Item"
},
{
"type": "report",
"is_query_report": True,
"name": "Supplier Addresses and Contacts",
"doctype": "Supplier"
},
]
},
{
"label": _("Help"),
"items": [
{
"type": "help",
"label": _("Customer and Supplier"),
"youtube_id": "anoGi_RpQ20"
},
{
"type": "help",
"label": _("Material Request to Purchase Order"),
"youtube_id": "4TN9kPyfIqM"
},
{
"type": "help",
"label": _("Purchase Order to Payment"),
"youtube_id": "EK65tLdVUDk"
},
{
"type": "help",
"label": _("Managing Subcontracting"),
"youtube_id": "ThiMCC2DtKo"
},
]
},
]
| agpl-3.0 |
gavinp/chromium | third_party/protobuf/python/google/protobuf/message.py | 261 | 9669 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# TODO(robinson): We should just make these methods all "pure-virtual" and move
# all implementation out, into reflection.py for now.
"""Contains an abstract base class for protocol messages."""
__author__ = 'robinson@google.com (Will Robinson)'
class Error(Exception): pass
class DecodeError(Error): pass
class EncodeError(Error): pass
class Message(object):
"""Abstract base class for protocol messages.
Protocol message classes are almost always generated by the protocol
compiler. These generated types subclass Message and implement the methods
shown below.
TODO(robinson): Link to an HTML document here.
TODO(robinson): Document that instances of this class will also
have an Extensions attribute with __getitem__ and __setitem__.
Again, not sure how to best convey this.
TODO(robinson): Document that the class must also have a static
RegisterExtension(extension_field) method.
Not sure how to best express at this point.
"""
# TODO(robinson): Document these fields and methods.
__slots__ = []
DESCRIPTOR = None
def __deepcopy__(self, memo=None):
clone = type(self)()
clone.MergeFrom(self)
return clone
def __eq__(self, other_msg):
raise NotImplementedError
def __ne__(self, other_msg):
# Can't just say self != other_msg, since that would infinitely recurse. :)
return not self == other_msg
def __hash__(self):
raise TypeError('unhashable object')
def __str__(self):
raise NotImplementedError
def __unicode__(self):
raise NotImplementedError
def MergeFrom(self, other_msg):
"""Merges the contents of the specified message into current message.
This method merges the contents of the specified message into the current
message. Singular fields that are set in the specified message overwrite
the corresponding fields in the current message. Repeated fields are
appended. Singular sub-messages and groups are recursively merged.
Args:
other_msg: Message to merge into the current message.
"""
raise NotImplementedError
def CopyFrom(self, other_msg):
"""Copies the content of the specified message into the current message.
The method clears the current message and then merges the specified
message using MergeFrom.
Args:
other_msg: Message to copy into the current one.
"""
if self is other_msg:
return
self.Clear()
self.MergeFrom(other_msg)
def Clear(self):
"""Clears all data that was set in the message."""
raise NotImplementedError
def SetInParent(self):
"""Mark this as present in the parent.
This normally happens automatically when you assign a field of a
sub-message, but sometimes you want to make the sub-message
present while keeping it empty. If you find yourself using this,
you may want to reconsider your design."""
raise NotImplementedError
def IsInitialized(self):
"""Checks if the message is initialized.
Returns:
The method returns True if the message is initialized (i.e. all of its
required fields are set).
"""
raise NotImplementedError
# TODO(robinson): MergeFromString() should probably return None and be
# implemented in terms of a helper that returns the # of bytes read. Our
# deserialization routines would use the helper when recursively
# deserializing, but the end user would almost always just want the no-return
# MergeFromString().
def MergeFromString(self, serialized):
"""Merges serialized protocol buffer data into this message.
When we find a field in |serialized| that is already present
in this message:
- If it's a "repeated" field, we append to the end of our list.
- Else, if it's a scalar, we overwrite our field.
- Else, (it's a nonrepeated composite), we recursively merge
into the existing composite.
TODO(robinson): Document handling of unknown fields.
Args:
serialized: Any object that allows us to call buffer(serialized)
to access a string of bytes using the buffer interface.
TODO(robinson): When we switch to a helper, this will return None.
Returns:
The number of bytes read from |serialized|.
For non-group messages, this will always be len(serialized),
but for messages which are actually groups, this will
generally be less than len(serialized), since we must
stop when we reach an END_GROUP tag. Note that if
we *do* stop because of an END_GROUP tag, the number
of bytes returned does not include the bytes
for the END_GROUP tag information.
"""
raise NotImplementedError
def ParseFromString(self, serialized):
"""Like MergeFromString(), except we clear the object first."""
self.Clear()
self.MergeFromString(serialized)
def SerializeToString(self):
"""Serializes the protocol message to a binary string.
Returns:
A binary string representation of the message if all of the required
fields in the message are set (i.e. the message is initialized).
Raises:
message.EncodeError if the message isn't initialized.
"""
raise NotImplementedError
def SerializePartialToString(self):
"""Serializes the protocol message to a binary string.
This method is similar to SerializeToString but doesn't check if the
message is initialized.
Returns:
A string representation of the partial message.
"""
raise NotImplementedError
# TODO(robinson): Decide whether we like these better
# than auto-generated has_foo() and clear_foo() methods
# on the instances themselves. This way is less consistent
# with C++, but it makes reflection-type access easier and
# reduces the number of magically autogenerated things.
#
# TODO(robinson): Be sure to document (and test) exactly
# which field names are accepted here. Are we case-sensitive?
# What do we do with fields that share names with Python keywords
# like 'lambda' and 'yield'?
#
# nnorwitz says:
# """
# Typically (in python), an underscore is appended to names that are
# keywords. So they would become lambda_ or yield_.
# """
def ListFields(self):
"""Returns a list of (FieldDescriptor, value) tuples for all
fields in the message which are not empty. A singular field is non-empty
if HasField() would return true, and a repeated field is non-empty if
it contains at least one element. The fields are ordered by field
number"""
raise NotImplementedError
def HasField(self, field_name):
"""Checks if a certain field is set for the message. Note if the
field_name is not defined in the message descriptor, ValueError will be
raised."""
raise NotImplementedError
def ClearField(self, field_name):
raise NotImplementedError
def HasExtension(self, extension_handle):
raise NotImplementedError
def ClearExtension(self, extension_handle):
raise NotImplementedError
def ByteSize(self):
"""Returns the serialized size of this message.
Recursively calls ByteSize() on all contained messages.
"""
raise NotImplementedError
def _SetListener(self, message_listener):
"""Internal method used by the protocol message implementation.
Clients should not call this directly.
Sets a listener that this message will call on certain state transitions.
The purpose of this method is to register back-edges from children to
parents at runtime, for the purpose of setting "has" bits and
byte-size-dirty bits in the parent and ancestor objects whenever a child or
descendant object is modified.
If the client wants to disconnect this Message from the object tree, she
explicitly sets callback to None.
If message_listener is None, unregisters any existing listener. Otherwise,
message_listener must implement the MessageListener interface in
internal/message_listener.py, and we discard any listener registered
via a previous _SetListener() call.
"""
raise NotImplementedError
| bsd-3-clause |
kkragenbrink/node-gyp | gyp/pylib/gyp/msvs_emulation.py | 73 | 31885 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
self.dxsdk_dir = _FindDirectXInstallation()
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
self.wdk_dir = os.environ.get('WDK_DIR')
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = self.GetTargetPlatform(config)
target_platform = {'x86': 'Win32'}.get(target_platform, target_platform)
replacements = {
'$(VSInstallDir)': self.vs_version.Path(),
'$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\',
'$(OutDir)\\': base_to_build + '\\' if base_to_build else '',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(InputPath)': '${source}',
'$(InputName)': '${root}',
'$(ProjectName)': self.spec['target_name'],
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
}
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
return [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetTargetPlatform(self, config):
target_platform = self.msvs_target_platform.get(config, '')
if not target_platform:
target_platform = 'Win32'
return {'Win32': 'x86'}.get(target_platform, target_platform)
def _RealConfig(self, config):
target_platform = self.GetTargetPlatform(config)
if target_platform == 'x64' and not config.endswith('_x64'):
config += '_x64'
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
config = self._RealConfig(config)
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
config = self._RealConfig(config)
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._RealConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._RealConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._RealConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special):
"""Gets the explicitly overridden pdb name for a target or returns None
if it's not overridden."""
config = self._RealConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._RealConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def GetPrecompiledHeader(self, config, gyp_to_build_path):
"""Returns an object that handles the generation of precompiled header
build steps."""
config = self._RealConfig(config)
return _PchHelper(self, config, gyp_to_build_path)
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._RealConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._RealConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._RealConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._RealConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._RealConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('AdditionalOptions')
return libflags
def _GetDefFileAsLdflags(self, spec, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = ''
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
ldflags.append('/DEF:"%s"' % gyp_to_build_path(def_files[0]))
elif len(def_files) > 1:
raise Exception("Multiple .def files")
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, is_executable):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._RealConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(self.spec, ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special)
if pdb:
ldflags.append('/PDB:' + pdb)
ld('AdditionalOptions', prefix='')
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration', map={'1': '/LTCG'})
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={ 'true': '/PROFILE'})
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
# TODO(scottmg): These too.
ldflags.extend(('kernel32.lib', 'user32.lib', 'gdi32.lib', 'winspool.lib',
'comdlg32.lib', 'advapi32.lib', 'shell32.lib', 'ole32.lib',
'oleaut32.lib', 'uuid.lib', 'odbc32.lib', 'DelayImp.lib'))
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags(
config, manifest_base_name, is_executable and not have_def_file)
ldflags.extend(manifest_flags)
manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path)
manifest_files.append(intermediate_manifest_file)
return ldflags, manifest_files
def _GetLdManifestFlags(self, config, name, allow_isolation):
"""Returns the set of flags that need to be added to the link to generate
a default manifest, as well as the name of the generated file."""
# Add manifest flags that mirror the defaults in VS. Chromium dev builds
# do not currently use any non-default settings, but we could parse
# VCManifestTool blocks if Chromium or other projects need them in the
# future. Of particular note, we do not yet support EmbedManifest because
# it complicates incremental linking.
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
'''/MANIFESTUAC:"level='asInvoker' uiAccess='false'"'''
]
if allow_isolation:
flags.append('/ALLOWISOLATION')
return flags, output_name
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if (self._Setting(
('VCManifestTool', 'EmbedManifest'), config, default='') == 'true'):
print 'gyp/msvs_emulation.py: "EmbedManifest: true" not yet supported.'
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._RealConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._RealConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def HasExplicitIdlRules(self, spec):
"""Determine if there's an explicit rule for idl files. When there isn't we
need to generate implicit rules to build MIDL .idl files."""
for rule in spec.get('rules', []):
if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
return True
return False
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._RealConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
flags = ['/char', 'signed', '/env', 'win32', '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(self, settings, config, gyp_to_build_path):
self.settings = settings
self.config = config
self.gyp_to_build_path = gyp_to_build_path
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def _PchSource(self):
"""Get the source file that is built once to compile the pch data."""
return self.gyp_to_build_path(
self.settings.msvs_precompiled_source[self.config])
def _PchOutput(self):
"""Get the name of the output of the compiled pch data."""
return '${pchprefix}.' + self._PchHeader() + '.pch'
def GetObjDependencies(self, sources, objs):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatability
with make.py on Mac, and xcode_emulation.py."""
if not self._PchHeader():
return []
source = self._PchSource()
assert source
pch_ext = os.path.splitext(self._PchSource())[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self._PchOutput())]
return []
def GetPchBuildCommands(self):
"""Returns [(path_to_pch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory."""
header = self._PchHeader()
source = self._PchSource()
if not source or not header:
return []
ext = os.path.splitext(source)[1]
lang = 'c' if ext == '.c' else 'cc'
return [(self._PchOutput(), '/Yc' + header, lang, source)]
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path."""
vs = GetVSVersion(generator_flags)
for arch in ('x86', 'x64'):
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
| mit |
sgraham/nope | third_party/cython/src/pyximport/test/test_pyximport.py | 103 | 1988 | import pyximport; pyximport.install(reload_support=True)
import os, sys
import time, shutil
import tempfile
def make_tempdir():
tempdir = os.path.join(tempfile.gettempdir(), "pyrex_temp")
if os.path.exists(tempdir):
remove_tempdir(tempdir)
os.mkdir(tempdir)
return tempdir
def remove_tempdir(tempdir):
shutil.rmtree(tempdir, 0, on_remove_file_error)
def on_remove_file_error(func, path, excinfo):
print "Sorry! Could not remove a temp file:", path
print "Extra information."
print func, excinfo
print "You may want to delete this yourself when you get a chance."
def test():
pyximport._test_files = []
tempdir = make_tempdir()
sys.path.append(tempdir)
filename = os.path.join(tempdir, "dummy.pyx")
open(filename, "w").write("print 'Hello world from the Pyrex install hook'")
import dummy
reload(dummy)
depend_filename = os.path.join(tempdir, "dummy.pyxdep")
depend_file = open(depend_filename, "w")
depend_file.write("*.txt\nfoo.bar")
depend_file.close()
build_filename = os.path.join(tempdir, "dummy.pyxbld")
build_file = open(build_filename, "w")
build_file.write("""
from distutils.extension import Extension
def make_ext(name, filename):
return Extension(name=name, sources=[filename])
""")
build_file.close()
open(os.path.join(tempdir, "foo.bar"), "w").write(" ")
open(os.path.join(tempdir, "1.txt"), "w").write(" ")
open(os.path.join(tempdir, "abc.txt"), "w").write(" ")
reload(dummy)
assert len(pyximport._test_files)==1, pyximport._test_files
reload(dummy)
time.sleep(1) # sleep a second to get safer mtimes
open(os.path.join(tempdir, "abc.txt"), "w").write(" ")
print "Here goes the reolad"
reload(dummy)
assert len(pyximport._test_files) == 1, pyximport._test_files
reload(dummy)
assert len(pyximport._test_files) ==0, pyximport._test_files
remove_tempdir(tempdir)
if __name__=="__main__":
test()
| bsd-3-clause |
heatherleaf/MCFParser.py | test/convert_mcfg_to_py.py | 1 | 1550 |
import re
import json
import fileinput
# F --> AO a [0,0;1,0] (* C --> /T,C T [0,0;1,0] *)
# A --> AL H [0,0;1,0] (* D,-case --> /N,D,-case N [0,0;1,0] *)
# I --> m [0,2;0,0][0,1] (* PastPart,-aux;-case --> +v,PastPart,-aux;-case;-v [0,2;0,0][0,1] *)
# I --> p [0,1;0,0][0,2] (* PastPart,-aux;-case --> +v,PastPart,-aux;-v;-case [0,1;0,0][0,2] *)
# E --> "laugh" (* /D,V,-v --> "laugh" *)
# E --> "love" (* /D,V,-v --> "love" *)
_rule_re = re.compile(r'''
^ (\w+) \s+ --> \s+ ([\w\s]+?) \s+ \[ ([][\d,;]+) \]
''', re.VERBOSE)
_lex_re = re.compile(r'''
^ (\w+) \s+ --> \s+ "([^"]*)"
''', re.VERBOSE)
# >>> grammar = [('f', 'S', ['A'], [[(0,0), (0,1)]]),
# ... ('g', 'A', ['A'], [['a', (0,0), 'b'], ['c', (0,1), 'd']]),
# ... ('h', 'A', [], [['a', 'b'], ['c', 'd']])]
# fun, cat, args, rhss = split_mcfrule(mcfrule)
grammar = []
functr = 1
for line in fileinput.input():
mrule = re.match(_rule_re, line)
mlex = re.match(_lex_re, line)
if mrule:
cat, args, rhss = mrule.groups()
args = tuple(args.split())
rhss = [[tuple(int(i) for i in sym.split(',')) for sym in rhs.split(';')] for rhs in rhss.split('][')]
elif mlex:
args = ()
cat, token = mlex.groups()
if token:
rhss = [[token]]
else:
rhss = [[]]
else:
continue
fun = f"{cat}-{functr:05d}"
grammar.append((fun, cat, args, rhss))
functr += 1
print('grammar = [')
for rule in grammar:
print(f" {rule},")
print(']')
| gpl-3.0 |
meredith-digops/awsops | volumecleanup/volumecleanup.py | 1 | 3967 | #!/usr/bin/env python
from __future__ import print_function
import boto3
from botocore.exceptions import ClientError
from datetime import datetime
from datetime import timedelta
from datetime import tzinfo
DEFAULT_RETENTION_DAYS = None
"""If None, no default retention is applied"""
ZERO = timedelta(0)
class UTC(tzinfo):
"""
Implements UTC timezone for datetime interaction
"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
def fetch_available_volumes(ec2, filters=None):
"""
Generator of available EBS volumes
:param ec2: EC2 resource
:type ec2: boto3.resources.factory.ec2.ServiceResource
:param filters: Optional list of filters
:type filters: None|list
:returns: volumes collection
:rtype: boto3.resources.collection.ec2.volumesCollection
"""
# Set an empty filter set if none provided
if filters is None:
filters = []
# Append the filter for finding only volumes that are in the 'available'
# state.
# Ref: http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVolumes.html
filters.append({
'Name': 'status',
'Values': ['available'],
})
return ec2.volumes.filter(
Filters=filters
)
def get_abandoned_volumes(since, *args, **kwargs):
"""
Generate of available EBS volumes created some time ago
:param since: Datetime where all volumes created prior to are considered abandoned
:type since: datetime.datetime
:returns: (iterator) of volumes
:rtype: boto3.resources.factory.ec2.Volume
"""
for vol in fetch_available_volumes(*args, **kwargs):
# Ignore volumes created after `since` parameter
if vol.meta.data['CreateTime'] > since:
continue
yield vol
def lambda_handler(event, context):
"""
Delete abandoned EBS snapshots that exceed reasonable retention
"""
# Set the default retention period if none was provided to the lambda
# invocation
if 'Retention' not in event:
event['Retention'] = DEFAULT_RETENTION_DAYS
if event['Retention'] is None:
# Don't delete anything
raise AttributeError("No Retention specified")
if 'DryRun' not in event:
event['DryRun'] = False
if 'Filters' not in event:
event['Filters'] = [{
'Name': 'tag-key',
'Values': [
'ops:retention'
]
}]
since = datetime.now(UTC()) - timedelta(float(event['Retention']))
ec2 = boto3.resource('ec2')
old_volumes = get_abandoned_volumes(since,
ec2=ec2,
filters=event['Filters'])
for volume in old_volumes:
print("Deleting: {id}".format(
id=volume.id
))
try:
volume.delete(DryRun=event['DryRun'])
except ClientError as e:
if e.response['Error']['Code'] == 'DryRunOperation':
pass
if __name__ == '__main__':
from terminaltables import AsciiTable
since = datetime.now(UTC()) - timedelta(3*365/12)
print("Since: {}".format(
since.isoformat()))
table_headers = [
[
'created',
'id',
'size',
'type',
'tags',
]
]
table_data = []
vols = get_abandoned_volumes(
since,
ec2=boto3.resource('ec2'))
for v in vols:
table_data.append([
v.meta.data['CreateTime'].isoformat(),
v.id,
v.size,
v.volume_type,
"" if v.tags is None else
"\n".join("{k}: {v}".format(
k=i['Key'],
v=i['Value']
) for i in v.tags),
])
table_data.sort(key=lambda x: x[0])
print(AsciiTable(table_headers + table_data).table)
| mit |
gjbex/parameter-weaver | src/vsc/parameter_weaver/c/types.py | 1 | 6550 | #
# ParameterWeaver: a code generator to handle command line parameters
# and configuration files for C/C++/Fortran/R/Octave
# Copyright (C) 2013 Geert Jan Bex <geertjan.bex@uhasselt.be>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
'''C types and implementation of their methods for validation and formatting'''
import re
from vsc.parameter_weaver.base_validator import ParameterDefinitionError
from vsc.parameter_weaver.params import VarType
from vsc.util import Indenter
class CType(VarType):
def __init__(self, name, enduser_name):
super(CType, self).__init__(name, enduser_name)
def is_valid_var_name(self, var_name):
if not re.match(r'^[A-Za-z_]\w*$', var_name):
msg = "not a valid name for type '{0}'".format(self.name)
raise ParameterDefinitionError(msg)
def validation_function(self, name):
return '1'
@property
def struct_sep(self):
return '->'
@property
def input_format_string(self):
return self.format_string()
def input_tmpl(self, name):
return '"{0} = %[^\\n]"'.format(name)
class Int(CType):
def __init__(self):
super(Int, self).__init__('int', enduser_name='integer')
def is_of_type(self, value):
if not re.match(r'^(?:\+|-)?[0-9]+$', value):
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
def input_conversion(self, var, indent=None):
return '{0} = atoi(argv_str);'.format(var)
@property
def format_string(self):
return '%d';
def validation_function(self, name):
return 'isIntCL({0}, 0)'.format(name)
class Long(CType):
def __init__(self):
super(Long, self).__init__('long', enduser_name='long integer')
def is_of_type(self, value):
if not re.match(r'^(?:\+|-)?[0-9]+L?$', value):
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
def input_conversion(self, var, indent=None):
return '{0} = atol(argv_str);'.format(var)
@property
def format_string(self):
return '%ld'
def validation_function(self, name):
return 'isLongCL({0}, 0)'.format(name)
class Float(CType):
def __init__(self):
super(Float, self).__init__('float', 'SP float')
def is_of_type(self, value):
re1 = r'^(?:\+|-)?[0-9]+(\.[0-9]*)?((e|E)(\+|-)?[0-9]+)?$'
re2 = r'^(?:\+|-)?[0-9]*\.[0-9]+((e|E)(\+|-)?[0-9]+)?$'
if not re.match(re1, value) and not re.match(re2, value):
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
def input_conversion(self, var, indent=None):
return '{0} = atof(argv_str);'.format(var)
@property
def format_string(self):
return '%.7f'
def validation_function(self, name):
return 'isFloatCL({0}, 0)'.format(name)
class Double(CType):
def __init__(self):
super(Double, self).__init__('double', 'DP float')
def is_of_type(self, value):
re1 = r'^(?:\+|-)?[0-9]+(\.[0-9]*)?((e|E)(\+|-)?[0-9]+)?$'
re2 = r'^(?:\+|-)?[0-9]*\.[0-9]+((e|E)(\+|-)?[0-9]+)?$'
if not re.match(re1, value) and not re.match(re2, value):
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
def input_conversion(self, var, indent=None):
return '{0} = atof(argv_str);'.format(var)
@property
def format_string(self):
return '%.16lf'
def validation_function(self, name):
return 'isDoubleCL({0}, 0)'.format(name)
class Bool(CType):
def __init__(self):
super(Bool, self).__init__('bool', 'boolean')
def is_of_type(self, value):
if value != 'true' and value != 'false' and not re.match(r'^(?:\+|-)?[0-9]+$', value):
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
def input_conversion(self, var, indent):
indenter = Indenter(indent)
indenter.add('if (!strncmp("false", argv_str, 6)) {').incr()
indenter.add('{0} = false;'.format(var))
indenter.decr().add('} else if (!strncmp("true", argv_str, 5)) {').incr()
indenter.add('{0} = true;'.format(var))
indenter.decr().add('} else {').incr()
indenter.add('{0} = atoi(argv_str);'.format(var))
indenter.decr().add('}')
return indenter.text()
@property
def format_string(self):
return '%d'
class CharPtr(CType):
def __init__(self):
super(CharPtr, self).__init__('char *', 'string')
def is_of_type(self, value):
if not re.match(r'^("|\').*\1$', value):
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
def input_conversion(self, var, indent=None):
indenter = Indenter('\t')
indenter.add('char *tmp;')
indenter.add('int len = strlen(argv_str);')
indenter.add('free({var});')
indenter.add('if (!(tmp = (char *) calloc(len + 1, sizeof(char))))')
indenter.incr().add('errx(EXIT_CL_ALLOC_FAIL, "can not allocate char* field");')
indenter.decr()
indenter.add('{var} = strncpy(tmp, argv_str, len + 1);')
return indenter.text().format(var=var)
def transform(self, value):
if value.startswith('"') and value.endswith('"'):
return value.strip('"')
elif value.startswith("'") and value.endswith("'"):
return value.strip("'")
else:
msg = "value '{0}' is invalid for type '{1}'".format(value, self.name)
raise ParameterDefinitionError(msg)
@property
def format_string(self):
return '%s'
| gpl-3.0 |
Affix/CouchPotatoServer | libs/chardet/constants.py | 3008 | 1335 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| gpl-3.0 |
varunarya10/rally | rally/benchmark/context/sahara/sahara_image.py | 2 | 3536 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.benchmark.context import base
from rally.benchmark.context.cleanup import manager as resource_manager
from rally.benchmark.scenarios import base as scenarios_base
from rally.benchmark.scenarios.glance import utils as glance_utils
from rally.common.i18n import _
from rally.common import log as logging
from rally.common import utils as rutils
from rally import consts
from rally import osclients
LOG = logging.getLogger(__name__)
@base.context(name="sahara_image", order=440)
class SaharaImage(base.Context):
"""Context class for adding and tagging Sahara images."""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"image_url": {
"type": "string",
},
"username": {
"type": "string"
},
"plugin_name": {
"type": "string",
},
"hadoop_version": {
"type": "string",
},
},
"additionalProperties": False,
"required": ["image_url", "username", "plugin_name", "hadoop_version"]
}
def __init__(self, context):
super(SaharaImage, self).__init__(context)
self.context["sahara_images"] = {}
@rutils.log_task_wrapper(LOG.info, _("Enter context: `Sahara Image`"))
def setup(self):
image_url = self.config["image_url"]
plugin_name = self.config["plugin_name"]
hadoop_version = self.config["hadoop_version"]
user_name = self.config["username"]
for user, tenant_id in rutils.iterate_per_tenants(
self.context["users"]):
clients = osclients.Clients(user["endpoint"])
glance_util_class = glance_utils.GlanceScenario(
clients=clients)
image_name = scenarios_base.Scenario._generate_random_name(
prefix="sahara_image_", length=15)
image = glance_util_class._create_image(image_name,
"bare",
image_url,
"qcow2")
clients.sahara().images.update_image(image_id=image.id,
user_name=user_name,
desc="")
clients.sahara().images.update_tags(image_id=image.id,
new_tags=[plugin_name,
hadoop_version])
self.context["tenants"][tenant_id]["sahara_image"] = image.id
@rutils.log_task_wrapper(LOG.info, _("Exit context: `Sahara Image`"))
def cleanup(self):
# TODO(boris-42): Delete only resources created by this context
resource_manager.cleanup(names=["glance.images"],
users=self.context.get("users", []))
| apache-2.0 |
viveksh13/gymkhana | venv/lib/python2.7/site-packages/pip/req/req_set.py | 30 | 26547 | from __future__ import absolute_import
from collections import defaultdict
import functools
import itertools
import logging
import os
from pip._vendor import pkg_resources
from pip._vendor import requests
from pip.download import (url_to_path, unpack_url)
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
DistributionNotFound, PreviousBuildDirError)
from pip.req.req_install import InstallRequirement
from pip.utils import (
display_path, dist_in_usersite, ensure_dir, normalize_path)
from pip.utils.logging import indent_log
from pip.vcs import vcs
logger = logging.getLogger(__name__)
class Requirements(object):
def __init__(self):
self._keys = []
self._dict = {}
def keys(self):
return self._keys
def values(self):
return [self._dict[key] for key in self._keys]
def __contains__(self, item):
return item in self._keys
def __setitem__(self, key, value):
if key not in self._keys:
self._keys.append(key)
self._dict[key] = value
def __getitem__(self, key):
return self._dict[key]
def __repr__(self):
values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()]
return 'Requirements({%s})' % ', '.join(values)
class DistAbstraction(object):
"""Abstracts out the wheel vs non-wheel prepare_files logic.
The requirements for anything installable are as follows:
- we must be able to determine the requirement name
(or we can't correctly handle the non-upgrade case).
- we must be able to generate a list of run-time dependencies
without installing any additional packages (or we would
have to either burn time by doing temporary isolated installs
or alternatively violate pips 'don't start installing unless
all requirements are available' rule - neither of which are
desirable).
- for packages with setup requirements, we must also be able
to determine their requirements without installing additional
packages (for the same reason as run-time dependencies)
- we must be able to create a Distribution object exposing the
above metadata.
"""
def __init__(self, req_to_install):
self.req_to_install = req_to_install
def dist(self, finder):
"""Return a setuptools Dist object."""
raise NotImplementedError(self.dist)
def prep_for_dist(self):
"""Ensure that we can get a Dist for this requirement."""
raise NotImplementedError(self.dist)
def make_abstract_dist(req_to_install):
"""Factory to make an abstract dist object.
Preconditions: Either an editable req with a source_dir, or satisfied_by or
a wheel link, or a non-editable req with a source_dir.
:return: A concrete DistAbstraction.
"""
if req_to_install.editable:
return IsSDist(req_to_install)
elif req_to_install.link and req_to_install.link.is_wheel:
return IsWheel(req_to_install)
else:
return IsSDist(req_to_install)
class IsWheel(DistAbstraction):
def dist(self, finder):
return list(pkg_resources.find_distributions(
self.req_to_install.source_dir))[0]
def prep_for_dist(self):
# FIXME:https://github.com/pypa/pip/issues/1112
pass
class IsSDist(DistAbstraction):
def dist(self, finder):
dist = self.req_to_install.get_dist()
# FIXME: shouldn't be globally added:
if dist.has_metadata('dependency_links.txt'):
finder.add_dependency_links(
dist.get_metadata_lines('dependency_links.txt')
)
return dist
def prep_for_dist(self):
self.req_to_install.run_egg_info()
self.req_to_install.assert_source_matches_version()
class Installed(DistAbstraction):
def dist(self, finder):
return self.req_to_install.satisfied_by
def prep_for_dist(self):
pass
class RequirementSet(object):
def __init__(self, build_dir, src_dir, download_dir, upgrade=False,
ignore_installed=False, as_egg=False, target_dir=None,
ignore_dependencies=False, force_reinstall=False,
use_user_site=False, session=None, pycompile=True,
isolated=False, wheel_download_dir=None,
wheel_cache=None):
"""Create a RequirementSet.
:param wheel_download_dir: Where still-packed .whl files should be
written to. If None they are written to the download_dir parameter.
Separate to download_dir to permit only keeping wheel archives for
pip wheel.
:param download_dir: Where still packed archives should be written to.
If None they are not saved, and are deleted immediately after
unpacking.
:param wheel_cache: The pip wheel cache, for passing to
InstallRequirement.
"""
if session is None:
raise TypeError(
"RequirementSet() missing 1 required keyword argument: "
"'session'"
)
self.build_dir = build_dir
self.src_dir = src_dir
# XXX: download_dir and wheel_download_dir overlap semantically and may
# be combined if we're willing to have non-wheel archives present in
# the wheelhouse output by 'pip wheel'.
self.download_dir = download_dir
self.upgrade = upgrade
self.ignore_installed = ignore_installed
self.force_reinstall = force_reinstall
self.requirements = Requirements()
# Mapping of alias: real_name
self.requirement_aliases = {}
self.unnamed_requirements = []
self.ignore_dependencies = ignore_dependencies
self.successfully_downloaded = []
self.successfully_installed = []
self.reqs_to_cleanup = []
self.as_egg = as_egg
self.use_user_site = use_user_site
self.target_dir = target_dir # set from --target option
self.session = session
self.pycompile = pycompile
self.isolated = isolated
if wheel_download_dir:
wheel_download_dir = normalize_path(wheel_download_dir)
self.wheel_download_dir = wheel_download_dir
self._wheel_cache = wheel_cache
# Maps from install_req -> dependencies_of_install_req
self._dependencies = defaultdict(list)
def __str__(self):
reqs = [req for req in self.requirements.values()
if not req.comes_from]
reqs.sort(key=lambda req: req.name.lower())
return ' '.join([str(req.req) for req in reqs])
def __repr__(self):
reqs = [req for req in self.requirements.values()]
reqs.sort(key=lambda req: req.name.lower())
reqs_str = ', '.join([str(req.req) for req in reqs])
return ('<%s object; %d requirement(s): %s>'
% (self.__class__.__name__, len(reqs), reqs_str))
def add_requirement(self, install_req, parent_req_name=None):
"""Add install_req as a requirement to install.
:param parent_req_name: The name of the requirement that needed this
added. The name is used because when multiple unnamed requirements
resolve to the same name, we could otherwise end up with dependency
links that point outside the Requirements set. parent_req must
already be added. Note that None implies that this is a user
supplied requirement, vs an inferred one.
:return: Additional requirements to scan. That is either [] if
the requirement is not applicable, or [install_req] if the
requirement is applicable and has just been added.
"""
name = install_req.name
if not install_req.match_markers():
logger.warning("Ignoring %s: markers %r don't match your "
"environment", install_req.name,
install_req.markers)
return []
install_req.as_egg = self.as_egg
install_req.use_user_site = self.use_user_site
install_req.target_dir = self.target_dir
install_req.pycompile = self.pycompile
if not name:
# url or path requirement w/o an egg fragment
self.unnamed_requirements.append(install_req)
return [install_req]
else:
if parent_req_name is None and self.has_requirement(name):
raise InstallationError(
'Double requirement given: %s (already in %s, name=%r)'
% (install_req, self.get_requirement(name), name))
if not self.has_requirement(name):
# Add requirement
self.requirements[name] = install_req
# FIXME: what about other normalizations? E.g., _ vs. -?
if name.lower() != name:
self.requirement_aliases[name.lower()] = name
result = [install_req]
else:
# Canonicalise to the already-added object
install_req = self.get_requirement(name)
# No need to scan, this is a duplicate requirement.
result = []
if parent_req_name:
parent_req = self.get_requirement(parent_req_name)
self._dependencies[parent_req].append(install_req)
return result
def has_requirement(self, project_name):
for name in project_name, project_name.lower():
if name in self.requirements or name in self.requirement_aliases:
return True
return False
@property
def has_requirements(self):
return list(self.requirements.values()) or self.unnamed_requirements
@property
def is_download(self):
if self.download_dir:
self.download_dir = os.path.expanduser(self.download_dir)
if os.path.exists(self.download_dir):
return True
else:
logger.critical('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '%s'"
% display_path(self.download_dir))
return False
def get_requirement(self, project_name):
for name in project_name, project_name.lower():
if name in self.requirements:
return self.requirements[name]
if name in self.requirement_aliases:
return self.requirements[self.requirement_aliases[name]]
raise KeyError("No project with the name %r" % project_name)
def uninstall(self, auto_confirm=False):
for req in self.requirements.values():
req.uninstall(auto_confirm=auto_confirm)
req.commit_uninstall()
def _walk_req_to_install(self, handler):
"""Call handler for all pending reqs.
:param handler: Handle a single requirement. Should take a requirement
to install. Can optionally return an iterable of additional
InstallRequirements to cover.
"""
# The list() here is to avoid potential mutate-while-iterating bugs.
discovered_reqs = []
reqs = itertools.chain(
list(self.unnamed_requirements), list(self.requirements.values()),
discovered_reqs)
for req_to_install in reqs:
more_reqs = handler(req_to_install)
if more_reqs:
discovered_reqs.extend(more_reqs)
def prepare_files(self, finder):
"""
Prepare process. Create temp directories, download and/or unpack files.
"""
# make the wheelhouse
if self.wheel_download_dir:
ensure_dir(self.wheel_download_dir)
self._walk_req_to_install(
functools.partial(self._prepare_file, finder))
def _check_skip_installed(self, req_to_install, finder):
"""Check if req_to_install should be skipped.
This will check if the req is installed, and whether we should upgrade
or reinstall it, taking into account all the relevant user options.
After calling this req_to_install will only have satisfied_by set to
None if the req_to_install is to be upgraded/reinstalled etc. Any
other value will be a dist recording the current thing installed that
satisfies the requirement.
Note that for vcs urls and the like we can't assess skipping in this
routine - we simply identify that we need to pull the thing down,
then later on it is pulled down and introspected to assess upgrade/
reinstalls etc.
:return: A text reason for why it was skipped, or None.
"""
# Check whether to upgrade/reinstall this req or not.
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
skip_reason = 'satisfied (use --upgrade to upgrade)'
if self.upgrade:
best_installed = False
# For link based requirements we have to pull the
# tree down and inspect to assess the version #, so
# its handled way down.
if not (self.force_reinstall or req_to_install.link):
try:
finder.find_requirement(req_to_install, self.upgrade)
except BestVersionAlreadyInstalled:
skip_reason = 'up-to-date'
best_installed = True
except DistributionNotFound:
# No distribution found, so we squash the
# error - it will be raised later when we
# re-try later to do the install.
# Why don't we just raise here?
pass
if not best_installed:
# don't uninstall conflict if user install and
# conflict is not user install
if not (self.use_user_site and not
dist_in_usersite(req_to_install.satisfied_by)):
req_to_install.conflicts_with = \
req_to_install.satisfied_by
req_to_install.satisfied_by = None
return skip_reason
else:
return None
def _prepare_file(self, finder, req_to_install):
"""Prepare a single requirements files.
:return: A list of addition InstallRequirements to also install.
"""
# Tell user what we are doing for this requirement:
# obtain (editable), skipping, processing (local url), collecting
# (remote url or package name)
if req_to_install.editable:
logger.info('Obtaining %s', req_to_install)
else:
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
assert req_to_install.satisfied_by is None
if not self.ignore_installed:
skip_reason = self._check_skip_installed(
req_to_install, finder)
if req_to_install.satisfied_by:
assert skip_reason is not None, (
'_check_skip_installed returned None but '
'req_to_install.satisfied_by is set to %r'
% (req_to_install.satisfied_by,))
logger.info(
'Requirement already %s: %s', skip_reason,
req_to_install)
else:
if (req_to_install.link and
req_to_install.link.scheme == 'file'):
path = url_to_path(req_to_install.link.url)
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req_to_install)
with indent_log():
# ################################ #
# # vcs update or unpack archive # #
# ################################ #
if req_to_install.editable:
req_to_install.ensure_has_source_dir(self.src_dir)
req_to_install.update_editable(not self.is_download)
abstract_dist = make_abstract_dist(req_to_install)
abstract_dist.prep_for_dist()
if self.is_download:
req_to_install.archive(self.download_dir)
elif req_to_install.satisfied_by:
abstract_dist = Installed(req_to_install)
else:
# @@ if filesystem packages are not marked
# editable in a req, a non deterministic error
# occurs when the script attempts to unpack the
# build directory
req_to_install.ensure_has_source_dir(self.build_dir)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req_to_install.source_dir`
if os.path.exists(
os.path.join(req_to_install.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '%s' due to a"
" pre-existing build directory (%s). This is "
"likely due to a previous installation that failed"
". pip is being responsible and not assuming it "
"can delete this. Please delete it and try again."
% (req_to_install, req_to_install.source_dir)
)
req_to_install.populate_link(finder, self.upgrade)
# We can't hit this spot and have populate_link return None.
# req_to_install.satisfied_by is None here (because we're
# guarded) and upgrade has no impact except when satisfied_by
# is not None.
# Then inside find_requirement existing_applicable -> False
# If no new versions are found, DistributionNotFound is raised,
# otherwise a result is guaranteed.
assert req_to_install.link
try:
download_dir = self.download_dir
# We always delete unpacked sdists after pip ran.
autodelete_unpacked = True
if req_to_install.link.is_wheel \
and self.wheel_download_dir:
# when doing 'pip wheel` we download wheels to a
# dedicated dir.
download_dir = self.wheel_download_dir
if req_to_install.link.is_wheel:
if download_dir:
# When downloading, we only unpack wheels to get
# metadata.
autodelete_unpacked = True
else:
# When installing a wheel, we use the unpacked
# wheel.
autodelete_unpacked = False
unpack_url(
req_to_install.link, req_to_install.source_dir,
download_dir, autodelete_unpacked,
session=self.session)
except requests.HTTPError as exc:
logger.critical(
'Could not install requirement %s because '
'of error %s',
req_to_install,
exc,
)
raise InstallationError(
'Could not install requirement %s because '
'of HTTP error %s for URL %s' %
(req_to_install, exc, req_to_install.link)
)
abstract_dist = make_abstract_dist(req_to_install)
abstract_dist.prep_for_dist()
if self.is_download:
# Make a .zip of the source_dir we already created.
if req_to_install.link.scheme in vcs.all_schemes:
req_to_install.archive(self.download_dir)
# req_to_install.req is only avail after unpack for URL
# pkgs repeat check_if_exists to uninstall-on-upgrade
# (#14)
if not self.ignore_installed:
req_to_install.check_if_exists()
if req_to_install.satisfied_by:
if self.upgrade or self.ignore_installed:
# don't uninstall conflict if user install and
# conflict is not user install
if not (self.use_user_site and not
dist_in_usersite(
req_to_install.satisfied_by)):
req_to_install.conflicts_with = \
req_to_install.satisfied_by
req_to_install.satisfied_by = None
else:
logger.info(
'Requirement already satisfied (use '
'--upgrade to upgrade): %s',
req_to_install,
)
# ###################### #
# # parse dependencies # #
# ###################### #
dist = abstract_dist.dist(finder)
more_reqs = []
def add_req(subreq):
sub_install_req = InstallRequirement(
str(subreq),
req_to_install,
isolated=self.isolated,
wheel_cache=self._wheel_cache,
)
more_reqs.extend(self.add_requirement(
sub_install_req, req_to_install.name))
# We add req_to_install before its dependencies, so that we
# can refer to it when adding dependencies.
if not self.has_requirement(req_to_install.name):
# 'unnamed' requirements will get added here
self.add_requirement(req_to_install, None)
if not self.ignore_dependencies:
if (req_to_install.extras):
logger.debug(
"Installing extra requirements: %r",
','.join(req_to_install.extras),
)
missing_requested = sorted(
set(req_to_install.extras) - set(dist.extras)
)
for missing in missing_requested:
logger.warning(
'%s does not provide the extra \'%s\'',
dist, missing
)
available_requested = sorted(
set(dist.extras) & set(req_to_install.extras)
)
for subreq in dist.requires(available_requested):
add_req(subreq)
# cleanup tmp src
self.reqs_to_cleanup.append(req_to_install)
if not req_to_install.editable and not req_to_install.satisfied_by:
# XXX: --no-install leads this to report 'Successfully
# downloaded' for only non-editable reqs, even though we took
# action on them.
self.successfully_downloaded.append(req_to_install)
return more_reqs
def cleanup_files(self):
"""Clean up files, remove builds."""
logger.debug('Cleaning up...')
with indent_log():
for req in self.reqs_to_cleanup:
req.remove_temporary_source()
def _to_install(self):
"""Create the installation order.
The installation order is topological - requirements are installed
before the requiring thing. We break cycles at an arbitrary point,
and make no other guarantees.
"""
# The current implementation, which we may change at any point
# installs the user specified things in the order given, except when
# dependencies must come earlier to achieve topological order.
order = []
ordered_reqs = set()
def schedule(req):
if req.satisfied_by or req in ordered_reqs:
return
ordered_reqs.add(req)
for dep in self._dependencies[req]:
schedule(dep)
order.append(req)
for install_req in self.requirements.values():
schedule(install_req)
return order
def install(self, install_options, global_options=(), *args, **kwargs):
"""
Install everything in this set (after having downloaded and unpacked
the packages)
"""
to_install = self._to_install()
if to_install:
logger.info(
'Installing collected packages: %s',
', '.join([req.name for req in to_install]),
)
with indent_log():
for requirement in to_install:
if requirement.conflicts_with:
logger.info(
'Found existing installation: %s',
requirement.conflicts_with,
)
with indent_log():
requirement.uninstall(auto_confirm=True)
try:
requirement.install(
install_options,
global_options,
*args,
**kwargs
)
except:
# if install did not succeed, rollback previous uninstall
if (requirement.conflicts_with and not
requirement.install_succeeded):
requirement.rollback_uninstall()
raise
else:
if (requirement.conflicts_with and
requirement.install_succeeded):
requirement.commit_uninstall()
requirement.remove_temporary_source()
self.successfully_installed = to_install
| apache-2.0 |
elingg/tensorflow | tensorflow/contrib/factorization/examples/mnist.py | 26 | 11701 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example mnist model with jointly computed k-means clustering.
This is a toy example of how clustering can be embedded into larger tensorflow
graphs. In this case, we learn a clustering on-the-fly and transform the input
into the 'distance to clusters' space. These are then fed into hidden layers to
learn the supervised objective.
To train this model on real mnist data, run this model as follows:
mnist --nofake_data --max_steps=2000
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import math
import tempfile
import time
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.examples.tutorials.mnist import mnist
FLAGS = None
# The MNIST dataset has 10 classes, representing the digits 0 through 9.
NUM_CLASSES = 10
# The MNIST images are always 28x28 pixels.
IMAGE_SIZE = 28
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
def placeholder_inputs():
"""Generate placeholder variables to represent the input tensors.
Returns:
images_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""
images_placeholder = tf.placeholder(tf.float32, shape=(None,
mnist.IMAGE_PIXELS))
labels_placeholder = tf.placeholder(tf.int32, shape=(None))
return images_placeholder, labels_placeholder
def fill_feed_dict(data_set, images_pl, labels_pl, batch_size):
"""Fills the feed_dict for training the given step.
Args:
data_set: The set of images and labels, from input_data.read_data_sets()
images_pl: The images placeholder, from placeholder_inputs().
labels_pl: The labels placeholder, from placeholder_inputs().
batch_size: Batch size of data to feed.
Returns:
feed_dict: The feed dictionary mapping from placeholders to values.
"""
# Create the feed_dict for the placeholders filled with the next
# `batch size ` examples.
images_feed, labels_feed = data_set.next_batch(batch_size, FLAGS.fake_data)
feed_dict = {
images_pl: images_feed,
labels_pl: labels_feed,
}
return feed_dict
def do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_set):
"""Runs one evaluation against the full epoch of data.
Args:
sess: The session in which the model has been trained.
eval_correct: The Tensor that returns the number of correct predictions.
images_placeholder: The images placeholder.
labels_placeholder: The labels placeholder.
data_set: The set of images and labels to evaluate, from
input_data.read_data_sets().
Returns:
Precision value on the dataset.
"""
# And run one epoch of eval.
true_count = 0 # Counts the number of correct predictions.
steps_per_epoch = data_set.num_examples // FLAGS.batch_size
num_examples = steps_per_epoch * FLAGS.batch_size
for step in xrange(steps_per_epoch):
feed_dict = fill_feed_dict(data_set,
images_placeholder,
labels_placeholder,
FLAGS.batch_size)
true_count += sess.run(eval_correct, feed_dict=feed_dict)
precision = true_count / num_examples
print(' Num examples: %d Num correct: %d Precision @ 1: %0.04f' %
(num_examples, true_count, precision))
return precision
def inference(inp, num_clusters, hidden1_units, hidden2_units):
"""Build the MNIST model up to where it may be used for inference.
Args:
inp: input data
num_clusters: number of clusters of input features to train.
hidden1_units: Size of the first hidden layer.
hidden2_units: Size of the second hidden layer.
Returns:
logits: Output tensor with the computed logits.
clustering_loss: Clustering loss.
kmeans_training_op: An op to train the clustering.
"""
# Clustering
kmeans = tf.contrib.factorization.KMeans(
inp,
num_clusters,
distance_metric=tf.contrib.factorization.COSINE_DISTANCE,
# TODO(agarwal): kmeans++ is currently causing crash in dbg mode.
# Enable this after fixing.
# initial_clusters=tf.contrib.factorization.KMEANS_PLUS_PLUS_INIT,
use_mini_batch=True)
all_scores, _, clustering_scores, kmeans_training_op = kmeans.training_graph()
# Some heuristics to approximately whiten this output.
all_scores = (all_scores[0] - 0.5) * 5
# Here we avoid passing the gradients from the supervised objective back to
# the clusters by creating a stop_gradient node.
all_scores = tf.stop_gradient(all_scores)
clustering_loss = tf.reduce_sum(clustering_scores[0])
# Hidden 1
with tf.name_scope('hidden1'):
weights = tf.Variable(
tf.truncated_normal([num_clusters, hidden1_units],
stddev=1.0 / math.sqrt(float(IMAGE_PIXELS))),
name='weights')
biases = tf.Variable(tf.zeros([hidden1_units]),
name='biases')
hidden1 = tf.nn.relu(tf.matmul(all_scores, weights) + biases)
# Hidden 2
with tf.name_scope('hidden2'):
weights = tf.Variable(
tf.truncated_normal([hidden1_units, hidden2_units],
stddev=1.0 / math.sqrt(float(hidden1_units))),
name='weights')
biases = tf.Variable(tf.zeros([hidden2_units]),
name='biases')
hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
# Linear
with tf.name_scope('softmax_linear'):
weights = tf.Variable(
tf.truncated_normal([hidden2_units, NUM_CLASSES],
stddev=1.0 / math.sqrt(float(hidden2_units))),
name='weights')
biases = tf.Variable(tf.zeros([NUM_CLASSES]),
name='biases')
logits = tf.matmul(hidden2, weights) + biases
return logits, clustering_loss, kmeans_training_op
def run_training():
"""Train MNIST for a number of steps."""
# Get the sets of images and labels for training, validation, and
# test on MNIST.
train_dir = tempfile.mkdtemp()
data_sets = input_data.read_data_sets(train_dir, FLAGS.fake_data)
# Tell TensorFlow that the model will be built into the default Graph.
with tf.Graph().as_default():
# Generate placeholders for the images and labels.
images_placeholder, labels_placeholder = placeholder_inputs()
# Build a Graph that computes predictions from the inference model.
logits, clustering_loss, kmeans_training_op = inference(images_placeholder,
FLAGS.num_clusters,
FLAGS.hidden1,
FLAGS.hidden2)
# Add to the Graph the Ops for loss calculation.
loss = mnist.loss(logits, labels_placeholder)
# Add to the Graph the Ops that calculate and apply gradients.
train_op = tf.group(mnist.training(loss, FLAGS.learning_rate),
kmeans_training_op)
# Add the Op to compare the logits to the labels during evaluation.
eval_correct = mnist.evaluation(logits, labels_placeholder)
# Add the variable initializer Op.
init = tf.global_variables_initializer()
# Create a session for running Ops on the Graph.
sess = tf.Session()
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder,
batch_size=5000)
# Run the Op to initialize the variables.
sess.run(init, feed_dict=feed_dict)
# Start the training loop.
max_test_prec = 0
for step in xrange(FLAGS.max_steps):
start_time = time.time()
# Fill a feed dictionary with the actual set of images and labels
# for this particular training step.
feed_dict = fill_feed_dict(data_sets.train,
images_placeholder,
labels_placeholder,
FLAGS.batch_size)
# Run one step of the model.
_, loss_value, clustering_loss_value = sess.run([train_op,
loss,
clustering_loss],
feed_dict=feed_dict)
duration = time.time() - start_time
if step % 100 == 0:
# Print status to stdout.
print('Step %d: loss = %.2f, clustering_loss = %.2f (%.3f sec)' % (
step, loss_value, clustering_loss_value, duration))
# Save a checkpoint and evaluate the model periodically.
if (step + 1) % 1000 == 0 or (step + 1) == FLAGS.max_steps:
# Evaluate against the training set.
print('Training Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.train)
# Evaluate against the validation set.
print('Validation Data Eval:')
do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.validation)
# Evaluate against the test set.
print('Test Data Eval:')
test_prec = do_eval(sess,
eval_correct,
images_placeholder,
labels_placeholder,
data_sets.test)
max_test_prec = max(max_test_prec, test_prec)
return max_test_prec
class MnistTest(tf.test.TestCase):
def test_train(self):
self.assertTrue(run_training() > 0.6)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Basic model parameters as external flags.'
)
parser.add_argument(
'--learning_rate',
type=float,
default=0.3,
help='Initial learning rate.'
)
parser.add_argument(
'--max_steps',
type=int,
default=200,
help='Number of steps to run trainer.'
)
parser.add_argument(
'--num_clusters',
type=int,
default=384,
help='Number of input feature clusters'
)
parser.add_argument(
'--hidden1',
type=int,
default=256,
help='Number of units in hidden layer 1.'
)
parser.add_argument(
'--hidden2',
type=int,
default=32,
help='Number of units in hidden layer 2.'
)
parser.add_argument(
'--batch_size',
type=int,
default=100,
help='Batch size. Must divide evenly into the dataset sizes.'
)
parser.add_argument(
'--train_dir',
type=str,
default='data',
help='Directory to put the training data.'
)
parser.add_argument(
'--fake_data',
type=bool,
default=True,
help='Use fake input data.'
)
FLAGS, unparsed = parser.parse_known_args()
tf.test.main()
| apache-2.0 |
hiepgia/android_kernel_lge_geeb | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
johncosta/private-readthedocs.org | readthedocs/core/models.py | 1 | 1611 | from django.db import models
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from django.contrib.auth.models import User
STANDARD_EMAIL = "anonymous@readthedocs.org"
class UserProfile (models.Model):
"""Additional information about a User.
"""
user = models.ForeignKey(User, unique=True, related_name='profile')
whitelisted = models.BooleanField()
homepage = models.CharField(max_length=100, blank=True)
allow_email = models.BooleanField(help_text='Show your email on VCS contributions.', default=True)
def get_absolute_url(self):
return ('profiles_profile_detail', (), {'username': self.user.username})
get_absolute_url = models.permalink(get_absolute_url)
def __unicode__(self):
return "%s's profile" % self.user.username
def get_contribution_details(self):
"""
Gets the line to put into commits to attribute the author.
Returns a tuple (name, email)
"""
if self.user.first_name and self.user.last_name:
name = '%s %s' % (self.user.first_name, self.user.last_name)
else:
name = self.user.username
if self.allow_email:
email = self.user.email
else:
email = STANDARD_EMAIL
return (name, email)
@receiver(post_save, sender=User)
def create_profile(sender, **kwargs):
if kwargs['created'] is True:
try:
UserProfile.objects.create(user_id=kwargs['instance'].id, whitelisted=False)
except DatabaseError:
pass
| mit |
nvoron23/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Tools/scripts/cleanfuture.py | 102 | 8578 | #! /usr/bin/env python
"""cleanfuture [-d][-r][-v] path ...
-d Dry run. Analyze, but don't make any changes to, files.
-r Recurse. Search for all .py files in subdirectories too.
-v Verbose. Print informative msgs.
Search Python (.py) files for future statements, and remove the features
from such statements that are already mandatory in the version of Python
you're using.
Pass one or more file and/or directory paths. When a directory path, all
.py files within the directory will be examined, and, if the -r option is
given, likewise recursively for subdirectories.
Overwrites files in place, renaming the originals with a .bak extension. If
cleanfuture finds nothing to change, the file is left alone. If cleanfuture
does change a file, the changed file is a fixed-point (i.e., running
cleanfuture on the resulting .py file won't change it again, at least not
until you try it again with a later Python release).
Limitations: You can do these things, but this tool won't help you then:
+ A future statement cannot be mixed with any other statement on the same
physical line (separated by semicolon).
+ A future statement cannot contain an "as" clause.
Example: Assuming you're using Python 2.2, if a file containing
from __future__ import nested_scopes, generators
is analyzed by cleanfuture, the line is rewritten to
from __future__ import generators
because nested_scopes is no longer optional in 2.2 but generators is.
"""
import __future__
import tokenize
import os
import sys
dryrun = 0
recurse = 0
verbose = 0
def errprint(*args):
strings = map(str, args)
msg = ' '.join(strings)
if msg[-1:] != '\n':
msg += '\n'
sys.stderr.write(msg)
def main():
import getopt
global verbose, recurse, dryrun
try:
opts, args = getopt.getopt(sys.argv[1:], "drv")
except getopt.error, msg:
errprint(msg)
return
for o, a in opts:
if o == '-d':
dryrun += 1
elif o == '-r':
recurse += 1
elif o == '-v':
verbose += 1
if not args:
errprint("Usage:", __doc__)
return
for arg in args:
check(arg)
def check(file):
if os.path.isdir(file) and not os.path.islink(file):
if verbose:
print "listing directory", file
names = os.listdir(file)
for name in names:
fullname = os.path.join(file, name)
if ((recurse and os.path.isdir(fullname) and
not os.path.islink(fullname))
or name.lower().endswith(".py")):
check(fullname)
return
if verbose:
print "checking", file, "...",
try:
f = open(file)
except IOError, msg:
errprint("%r: I/O Error: %s" % (file, str(msg)))
return
ff = FutureFinder(f, file)
changed = ff.run()
if changed:
ff.gettherest()
f.close()
if changed:
if verbose:
print "changed."
if dryrun:
print "But this is a dry run, so leaving it alone."
for s, e, line in changed:
print "%r lines %d-%d" % (file, s+1, e+1)
for i in range(s, e+1):
print ff.lines[i],
if line is None:
print "-- deleted"
else:
print "-- change to:"
print line,
if not dryrun:
bak = file + ".bak"
if os.path.exists(bak):
os.remove(bak)
os.rename(file, bak)
if verbose:
print "renamed", file, "to", bak
g = open(file, "w")
ff.write(g)
g.close()
if verbose:
print "wrote new", file
else:
if verbose:
print "unchanged."
class FutureFinder:
def __init__(self, f, fname):
self.f = f
self.fname = fname
self.ateof = 0
self.lines = [] # raw file lines
# List of (start_index, end_index, new_line) triples.
self.changed = []
# Line-getter for tokenize.
def getline(self):
if self.ateof:
return ""
line = self.f.readline()
if line == "":
self.ateof = 1
else:
self.lines.append(line)
return line
def run(self):
STRING = tokenize.STRING
NL = tokenize.NL
NEWLINE = tokenize.NEWLINE
COMMENT = tokenize.COMMENT
NAME = tokenize.NAME
OP = tokenize.OP
changed = self.changed
get = tokenize.generate_tokens(self.getline).next
type, token, (srow, scol), (erow, ecol), line = get()
# Chew up initial comments and blank lines (if any).
while type in (COMMENT, NL, NEWLINE):
type, token, (srow, scol), (erow, ecol), line = get()
# Chew up docstring (if any -- and it may be implicitly catenated!).
while type is STRING:
type, token, (srow, scol), (erow, ecol), line = get()
# Analyze the future stmts.
while 1:
# Chew up comments and blank lines (if any).
while type in (COMMENT, NL, NEWLINE):
type, token, (srow, scol), (erow, ecol), line = get()
if not (type is NAME and token == "from"):
break
startline = srow - 1 # tokenize is one-based
type, token, (srow, scol), (erow, ecol), line = get()
if not (type is NAME and token == "__future__"):
break
type, token, (srow, scol), (erow, ecol), line = get()
if not (type is NAME and token == "import"):
break
type, token, (srow, scol), (erow, ecol), line = get()
# Get the list of features.
features = []
while type is NAME:
features.append(token)
type, token, (srow, scol), (erow, ecol), line = get()
if not (type is OP and token == ','):
break
type, token, (srow, scol), (erow, ecol), line = get()
# A trailing comment?
comment = None
if type is COMMENT:
comment = token
type, token, (srow, scol), (erow, ecol), line = get()
if type is not NEWLINE:
errprint("Skipping file %r; can't parse line %d:\n%s" %
(self.fname, srow, line))
return []
endline = srow - 1
# Check for obsolete features.
okfeatures = []
for f in features:
object = getattr(__future__, f, None)
if object is None:
# A feature we don't know about yet -- leave it in.
# They'll get a compile-time error when they compile
# this program, but that's not our job to sort out.
okfeatures.append(f)
else:
released = object.getMandatoryRelease()
if released is None or released <= sys.version_info:
# Withdrawn or obsolete.
pass
else:
okfeatures.append(f)
# Rewrite the line if at least one future-feature is obsolete.
if len(okfeatures) < len(features):
if len(okfeatures) == 0:
line = None
else:
line = "from __future__ import "
line += ', '.join(okfeatures)
if comment is not None:
line += ' ' + comment
line += '\n'
changed.append((startline, endline, line))
# Loop back for more future statements.
return changed
def gettherest(self):
if self.ateof:
self.therest = ''
else:
self.therest = self.f.read()
def write(self, f):
changed = self.changed
assert changed
# Prevent calling this again.
self.changed = []
# Apply changes in reverse order.
changed.reverse()
for s, e, line in changed:
if line is None:
# pure deletion
del self.lines[s:e+1]
else:
self.lines[s:e+1] = [line]
f.writelines(self.lines)
# Copy over the remainder of the file.
if self.therest:
f.write(self.therest)
if __name__ == '__main__':
main()
| apache-2.0 |
yencarnacion/jaikuengine | appengine_django/management/commands/startapp.py | 64 | 1371 | # Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import django
from django.core.management.commands import startapp
import appengine_django
class Command(startapp.Command):
def handle_label(self, *args, **kwds):
"""Temporary adjust django.__path__ to load app templates from the
helpers directory.
"""
old_path = django.__path__
django.__path__ = appengine_django.__path__
startapp.Command.handle_label(self, *args, **kwds)
django.__path__ = old_path
class ProjectCommand(Command):
def __init__(self, project_directory):
super(ProjectCommand, self).__init__()
self.project_directory = project_directory
def handle_label(self, app_name, **options):
super(ProjectCommand, self).handle_label(app_name, self.project_directory,
**options)
| apache-2.0 |
codoo/clouder | clouder/clouder_base.py | 2 | 31386 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2015, TODAY Clouder SASU
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License with Attribution
# clause as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License with
# Attribution clause along with this program. If not, see
# <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
from openerp.exceptions import except_orm
import re
from datetime import datetime, timedelta
import clouder_model
class ClouderDomain(models.Model):
"""
Define the domain object, which represent all domains which can be linked
to the bases hosted in this clouder.
"""
_name = 'clouder.domain'
_inherit = ['clouder.model']
name = fields.Char('Domain name', size=64, required=True)
organisation = fields.Char('Organisation', size=64, required=True)
dns_id = fields.Many2one('clouder.container', 'DNS Server', required=True)
cert_key = fields.Text('Wildcard Cert Key')
cert_cert = fields.Text('Wildcart Cert')
public = fields.Boolean('Public?')
partner_id = fields.Many2one(
'res.partner', 'Manager',
default=lambda self: self.env.user.partner_id)
_sql_constraints = [
('name_uniq', 'unique(name)', 'Name must be unique!'),
]
@api.one
@api.constrains('name')
def _validate_data(self):
"""
Check that the domain name does not contain any forbidden
characters.
"""
if not re.match("^[\w\d.-]*$", self.name):
raise except_orm(_('Data error!'), _(
"Name can only contains letters, digits - and dot"))
class ClouderBase(models.Model):
"""
Define the base object, which represent all websites hosted in this clouder
with a specific url and a specific database.
"""
_name = 'clouder.base'
_inherit = ['clouder.model']
name = fields.Char('Name', size=64, required=True)
title = fields.Char('Title', size=64, required=True)
application_id = fields.Many2one('clouder.application', 'Application',
required=True)
domain_id = fields.Many2one('clouder.domain', 'Domain name', required=True)
service_id = fields.Many2one('clouder.service', 'Service', required=True)
service_ids = fields.Many2many('clouder.service',
'clouder_base_service_rel', 'base_id',
'service_id', 'Alternative Services')
admin_name = fields.Char('Admin name', size=64, required=True)
admin_password = fields.Char(
'Admin password', size=64, required=True,
default=clouder_model.generate_random_password(20))
admin_email = fields.Char('Admin email', size=64, required=True)
poweruser_name = fields.Char('PowerUser name', size=64)
poweruser_password = fields.Char(
'PowerUser password', size=64,
default=clouder_model.generate_random_password(12))
poweruser_email = fields.Char('PowerUser email', size=64)
build = fields.Selection(
[('none', 'No action'), ('build', 'Build'), ('restore', 'Restore')],
'Build?', default='build')
ssl_only = fields.Boolean('SSL Only?')
test = fields.Boolean('Test?')
lang = fields.Selection(
[('en_US', 'en_US'), ('fr_FR', 'fr_FR')],
'Language', required=True, default='en_US')
state = fields.Selection([
('installing', 'Installing'), ('enabled', 'Enabled'),
('blocked', 'Blocked'), ('removing', 'Removing')],
'State', readonly=True)
option_ids = fields.One2many('clouder.base.option', 'base_id', 'Options')
link_ids = fields.One2many('clouder.base.link', 'base_id', 'Links')
save_repository_id = fields.Many2one('clouder.save.repository',
'Save repository')
time_between_save = fields.Integer('Minutes between each save')
saverepo_change = fields.Integer('Days before saverepo change')
saverepo_expiration = fields.Integer('Days before saverepo expiration')
save_expiration = fields.Integer('Days before save expiration')
date_next_save = fields.Datetime('Next save planned')
save_comment = fields.Text('Save Comment')
nosave = fields.Boolean('No save?')
reset_each_day = fields.Boolean('Reset each day?')
cert_key = fields.Text('Cert Key')
cert_cert = fields.Text('Cert')
parent_id = fields.Many2one('clouder.base', 'Parent Base')
backup_ids = fields.Many2many(
'clouder.container', 'clouder_base_backup_rel',
'base_id', 'backup_id', 'Backup containers', required=True)
public = fields.Boolean('Public?')
partner_id = fields.Many2one(
'res.partner', 'Manager',
default=lambda self: self.user_partner)
partner_ids = fields.Many2many('res.partner', 'clouder_base_partner_rel',
'base_id', 'partner_id', 'Users')
@property
def fullname(self):
"""
Property returning the full name of the base.
"""
return (self.application_id.code + '-' + self.name + '-'
+ self.domain_id.name).replace('.', '-')
@property
def fullname_(self):
"""
Property returning the full name of the base with all - replace by
underscore (databases compatible names).
"""
return self.fullname.replace('-', '_')
@property
def fulldomain(self):
"""
Property returning the full url of the base.
"""
return self.name + '.' + self.domain_id.name
@property
def databases(self):
"""
Property returning all databases names used for this base, in a dict.
"""
databases = {'single': self.fullname_}
if self.application_id.type_id.multiple_databases:
databases = {}
for database in \
self.application_id.type_id.multiple_databases.split(','):
databases[database] = self.fullname_ + '_' + database
return databases
@property
def databases_comma(self):
"""
Property returning all databases names used for this base,
separated by a comma.
"""
return ','.join([d for k, d in self.databases.iteritems()])
@property
def options(self):
"""
Property returning a dictionary containing the value of all options
for this base, even is they are not defined here.
"""
options = {}
for option in \
self.service_id.container_id.application_id.type_id.option_ids:
if option.type == 'base':
options[option.name] = {'id': option.id, 'name': option.id,
'value': option.default}
for option in self.option_ids:
options[option.name.name] = {'id': option.id,
'name': option.name.id,
'value': option.value}
return options
_sql_constraints = [
('name_uniq', 'unique (name,domain_id)',
'Name must be unique per domain !')
]
@api.one
@api.constrains('name', 'admin_name', 'admin_email', 'poweruser_email')
def _validate_data(self):
"""
Check that the base name and some other fields does not contain any
forbidden characters.
"""
if not re.match("^[\w\d-]*$", self.name):
raise except_orm(_('Data error!'), _(
"Name can only contains letters, digits and -"))
if self.admin_name and not re.match("^[\w\d_]*$", self.admin_name):
raise except_orm(_('Data error!'), _(
"Admin name can only contains letters, digits and underscore"))
if self.admin_email\
and not re.match("^[\w\d_.@-]*$", self.admin_email):
raise except_orm(_('Data error!'), _(
"Admin email can only contains letters, "
"digits, underscore, - and @"))
if self.poweruser_email \
and not re.match("^[\w\d_.@-]*$", self.poweruser_email):
raise except_orm(_('Data error!'), _(
"Poweruser email can only contains letters, "
"digits, underscore, - and @"))
@api.one
@api.constrains('service_id', 'service_ids', 'application_id')
def _check_application(self):
"""
Check that the application of the base is the same than application
of services.
"""
if self.application_id.id != \
self.service_id.container_id.application_id.id:
raise except_orm(_('Data error!'),
_("The application of base must be the same "
"than the application of service."))
for s in self.service_ids:
if self.application_id.id != s.container_idapplication_id.id:
raise except_orm(
_('Data error!'),
_("The application of base must be the "
"same than the application of service.")
)
@api.one
@api.constrains('option_ids')
def _check_option_ids(self):
"""
Check that the required options are filled.
"""
for type_option in self.application_id.type_id.option_ids:
if type_option.type == 'base' and type_option.required:
test = False
for option in self.option_ids:
if option.name == type_option and option.value:
test = True
if not test:
raise except_orm(
_('Data error!'),
_("You need to specify a value for the option "
+ type_option.name + " for the base " +
self.name + ".")
)
@api.one
@api.constrains('link_ids')
def _check_link_ids(self):
"""
Check that the required links are specified.
"""
for app_link in self.application_id.link_ids:
if app_link.base and app_link.required:
test = False
for link in self.link_ids:
if link.name == app_link and link.target:
test = True
if not test:
raise except_orm(
_('Data error!'),
_("You need to specify a link to " + app_link.name
+ " for the container " + self.name)
)
@api.multi
@api.onchange('application_id')
def onchange_application_id(self):
"""
Update the options, links and some other fields when we change
the application_id field.
"""
if self.application_id:
self.admin_name = self.application_id.admin_name
self.admin_email = self.application_id.admin_email \
and self.application_id.admin_email \
or self.email_sysadmin
options = []
for type_option in self.application_id.type_id.option_ids:
if type_option.type == 'base' and type_option.auto:
test = False
for option in self.option_ids:
if option.name == type_option:
test = True
if not test:
options.append((0, 0, {
'name': type_option,
'value': type_option.default}))
self.option_ids = options
links = []
for app_link in self.application_id.link_ids:
if app_link.base and app_link.auto:
test = False
for link in self.link_ids:
if link.name == app_link:
test = True
if not test:
links.append((0, 0, {'name': app_link,
'target': app_link.next}))
self.link_ids = links
self.backup_ids = [(6, 0, [
b.id for b in self.application_id.base_backup_ids])]
self.time_between_save = self.application_id.base_time_between_save
self.saverepo_change = self.application_id.base_saverepo_change
self.saverepo_expiration = \
self.application_id.base_saverepo_expiration
self.save_expiration = self.application_id.base_save_expiration
@api.model
def create(self, vals):
"""
Override create method to create a container and a service if none
are specified.
:param vals: The values needed to create the record.
"""
if (not 'service_id' in vals) or (not vals['service_id']):
application_obj = self.env['clouder.application']
domain_obj = self.env['clouder.domain']
container_obj = self.env['clouder.container']
service_obj = self.env['clouder.service']
if 'application_id' not in vals or not vals['application_id']:
raise except_orm(_('Error!'), _(
"You need to specify the application of the base."))
application = application_obj.browse(vals['application_id'])
if not application.next_server_id:
raise except_orm(_('Error!'), _(
"You need to specify the next server in "
"application for the container autocreate."))
if not application.default_image_id.version_ids:
raise except_orm(_('Error!'), _(
"No version for the image linked to the application, "
"abandoning container autocreate..."))
if not application.version_ids:
raise except_orm(_('Error!'), _(
"No version for the application, "
"abandoning service autocreate..."))
if 'domain_id' not in vals or not vals['domain_id']:
raise except_orm(_('Error!'), _(
"You need to specify the domain of the base."))
domain = domain_obj.browse(vals['domain_id'])
container_vals = {
'name': vals['name'] + '_' +
domain.name.replace('.', '_').replace('-', '_'),
'server_id': application.next_server_id.id,
'application_id': application.id,
'image_id': application.default_image_id.id,
'image_version_id':
application.default_image_id.version_ids[0].id,
}
container_id = container_obj.create(container_vals)
service_vals = {
'name': 'production',
'container_id': container_id,
'application_version_id': application.version_ids[0].id,
}
vals['service_id'] = service_obj.create(service_vals)
return super(ClouderBase, self).create(vals)
@api.multi
def write(self, vals):
"""
Override write method to move base if we change the service.
:param vals: The values to update.
"""
save = False
if 'service_id' in vals:
self = self.with_context(self.create_log('service change'))
self = self.with_context(save_comment='Before service change')
self = self.with_context(forcesave=True)
save = self.save()
self = self.with_context(forcesave=False)
self.purge()
res = super(ClouderBase, self).write(vals)
if save:
save.service_id = vals['service_id']
self = self.with_context(base_restoration=True)
self.deploy()
save.restore()
self.end_log()
if 'nosave' in vals or 'ssl_only' in vals:
self.deploy_links()
return res
@api.one
def unlink(self):
"""
Override unlink method to make a save before we delete a base.
"""
self = self.with_context(save_comment='Before unlink')
self.save()
return super(ClouderBase, self).unlink()
@api.multi
def save(self):
"""
Make a new save.
"""
save_obj = self.env['clouder.save.save']
repo_obj = self.env['clouder.save.repository']
save = False
now = datetime.now()
if not self.save_repository_id:
repo_ids = repo_obj.search([
('base_name', '=', self.name),
('base_domain', '=', self.domain_id.name)])
if repo_ids:
self.save_repository_id = repo_ids[0]
if not self.save_repository_id or datetime.strptime(
self.save_repository_id.date_change,
"%Y-%m-%d") < now or False:
repo_vals = {
'name': now.strftime(
"%Y-%m-%d") + '_' + self.name + '_' + self.domain_id.name,
'type': 'base',
'date_change': (now + timedelta(days=self.saverepo_change
or self.application_id.base_saverepo_change)
).strftime("%Y-%m-%d"),
'date_expiration': (now + timedelta(
days=self.saverepo_expiration
or self.application_id.base_saverepo_expiration)
).strftime("%Y-%m-%d"),
'base_name': self.name,
'base_domain': self.domain_id.name,
}
repo_id = repo_obj.create(repo_vals)
self.save_repository_id = repo_id
if 'nosave' in self.env.context \
or (self.nosave and not 'forcesave' in self.env.context):
self.log(
'This base shall not be saved or the backup '
'isnt configured in conf, skipping save base')
return
self = self.with_context(self.create_log('save'))
if not self.backup_ids:
self.log('The backup isnt configured in conf, skipping save base')
for backup_server in self.backup_ids:
save_vals = {
'name': self.now_bup + '_' + self.fullname,
'backup_id': backup_server.id,
'repo_id': self.save_repository_id.id,
'date_expiration': (now + timedelta(
days=self.save_expiration
or self.application_id.base_save_expiration)
).strftime("%Y-%m-%d"),
'comment': 'save_comment' in self.env.context
and self.env.context['save_comment']
or self.save_comment or 'Manual',
'now_bup': self.now_bup,
'container_id': self.service_id.container_id.id,
'service_id': self.service_id.id,
'base_id': self.id,
}
save = save_obj.create(save_vals)
date_next_save = (datetime.now() + timedelta(
minutes=self.time_between_save
or self.application_id.base_time_between_save)
).strftime("%Y-%m-%d %H:%M:%S")
self.write({'save_comment': False, 'date_next_save': date_next_save})
self.end_log()
return save
@api.multi
def post_reset(self):
"""
Hook which can be called by submodules to execute commands after we
reset a base.
"""
self.deploy_links()
return
@api.multi
def reset_base(self, base_name=False, service_id=False):
"""
Reset the base with the parent base.
:param base_name: Specify another base name
if the reset need to be done in a new base.
:param service_id: Specify the service_id is the reset
need to be done in another service.
"""
base_parent_id = self.parent_id and self.parent_id or self
if not 'save_comment' in self.env.context:
self = self.with_context(save_comment='Reset base')
self.with_context(forcesave=True)
save = base_parent_id.save()
self.with_context(forcesave=False)
self.with_context(nosave=True)
vals = {'base_id': self.id, 'base_restore_to_name': self.name,
'base_restore_to_domain_id': self.domain_id.id,
'service_id': self.service_id.id, 'base_nosave': True}
if base_name and service_id:
vals = {'base_id': False, 'base_restore_to_name': base_name,
'base_restore_to_domain_id': self.domain_id.id,
'service_id': service_id.id, 'base_nosave': True}
save.write(vals)
base = save.restore()
base.write({'parent_id': base_parent_id.id})
base = base.with_context(
base_parent_fullname_=base_parent_id.fullname_)
base = base.with_context(
service_parent_name=base_parent_id.service_id.name)
base.update_base()
base.post_reset()
base.deploy_post()
@api.multi
def deploy_create_database(self):
"""
Hook which can be called by submodules to execute commands when we
want to create the database. If return False, the database will be
created by default method.
"""
return False
@api.multi
def deploy_build(self):
"""
Hook which can be called by submodules to execute commands when we
want to build the database.
"""
return
@api.multi
def deploy_post_restore(self):
"""
Hook which can be called by submodules to execute commands after we
restore a database.
"""
return
@api.multi
def deploy_create_poweruser(self):
"""
Hook which can be called by submodules to execute commands when we
want to create a poweruser.
"""
return
@api.multi
def deploy_test(self):
"""
Hook which can be called by submodules to execute commands when we
want to deploy test datas.
"""
return
@api.multi
def deploy_post(self):
"""
Hook which can be called by submodules to execute commands after we
deploy a base.
"""
return
@api.multi
def deploy(self):
"""
Deploy the base.
"""
self.purge()
if 'base_restoration' in self.env.context:
return
res = self.deploy_create_database()
if not res:
for key, database in self.databases.iteritems():
if self.service_id.database_type != 'mysql':
ssh = self.connect(
self.service_id.container_id.fullname,
username=self.application_id.type_id.system_user)
self.execute(ssh, ['createdb', '-h',
self.service_id.database_server, '-U',
self.service_id.db_user, database])
ssh.close()
else:
ssh = self.connect(
self.service_id.database.fullname)
self.execute(ssh, [
"mysql -u root -p'"
+ self.service_id.database.root_password
+ "' -se \"create database " + database + ";\""
])
self.execute(ssh, [
"mysql -u root -p'"
+ self.service_id.database.root_password
+ "' -se \"grant all on " + database
+ ".* to '" + self.service_id.db_user + "';\""
])
ssh.close()
self.log('Database created')
if self.build == 'build':
self.deploy_build()
elif self.build == 'restore':
if self.service_id.database_type != 'mysql':
ssh = self.connect(
self.service_id.container_id.fullname,
username=self.application_id.type_id.system_user)
self.execute(ssh, [
'pg_restore', '-h', self.service_id.database_server,
'-U', self.service_id.db_user, '--no-owner',
'-Fc', '-d', self.fullname_,
self.service_id.application_version_id.full_localpath
+ '/' + self.service_id.database_type + '/build.sql'
])
ssh.close()
else:
ssh = self.connect(
self.service_id.container_id.fullname,
username=self.application_id.type_id.system_user)
self.execute(ssh, [
'mysql', '-h', self.service_id.database_server,
'-u', self.service_id.db_user,
'-p' + self.service_id.database.root_password,
self.fullname_, '<',
self.service_id.application_version_id.full_localpath
+ '/' + self.service_id.database_type + '/build.sql'
])
ssh.close()
self.deploy_post_restore()
if self.build != 'none':
if self.poweruser_name and self.poweruser_email \
and self.admin_name != self.poweruser_name:
self.deploy_create_poweruser()
if self.test:
self.deploy_test()
self.deploy_post()
#For shinken
self.save()
@api.multi
def purge_post(self):
"""
Hook which can be called by submodules to execute commands after we
purge a base.
"""
return
@api.multi
def purge_db(self):
"""
Purge the database.
"""
for key, database in self.databases.iteritems():
if self.service_id.database_type != 'mysql':
ssh = self.connect(self.service_id.database.fullname,
username='postgres')
self.execute(ssh, [
'psql', '-c',
'"update pg_database set datallowconn = \'false\' '
'where datname = \'' + database + '\'; '
'SELECT pg_terminate_backend(pid) '
'FROM pg_stat_activity WHERE datname = \''
+ database + '\';"'
])
self.execute(ssh, ['dropdb', database])
ssh.close()
else:
ssh = self.connect(self.service_id.database.fullname)
self.execute(ssh, [
"mysql -u root -p'"
+ self.service_id.database.root_password
+ "' -se \"drop database " + database + ";\""
])
ssh.close()
return
@api.multi
def purge(self):
"""
Purge the base.
"""
self.purge_db()
self.purge_post()
def update_base(self):
"""
Hook which can be called by submodules to execute commands when we
want to update a base.
"""
return
class ClouderBaseOption(models.Model):
"""
Define the base.option object, used to define custom values specific
to a base.
"""
_name = 'clouder.base.option'
base_id = fields.Many2one('clouder.base', 'Base', ondelete="cascade",
required=True)
name = fields.Many2one('clouder.application.type.option', 'Option',
required=True)
value = fields.Text('Value')
_sql_constraints = [
('name_uniq', 'unique(base_id,name)',
'Option name must be unique per base!'),
]
@api.one
@api.constrains('base_id')
def _check_required(self):
"""
Check that we specify a value for the option
if this option is required.
"""
if self.name.required and not self.value:
raise except_orm(
_('Data error!'),
_("You need to specify a value for the option "
+ self.name.name + " for the base "
+ self.base_id.name + ".")
)
class ClouderBaseLink(models.Model):
"""
Define the base.link object, used to specify the applications linked
to a base.
"""
_name = 'clouder.base.link'
_inherit = ['clouder.model']
base_id = fields.Many2one('clouder.base', 'Base', ondelete="cascade",
required=True)
name = fields.Many2one('clouder.application.link', 'Application Link',
required=True)
target = fields.Many2one('clouder.container', 'Target')
@property
def target_base(self):
"""
Property returning the first base of the target container.
"""
return self.target.service_ids and \
self.target.service_ids[0].base_ids and \
self.target.service_ids[0].base_ids[0]
@api.one
@api.constrains('base_id')
def _check_required(self):
"""
Check that we specify a value for the link
if this link is required.
"""
if self.name.required and not self.target:
raise except_orm(
_('Data error!'),
_("You need to specify a link to "
+ self.name.application_id.name + " for the base "
+ self.base_id.name)
)
@api.multi
def deploy_link(self):
"""
Hook which can be called by submodules to execute commands when we
deploy a link.
"""
return
@api.multi
def purge_link(self):
"""
Hook which can be called by submodules to execute commands when we
purge a link.
"""
return
def control(self):
"""
Make the control to know if we can launch the deploy/purge.
"""
if not self.target:
self.log(
'The target isnt configured in the link, skipping deploy link')
return False
if not self.name.base:
self.log('This application isnt for base, skipping deploy link')
return False
return True
@api.multi
def deploy_(self):
"""
Control and call the hook to deploy the link.
"""
self.purge_()
self.control() and self.deploy_link()
@api.multi
def purge_(self):
"""
Control and call the hook to purge the link.
"""
self.control() and self.purge_link()
| gpl-3.0 |
mbayon/TFG-MachineLearning | vbig/lib/python2.7/site-packages/pkg_resources/__init__.py | 30 | 103308 | # coding: utf-8
"""
Package resource API
--------------------
A resource is a logical file contained within a package, or a logical
subdirectory thereof. The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is. Do not use os.path operations to manipulate resource
names being passed into the API.
The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
"""
from __future__ import absolute_import
import sys
import os
import io
import time
import re
import types
import zipfile
import zipimport
import warnings
import stat
import functools
import pkgutil
import operator
import platform
import collections
import plistlib
import email.parser
import tempfile
import textwrap
import itertools
from pkgutil import get_importer
try:
import _imp
except ImportError:
# Python 3.2 compatibility
import imp as _imp
from pkg_resources.extern import six
from pkg_resources.extern.six.moves import urllib, map, filter
# capture these to bypass sandboxing
from os import utime
try:
from os import mkdir, rename, unlink
WRITE_SUPPORT = True
except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
from os import open as os_open
from os.path import isdir, split
try:
import importlib.machinery as importlib_machinery
# access attribute to force import under delayed import mechanisms.
importlib_machinery.__name__
except ImportError:
importlib_machinery = None
from pkg_resources.extern import appdirs
from pkg_resources.extern import packaging
__import__('pkg_resources.extern.packaging.version')
__import__('pkg_resources.extern.packaging.specifiers')
__import__('pkg_resources.extern.packaging.requirements')
__import__('pkg_resources.extern.packaging.markers')
if (3, 0) < sys.version_info < (3, 3):
raise RuntimeError("Python 3.3 or later is required")
# declare some globals that will be defined later to
# satisfy the linters.
require = None
working_set = None
class PEP440Warning(RuntimeWarning):
"""
Used when there is an issue with a version or specifier not complying with
PEP 440.
"""
class _SetuptoolsVersionMixin(object):
def __hash__(self):
return super(_SetuptoolsVersionMixin, self).__hash__()
def __lt__(self, other):
if isinstance(other, tuple):
return tuple(self) < other
else:
return super(_SetuptoolsVersionMixin, self).__lt__(other)
def __le__(self, other):
if isinstance(other, tuple):
return tuple(self) <= other
else:
return super(_SetuptoolsVersionMixin, self).__le__(other)
def __eq__(self, other):
if isinstance(other, tuple):
return tuple(self) == other
else:
return super(_SetuptoolsVersionMixin, self).__eq__(other)
def __ge__(self, other):
if isinstance(other, tuple):
return tuple(self) >= other
else:
return super(_SetuptoolsVersionMixin, self).__ge__(other)
def __gt__(self, other):
if isinstance(other, tuple):
return tuple(self) > other
else:
return super(_SetuptoolsVersionMixin, self).__gt__(other)
def __ne__(self, other):
if isinstance(other, tuple):
return tuple(self) != other
else:
return super(_SetuptoolsVersionMixin, self).__ne__(other)
def __getitem__(self, key):
return tuple(self)[key]
def __iter__(self):
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part, part)
if not part or part == '.':
continue
if part[:1] in '0123456789':
# pad for numeric comparison
yield part.zfill(8)
else:
yield '*' + part
# ensure that alpha/beta/candidate are before final
yield '*final'
def old_parse_version(s):
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
# remove '-' before a prerelease tag
if part < '*final':
while parts and parts[-1] == '*final-':
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == '00000000':
parts.pop()
parts.append(part)
return tuple(parts)
# Warn for use of this function
warnings.warn(
"You have iterated over the result of "
"pkg_resources.parse_version. This is a legacy behavior which is "
"inconsistent with the new version class introduced in setuptools "
"8.0. In most cases, conversion to a tuple is unnecessary. For "
"comparison of versions, sort the Version instances directly. If "
"you have another use case requiring the tuple, please file a "
"bug with the setuptools project describing that need.",
RuntimeWarning,
stacklevel=1,
)
for part in old_parse_version(str(self)):
yield part
class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
pass
class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
packaging.version.LegacyVersion):
pass
def parse_version(v):
try:
return SetuptoolsVersion(v)
except packaging.version.InvalidVersion:
return SetuptoolsLegacyVersion(v)
_state_vars = {}
def _declare_state(vartype, **kw):
globals().update(kw)
_state_vars.update(dict.fromkeys(kw, vartype))
def __getstate__():
state = {}
g = globals()
for k, v in _state_vars.items():
state[k] = g['_sget_' + v](g[k])
return state
def __setstate__(state):
g = globals()
for k, v in state.items():
g['_sset_' + _state_vars[k]](k, g[k], v)
return state
def _sget_dict(val):
return val.copy()
def _sset_dict(key, ob, state):
ob.clear()
ob.update(state)
def _sget_object(val):
return val.__getstate__()
def _sset_object(key, ob, state):
ob.__setstate__(state)
_sget_none = _sset_none = lambda *args: None
def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly.
"""
plat = get_build_platform()
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
except ValueError:
# not Mac OS X
pass
return plat
__all__ = [
# Basic resource access and distribution/entry point discovery
'require', 'run_script', 'get_provider', 'get_distribution',
'load_entry_point', 'get_entry_map', 'get_entry_info',
'iter_entry_points',
'resource_string', 'resource_stream', 'resource_filename',
'resource_listdir', 'resource_exists', 'resource_isdir',
# Environmental control
'declare_namespace', 'working_set', 'add_activation_listener',
'find_distributions', 'set_extraction_path', 'cleanup_resources',
'get_default_cache',
# Primary implementation classes
'Environment', 'WorkingSet', 'ResourceManager',
'Distribution', 'Requirement', 'EntryPoint',
# Exceptions
'ResolutionError', 'VersionConflict', 'DistributionNotFound',
'UnknownExtra', 'ExtractionError',
# Warnings
'PEP440Warning',
# Parsing functions and string utilities
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
# filesystem utilities
'ensure_directory', 'normalize_path',
# Distribution "precedence" constants
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
# "Provider" interfaces, implementations, and registration/lookup APIs
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
'register_finder', 'register_namespace_handler', 'register_loader_type',
'fixup_namespace_packages', 'get_importer',
# Deprecated/backward compatibility only
'run_main', 'AvailableDistributions',
]
class ResolutionError(Exception):
"""Abstract base for dependency resolution errors"""
def __repr__(self):
return self.__class__.__name__ + repr(self.args)
class VersionConflict(ResolutionError):
"""
An already-installed version conflicts with the requested version.
Should be initialized with the installed Distribution and the requested
Requirement.
"""
_template = "{self.dist} is installed but {self.req} is required"
@property
def dist(self):
return self.args[0]
@property
def req(self):
return self.args[1]
def report(self):
return self._template.format(**locals())
def with_context(self, required_by):
"""
If required_by is non-empty, return a version of self that is a
ContextualVersionConflict.
"""
if not required_by:
return self
args = self.args + (required_by,)
return ContextualVersionConflict(*args)
class ContextualVersionConflict(VersionConflict):
"""
A VersionConflict that accepts a third parameter, the set of the
requirements that required the installed Distribution.
"""
_template = VersionConflict._template + ' by {self.required_by}'
@property
def required_by(self):
return self.args[2]
class DistributionNotFound(ResolutionError):
"""A requested distribution was not found"""
_template = ("The '{self.req}' distribution was not found "
"and is required by {self.requirers_str}")
@property
def req(self):
return self.args[0]
@property
def requirers(self):
return self.args[1]
@property
def requirers_str(self):
if not self.requirers:
return 'the application'
return ', '.join(self.requirers)
def report(self):
return self._template.format(**locals())
def __str__(self):
return self.report()
class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
_provider_factories = {}
PY_MAJOR = sys.version[:3]
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
CHECKOUT_DIST = 0
DEVELOP_DIST = -1
def register_loader_type(loader_type, provider_factory):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
and `provider_factory` is a function that, passed a *module* object,
returns an ``IResourceProvider`` for that module.
"""
_provider_factories[loader_type] = provider_factory
def get_provider(moduleOrReq):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq, Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
try:
module = sys.modules[moduleOrReq]
except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
loader = getattr(module, '__loader__', None)
return _find_adapter(_provider_factories, loader)(module)
def _macosx_vers(_cache=[]):
if not _cache:
version = platform.mac_ver()[0]
# fallback for MacPorts
if version == '':
plist = '/System/Library/CoreServices/SystemVersion.plist'
if os.path.exists(plist):
if hasattr(plistlib, 'readPlist'):
plist_content = plistlib.readPlist(plist)
if 'ProductVersion' in plist_content:
version = plist_content['ProductVersion']
_cache.append(version.split('.'))
return _cache[0]
def _macosx_arch(machine):
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X.
"""
try:
# Python 2.7 or >=3.2
from sysconfig import get_platform
except ImportError:
from distutils.util import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
_macosx_arch(machine))
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
# through to the default implementation
pass
return plat
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
# XXX backward compat
get_platform = get_build_platform
def compatible_platforms(provided, required):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
XXX Needs compatibility checks for Linux and other unixy OSes.
"""
if provided is None or required is None or provided == required:
# easy case
return True
# Mac OS X special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
# is this a Mac package?
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
# use the new macosx designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
return True
# egg isn't macosx or legacy darwin
return False
# are they the same major version and machine type?
if provMac.group(1) != reqMac.group(1) or \
provMac.group(3) != reqMac.group(3):
return False
# is the required OS major update >= the provided one?
if int(provMac.group(2)) > int(reqMac.group(2)):
return False
return True
# XXX Linux and other platforms' special cases should go here
return False
def run_script(dist_spec, script_name):
"""Locate distribution `dist_spec` and run its `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
require(dist_spec)[0].run_script(script_name, ns)
# backward compatibility
run_main = run_script
def get_distribution(dist):
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist, six.string_types):
dist = Requirement.parse(dist)
if isinstance(dist, Requirement):
dist = get_provider(dist)
if not isinstance(dist, Distribution):
raise TypeError("Expected string, Requirement, or Distribution", dist)
return dist
def load_entry_point(dist, group, name):
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
def get_entry_map(dist, group=None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
def get_entry_info(dist, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider:
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
def get_resource_filename(manager, resource_name):
"""Return a true filesystem path for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_stream(manager, resource_name):
"""Return a readable file-like object for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_string(manager, resource_name):
"""Return a string containing the contents of `resource_name`
`manager` must be an ``IResourceManager``"""
def has_resource(resource_name):
"""Does the package contain the named resource?"""
def resource_isdir(resource_name):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
def resource_listdir(resource_name):
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet(object):
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
"""Create working set from list of path entries (default=sys.path)"""
self.entries = []
self.entry_keys = {}
self.by_key = {}
self.callbacks = []
if entries is None:
entries = sys.path
for entry in entries:
self.add_entry(entry)
@classmethod
def _build_master(cls):
"""
Prepare the master working set.
"""
ws = cls()
try:
from __main__ import __requires__
except ImportError:
# The main program does not list any requirements
return ws
# ensure the requirements are met
try:
ws.require(__requires__)
except VersionConflict:
return cls._build_from_requirements(__requires__)
return ws
@classmethod
def _build_from_requirements(cls, req_spec):
"""
Build a working set from a requirement spec. Rewrites sys.path.
"""
# try it without defaults already on sys.path
# by starting with an empty path
ws = cls([])
reqs = parse_requirements(req_spec)
dists = ws.resolve(reqs, Environment())
for dist in dists:
ws.add(dist)
# add any missing entries from sys.path
for entry in sys.path:
if entry not in ws.entries:
ws.add_entry(entry)
# then copy back to sys.path
sys.path[:] = ws.entries
return ws
def add_entry(self, entry):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
equal ``sys.path``.)
"""
self.entry_keys.setdefault(entry, [])
self.entries.append(entry)
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
def __contains__(self, dist):
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
def find(self, req):
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
returns it as long as it meets the version requirement specified by
`req`. But, if there is an active distribution for the project and it
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
"""
dist = self.by_key.get(req.key)
if dist is not None and dist not in req:
# XXX add more info
raise VersionConflict(dist, req)
return dist
def iter_entry_points(self, group, name=None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
for dist in self:
entries = dist.get_entry_map(group)
if name is None:
for ep in entries.values():
yield ep
elif name in entries:
yield entries[name]
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
def __iter__(self):
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set.
"""
seen = {}
for item in self.entries:
if item not in self.entry_keys:
# workaround a cache issue
continue
for key in self.entry_keys[item]:
if key not in seen:
seen[key] = 1
yield self.by_key[key]
def add(self, dist, entry=None, insert=True, replace=False):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`.
If it's added, any callbacks registered with the ``subscribe()`` method
will be called.
"""
if insert:
dist.insert_on(self.entries, entry, replace=replace)
if entry is None:
entry = dist.location
keys = self.entry_keys.setdefault(entry, [])
keys2 = self.entry_keys.setdefault(dist.location, [])
if not replace and dist.key in self.by_key:
# ignore hidden distros
return
self.by_key[dist.key] = dist
if dist.key not in keys:
keys.append(dist.key)
if dist.key not in keys2:
keys2.append(dist.key)
self._added_new(dist)
def resolve(self, requirements, env=None, installer=None,
replace_conflicting=False):
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
if supplied, should be an ``Environment`` instance. If
not supplied, it defaults to all distributions available within any
entry or distribution in the working set. `installer`, if supplied,
will be invoked with each requirement that cannot be met by an
already-installed distribution; it should return a ``Distribution`` or
``None``.
Unless `replace_conflicting=True`, raises a VersionConflict exception if
any requirements are found on the path that have the correct name but
the wrong version. Otherwise, if an `installer` is supplied it will be
invoked to obtain the correct version of the requirement and activate
it.
"""
# set up the stack
requirements = list(requirements)[::-1]
# set of processed requirements
processed = {}
# key -> dist
best = {}
to_activate = []
req_extras = _ReqExtras()
# Mapping of requirement to set of distributions that required it;
# useful for reporting info about conflicts.
required_by = collections.defaultdict(set)
while requirements:
# process dependencies breadth-first
req = requirements.pop(0)
if req in processed:
# Ignore cyclic or redundant dependencies
continue
if not req_extras.markers_pass(req):
continue
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
dist = self.by_key.get(req.key)
if dist is None or (dist not in req and replace_conflicting):
ws = self
if env is None:
if dist is None:
env = Environment(self.entries)
else:
# Use an empty environment and workingset to avoid
# any further conflicts with the conflicting
# distribution
env = Environment([])
ws = WorkingSet([])
dist = best[req.key] = env.best_match(req, ws, installer)
if dist is None:
requirers = required_by.get(req, None)
raise DistributionNotFound(req, requirers)
to_activate.append(dist)
if dist not in req:
# Oops, the "best" so far conflicts with a dependency
dependent_req = required_by[req]
raise VersionConflict(dist, req).with_context(dependent_req)
# push the new requirements onto the stack
new_requirements = dist.requires(req.extras)[::-1]
requirements.extend(new_requirements)
# Register the new requirements needed by req
for new_requirement in new_requirements:
required_by[new_requirement].add(req.project_name)
req_extras[new_requirement] = req.extras
processed[req] = True
# return list of distros to activate
return to_activate
def find_plugins(self, plugin_env, full_env=None, installer=None,
fallback=True):
"""Find all activatable distributions in `plugin_env`
Example usage::
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
)
# add plugins+libs to sys.path
map(working_set.add, distributions)
# display errors
print('Could not load', errors)
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` is not
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance.
"""
plugin_projects = list(plugin_env)
# scan project names in alphabetic order
plugin_projects.sort()
error_info = {}
distributions = {}
if full_env is None:
env = Environment(self.entries)
env += plugin_env
else:
env = full_env + plugin_env
shadow_set = self.__class__([])
# put all our entries in shadow_set
list(map(shadow_set.add, self))
for project_name in plugin_projects:
for dist in plugin_env[project_name]:
req = [dist.as_requirement()]
try:
resolvees = shadow_set.resolve(req, env, installer)
except ResolutionError as v:
# save error info
error_info[dist] = v
if fallback:
# try the next older version of project
continue
else:
# give up on this project, keep going
break
else:
list(map(shadow_set.add, resolvees))
distributions.update(dict.fromkeys(resolvees))
# success, no need to try any more versions of this project
break
distributions = list(distributions)
distributions.sort()
return distributions, error_info
def require(self, *requirements):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set.
"""
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
self.add(dist)
return needed
def subscribe(self, callback, existing=True):
"""Invoke `callback` for all distributions
If `existing=True` (default),
call on all existing ones, as well.
"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
if not existing:
return
for dist in self:
callback(dist)
def _added_new(self, dist):
for callback in self.callbacks:
callback(dist)
def __getstate__(self):
return (
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
self.callbacks[:]
)
def __setstate__(self, e_k_b_c):
entries, keys, by_key, callbacks = e_k_b_c
self.entries = entries[:]
self.entry_keys = keys.copy()
self.by_key = by_key.copy()
self.callbacks = callbacks[:]
class _ReqExtras(dict):
"""
Map each requirement to the extras that demanded it.
"""
def markers_pass(self, req):
"""
Evaluate markers for req against each extra that
demanded it.
Return False if the req has a marker and fails
evaluation. Otherwise, return True.
"""
extra_evals = (
req.marker.evaluate({'extra': extra})
for extra in self.get(req, ()) + (None,)
)
return not req.marker or any(extra_evals)
class Environment(object):
"""Searchable snapshot of distributions on a search path"""
def __init__(self, search_path=None, platform=get_supported_platform(),
python=PY_MAJOR):
"""Snapshot distributions available on a search path
Any distributions found on `search_path` are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used.
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.3'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
wish to map *all* distributions, not just those compatible with the
running platform or Python version.
"""
self._distmap = {}
self.platform = platform
self.python = python
self.scan(search_path)
def can_add(self, dist):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
requirements specified when this environment was created, or False
is returned.
"""
return (self.python is None or dist.py_version is None
or dist.py_version == self.python) \
and compatible_platforms(dist.platform, self.platform)
def remove(self, dist):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
def scan(self, search_path=None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added.
"""
if search_path is None:
search_path = sys.path
for item in search_path:
for dist in find_distributions(item):
self.add(dist)
def __getitem__(self, project_name):
"""Return a newest-to-oldest list of distributions for `project_name`
Uses case-insensitive `project_name` comparison, assuming all the
project's distributions use their project's name converted to all
lowercase as their key.
"""
distribution_key = project_name.lower()
return self._distmap.get(distribution_key, [])
def add(self, dist):
"""Add `dist` if we ``can_add()`` it and it has not already been added
"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
if dist not in dists:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist
for dist in self[req.key]:
if dist in req:
return dist
# try to download/install
return self.obtain(req, installer)
def obtain(self, requirement, installer=None):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument."""
if installer is not None:
return installer(requirement)
def __iter__(self):
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]:
yield key
def __iadd__(self, other):
"""In-place addition of a distribution or environment"""
if isinstance(other, Distribution):
self.add(other)
elif isinstance(other, Environment):
for project in other:
for dist in other[project]:
self.add(dist)
else:
raise TypeError("Can't add %r to environment" % (other,))
return self
def __add__(self, other):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
new += env
return new
# XXX backward compatibility
AvailableDistributions = Environment
class ExtractionError(RuntimeError):
"""An error occurred extracting a resource
The following attributes are available from instances of this exception:
manager
The resource manager that raised this exception
cache_path
The base directory for resource extraction
original_error
The exception instance that caused extraction to fail
"""
class ResourceManager:
"""Manage resource extraction and packages"""
extraction_path = None
def __init__(self):
self.cached_files = {}
def resource_exists(self, package_or_requirement, resource_name):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(self, package_or_requirement, resource_name):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(
resource_name
)
def resource_filename(self, package_or_requirement, resource_name):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
def resource_stream(self, package_or_requirement, resource_name):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
def resource_string(self, package_or_requirement, resource_name):
"""Return specified resource as a string"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
def resource_listdir(self, package_or_requirement, resource_name):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(
resource_name
)
def extraction_error(self):
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
tmpl = textwrap.dedent("""
Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s) to the Python egg
cache:
{old_exc}
The Python egg cache directory is currently set to:
{cache_path}
Perhaps your account does not have write access to this directory? You can
change the cache directory by setting the PYTHON_EGG_CACHE environment
variable to point to an accessible directory.
""").lstrip()
err = ExtractionError(tmpl.format(**locals()))
err.manager = self
err.cache_path = cache_path
err.original_error = old_exc
raise err
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self._warn_unsafe_extraction_path(extract_path)
self.cached_files[target_path] = 1
return target_path
@staticmethod
def _warn_unsafe_extraction_path(path):
"""
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
"""
if os.name == 'nt' and not path.startswith(os.environ['windir']):
# On Windows, permissions are generally restrictive by default
# and temp directories are not writable by other users, so
# bypass the warning.
return
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = ("%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path)
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
have anything special they should do.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
`tempname` is the current (temporary) name of the file, and `filename`
is the name it will be renamed to by the caller after this routine
returns.
"""
if os.name == 'posix':
# Make the resource executable
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
os.chmod(tempname, mode)
def set_extraction_path(self, path):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
path defaults to the return value of ``get_default_cache()``. (Which
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
platform-specific fallbacks. See that routine's documentation for more
details.)
Resources are extracted to subdirectories of this path based upon
information given by the ``IResourceProvider``. You may set this to a
temporary directory, but then you must call ``cleanup_resources()`` to
delete the extracted files when done. There is no guarantee that
``cleanup_resources()`` will be able to remove all extracted files.
(Note: you may not change the extraction path for a given resource
manager once resources have been extracted, unless you first call
``cleanup_resources()``.)
"""
if self.cached_files:
raise ValueError(
"Can't change extraction path, files already extracted"
)
self.extraction_path = path
def cleanup_resources(self, force=False):
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
This function does not have any concurrency protection, so it should
generally only be called when the extraction path is a temporary
directory exclusive to a single process. This method is not
automatically called; you must call it explicitly or register it as an
``atexit`` function if you wish to ensure cleanup of a temporary
directory used for extractions.
"""
# XXX
def get_default_cache():
"""
Return the ``PYTHON_EGG_CACHE`` environment variable
or a platform-relevant user cache dir for an app
named "Python-Eggs".
"""
return (
os.environ.get('PYTHON_EGG_CACHE')
or appdirs.user_cache_dir(appname='Python-Eggs')
)
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""
Convert an arbitrary string to a standard version string
"""
try:
# normalize the version
return str(packaging.version.Version(version))
except packaging.version.InvalidVersion:
version = version.replace(' ', '.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
"""
return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-', '_')
def invalid_marker(text):
"""
Validate text as a PEP 508 environment marker; return an exception
if invalid or False otherwise.
"""
try:
evaluate_marker(text)
except SyntaxError as e:
e.filename = None
e.lineno = None
return e
return False
def evaluate_marker(text, extra=None):
"""
Evaluate a PEP 508 environment marker.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'pyparsing' module.
"""
try:
marker = packaging.markers.Marker(text)
return marker.evaluate()
except packaging.markers.InvalidMarker as e:
raise SyntaxError(e)
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
egg_name = None
egg_info = None
loader = None
def __init__(self, module):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
def get_resource_filename(self, manager, resource_name):
return self._fn(self.module_path, resource_name)
def get_resource_stream(self, manager, resource_name):
return io.BytesIO(self.get_resource_string(manager, resource_name))
def get_resource_string(self, manager, resource_name):
return self._get(self._fn(self.module_path, resource_name))
def has_resource(self, resource_name):
return self._has(self._fn(self.module_path, resource_name))
def has_metadata(self, name):
return self.egg_info and self._has(self._fn(self.egg_info, name))
def get_metadata(self, name):
if not self.egg_info:
return ""
value = self._get(self._fn(self.egg_info, name))
return value.decode('utf-8') if six.PY3 else value
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
def resource_isdir(self, resource_name):
return self._isdir(self._fn(self.module_path, resource_name))
def metadata_isdir(self, name):
return self.egg_info and self._isdir(self._fn(self.egg_info, name))
def resource_listdir(self, resource_name):
return self._listdir(self._fn(self.module_path, resource_name))
def metadata_listdir(self, name):
if self.egg_info:
return self._listdir(self._fn(self.egg_info, name))
return []
def run_script(self, script_name, namespace):
script = 'scripts/' + script_name
if not self.has_metadata(script):
raise ResolutionError("No script named %r" % script_name)
script_text = self.get_metadata(script).replace('\r\n', '\n')
script_text = script_text.replace('\r', '\n')
script_filename = self._fn(self.egg_info, script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
source = open(script_filename).read()
code = compile(source, script_filename, 'exec')
exec(code, namespace, namespace)
else:
from linecache import cache
cache[script_filename] = (
len(script_text), 0, script_text.split('\n'), script_filename
)
script_code = compile(script_text, script_filename, 'exec')
exec(script_code, namespace, namespace)
def _has(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _isdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _listdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _fn(self, base, resource_name):
if resource_name:
return os.path.join(base, *resource_name.split('/'))
return base
def _get(self, path):
if hasattr(self.loader, 'get_data'):
return self.loader.get_data(path)
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
register_loader_type(object, NullProvider)
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
def __init__(self, module):
NullProvider.__init__(self, module)
self._setup_prefix()
def _setup_prefix(self):
# we assume here that our metadata may be nested inside a "basket"
# of multiple eggs; that's why we use module_path instead of .archive
path = self.module_path
old = None
while path != old:
if _is_unpacked_egg(path):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
break
old = path
path, base = os.path.split(path)
class DefaultProvider(EggProvider):
"""Provides access to package resources in the filesystem"""
def _has(self, path):
return os.path.exists(path)
def _isdir(self, path):
return os.path.isdir(path)
def _listdir(self, path):
return os.listdir(path)
def get_resource_stream(self, manager, resource_name):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path):
with open(path, 'rb') as stream:
return stream.read()
@classmethod
def _register(cls):
loader_cls = getattr(importlib_machinery, 'SourceFileLoader',
type(None))
register_loader_type(loader_cls, cls)
DefaultProvider._register()
class EmptyProvider(NullProvider):
"""Provider that returns nothing for all requests"""
_isdir = _has = lambda self, path: False
_get = lambda self, path: ''
_listdir = lambda self, path: []
module_path = None
def __init__(self):
pass
empty_provider = EmptyProvider()
class ZipManifests(dict):
"""
zip manifest builder
"""
@classmethod
def build(cls, path):
"""
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
Use a platform-specific path separator (os.sep) for the path keys
for compatibility with pypy on Windows.
"""
with ContextualZipFile(path) as zfile:
items = (
(
name.replace('/', os.sep),
zfile.getinfo(name),
)
for name in zfile.namelist()
)
return dict(items)
load = build
class MemoizedZipManifests(ZipManifests):
"""
Memoized zipfile manifests.
"""
manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
def load(self, path):
"""
Load a manifest at path or return a suitable manifest already loaded.
"""
path = os.path.normpath(path)
mtime = os.stat(path).st_mtime
if path not in self or self[path].mtime != mtime:
manifest = self.build(path)
self[path] = self.manifest_mod(manifest, mtime)
return self[path].manifest
class ContextualZipFile(zipfile.ZipFile):
"""
Supplement ZipFile class to support context manager for Python 2.6
"""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __new__(cls, *args, **kwargs):
"""
Construct a ZipFile or ContextualZipFile as appropriate
"""
if hasattr(zipfile.ZipFile, '__exit__'):
return zipfile.ZipFile(*args, **kwargs)
return super(ContextualZipFile, cls).__new__(cls)
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
eagers = None
_zip_manifests = MemoizedZipManifests()
def __init__(self, module):
EggProvider.__init__(self, module)
self.zip_pre = self.loader.archive + os.sep
def _zipinfo_name(self, fspath):
# Convert a virtual filename (full path to file) into a zipfile subpath
# usable with the zipimport directory cache for our target archive
if fspath.startswith(self.zip_pre):
return fspath[len(self.zip_pre):]
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.zip_pre)
)
def _parts(self, zip_path):
# Convert a zipfile subpath into an egg-relative path part list.
# pseudo-fs path
fspath = self.zip_pre + zip_path
if fspath.startswith(self.egg_root + os.sep):
return fspath[len(self.egg_root) + 1:].split(os.sep)
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.egg_root)
)
@property
def zipinfo(self):
return self._zip_manifests.load(self.loader.archive)
def get_resource_filename(self, manager, resource_name):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
)
# no need to lock for extraction, since we use temp names
zip_path = self._resource_to_zip(resource_name)
eagers = self._get_eager_resources()
if '/'.join(self._parts(zip_path)) in eagers:
for name in eagers:
self._extract_resource(manager, self._eager_to_zip(name))
return self._extract_resource(manager, zip_path)
@staticmethod
def _get_date_and_size(zip_stat):
size = zip_stat.file_size
# ymdhms+wday, yday, dst
date_time = zip_stat.date_time + (0, 0, -1)
# 1980 offset already done
timestamp = time.mktime(date_time)
return timestamp, size
def _extract_resource(self, manager, zip_path):
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(
manager, os.path.join(zip_path, name)
)
# return the extracted directory name
return os.path.dirname(last)
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not WRITE_SUPPORT:
raise IOError('"os.rename" and "os.unlink" are not supported '
'on this platform')
try:
real_path = manager.get_cache_path(
self.egg_name, self._parts(zip_path)
)
if self._is_current(real_path, zip_path):
return real_path
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
os.write(outf, self.loader.get_data(zip_path))
os.close(outf)
utime(tmpnam, (timestamp, timestamp))
manager.postprocess(tmpnam, real_path)
try:
rename(tmpnam, real_path)
except os.error:
if os.path.isfile(real_path):
if self._is_current(real_path, zip_path):
# the file became current since it was checked above,
# so proceed.
return real_path
# Windows, del old file and retry
elif os.name == 'nt':
unlink(real_path)
rename(tmpnam, real_path)
return real_path
raise
except os.error:
# report a user-friendly error
manager.extraction_error()
return real_path
def _is_current(self, file_path, zip_path):
"""
Return True if the file_path is current for this zip_path
"""
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not os.path.isfile(file_path):
return False
stat = os.stat(file_path)
if stat.st_size != size or stat.st_mtime != timestamp:
return False
# check that the contents match
zip_contents = self.loader.get_data(zip_path)
with open(file_path, 'rb') as f:
file_contents = f.read()
return zip_contents == file_contents
def _get_eager_resources(self):
if self.eagers is None:
eagers = []
for name in ('native_libs.txt', 'eager_resources.txt'):
if self.has_metadata(name):
eagers.extend(self.get_metadata_lines(name))
self.eagers = eagers
return self.eagers
def _index(self):
try:
return self._dirindex
except AttributeError:
ind = {}
for path in self.zipinfo:
parts = path.split(os.sep)
while parts:
parent = os.sep.join(parts[:-1])
if parent in ind:
ind[parent].append(parts[-1])
break
else:
ind[parent] = [parts.pop()]
self._dirindex = ind
return ind
def _has(self, fspath):
zip_path = self._zipinfo_name(fspath)
return zip_path in self.zipinfo or zip_path in self._index()
def _isdir(self, fspath):
return self._zipinfo_name(fspath) in self._index()
def _listdir(self, fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
def _eager_to_zip(self, resource_name):
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
def _resource_to_zip(self, resource_name):
return self._zipinfo_name(self._fn(self.module_path, resource_name))
register_loader_type(zipimport.zipimporter, ZipProvider)
class FileMetadata(EmptyProvider):
"""Metadata handler for standalone PKG-INFO files
Usage::
metadata = FileMetadata("/path/to/PKG-INFO")
This provider rejects all data and metadata requests except for PKG-INFO,
which is treated as existing, and will be the contents of the file at
the provided location.
"""
def __init__(self, path):
self.path = path
def has_metadata(self, name):
return name == 'PKG-INFO' and os.path.isfile(self.path)
def get_metadata(self, name):
if name != 'PKG-INFO':
raise KeyError("No metadata except PKG-INFO is available")
with io.open(self.path, encoding='utf-8', errors="replace") as f:
metadata = f.read()
self._warn_on_replacement(metadata)
return metadata
def _warn_on_replacement(self, metadata):
# Python 2.6 and 3.2 compat for: replacement_char = '�'
replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
if replacement_char in metadata:
tmpl = "{self.path} could not be properly decoded in UTF-8"
msg = tmpl.format(**locals())
warnings.warn(msg)
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
class PathMetadata(DefaultProvider):
"""Metadata provider for egg directories
Usage::
# Development eggs:
egg_info = "/path/to/PackageName.egg-info"
base_dir = os.path.dirname(egg_info)
metadata = PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
# Unpacked egg directories:
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
def __init__(self, path, egg_info):
self.module_path = path
self.egg_info = egg_info
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
def __init__(self, importer):
"""Create a metadata provider from a zipimporter"""
self.zip_pre = importer.archive + os.sep
self.loader = importer
if importer.prefix:
self.module_path = os.path.join(importer.archive, importer.prefix)
else:
self.module_path = importer.archive
self._setup_prefix()
_declare_state('dict', _distribution_finders={})
def register_finder(importer_type, distribution_finder):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `distribution_finder` is a callable that, passed a path
item and the importer instance, yields ``Distribution`` instances found on
that path item. See ``pkg_resources.find_on_path`` for an example."""
_distribution_finders[importer_type] = distribution_finder
def find_distributions(path_item, only=False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata)
if only:
# don't yield nested distros
return
for subitem in metadata.resource_listdir('/'):
if _is_unpacked_egg(subitem):
subpath = os.path.join(path_item, subitem)
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist
register_finder(zipimport.zipimporter, find_eggs_in_zip)
def find_nothing(importer, path_item, only=False):
return ()
register_finder(object, find_nothing)
def _by_version_descending(names):
"""
Given a list of filenames, return them in descending order
by version number.
>>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
>>> _by_version_descending(names)
['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
>>> _by_version_descending(names)
['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
>>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
>>> _by_version_descending(names)
['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
"""
def _by_version(name):
"""
Parse each component of the filename
"""
name, ext = os.path.splitext(name)
parts = itertools.chain(name.split('-'), [ext])
return [packaging.version.parse(part) for part in parts]
return sorted(names, key=_by_version, reverse=True)
def find_on_path(importer, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
if _is_unpacked_egg(path_item):
yield Distribution.from_filename(
path_item, metadata=PathMetadata(
path_item, os.path.join(path_item, 'EGG-INFO')
)
)
else:
# scan for .egg and .egg-info in directory
path_item_entries = _by_version_descending(os.listdir(path_item))
for entry in path_item_entries:
lower = entry.lower()
if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
fullpath = os.path.join(path_item, entry)
if os.path.isdir(fullpath):
# egg-info directory, allow getting metadata
if len(os.listdir(fullpath)) == 0:
# Empty egg directory, skip.
continue
metadata = PathMetadata(path_item, fullpath)
else:
metadata = FileMetadata(fullpath)
yield Distribution.from_location(
path_item, entry, metadata, precedence=DEVELOP_DIST
)
elif not only and _is_unpacked_egg(entry):
dists = find_distributions(os.path.join(path_item, entry))
for dist in dists:
yield dist
elif not only and lower.endswith('.egg-link'):
with open(os.path.join(path_item, entry)) as entry_file:
entry_lines = entry_file.readlines()
for line in entry_lines:
if not line.strip():
continue
path = os.path.join(path_item, line.rstrip())
dists = find_distributions(path)
for item in dists:
yield item
break
register_finder(pkgutil.ImpImporter, find_on_path)
if hasattr(importlib_machinery, 'FileFinder'):
register_finder(importlib_machinery.FileFinder, find_on_path)
_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})
def register_namespace_handler(importer_type, namespace_handler):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `namespace_handler` is a callable like this::
def namespace_handler(importer, path_entry, moduleName, module):
# return a path_entry to use for child packages
Namespace handlers are only called if the importer object has already
agreed that it can handle the relevant path item, and they should only
return a subpath if the module __path__ does not already contain an
equivalent subpath. For an example namespace handler, see
``pkg_resources.file_ns_handler``.
"""
_namespace_handlers[importer_type] = namespace_handler
def _handle_ns(packageName, path_item):
"""Ensure that named package includes a subpath of path_item (if needed)"""
importer = get_importer(path_item)
if importer is None:
return None
loader = importer.find_module(packageName)
if loader is None:
return None
module = sys.modules.get(packageName)
if module is None:
module = sys.modules[packageName] = types.ModuleType(packageName)
module.__path__ = []
_set_parent_ns(packageName)
elif not hasattr(module, '__path__'):
raise TypeError("Not a package:", packageName)
handler = _find_adapter(_namespace_handlers, importer)
subpath = handler(importer, path_item, packageName, module)
if subpath is not None:
path = module.__path__
path.append(subpath)
loader.load_module(packageName)
_rebuild_mod_path(path, packageName, module)
return subpath
def _rebuild_mod_path(orig_path, package_name, module):
"""
Rebuild module.__path__ ensuring that all entries are ordered
corresponding to their sys.path order
"""
sys_path = [_normalize_cached(p) for p in sys.path]
def safe_sys_path_index(entry):
"""
Workaround for #520 and #513.
"""
try:
return sys_path.index(entry)
except ValueError:
return float('inf')
def position_in_sys_path(path):
"""
Return the ordinal of the path based on its position in sys.path
"""
path_parts = path.split(os.sep)
module_parts = package_name.count('.') + 1
parts = path_parts[:-module_parts]
return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
if not isinstance(orig_path, list):
# Is this behavior useful when module.__path__ is not a list?
return
orig_path.sort(key=position_in_sys_path)
module.__path__[:] = [_normalize_cached(p) for p in orig_path]
def declare_namespace(packageName):
"""Declare that package 'packageName' is a namespace package"""
_imp.acquire_lock()
try:
if packageName in _namespace_packages:
return
path, parent = sys.path, None
if '.' in packageName:
parent = '.'.join(packageName.split('.')[:-1])
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
try:
path = sys.modules[parent].__path__
except AttributeError:
raise TypeError("Not a package:", parent)
# Track what packages are namespaces, so when new path items are added,
# they can be updated
_namespace_packages.setdefault(parent, []).append(packageName)
_namespace_packages.setdefault(packageName, [])
for path_item in path:
# Ensure all the parent's path items are reflected in the child,
# if they apply
_handle_ns(packageName, path_item)
finally:
_imp.release_lock()
def fixup_namespace_packages(path_item, parent=None):
"""Ensure that previously-declared namespace packages include path_item"""
_imp.acquire_lock()
try:
for package in _namespace_packages.get(parent, ()):
subpath = _handle_ns(package, path_item)
if subpath:
fixup_namespace_packages(subpath, package)
finally:
_imp.release_lock()
def file_ns_handler(importer, path_item, packageName, module):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
normalized = _normalize_cached(subpath)
for item in module.__path__:
if _normalize_cached(item) == normalized:
break
else:
# Only return the path if it's not already there
return subpath
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
if hasattr(importlib_machinery, 'FileFinder'):
register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
def null_ns_handler(importer, path_item, packageName, module):
return None
register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
def _normalize_cached(filename, _cache={}):
try:
return _cache[filename]
except KeyError:
_cache[filename] = result = normalize_path(filename)
return result
def _is_unpacked_egg(path):
"""
Determine if given path appears to be an unpacked egg.
"""
return (
path.lower().endswith('.egg')
)
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName])
def yield_lines(strs):
"""Yield non-empty/non-comment lines of a string or sequence"""
if isinstance(strs, six.string_types):
for s in strs.splitlines():
s = s.strip()
# skip blank lines/comments
if s and not s.startswith('#'):
yield s
else:
for ss in strs:
for s in yield_lines(ss):
yield s
MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
r"""
(?P<name>[^-]+) (
-(?P<ver>[^-]+) (
-py(?P<pyver>[^-]+) (
-(?P<plat>.+)
)?
)?
)?
""",
re.VERBOSE | re.IGNORECASE,
).match
class EntryPoint(object):
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
self.module_name = module_name
self.attrs = tuple(attrs)
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
self.dist = dist
def __str__(self):
s = "%s = %s" % (self.name, self.module_name)
if self.attrs:
s += ':' + '.'.join(self.attrs)
if self.extras:
s += ' [%s]' % ','.join(self.extras)
return s
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
def load(self, require=True, *args, **kwargs):
"""
Require packages for this EntryPoint, then resolve it.
"""
if not require or args or kwargs:
warnings.warn(
"Parameters to load are deprecated. Call .resolve and "
".require separately.",
DeprecationWarning,
stacklevel=2,
)
if require:
self.require(*args, **kwargs)
return self.resolve()
def resolve(self):
"""
Resolve the entry point from its module and attrs.
"""
module = __import__(self.module_name, fromlist=['__name__'], level=0)
try:
return functools.reduce(getattr, self.attrs, module)
except AttributeError as exc:
raise ImportError(str(exc))
def require(self, env=None, installer=None):
if self.extras and not self.dist:
raise UnknownExtra("Can't require() without a distribution", self)
reqs = self.dist.requires(self.extras)
items = working_set.resolve(reqs, env, installer)
list(map(working_set.add, items))
pattern = re.compile(
r'\s*'
r'(?P<name>.+?)\s*'
r'=\s*'
r'(?P<module>[\w.]+)\s*'
r'(:\s*(?P<attr>[\w.]+))?\s*'
r'(?P<extras>\[.*\])?\s*$'
)
@classmethod
def parse(cls, src, dist=None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
name = some.module:some.attr [extra1, extra2]
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional
"""
m = cls.pattern.match(src)
if not m:
msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
raise ValueError(msg, src)
res = m.groupdict()
extras = cls._parse_extras(res['extras'])
attrs = res['attr'].split('.') if res['attr'] else ()
return cls(res['name'], res['module'], attrs, extras, dist)
@classmethod
def _parse_extras(cls, extras_spec):
if not extras_spec:
return ()
req = Requirement.parse('x' + extras_spec)
if req.specs:
raise ValueError()
return req.extras
@classmethod
def parse_group(cls, group, lines, dist=None):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
this = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
raise ValueError("Duplicate entry point", group, ep.name)
this[ep.name] = ep
return this
@classmethod
def parse_map(cls, data, dist=None):
"""Parse a map of entry point groups"""
if isinstance(data, dict):
data = data.items()
else:
data = split_sections(data)
maps = {}
for group, lines in data:
if group is None:
if not lines:
continue
raise ValueError("Entry points must be listed in groups")
group = group.strip()
if group in maps:
raise ValueError("Duplicate group name", group)
maps[group] = cls.parse_group(group, lines, dist)
return maps
def _remove_md5_fragment(location):
if not location:
return ''
parsed = urllib.parse.urlparse(location)
if parsed[-1].startswith('md5='):
return urllib.parse.urlunparse(parsed[:-1] + ('',))
return location
def _version_from_file(lines):
"""
Given an iterable of lines from a Metadata file, return
the value of the Version field, if present, or None otherwise.
"""
is_version_line = lambda line: line.lower().startswith('version:')
version_lines = filter(is_version_line, lines)
line = next(iter(version_lines), '')
_, _, value = line.partition(':')
return safe_version(value.strip()) or None
class Distribution(object):
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
def __init__(self, location=None, metadata=None, project_name=None,
version=None, py_version=PY_MAJOR, platform=None,
precedence=EGG_DIST):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
self._version = safe_version(version)
self.py_version = py_version
self.platform = platform
self.location = location
self.precedence = precedence
self._provider = metadata or empty_provider
@classmethod
def from_location(cls, location, basename, metadata=None, **kw):
project_name, version, py_version, platform = [None] * 4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
cls = _distributionImpl[ext.lower()]
match = EGG_NAME(basename)
if match:
project_name, version, py_version, platform = match.group(
'name', 'ver', 'pyver', 'plat'
)
return cls(
location, metadata, project_name=project_name, version=version,
py_version=py_version, platform=platform, **kw
)._reload_version()
def _reload_version(self):
return self
@property
def hashcmp(self):
return (
self.parsed_version,
self.precedence,
self.key,
_remove_md5_fragment(self.location),
self.py_version or '',
self.platform or '',
)
def __hash__(self):
return hash(self.hashcmp)
def __lt__(self, other):
return self.hashcmp < other.hashcmp
def __le__(self, other):
return self.hashcmp <= other.hashcmp
def __gt__(self, other):
return self.hashcmp > other.hashcmp
def __ge__(self, other):
return self.hashcmp >= other.hashcmp
def __eq__(self, other):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
def __ne__(self, other):
return not self == other
# These properties have to be lazy so that we don't have to load any
# metadata until/unless it's actually needed. (i.e., some distributions
# may not know their name or version without loading PKG-INFO)
@property
def key(self):
try:
return self._key
except AttributeError:
self._key = key = self.project_name.lower()
return key
@property
def parsed_version(self):
if not hasattr(self, "_parsed_version"):
self._parsed_version = parse_version(self.version)
return self._parsed_version
def _warn_legacy_version(self):
LV = packaging.version.LegacyVersion
is_legacy = isinstance(self._parsed_version, LV)
if not is_legacy:
return
# While an empty version is technically a legacy version and
# is not a valid PEP 440 version, it's also unlikely to
# actually come from someone and instead it is more likely that
# it comes from setuptools attempting to parse a filename and
# including it in the list. So for that we'll gate this warning
# on if the version is anything at all or not.
if not self.version:
return
tmpl = textwrap.dedent("""
'{project_name} ({version})' is being parsed as a legacy,
non PEP 440,
version. You may find odd behavior and sort order.
In particular it will be sorted as less than 0.0. It
is recommended to migrate to PEP 440 compatible
versions.
""").strip().replace('\n', ' ')
warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
@property
def version(self):
try:
return self._version
except AttributeError:
version = _version_from_file(self._get_metadata(self.PKG_INFO))
if version is None:
tmpl = "Missing 'Version:' header and/or %s file"
raise ValueError(tmpl % self.PKG_INFO, self)
return version
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
dm = self.__dep_map = {None: []}
for name in 'requires.txt', 'depends.txt':
for extra, reqs in split_sections(self._get_metadata(name)):
if extra:
if ':' in extra:
extra, marker = extra.split(':', 1)
if invalid_marker(marker):
# XXX warn
reqs = []
elif not evaluate_marker(marker):
reqs = []
extra = safe_extra(extra) or None
dm.setdefault(extra, []).extend(parse_requirements(reqs))
return dm
def requires(self, extras=()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
deps = []
deps.extend(dm.get(None, ()))
for ext in extras:
try:
deps.extend(dm[safe_extra(ext)])
except KeyError:
raise UnknownExtra(
"%s has no such extra feature %r" % (self, ext)
)
return deps
def _get_metadata(self, name):
if self.has_metadata(name):
for line in self.get_metadata_lines(name):
yield line
def activate(self, path=None, replace=False):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None:
path = sys.path
self.insert_on(path, replace=replace)
if path is sys.path:
fixup_namespace_packages(self.location)
for pkg in self._get_metadata('namespace_packages.txt'):
if pkg in sys.modules:
declare_namespace(pkg)
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
to_filename(self.project_name), to_filename(self.version),
self.py_version or PY_MAJOR
)
if self.platform:
filename += '-' + self.platform
return filename
def __repr__(self):
if self.location:
return "%s (%s)" % (self, self.location)
else:
return str(self)
def __str__(self):
try:
version = getattr(self, 'version', None)
except ValueError:
version = None
version = version or "[unknown version]"
return "%s %s" % (self.project_name, version)
def __getattr__(self, attr):
"""Delegate all unrecognized public attributes to .metadata provider"""
if attr.startswith('_'):
raise AttributeError(attr)
return getattr(self._provider, attr)
@classmethod
def from_filename(cls, filename, metadata=None, **kw):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata,
**kw
)
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
if isinstance(self.parsed_version, packaging.version.Version):
spec = "%s==%s" % (self.project_name, self.parsed_version)
else:
spec = "%s===%s" % (self.project_name, self.parsed_version)
return Requirement.parse(spec)
def load_entry_point(self, group, name):
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group, name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group, name),))
return ep.load()
def get_entry_map(self, group=None):
"""Return the entry point map for `group`, or the full entry map"""
try:
ep_map = self._ep_map
except AttributeError:
ep_map = self._ep_map = EntryPoint.parse_map(
self._get_metadata('entry_points.txt'), self
)
if group is not None:
return ep_map.get(group, {})
return ep_map
def get_entry_info(self, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
def insert_on(self, path, loc=None, replace=False):
"""Ensure self.location is on path
If replace=False (default):
- If location is already in path anywhere, do nothing.
- Else:
- If it's an egg and its parent directory is on path,
insert just ahead of the parent.
- Else: add to the end of path.
If replace=True:
- If location is already on path anywhere (not eggs)
or higher priority than its parent (eggs)
do nothing.
- Else:
- If it's an egg and its parent directory is on path,
insert just ahead of the parent,
removing any lower-priority entries.
- Else: add it to the front of path.
"""
loc = loc or self.location
if not loc:
return
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath = [(p and _normalize_cached(p) or p) for p in path]
for p, item in enumerate(npath):
if item == nloc:
if replace:
break
else:
# don't modify path (even removing duplicates) if found and not replace
return
elif item == bdir and self.precedence == EGG_DIST:
# if it's an .egg, give it precedence over its directory
# UNLESS it's already been added to sys.path and replace=False
if (not replace) and nloc in npath[p:]:
return
if path is sys.path:
self.check_version_conflict()
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
if path is sys.path:
self.check_version_conflict()
if replace:
path.insert(0, loc)
else:
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while True:
try:
np = npath.index(nloc, p + 1)
except ValueError:
break
else:
del npath[np], path[np]
# ha!
p = np
return
def check_version_conflict(self):
if self.key == 'setuptools':
# ignore the inevitable setuptools self-conflicts :(
return
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
loc = normalize_path(self.location)
for modname in self._get_metadata('top_level.txt'):
if (modname not in sys.modules or modname in nsp
or modname in _namespace_packages):
continue
if modname in ('pkg_resources', 'setuptools', 'site'):
continue
fn = getattr(sys.modules[modname], '__file__', None)
if fn and (normalize_path(fn).startswith(loc) or
fn.startswith(self.location)):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
" to sys.path" % (modname, fn, self.location),
)
def has_version(self):
try:
self.version
except ValueError:
issue_warning("Unbuilt egg for " + repr(self))
return False
return True
def clone(self, **kw):
"""Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence'
for attr in names.split():
kw.setdefault(attr, getattr(self, attr, None))
kw.setdefault('metadata', self._provider)
return self.__class__(**kw)
@property
def extras(self):
return [dep for dep in self._dep_map if dep]
class EggInfoDistribution(Distribution):
def _reload_version(self):
"""
Packages installed by distutils (e.g. numpy or scipy),
which uses an old safe_version, and so
their version numbers can get mangled when
converted to filenames (e.g., 1.11.0.dev0+2329eae to
1.11.0.dev0_2329eae). These distributions will not be
parsed properly
downstream by Distribution and safe_version, so
take an extra step and try to get the version number from
the metadata file itself instead of the filename.
"""
md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
if md_version:
self._version = md_version
return self
class DistInfoDistribution(Distribution):
"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
PKG_INFO = 'METADATA'
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
@property
def _parsed_pkg_info(self):
"""Parse and cache metadata"""
try:
return self._pkg_info
except AttributeError:
metadata = self.get_metadata(self.PKG_INFO)
self._pkg_info = email.parser.Parser().parsestr(metadata)
return self._pkg_info
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
self.__dep_map = self._compute_dependencies()
return self.__dep_map
def _compute_dependencies(self):
"""Recompute this distribution's dependencies."""
dm = self.__dep_map = {None: []}
reqs = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
reqs.extend(parse_requirements(req))
def reqs_for_extra(extra):
for req in reqs:
if not req.marker or req.marker.evaluate({'extra': extra}):
yield req
common = frozenset(reqs_for_extra(None))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
s_extra = safe_extra(extra.strip())
dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
return dm
_distributionImpl = {
'.egg': Distribution,
'.egg-info': EggInfoDistribution,
'.dist-info': DistInfoDistribution,
}
def issue_warning(*args, **kw):
level = 1
g = globals()
try:
# find the first stack frame that is *not* code in
# the pkg_resources module, to use for the warning
while sys._getframe(level).f_globals is g:
level += 1
except ValueError:
pass
warnings.warn(stacklevel=level + 1, *args, **kw)
class RequirementParseError(ValueError):
def __str__(self):
return ' '.join(self.args)
def parse_requirements(strs):
"""Yield ``Requirement`` objects for each specification in `strs`
`strs` must be a string, or a (possibly-nested) iterable thereof.
"""
# create a steppable iterator, so we can handle \-continuations
lines = iter(yield_lines(strs))
for line in lines:
# Drop comments -- a hash without a space may be in a URL.
if ' #' in line:
line = line[:line.find(' #')]
# If there is a line continuation, drop it, and append the next line.
if line.endswith('\\'):
line = line[:-2].strip()
line += next(lines)
yield Requirement(line)
class Requirement(packaging.requirements.Requirement):
def __init__(self, requirement_string):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
try:
super(Requirement, self).__init__(requirement_string)
except packaging.requirements.InvalidRequirement as e:
raise RequirementParseError(str(e))
self.unsafe_name = self.name
project_name = safe_name(self.name)
self.project_name, self.key = project_name, project_name.lower()
self.specs = [
(spec.operator, spec.version) for spec in self.specifier]
self.extras = tuple(map(safe_extra, self.extras))
self.hashCmp = (
self.key,
self.specifier,
frozenset(self.extras),
str(self.marker) if self.marker else None,
)
self.__hash = hash(self.hashCmp)
def __eq__(self, other):
return (
isinstance(other, Requirement) and
self.hashCmp == other.hashCmp
)
def __ne__(self, other):
return not self == other
def __contains__(self, item):
if isinstance(item, Distribution):
if item.key != self.key:
return False
item = item.version
# Allow prereleases always in order to match the previous behavior of
# this method. In the future this should be smarter and follow PEP 440
# more accurately.
return self.specifier.contains(item, prereleases=True)
def __hash__(self):
return self.__hash
def __repr__(self): return "Requirement.parse(%r)" % str(self)
@staticmethod
def parse(s):
req, = parse_requirements(s)
return req
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls, type):
class cls(cls, object):
pass
return cls.__mro__[1:]
return cls.__mro__
def _find_adapter(registry, ob):
"""Return an adapter factory for `ob` from `registry`"""
for t in _get_mro(getattr(ob, '__class__', type(ob))):
if t in registry:
return registry[t]
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _bypass_ensure_directory(path):
"""Sandbox-bypassing version of ensure_directory()"""
if not WRITE_SUPPORT:
raise IOError('"os.mkdir" not supported on this platform.')
dirname, filename = split(path)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
mkdir(dirname, 0o755)
def split_sections(s):
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith("["):
if line.endswith("]"):
if section or content:
yield section, content
section = line[1:-1].strip()
content = []
else:
raise ValueError("Invalid section heading", line)
else:
content.append(line)
# wrap up last segment
yield section, content
def _mkstemp(*args, **kw):
old_open = os.open
try:
# temporarily bypass sandboxing
os.open = os_open
return tempfile.mkstemp(*args, **kw)
finally:
# and then put it back
os.open = old_open
# Silence the PEP440Warning by default, so that end users don't get hit by it
# randomly just because they use pkg_resources. We want to append the rule
# because we want earlier uses of filterwarnings to take precedence over this
# one.
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
# from jaraco.functools 1.3
def _call_aside(f, *args, **kwargs):
f(*args, **kwargs)
return f
@_call_aside
def _initialize(g=globals()):
"Set up global resource manager (deliberately not state-saved)"
manager = ResourceManager()
g['_manager'] = manager
g.update(
(name, getattr(manager, name))
for name in dir(manager)
if not name.startswith('_')
)
@_call_aside
def _initialize_master_working_set():
"""
Prepare the master working set and make the ``require()``
API available.
This function has explicit effects on the global state
of pkg_resources. It is intended to be invoked once at
the initialization of this module.
Invocation by other packages is unsupported and done
at their own risk.
"""
working_set = WorkingSet._build_master()
_declare_state('object', working_set=working_set)
require = working_set.require
iter_entry_points = working_set.iter_entry_points
add_activation_listener = working_set.subscribe
run_script = working_set.run_script
# backward compatibility
run_main = run_script
# Activate all distributions already on sys.path with replace=False and
# ensure that all distributions added to the working set in the future
# (e.g. by calling ``require()``) will get activated as well,
# with higher priority (replace=True).
tuple(
dist.activate(replace=False)
for dist in working_set
)
add_activation_listener(lambda dist: dist.activate(replace=True), existing=False)
working_set.entries = []
# match order
list(map(working_set.add_entry, sys.path))
globals().update(locals())
| mit |
qtekfun/htcDesire820Kernel | external/chromium_org/build/android/pylib/uiautomator/setup.py | 25 | 1088 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates test runner factory and tests for uiautomator tests."""
import logging
import test_package
import test_runner
def Setup(test_options):
"""Runs uiautomator tests on connected device(s).
Args:
test_options: A UIAutomatorOptions object.
Returns:
A tuple of (TestRunnerFactory, tests).
"""
test_pkg = test_package.TestPackage(test_options.uiautomator_jar,
test_options.uiautomator_info_jar)
tests = test_pkg._GetAllMatchingTests(test_options.annotations,
test_options.exclude_annotations,
test_options.test_filter)
if not tests:
logging.error('No uiautomator tests to run with current args.')
def TestRunnerFactory(device, shard_index):
return test_runner.TestRunner(
test_options, device, shard_index, test_pkg)
return (TestRunnerFactory, tests)
| gpl-2.0 |
FernanOrtega/DAT210x | Module2/assignment3.py | 1 | 1065 | import pandas as pd
# TODO: Load up the dataset
# Ensuring you set the appropriate header column names
#
df = pd.read_csv('Datasets/servo.data', names=['motor', 'screw', 'pgain', 'vgain', 'class'])
print df.head()
# TODO: Create a slice that contains all entries
# having a vgain equal to 5. Then print the
# length of (# of samples in) that slice:
#
df_vgain = df[df.vgain == 5]
print df_vgain.iloc[:,0].count()
# TODO: Create a slice that contains all entries
# having a motor equal to E and screw equal
# to E. Then print the length of (# of
# samples in) that slice:
#
# .. your code here ..
df_eq = df[(df.motor == 'E') & (df.screw == 'E')]
print df_eq.iloc[:,0].count()
# TODO: Create a slice that contains all entries
# having a pgain equal to 4. Use one of the
# various methods of finding the mean vgain
# value for the samples in that slice. Once
# you've found it, print it:
#
df_pgain = df[df.pgain == 4]
print df_pgain.vgain.mean(0)
# TODO: (Bonus) See what happens when you run
# the .dtypes method on your dataframe!
print df.dtypes
| mit |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/django-1.8.2/django/core/mail/backends/filebased.py | 558 | 2771 | """Email backend that writes messages to a file."""
import datetime
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.console import \
EmailBackend as ConsoleEmailBackend
from django.utils import six
class EmailBackend(ConsoleEmailBackend):
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'EMAIL_FILE_PATH', None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, six.string_types):
raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured(
'Path for saving email messages exists, but is not a directory: %s' % self.file_path
)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except OSError as err:
raise ImproperlyConfigured(
'Could not create directory for saving email messages: %s (%s)' % (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(EmailBackend, self).__init__(*args, **kwargs)
def write_message(self, message):
self.stream.write(message.message().as_bytes() + b'\n')
self.stream.write(b'-' * 79)
self.stream.write(b'\n')
def _get_filename(self):
"""Return a unique file name."""
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), 'ab')
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
| mit |
stacybird/lemur | lemur/defaults/views.py | 9 | 1826 | """
.. module: lemur.status.views
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
"""
from flask import current_app, Blueprint
from flask.ext.restful import Api
from lemur.auth.service import AuthenticatedResource
mod = Blueprint('default', __name__)
api = Api(mod)
class LemurDefaults(AuthenticatedResource):
""" Defines the 'defaults' endpoint """
def __init__(self):
super(LemurDefaults)
def get(self):
"""
.. http:get:: /defaults
Returns defaults needed to generate CSRs
**Example request**:
.. sourcecode:: http
GET /defaults HTTP/1.1
Host: example.com
Accept: application/json, text/javascript
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: text/javascript
{
"country": "US",
"state": "CA",
"location": "Los Gatos",
"organization": "Netflix",
"organizationalUnit": "Operations"
}
:reqheader Authorization: OAuth token to authenticate
:statuscode 200: no error
:statuscode 403: unauthenticated
"""
return dict(
country=current_app.config.get('LEMUR_DEFAULT_COUNTRY'),
state=current_app.config.get('LEMUR_DEFAULT_STATE'),
location=current_app.config.get('LEMUR_DEFAULT_LOCATION'),
organization=current_app.config.get('LEMUR_DEFAULT_ORGANIZATION'),
organizationalUnit=current_app.config.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT')
)
api.add_resource(LemurDefaults, '/defaults', endpoint='default')
| apache-2.0 |
perryjrandall/arsenalsuite | cpp/lib/PyQt4/examples/widgets/calculator.py | 20 | 12001 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
import math
from PyQt4 import QtCore, QtGui
class Button(QtGui.QToolButton):
def __init__(self, text, parent=None):
super(Button, self).__init__(parent)
self.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Preferred)
self.setText(text)
def sizeHint(self):
size = super(Button, self).sizeHint()
size.setHeight(size.height() + 20)
size.setWidth(max(size.width(), size.height()))
return size
class Calculator(QtGui.QDialog):
NumDigitButtons = 10
def __init__(self, parent=None):
super(Calculator, self).__init__(parent)
self.pendingAdditiveOperator = ''
self.pendingMultiplicativeOperator = ''
self.sumInMemory = 0.0
self.sumSoFar = 0.0
self.factorSoFar = 0.0
self.waitingForOperand = True
self.display = QtGui.QLineEdit('0')
self.display.setReadOnly(True)
self.display.setAlignment(QtCore.Qt.AlignRight)
self.display.setMaxLength(15)
font = self.display.font()
font.setPointSize(font.pointSize() + 8)
self.display.setFont(font)
self.digitButtons = []
for i in range(Calculator.NumDigitButtons):
self.digitButtons.append(self.createButton(str(i),
self.digitClicked))
self.pointButton = self.createButton(".", self.pointClicked)
self.changeSignButton = self.createButton("\261",
self.changeSignClicked)
self.backspaceButton = self.createButton("Backspace",
self.backspaceClicked)
self.clearButton = self.createButton("Clear", self.clear)
self.clearAllButton = self.createButton("Clear All", self.clearAll)
self.clearMemoryButton = self.createButton("MC", self.clearMemory)
self.readMemoryButton = self.createButton("MR", self.readMemory)
self.setMemoryButton = self.createButton("MS", self.setMemory)
self.addToMemoryButton = self.createButton("M+", self.addToMemory)
self.divisionButton = self.createButton("\367",
self.multiplicativeOperatorClicked)
self.timesButton = self.createButton("\327",
self.multiplicativeOperatorClicked)
self.minusButton = self.createButton("-", self.additiveOperatorClicked)
self.plusButton = self.createButton("+", self.additiveOperatorClicked)
self.squareRootButton = self.createButton("Sqrt",
self.unaryOperatorClicked)
self.powerButton = self.createButton("x\262",
self.unaryOperatorClicked)
self.reciprocalButton = self.createButton("1/x",
self.unaryOperatorClicked)
self.equalButton = self.createButton("=", self.equalClicked)
mainLayout = QtGui.QGridLayout()
mainLayout.setSizeConstraint(QtGui.QLayout.SetFixedSize)
mainLayout.addWidget(self.display, 0, 0, 1, 6)
mainLayout.addWidget(self.backspaceButton, 1, 0, 1, 2)
mainLayout.addWidget(self.clearButton, 1, 2, 1, 2)
mainLayout.addWidget(self.clearAllButton, 1, 4, 1, 2)
mainLayout.addWidget(self.clearMemoryButton, 2, 0)
mainLayout.addWidget(self.readMemoryButton, 3, 0)
mainLayout.addWidget(self.setMemoryButton, 4, 0)
mainLayout.addWidget(self.addToMemoryButton, 5, 0)
for i in range(1, Calculator.NumDigitButtons):
row = ((9 - i) / 3) + 2
column = ((i - 1) % 3) + 1
mainLayout.addWidget(self.digitButtons[i], row, column)
mainLayout.addWidget(self.digitButtons[0], 5, 1)
mainLayout.addWidget(self.pointButton, 5, 2)
mainLayout.addWidget(self.changeSignButton, 5, 3)
mainLayout.addWidget(self.divisionButton, 2, 4)
mainLayout.addWidget(self.timesButton, 3, 4)
mainLayout.addWidget(self.minusButton, 4, 4)
mainLayout.addWidget(self.plusButton, 5, 4)
mainLayout.addWidget(self.squareRootButton, 2, 5)
mainLayout.addWidget(self.powerButton, 3, 5)
mainLayout.addWidget(self.reciprocalButton, 4, 5)
mainLayout.addWidget(self.equalButton, 5, 5)
self.setLayout(mainLayout)
self.setWindowTitle("Calculator")
def digitClicked(self):
clickedButton = self.sender()
digitValue = int(clickedButton.text())
if self.display.text() == '0' and digitValue == 0.0:
return
if self.waitingForOperand:
self.display.clear()
self.waitingForOperand = False
self.display.setText(self.display.text() + str(digitValue))
def unaryOperatorClicked(self):
clickedButton = self.sender()
clickedOperator = clickedButton.text()
operand = float(self.display.text())
if clickedOperator == "Sqrt":
if operand < 0.0:
self.abortOperation()
return
result = math.sqrt(operand)
elif clickedOperator == "x\262":
result = math.pow(operand, 2.0)
elif clickedOperator == "1/x":
if operand == 0.0:
self.abortOperation()
return
result = 1.0 / operand
self.display.setText(str(result))
self.waitingForOperand = True
def additiveOperatorClicked(self):
clickedButton = self.sender()
clickedOperator = clickedButton.text()
operand = float(self.display.text())
if self.pendingMultiplicativeOperator:
if not self.calculate(operand, self.pendingMultiplicativeOperator):
self.abortOperation()
return
self.display.setText(str(self.factorSoFar))
operand = self.factorSoFar
self.factorSoFar = 0.0
self.pendingMultiplicativeOperator = ''
if self.pendingAdditiveOperator:
if not self.calculate(operand, self.pendingAdditiveOperator):
self.abortOperation()
return
self.display.setText(str(self.sumSoFar))
else:
self.sumSoFar = operand
self.pendingAdditiveOperator = clickedOperator
self.waitingForOperand = True
def multiplicativeOperatorClicked(self):
clickedButton = self.sender()
clickedOperator = clickedButton.text()
operand = float(self.display.text())
if self.pendingMultiplicativeOperator:
if not self.calculate(operand, self.pendingMultiplicativeOperator):
self.abortOperation()
return
self.display.setText(str(self.factorSoFar))
else:
self.factorSoFar = operand
self.pendingMultiplicativeOperator = clickedOperator
self.waitingForOperand = True
def equalClicked(self):
operand = float(self.display.text())
if self.pendingMultiplicativeOperator:
if not self.calculate(operand, self.pendingMultiplicativeOperator):
self.abortOperation()
return
operand = self.factorSoFar
self.factorSoFar = 0.0
self.pendingMultiplicativeOperator = ''
if self.pendingAdditiveOperator:
if not self.calculate(operand, self.pendingAdditiveOperator):
self.abortOperation()
return
self.pendingAdditiveOperator = ''
else:
self.sumSoFar = operand
self.display.setText(str(self.sumSoFar))
self.sumSoFar = 0.0
self.waitingForOperand = True
def pointClicked(self):
if self.waitingForOperand:
self.display.setText('0')
if "." not in self.display.text():
self.display.setText(self.display.text() + ".")
self.waitingForOperand = False
def changeSignClicked(self):
text = self.display.text()
value = float(text)
if value > 0.0:
text = "-" + text
elif value < 0.0:
text = text[1:]
self.display.setText(text)
def backspaceClicked(self):
if self.waitingForOperand:
return
text = self.display.text()[:-1]
if not text:
text = '0'
self.waitingForOperand = True
self.display.setText(text)
def clear(self):
if self.waitingForOperand:
return
self.display.setText('0')
self.waitingForOperand = True
def clearAll(self):
self.sumSoFar = 0.0
self.factorSoFar = 0.0
self.pendingAdditiveOperator = ''
self.pendingMultiplicativeOperator = ''
self.display.setText('0')
self.waitingForOperand = True
def clearMemory(self):
self.sumInMemory = 0.0
def readMemory(self):
self.display.setText(str(self.sumInMemory))
self.waitingForOperand = True
def setMemory(self):
self.equalClicked()
self.sumInMemory = float(self.display.text())
def addToMemory(self):
self.equalClicked()
self.sumInMemory += float(self.display.text())
def createButton(self, text, member):
button = Button(text)
button.clicked.connect(member)
return button
def abortOperation(self):
self.clearAll()
self.display.setText("####")
def calculate(self, rightOperand, pendingOperator):
if pendingOperator == "+":
self.sumSoFar += rightOperand
elif pendingOperator == "-":
self.sumSoFar -= rightOperand
elif pendingOperator == "\327":
self.factorSoFar *= rightOperand
elif pendingOperator == "\367":
if rightOperand == 0.0:
return False
self.factorSoFar /= rightOperand
return True
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
calc = Calculator()
sys.exit(calc.exec_())
| gpl-2.0 |
inspirehep/invenio | modules/webauthorprofile/lib/webauthorprofile_webinterface.py | 2 | 35083 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011, 2018, 2019 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebAuthorProfile web interface logic and URL handler
"""
# pylint: disable=W0105
# pylint: disable=C0301
# pylint: disable=W0613
from sys import hexversion
from urllib import urlencode
from datetime import datetime, timedelta
from invenio import webinterface_handler_config
from invenio.bibauthorid_webauthorprofileinterface import is_valid_canonical_id, \
is_valid_bibref, get_person_id_from_paper, get_person_id_from_canonical_id, \
search_person_ids_by_name, get_papers_by_person_id, get_person_redirect_link, \
author_has_papers, get_authors_by_name
from invenio.bibauthorid_webapi import history_log_visit
from invenio.config import CFG_BASE_URL, CFG_LABS_HOSTNAME
from invenio.webauthorprofile_corefunctions import get_pubs, get_person_names_dicts, \
get_institute_pubs, get_pubs_per_year, get_coauthors, get_summarize_records, \
get_total_downloads, get_kwtuples, get_fieldtuples, get_veryfy_my_pubs_list_link, \
get_hepnames_data, get_self_pubs, get_collabtuples, get_internal_publications, \
get_external_publications, expire_all_cache_for_person, get_person_oldest_date, \
get_datasets, get_canonical_name_of_author
from invenio.webauthorprofile_config import deserialize
from invenio.bibauthorid_general_utils import get_doi_url, get_arxiv_url, get_inspire_record_url
from invenio.webpage import page
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.urlutils import redirect_to_url
from invenio.jsonutils import json_unicode_to_utf8
from invenio.bibauthorid_templates import WebProfileMenu, WebProfilePage
from invenio.bibauthorid_webinterface import WebInterfaceAuthorTicketHandling
import invenio.bibauthorid_webapi as webapi
from invenio.bibauthorid_dbinterface import get_canonical_name_of_author
from invenio.bibauthorid_config import CFG_BIBAUTHORID_ENABLED, AID_VISIBILITY
from invenio.bibformat import format_record
import invenio.template
import cProfile, pstats, cStringIO
websearch_templates = invenio.template.load('websearch')
webauthorprofile_templates = invenio.template.load('webauthorprofile')
bibauthorid_template = invenio.template.load('bibauthorid')
from invenio.search_engine import page_end, perform_request_search
JSON_OK = False
if hexversion < 0x2060000:
try:
import simplejson as json
JSON_OK = True
except ImportError:
# Okay, no Ajax app will be possible, but continue anyway,
# since this package is only recommended, not mandatory.
JSON_OK = False
else:
try:
import json
JSON_OK = True
except ImportError:
JSON_OK = False
from webauthorprofile_config import CFG_SITE_LANG, CFG_SITE_URL
RECOMPUTE_ALLOWED_DELAY = timedelta(minutes=30)
BOOTSTRAP_WIDTH_WHOLE = 12
BOOTSTRAP_WIDTH_HALF = 6
def wrap_json_req_profiler(func):
def json_req_profiler(self, req, form):
if "ajaxProfile" in form:
profiler = cProfile.Profile()
return_val = profiler.runcall(func, self, req, form)
results = cStringIO.StringIO()
stats = pstats.Stats(profiler, stream=results)
stats.sort_stats('cumulative')
stats.print_stats(100)
json_data = json.loads(return_val)
json_data.update({"profilerStats": "<pre style='overflow: scroll'>" + results.getvalue() + "</pre>"})
return json.dumps(json_data)
else:
return func(self, req, form)
return json_req_profiler
class WebAuthorPages(WebInterfaceDirectory):
'''
Handles webauthorpages /author/profile/
'''
_exports = ['',
('affiliations', 'create_authorpage_affiliations'),
'create_authorpage_authors_pubs',
('citations-summary', 'create_authorpage_citations'),
('co-authors', 'create_authorpage_coauthors'),
('collaborations', 'create_authorpage_collaborations'),
('papers-summary', 'create_authorpage_combined_papers'),
('subject-categories', 'create_authorpage_fieldcodes'),
('hepnames', 'create_authorpage_hepdata'),
('keywords', 'create_authorpage_keywords'),
('name-variants', 'create_authorpage_name_variants'),
'create_authorpage_pubs',
('publications-graph', 'create_authorpage_pubs_graph'),
('publications-list', 'create_authorpage_pubs_list'),
('announcements', 'create_announcements_box')]
def __init__(self, identifier=None):
'''
Constructor of the web interface.
@param identifier: identifier of an author. Can be one of:
- an author id: e.g. "14"
- a canonical id: e.g. "J.R.Ellis.1"
- a bibrefrec: e.g. "100:1442,155"
@type identifier: str
'''
self.person_id = -1 # -1 is a non valid author identifier
self.cid = None
self.original_search_parameter = identifier
self._prepare_render_variables()
if (not CFG_BIBAUTHORID_ENABLED or
identifier is None or
not isinstance(identifier, str)):
return
# check if it's a canonical id: e.g. "J.R.Ellis.1"
pid = int(get_person_id_from_canonical_id(identifier))
if pid >= 0:
self.person_id = pid
self.cid = get_person_redirect_link(self.person_id)
return
# check if it's an author id: e.g. "14"
try:
self.person_id = int(identifier)
cid = get_person_redirect_link(pid)
# author may not have a canonical id
if is_valid_canonical_id(cid):
self.cid = cid
return
except ValueError:
pass
# check if it's a bibrefrec: e.g. "100:1442,155"
if is_valid_bibref(identifier):
pid = int(get_person_id_from_paper(identifier))
if pid >= 0:
self.person_id = pid
self.cid = get_person_redirect_link(self.person_id)
return
def _lookup(self, component, path):
'''
This handler parses dynamic URLs:
- /author/profile/1332 shows the page of author with id: 1332
- /author/profile/100:5522,1431 shows the page of the author
identified by the bibrefrec: '100:5522,1431'
'''
if not component in self._exports:
return WebAuthorPages(component), path
def _prepare_render_variables(self):
'''
Computes variables for rendering the profile.
Bootstrap's grid model divides the whole screen into 12 units.
Here we compute width in units for some of the elements.
It allows customising profile page.
'''
if AID_VISIBILITY['coauthors']:
coauthors_width = (BOOTSTRAP_WIDTH_WHOLE - BOOTSTRAP_WIDTH_HALF *
AID_VISIBILITY['papers'])
else:
coauthors_width = 0
if AID_VISIBILITY['subject_categories']:
subject_categories_width = (BOOTSTRAP_WIDTH_WHOLE -
BOOTSTRAP_WIDTH_HALF *
AID_VISIBILITY['frequent_keywords'])
else:
subject_categories_width = 0
# Indicates whether a box for publications is visible or not
publication_box = (AID_VISIBILITY['publications'] or
AID_VISIBILITY['datasets'] or
AID_VISIBILITY['external'])
# Those three indicate if a column in 'large' view is visible.
personal_info_column = (AID_VISIBILITY['personal_details'] or
AID_VISIBILITY['name_variants'] or
AID_VISIBILITY['affiliations'] or
AID_VISIBILITY['collaborations'])
publications_column = (publication_box or
AID_VISIBILITY['coauthors'] or
AID_VISIBILITY['papers'] or
AID_VISIBILITY['subject_categories'] or
AID_VISIBILITY['frequent_keywords'])
stats_column = (AID_VISIBILITY['citations_summary'] or
AID_VISIBILITY['publications_graph'])
# Here we compute width for three columns in 'large' view. They
# must sum up to 12.
if publications_column and personal_info_column and stats_column:
columns_large_width = {
'personal_info' : '3',
'publications' : '5',
'stats' : '4'
}
elif publications_column and personal_info_column:
columns_large_width = {
'personal_info' : '5',
'publications' : '7',
'stats' : '0'
}
elif publications_column and stats_column:
columns_large_width = {
'personal_info' : '0',
'publications' : '7',
'stats' : '5'
}
elif personal_info_column and stats_column:
columns_large_width = {
'personal_info' : '6',
'publications' : '0',
'stats' : '6'
}
elif publications_column:
columns_large_width = {
'personal_info' : '0',
'publications' : str(BOOTSTRAP_WIDTH_WHOLE),
'stats' : '0'
}
elif personal_info_column:
columns_large_width = {
'personal_info' : str(BOOTSTRAP_WIDTH_WHOLE),
'publications' : '0',
'stats' : '0'
}
elif stats_column:
columns_large_width = {
'personal_info' : '0',
'publications' : '0',
'stats' : str(BOOTSTRAP_WIDTH_WHOLE)
}
else:
raise ValueError("You can not disable all author identification " +
"module's fields in bibauthorid_config.py!")
# This dictionary will be passed to html template
self.render_width_dict = {
'coauthors': str(coauthors_width),
'papers': str(BOOTSTRAP_WIDTH_WHOLE - coauthors_width),
'subject_categories': str(subject_categories_width),
'frequent_keywords': str(BOOTSTRAP_WIDTH_WHOLE -
subject_categories_width),
'publication_box': publication_box,
'columns_large': columns_large_width
}
def _is_profile_owner(self, pid):
return self.person_id == int(pid)
def _is_admin(self, pinfo):
return pinfo['ulevel'] == 'admin'
def _possible_to_recompute(self, pinfo):
oldest_cache_date = self.last_computed()
delay = datetime.now() - oldest_cache_date
if self._is_admin(pinfo) or (delay > RECOMPUTE_ALLOWED_DELAY):
return True
else:
return False
def __call__(self, req, form):
'''
Serves the main person page.
Will use the object's person id to get a person's information.
@param req: apache request object
@type req: apache request object
@param form: POST/GET variables of the request
@type form: dict
@return: a full page formatted in HTML
@rtype: str
'''
if not CFG_BIBAUTHORID_ENABLED:
self.person_id = self.original_search_parameter
return self.index(req, form)
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'recid': (int, -1),
'verbose': (int, 0)})
ln = argd['ln']
verbose = argd['verbose']
url_args = dict()
if ln != CFG_SITE_LANG:
url_args['ln'] = ln
if verbose:
url_args['verbose'] = str(verbose)
encoded = urlencode(url_args)
if encoded:
encoded = '?' + encoded
if self.cid is not None and self.original_search_parameter != self.cid:
return redirect_to_url(req, '%s/author/profile/%s%s' % (CFG_SITE_URL, self.cid, encoded))
# author may have only author identifier and not a canonical id
if self.person_id > -1:
return self.index(req, form)
recid = argd['recid']
if recid > -1:
possible_authors = get_authors_by_name(self.original_search_parameter,
limit_to_recid=recid)
if len(possible_authors) == 1:
self.person_id = possible_authors.pop()
self.cid = get_person_redirect_link(self.person_id)
redirect_to_url(req, '%s/author/profile/%s%s' % (CFG_SITE_URL, self.cid, encoded))
encoded = urlencode(url_args)
if encoded:
encoded = '&' + encoded
return redirect_to_url(req, '%s/author/search?q=%s%s' %
(CFG_SITE_URL, self.original_search_parameter, encoded))
def index(self, req, form):
'''
Serve the main person page.
Will use the object's person id to get a person's information.
@param req: apache request object
@type req: apache request object
@param form: POST/GET variables of the request
@type form: dict
@return: a full page formatted in HTML
@return: str
'''
webapi.session_bareinit(req)
session = webapi.get_session(req)
pinfo = session['personinfo']
ulevel = pinfo['ulevel']
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG),
'recompute': (int, 0),
'verbose': (int, 0),
'trial': (str, None)})
ln = argd['ln']
debug = "verbose" in argd and argd["verbose"] > 0
# Create Page Markup and Menu
try:
int(self.person_id)
except ValueError:
cname = self.person_id
else:
cname = webapi.get_canonical_id_from_person_id(self.person_id)
menu = WebProfileMenu(str(cname), "profile", ln, self._is_profile_owner(pinfo['pid']), self._is_admin(pinfo))
profile_page = WebProfilePage("profile", webapi.get_longest_name_from_pid(self.person_id))
profile_page.add_profile_menu(menu)
if 'form_email' in pinfo:
gFormEmail = pinfo['form_email']
else:
gFormEmail = ""
profile_page.add_bootstrapped_data(json.dumps({
"backbone": """
(function(ticketbox) {
var app = ticketbox.app;
app.userops.set(%s);
app.bodyModel.set({userLevel: "%s"});
})(ticketbox);""" % (WebInterfaceAuthorTicketHandling.bootstrap_status(pinfo, "user"), ulevel),
"other": "var gUserLevel = '%s'; var gFormEmail = '%s';" % (ulevel, gFormEmail)
}))
if debug:
profile_page.add_debug_info(pinfo)
last_computed = str(self.last_computed())
context = {
'person_id': self.person_id,
'last_computed': last_computed,
'citation_fine_print_link': "%s/help/citation-metrics" % CFG_BASE_URL,
'search_form_url': "%s/author/search" % CFG_BASE_URL,
'possible_to_recompute': self._possible_to_recompute(pinfo)
}
verbose = argd['verbose']
url_args = dict()
if ln != CFG_SITE_LANG:
url_args['ln'] = ln
if verbose:
url_args['verbose'] = str(verbose)
encoded = urlencode(url_args)
if encoded:
encoded = '&' + encoded
if CFG_BIBAUTHORID_ENABLED:
if self.person_id < 0:
return redirect_to_url(req, '%s/author/search?q=%s%s' %
(CFG_SITE_URL, self.original_search_parameter, encoded))
else:
self.person_id = self.original_search_parameter
profile_page.menu = None
assert not form.has_key('jsondata'), "Content type should be only text/html."
full_name = webapi.get_longest_name_from_pid(self.person_id)
page_title = '%s - Profile' % full_name
if argd['recompute'] and req.get_method() == 'POST':
expire_all_cache_for_person(self.person_id)
context['last_computed'] = str(datetime.now().replace(microsecond=0))
history_log_visit(req, 'profile', pid=self.person_id)
meta = profile_page.get_head()
context["visible"] = AID_VISIBILITY
context["element_width"] = self.render_width_dict
hndata = get_hepnames_data(self.person_id)
if hndata[1] is True:
context["hnrecid"] = get_hepnames_data(self.person_id)[0].get('record', {}).get('record_id')
body = profile_page.get_wrapped_body("profile_page", context)
return page(title=page_title,
metaheaderadd=meta.encode('utf-8'),
body=body.encode('utf-8'),
req=req,
language=ln,
show_title_p=False)
@wrap_json_req_profiler
def create_authorpage_name_variants(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
namesdict, namesdictStatus = get_person_names_dicts(person_id)
if not namesdict:
namesdict = dict()
try:
db_names_dict = namesdict['db_names_dict']
except (IndexError, KeyError):
db_names_dict = dict()
person_link, person_linkStatus = get_veryfy_my_pubs_list_link(person_id)
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': None}
if person_link and person_linkStatus:
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': person_link}
json_response = {'status': namesdictStatus, 'html': webauthorprofile_templates.tmpl_author_name_variants_box(db_names_dict, bibauthorid_data, ln='en', add_box=False, loading=not db_names_dict)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_combined_papers(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
pubs, pubsStatus = get_pubs(person_id)
if not pubs:
pubs = list()
selfpubs, selfpubsStatus = get_self_pubs(person_id)
if not selfpubs:
selfpubs = list()
person_link, person_linkStatus = get_veryfy_my_pubs_list_link(person_id)
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': None}
if person_link and person_linkStatus:
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': person_link}
totaldownloads, totaldownloadsStatus = get_total_downloads(person_id)
if not totaldownloads:
totaldownloads = 0
json_response = {'status': selfpubsStatus, 'html': webauthorprofile_templates.tmpl_papers_with_self_papers_box(pubs, selfpubs, bibauthorid_data, totaldownloads, ln='en', add_box=False, loading=not selfpubsStatus)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_keywords(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
kwtuples, kwtuplesStatus = get_kwtuples(person_id)
if kwtuples:
pass
# kwtuples = kwtuples[0:MAX_KEYWORD_LIST]
else:
kwtuples = list()
person_link, person_linkStatus = get_veryfy_my_pubs_list_link(person_id)
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': None}
if person_link and person_linkStatus:
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': person_link}
json_response = {'status': kwtuplesStatus, 'html': webauthorprofile_templates.tmpl_keyword_box(kwtuples, bibauthorid_data, ln='en', add_box=False, loading=not kwtuplesStatus)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_fieldcodes(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
fieldtuples, fieldtuplesStatus = get_fieldtuples(person_id)
if fieldtuples:
pass
# fieldtuples = fieldtuples[0:MAX_FIELDCODE_LIST]
else:
fieldtuples = list()
person_link, person_linkStatus = get_veryfy_my_pubs_list_link(person_id)
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': None}
if person_link and person_linkStatus:
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': person_link}
json_response = {'status': fieldtuplesStatus, 'html': webauthorprofile_templates.tmpl_fieldcode_box(fieldtuples, bibauthorid_data, ln='en', add_box=False, loading=not fieldtuplesStatus)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_affiliations(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
author_aff_pubs, author_aff_pubsStatus = get_institute_pubs(person_id)
if not author_aff_pubs:
author_aff_pubs = dict()
json_response = {'status': author_aff_pubsStatus, 'html': webauthorprofile_templates.tmpl_affiliations_box(author_aff_pubs, ln='en', add_box=False, loading=not author_aff_pubsStatus)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_coauthors(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
person_link, person_linkStatus = get_veryfy_my_pubs_list_link(person_id)
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': None}
if person_link and person_linkStatus:
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': person_link}
coauthors, coauthorsStatus = get_coauthors(person_id)
if not coauthors:
coauthors = dict()
json_response = {'status': coauthorsStatus, 'html': webauthorprofile_templates.tmpl_coauthor_box(bibauthorid_data, coauthors, ln='en', loading=not coauthorsStatus)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_citations(self, req, form):
if 'jsondata' in form:
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if 'personId' in json_data:
person_id = json_data['personId']
citation_data, cache_status = get_summarize_records(person_id)
records, records_cache_status = get_pubs(person_id)
datasets = get_datasets(person_id)
if datasets and datasets[0]:
datasets = len(datasets[0])
else:
datasets = 0
citations = {'breakdown_categories': ['Renowned papers (500+)', 'Famous papers (250-499)',
'Very well-known papers (100-249)',
'Well-known papers (50-99)',
'Known papers (10-49)', 'Less known papers (1-9)',
'Unknown papers (0)']}
content = "Data not ready. Please wait..."
if cache_status and citation_data and records and records_cache_status:
citations['papers_num'] = len(records)
try:
citations['papers'], citations['data'] = citation_data[0]
except IndexError:
pass
result = get_canonical_name_of_author(person_id)
if result:
canonical_name = result[0][0]
else:
canonical_name = ""
content = WebProfilePage.render_citations_summary_content(citations, canonical_name, datasets)
elif not citation_data and not records:
content = "No citations data."
json_response = {'status': (cache_status and records_cache_status), 'html': content}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_pubs_graph(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
pubs_per_year, pubs_per_yearStatus = get_pubs_per_year(person_id)
if not pubs_per_year:
pubs_per_year = dict()
securelinks = req.is_https()
json_response = {'status': pubs_per_yearStatus, 'html': webauthorprofile_templates.tmpl_graph_box(pubs_per_year, ln='en', loading=not pubs_per_yearStatus, https=securelinks)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_hepdata(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
webapi.session_bareinit(req)
session = webapi.get_session(req)
ulevel = session['personinfo']['ulevel']
if json_data.has_key('personId'):
person_id = json_data['personId']
context, hepdictStatus = get_hepnames_data(person_id)
if not hepdictStatus:
return json.dumps({'status': False, 'html': ''})
context.update({
'cname': webapi.get_canonical_id_from_person_id(person_id),
'link_to_record': ulevel == "admin",
'hepnames_link': "%s/%s/" % (CFG_BASE_URL, "record"),
'new_record_link': 'https://%s/submissions/authors?bai=%s' % (CFG_LABS_HOSTNAME, webapi.get_canonical_id_from_person_id(person_id)),
'update_link_prefix': "https://%s/submissions/authors/" % CFG_LABS_HOSTNAME,
'update_link_suffix': "",
'profile_link': "%s/%s" % (CFG_BASE_URL, "author/profile/")
})
content = WebProfilePage.render_template('personal_details_box', context)
json_response = {'status': hepdictStatus, 'html': content}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_collaborations(self, req, form):
if form.has_key('jsondata'):
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if json_data.has_key('personId'):
person_id = json_data['personId']
collab, collabStatus = get_collabtuples(person_id)
person_link, person_linkStatus = get_veryfy_my_pubs_list_link(person_id)
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': None}
if person_link and person_linkStatus:
bibauthorid_data = {'is_baid': True, 'pid': person_id, 'cid': person_link}
json_response = {'status': collabStatus, 'html': webauthorprofile_templates.tmpl_collab_box(collab, bibauthorid_data, ln='en', add_box=False, loading=not collabStatus)}
req.content_type = 'application/json'
return json.dumps(json_response)
@wrap_json_req_profiler
def create_authorpage_pubs_list(self, req, form):
def get_unsorted_datasets(items):
return [(title, get_inspire_record_url(recid),
recid) for recid, title
in items.iteritems()][0:10]
def get_sorted_internal_pubs(items, order):
result = []
current_index = 0
offset = 0
while(current_index < 10 and current_index + offset < len(order)):
recid = order[current_index + offset]
if recid in items:
title = items[recid]
result.append((title, get_inspire_record_url(recid),
recid))
current_index += 1
else:
offset += 1
return result
if 'jsondata' in form:
json_data = json.loads(str(form['jsondata']))
json_data = json_unicode_to_utf8(json_data)
if 'personId' in json_data:
person_id = json_data['personId']
try:
canonical_name = get_canonical_name_of_author(person_id)[0][0]
internal_search_pubs = perform_request_search(p="exactauthor:%s" % canonical_name,
sf="earliestdate",
so="d")
except IndexError:
canonical_name = None
internal_search_pubs = []
internal_pubs, internal_pubsStatus = get_internal_publications(person_id)
external_pubs, external_pubsStatus = get_external_publications(person_id)
datasets_pubs, datasets_pubsStatus = get_datasets(person_id)
if internal_pubs is not None and internal_pubsStatus is True:
internal_pubs = \
get_sorted_internal_pubs(internal_pubs, internal_search_pubs)
else:
internal_pubs = list()
if datasets_pubs is not None and datasets_pubsStatus is True:
datasets_pubs_to_display = get_unsorted_datasets(datasets_pubs)
else:
datasets_pubs_to_display = list()
arxiv_pubs = list()
doi_pubs = list()
if external_pubs is not None and external_pubsStatus is True:
if 'arxiv' in external_pubs:
arxiv_pubs = [(title, get_arxiv_url(arxiv_id), 'arxiv')
for arxiv_id, title
in external_pubs['arxiv'].iteritems()]
if 'doi' in external_pubs:
doi_pubs = [(title, get_doi_url(doi_id), 'doi')
for doi_id, title
in external_pubs['doi'].iteritems()]
external_pubs = arxiv_pubs + doi_pubs
all_pubs_search_link = "%s/search?p=exactauthor%%3A%s&sf=earliestdate" % (CFG_BASE_URL, canonical_name)
if datasets_pubs:
datasets_pubs_recs = ['recid%%3A%s' % pub for pub in datasets_pubs]
else:
datasets_pubs_recs = list()
#TODO An operator should be introduced as this will not work for authors with many records.
datasets_search_link = "%s/search?cc=Data&p=%s" % (CFG_BASE_URL, '+or+'.join(datasets_pubs_recs))
json_response = {
'status': (internal_pubsStatus and external_pubsStatus and datasets_pubsStatus),
'html': WebProfilePage.render_publications_box_content({
"internal_pubs": internal_pubs,
"external_pubs": external_pubs,
"datasets": datasets_pubs_to_display,
"all_pubs_search_link": all_pubs_search_link,
"data_sets_search_link": datasets_search_link,
"base_url": CFG_BASE_URL
})
}
req.content_type = 'application/json'
return json.dumps(json_response)
def last_computed(self):
return get_person_oldest_date(self.person_id)
| gpl-2.0 |
noplay/aiohttp | tests/test_web_request.py | 1 | 5998 | import asyncio
import unittest
from unittest import mock
from aiohttp.web import Request
from aiohttp.multidict import MultiDict, CIMultiDict
from aiohttp.protocol import HttpVersion
from aiohttp.protocol import RawRequestMessage
class TestWebRequest(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
def tearDown(self):
self.loop.close()
def make_request(self, method, path, headers=CIMultiDict(), *,
version=HttpVersion(1, 1), closing=False):
self.app = mock.Mock()
message = RawRequestMessage(method, path, version, headers, closing,
False)
self.payload = mock.Mock()
self.transport = mock.Mock()
self.writer = mock.Mock()
self.reader = mock.Mock()
req = Request(self.app, message, self.payload,
self.transport, self.reader, self.writer)
return req
def test_ctor(self):
req = self.make_request('GET', '/path/to?a=1&b=2')
self.assertIs(self.app, req.app)
self.assertEqual('GET', req.method)
self.assertEqual(HttpVersion(1, 1), req.version)
self.assertEqual(None, req.host)
self.assertEqual('/path/to?a=1&b=2', req.path_qs)
self.assertEqual('/path/to', req.path)
self.assertEqual('a=1&b=2', req.query_string)
get = req.GET
self.assertEqual(MultiDict([('a', '1'), ('b', '2')]), get)
# second call should return the same object
self.assertIs(get, req.GET)
with self.assertWarns(DeprecationWarning):
self.assertIs(self.payload, req.payload)
self.assertIs(self.payload, req.content)
self.assertIs(self.transport, req.transport)
self.assertTrue(req.keep_alive)
def test_POST(self):
req = self.make_request('POST', '/')
with self.assertRaises(RuntimeError):
req.POST
marker = object()
req._post = marker
self.assertIs(req.POST, marker)
self.assertIs(req.POST, marker)
def test_content_type_not_specified(self):
req = self.make_request('Get', '/')
self.assertEqual('application/octet-stream', req.content_type)
def test_content_type_from_spec(self):
req = self.make_request(
'Get', '/',
CIMultiDict([('CONTENT-TYPE', 'application/json')]))
self.assertEqual('application/json', req.content_type)
def test_content_type_from_spec_with_charset(self):
req = self.make_request(
'Get', '/',
CIMultiDict([('CONTENT-TYPE', 'text/html; charset=UTF-8')]))
self.assertEqual('text/html', req.content_type)
self.assertEqual('UTF-8', req.charset)
def test_calc_content_type_on_getting_charset(self):
req = self.make_request(
'Get', '/',
CIMultiDict([('CONTENT-TYPE', 'text/html; charset=UTF-8')]))
self.assertEqual('UTF-8', req.charset)
self.assertEqual('text/html', req.content_type)
def test_urlencoded_querystring(self):
req = self.make_request(
'GET',
'/yandsearch?text=%D1%82%D0%B5%D0%BA%D1%81%D1%82')
self.assertEqual({'text': 'текст'}, req.GET)
def test_non_ascii_path(self):
req = self.make_request('GET', '/путь')
self.assertEqual('/путь', req.path)
def test_content_length(self):
req = self.make_request(
'Get', '/',
CIMultiDict([('CONTENT-LENGTH', '123')]))
self.assertEqual(123, req.content_length)
def test_non_keepalive_on_http10(self):
req = self.make_request('GET', '/', version=HttpVersion(1, 0))
self.assertFalse(req.keep_alive)
def test_non_keepalive_on_closing(self):
req = self.make_request('GET', '/', closing=True)
self.assertFalse(req.keep_alive)
def test_call_POST_on_GET_request(self):
req = self.make_request('GET', '/')
ret = self.loop.run_until_complete(req.post())
self.assertEqual(CIMultiDict(), ret)
def test_call_POST_on_weird_content_type(self):
req = self.make_request(
'POST', '/',
headers=CIMultiDict({'CONTENT-TYPE': 'something/weird'}))
ret = self.loop.run_until_complete(req.post())
self.assertEqual(CIMultiDict(), ret)
def test_call_POST_twice(self):
req = self.make_request('GET', '/')
ret1 = self.loop.run_until_complete(req.post())
ret2 = self.loop.run_until_complete(req.post())
self.assertIs(ret1, ret2)
def test_no_request_cookies(self):
req = self.make_request('GET', '/')
self.assertEqual(req.cookies, {})
cookies = req.cookies
self.assertIs(cookies, req.cookies)
def test_request_cookie(self):
headers = CIMultiDict(COOKIE='cookie1=value1; cookie2=value2')
req = self.make_request('GET', '/', headers=headers)
self.assertEqual(req.cookies, {
'cookie1': 'value1',
'cookie2': 'value2',
})
def test_request_cookie__set_item(self):
headers = CIMultiDict(COOKIE='name=value')
req = self.make_request('GET', '/', headers=headers)
self.assertEqual(req.cookies, {'name': 'value'})
with self.assertRaises(TypeError):
req.cookies['my'] = 'value'
def test_match_info(self):
req = self.make_request('GET', '/')
self.assertIsNone(req.match_info)
match = {'a': 'b'}
req._match_info = match
self.assertIs(match, req.match_info)
def test_request_is_dict(self):
req = self.make_request('GET', '/')
self.assertTrue(isinstance(req, dict))
req['key'] = 'value'
self.assertEqual('value', req['key'])
def test___repr__(self):
req = self.make_request('GET', '/path/to')
self.assertEqual("<Request GET /path/to >", repr(req))
| apache-2.0 |
sankhesh/VTK | ThirdParty/Twisted/twisted/names/test/test_hosts.py | 22 | 8550 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the I{hosts(5)}-based resolver, L{twisted.names.hosts}.
"""
from __future__ import division, absolute_import
from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.internet.defer import gatherResults
from twisted.names.dns import (
A, AAAA, IN, DomainError, RRHeader, Query, Record_A, Record_AAAA)
from twisted.names.hosts import Resolver, searchFileFor, searchFileForAll
class GoodTempPathMixin(object):
def path(self):
return FilePath(self.mktemp().encode('utf-8'))
class SearchHostsFileTests(TestCase, GoodTempPathMixin):
"""
Tests for L{searchFileFor}, a helper which finds the first address for a
particular hostname in a I{hosts(5)}-style file.
"""
def test_findAddress(self):
"""
If there is an IPv4 address for the hostname passed to L{searchFileFor},
it is returned.
"""
hosts = self.path()
hosts.setContent(
b"10.2.3.4 foo.example.com\n")
self.assertEqual(
"10.2.3.4", searchFileFor(hosts.path, b"foo.example.com"))
def test_notFoundAddress(self):
"""
If there is no address information for the hostname passed to
L{searchFileFor}, C{None} is returned.
"""
hosts = self.path()
hosts.setContent(
b"10.2.3.4 foo.example.com\n")
self.assertIs(None, searchFileFor(hosts.path, b"bar.example.com"))
def test_firstAddress(self):
"""
The first address associated with the given hostname is returned.
"""
hosts = self.path()
hosts.setContent(
b"::1 foo.example.com\n"
b"10.1.2.3 foo.example.com\n"
b"fe80::21b:fcff:feee:5a1d foo.example.com\n")
self.assertEqual("::1", searchFileFor(hosts.path, b"foo.example.com"))
def test_searchFileForAliases(self):
"""
For a host with a canonical name and one or more aliases,
L{searchFileFor} can find an address given any of the names.
"""
hosts = self.path()
hosts.setContent(
b"127.0.1.1\thelmut.example.org\thelmut\n"
b"# a comment\n"
b"::1 localhost ip6-localhost ip6-loopback\n")
self.assertEqual(searchFileFor(hosts.path, b'helmut'), '127.0.1.1')
self.assertEqual(
searchFileFor(hosts.path, b'helmut.example.org'), '127.0.1.1')
self.assertEqual(searchFileFor(hosts.path, b'ip6-localhost'), '::1')
self.assertEqual(searchFileFor(hosts.path, b'ip6-loopback'), '::1')
self.assertEqual(searchFileFor(hosts.path, b'localhost'), '::1')
class SearchHostsFileForAllTests(TestCase, GoodTempPathMixin):
"""
Tests for L{searchFileForAll}, a helper which finds all addresses for a
particular hostname in a I{hosts(5)}-style file.
"""
def test_allAddresses(self):
"""
L{searchFileForAll} returns a list of all addresses associated with the
name passed to it.
"""
hosts = self.path()
hosts.setContent(
b"127.0.0.1 foobar.example.com\n"
b"127.0.0.2 foobar.example.com\n"
b"::1 foobar.example.com\n")
self.assertEqual(
["127.0.0.1", "127.0.0.2", "::1"],
searchFileForAll(hosts, b"foobar.example.com"))
def test_caseInsensitively(self):
"""
L{searchFileForAll} searches for names case-insensitively.
"""
hosts = self.path()
hosts.setContent(b"127.0.0.1 foobar.EXAMPLE.com\n")
self.assertEqual(
["127.0.0.1"], searchFileForAll(hosts, b"FOOBAR.example.com"))
def test_readError(self):
"""
If there is an error reading the contents of the hosts file,
L{searchFileForAll} returns an empty list.
"""
self.assertEqual(
[], searchFileForAll(self.path(), b"example.com"))
class HostsTestCase(TestCase, GoodTempPathMixin):
"""
Tests for the I{hosts(5)}-based L{twisted.names.hosts.Resolver}.
"""
def setUp(self):
f = self.path()
f.setContent(b'''
1.1.1.1 EXAMPLE EXAMPLE.EXAMPLETHING
::2 mixed
1.1.1.2 MIXED
::1 ip6thingy
1.1.1.3 multiple
1.1.1.4 multiple
::3 ip6-multiple
::4 ip6-multiple
''')
self.ttl = 4200
self.resolver = Resolver(f.path, self.ttl)
def test_defaultPath(self):
"""
The default hosts file used by L{Resolver} is I{/etc/hosts} if no value
is given for the C{file} initializer parameter.
"""
resolver = Resolver()
self.assertEqual(b"/etc/hosts", resolver.file)
def test_getHostByName(self):
"""
L{hosts.Resolver.getHostByName} returns a L{Deferred} which fires with a
string giving the address of the queried name as found in the resolver's
hosts file.
"""
data = [(b'EXAMPLE', '1.1.1.1'),
(b'EXAMPLE.EXAMPLETHING', '1.1.1.1'),
(b'MIXED', '1.1.1.2'),
]
ds = [self.resolver.getHostByName(n).addCallback(self.assertEqual, ip)
for n, ip in data]
return gatherResults(ds)
def test_lookupAddress(self):
"""
L{hosts.Resolver.lookupAddress} returns a L{Deferred} which fires with A
records from the hosts file.
"""
d = self.resolver.lookupAddress(b'multiple')
def resolved(results):
answers, authority, additional = results
self.assertEqual(
(RRHeader(b"multiple", A, IN, self.ttl,
Record_A("1.1.1.3", self.ttl)),
RRHeader(b"multiple", A, IN, self.ttl,
Record_A("1.1.1.4", self.ttl))),
answers)
d.addCallback(resolved)
return d
def test_lookupIPV6Address(self):
"""
L{hosts.Resolver.lookupIPV6Address} returns a L{Deferred} which fires
with AAAA records from the hosts file.
"""
d = self.resolver.lookupIPV6Address(b'ip6-multiple')
def resolved(results):
answers, authority, additional = results
self.assertEqual(
(RRHeader(b"ip6-multiple", AAAA, IN, self.ttl,
Record_AAAA("::3", self.ttl)),
RRHeader(b"ip6-multiple", AAAA, IN, self.ttl,
Record_AAAA("::4", self.ttl))),
answers)
d.addCallback(resolved)
return d
def test_lookupAllRecords(self):
"""
L{hosts.Resolver.lookupAllRecords} returns a L{Deferred} which fires
with A records from the hosts file.
"""
d = self.resolver.lookupAllRecords(b'mixed')
def resolved(results):
answers, authority, additional = results
self.assertEqual(
(RRHeader(b"mixed", A, IN, self.ttl,
Record_A("1.1.1.2", self.ttl)),),
answers)
d.addCallback(resolved)
return d
def testNotImplemented(self):
return self.assertFailure(self.resolver.lookupMailExchange(b'EXAMPLE'),
NotImplementedError)
def testQuery(self):
d = self.resolver.query(Query(b'EXAMPLE'))
d.addCallback(lambda x: self.assertEqual(x[0][0].payload.dottedQuad(),
'1.1.1.1'))
return d
def test_lookupAddressNotFound(self):
"""
L{hosts.Resolver.lookupAddress} returns a L{Deferred} which fires with
L{dns.DomainError} if the name passed in has no addresses in the hosts
file.
"""
return self.assertFailure(self.resolver.lookupAddress(b'foueoa'),
DomainError)
def test_lookupIPV6AddressNotFound(self):
"""
Like L{test_lookupAddressNotFound}, but for
L{hosts.Resolver.lookupIPV6Address}.
"""
return self.assertFailure(self.resolver.lookupIPV6Address(b'foueoa'),
DomainError)
def test_lookupAllRecordsNotFound(self):
"""
Like L{test_lookupAddressNotFound}, but for
L{hosts.Resolver.lookupAllRecords}.
"""
return self.assertFailure(self.resolver.lookupAllRecords(b'foueoa'),
DomainError)
| bsd-3-clause |
ITNano/WikiSubtitleReader | raw_to_ass.py | 1 | 3753 | # -*- coding: utf-8 -*-
#Python class to parse lyrics on the form
#Singer 1: I am so happy, hear me sing
#And write it to an .ass file. (Advanced SubStation Alpha subtitle file)
#The string before the separator ':' is used to format the text by mapping it to
#a predefined format, the remainder is the actual text to sing.
import math
def time_to_seconds(time):
hmmss_list=time.split(':')
seconds=3600*float(hmmss_list[0])+60*float(hmmss_list[1])+float(hmmss_list[2])
return seconds
def seconds_to_time(seconds):
#Seconds are given with two decimal points. 1 digit for hours.
#Minutes and hours are integers.
hours=math.floor(seconds/3600)
seconds=seconds-3600*hours
minutes=math.floor(seconds/60)
seconds=seconds-60*minutes
seconds=float("{0:05.2f}".format(seconds))
if seconds==60:
seconds=0;
minutes=minutes+1;
if minutes==60:
minutes=0
hours=hours+1
#Pads minutes with a leading zero, formats seconds to xx.xx
hmmss_string="{0:01.0f}".format(hours)+':'+"{0:02.0f}".format(minutes)+':'+"{0:05.2f}".format(seconds)
return hmmss_string
class Raw_to_ass_parser():
def parse_line_to_ass(self,line,delimiter,allowEmptyLines):
#example output:
#Dialogue: 0,0:00:26.00,0:00:27.00,CHARACTER,,0,0,0,,I am singing!
#Styledict maps a short form to a style used in the ASS file. Example:
#Styledict["a"]="ANNA"
#Note that keys are all cast to lowercase.
emptyLine = False
if len(line) == 0:
emptyLine = True
if allowEmptyLines:
line = "kommentar:"
else:
return ""
split_line=line.split(delimiter,1)
# Handle lines without explicitly written singer
if len(split_line)==1:
split_line=[self.empty_style,split_line[0]]
# Handle multi/none singer(s)
default_singer = r"OKÄND"
if "," in split_line[0] or "+" in split_line[0]:
default_singer = r"ALLA"
# Handle people singing at the same time
extra_stylepart = ""
if self.multi_line:
extra_stylepart = " NERE"
if split_line[1].strip().endswith(self.multi_line_keyword):
if self.multi_line:
print("WARNING: Found 3+ multiline!")
extra_stylepart = " UPPE"
split_line[1] = split_line[1].strip()[:-len(self.multi_line_keyword)]
self.multi_line = True;
else:
self.multi_line = False;
# Construct the actual data.
outline='Dialogue: 0,'+self.time_start+','+self.time_end+','
outline=outline+self.style_dictionary.get(split_line[0].lower(), default_singer)+extra_stylepart+',,0,0,0,,'+split_line[1].strip()
# Prepare for next line
if not emptyLine:
self.empty_style=split_line[0]
if len(outline) > 0 and not self.multi_line:
self.increment_time()
return outline
def increment_time(self):
float_start=time_to_seconds(self.time_start)
float_end=time_to_seconds(self.time_end)
self.time_start=seconds_to_time(float_start+self.time_step)
self.time_end=seconds_to_time(float_end+self.time_step)
def __init__(self,start_time,increment_time):
self.time_step=float(increment_time)
self.time_start=seconds_to_time(start_time)
self.time_end=seconds_to_time(time_to_seconds(self.time_start)+self.time_step)
self.style_dictionary={}
self.empty_style=""
self.multi_line = False;
self.multi_line_keyword = "[samtidigt]"
| apache-2.0 |
group-policy/rally | rally/plugins/openstack/scenarios/heat/stacks.py | 5 | 10683 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally import consts
from rally.plugins.openstack import scenario
from rally.plugins.openstack.scenarios.heat import utils
from rally.task import atomic
from rally.task import types
from rally.task import validation
class HeatStacks(utils.HeatScenario):
"""Benchmark scenarios for Heat stacks."""
@types.set(template_path=types.FileType, files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_and_list_stack(self, template_path, parameters=None,
files=None, environment=None):
"""Create a stack and then list all stacks.
Measure the "heat stack-create" and "heat stack-list" commands
performance.
:param template_path: path to stack template file
:param parameters: parameters to use in heat template
:param files: files used in template
:param environment: stack environment definition
"""
self._create_stack(template_path, parameters, files, environment)
self._list_stacks()
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure()
def list_stacks_and_resources(self):
"""List all resources from tenant stacks."""
stacks = self._list_stacks()
with atomic.ActionTimer(
self, "heat.list_resources_of_%s_stacks" % len(stacks)):
for stack in stacks:
self.clients("heat").resources.list(stack.id)
@types.set(template_path=types.FileType, files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_and_delete_stack(self, template_path, parameters=None,
files=None, environment=None):
"""Create and then delete a stack.
Measure the "heat stack-create" and "heat stack-delete" commands
performance.
:param template_path: path to stack template file
:param parameters: parameters to use in heat template
:param files: files used in template
:param environment: stack environment definition
"""
stack = self._create_stack(template_path, parameters,
files, environment)
self._delete_stack(stack)
@types.set(template_path=types.FileType, files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_check_delete_stack(self, template_path, parameters=None,
files=None, environment=None):
"""Create, check and delete a stack.
Measure the performance of the following commands:
- heat stack-create
- heat action-check
- heat stack-delete
:param template_path: path to stack template file
:param parameters: parameters to use in heat template
:param files: files used in template
:param environment: stack environment definition
"""
stack = self._create_stack(template_path, parameters,
files, environment)
self._check_stack(stack)
self._delete_stack(stack)
@types.set(template_path=types.FileType,
updated_template_path=types.FileType,
files=types.FileTypeDict,
updated_files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_update_delete_stack(self, template_path,
updated_template_path,
parameters=None, updated_parameters=None,
files=None, updated_files=None,
environment=None, updated_environment=None):
"""Create, update and then delete a stack.
Measure the "heat stack-create", "heat stack-update"
and "heat stack-delete" commands performance.
:param template_path: path to stack template file
:param updated_template_path: path to updated stack template file
:param parameters: parameters to use in heat template
:param updated_parameters: parameters to use in updated heat template
If not specified then parameters will be
used instead
:param files: files used in template
:param updated_files: files used in updated template. If not specified
files value will be used instead
:param environment: stack environment definition
:param updated_environment: environment definition for updated stack
"""
stack = self._create_stack(template_path, parameters,
files, environment)
self._update_stack(stack, updated_template_path,
updated_parameters or parameters,
updated_files or files,
updated_environment or environment)
self._delete_stack(stack)
@types.set(template_path=types.FileType, files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_stack_and_scale(self, template_path, output_key, delta,
parameters=None, files=None,
environment=None):
"""Create an autoscaling stack and invoke a scaling policy.
Measure the performance of autoscaling webhooks.
:param template_path: path to template file that includes an
OS::Heat::AutoScalingGroup resource
:param output_key: the stack output key that corresponds to
the scaling webhook
:param delta: the number of instances the stack is expected to
change by.
:param parameters: parameters to use in heat template
:param files: files used in template (dict of file name to
file path)
:param environment: stack environment definition (dict)
"""
# TODO(stpierre): Kilo Heat is *much* better than Juno for the
# requirements of this scenario, so once Juno goes out of
# support we should update this scenario to suck less. Namely:
#
# * Kilo Heat can supply alarm_url attributes without needing
# an output key, so instead of getting the output key from
# the user, just get the name of the ScalingPolicy to apply.
# * Kilo Heat changes the status of a stack while scaling it,
# so _scale_stack() can check for the stack to have changed
# size and for it to be in UPDATE_COMPLETE state, so the
# user no longer needs to specify the expected delta.
stack = self._create_stack(template_path, parameters, files,
environment)
self._scale_stack(stack, output_key, delta)
@types.set(template_path=types.FileType, files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_suspend_resume_delete_stack(self, template_path,
parameters=None, files=None,
environment=None):
"""Create, suspend-resume and then delete a stack.
Measure performance of the following commands:
heat stack-create
heat action-suspend
heat action-resume
heat stack-delete
:param template_path: path to stack template file
:param parameters: parameters to use in heat template
:param files: files used in template
:param environment: stack environment definition
"""
s = self._create_stack(template_path, parameters, files, environment)
self._suspend_stack(s)
self._resume_stack(s)
self._delete_stack(s)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure()
def list_stacks_and_events(self):
"""List events from tenant stacks."""
stacks = self._list_stacks()
with atomic.ActionTimer(
self, "heat.list_events_of_%s_stacks" % len(stacks)):
for stack in stacks:
self.clients("heat").events.list(stack.id)
@types.set(template_path=types.FileType, files=types.FileTypeDict)
@validation.required_services(consts.Service.HEAT)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["heat"]})
def create_snapshot_restore_delete_stack(self, template_path,
parameters=None, files=None,
environment=None):
"""Create, snapshot-restore and then delete a stack.
Measure performance of the following commands:
heat stack-create
heat stack-snapshot
heat stack-restore
heat stack-delete
:param template_path: path to stack template file
:param parameters: parameters to use in heat template
:param files: files used in template
:param environment: stack environment definition
"""
stack = self._create_stack(
template_path, parameters, files, environment)
snapshot = self._snapshot_stack(stack)
self._restore_stack(stack, snapshot["id"])
self._delete_stack(stack)
| apache-2.0 |
petricm/DIRAC | Interfaces/scripts/dirac-admin-service-ports.py | 4 | 1118 | #!/usr/bin/env python
########################################################################
# $HeadURL$
# File : dirac-admin-service-ports
# Author : Stuart Paterson
########################################################################
"""
Print the service ports for the specified setup
"""
from __future__ import print_function
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Base import Script
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile] ... [Setup]' % Script.scriptName,
'Arguments:',
' Setup: Name of the setup' ] ) )
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
setup = ''
if args:
setup = args[0]
from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
diracAdmin = DiracAdmin()
result = diracAdmin.getServicePorts( setup, printOutput = True )
if result['OK']:
DIRAC.exit( 0 )
else:
print(result['Message'])
DIRAC.exit( 2 )
| gpl-3.0 |
plotly/python-api | packages/python/plotly/plotly/validators/densitymapbox/__init__.py | 1 | 3954 | import sys
if sys.version_info < (3, 7):
from ._zsrc import ZsrcValidator
from ._zmin import ZminValidator
from ._zmid import ZmidValidator
from ._zmax import ZmaxValidator
from ._zauto import ZautoValidator
from ._z import ZValidator
from ._visible import VisibleValidator
from ._uirevision import UirevisionValidator
from ._uid import UidValidator
from ._textsrc import TextsrcValidator
from ._text import TextValidator
from ._subplot import SubplotValidator
from ._stream import StreamValidator
from ._showscale import ShowscaleValidator
from ._showlegend import ShowlegendValidator
from ._reversescale import ReversescaleValidator
from ._radiussrc import RadiussrcValidator
from ._radius import RadiusValidator
from ._opacity import OpacityValidator
from ._name import NameValidator
from ._metasrc import MetasrcValidator
from ._meta import MetaValidator
from ._lonsrc import LonsrcValidator
from ._lon import LonValidator
from ._legendgroup import LegendgroupValidator
from ._latsrc import LatsrcValidator
from ._lat import LatValidator
from ._idssrc import IdssrcValidator
from ._ids import IdsValidator
from ._hovertextsrc import HovertextsrcValidator
from ._hovertext import HovertextValidator
from ._hovertemplatesrc import HovertemplatesrcValidator
from ._hovertemplate import HovertemplateValidator
from ._hoverlabel import HoverlabelValidator
from ._hoverinfosrc import HoverinfosrcValidator
from ._hoverinfo import HoverinfoValidator
from ._customdatasrc import CustomdatasrcValidator
from ._customdata import CustomdataValidator
from ._colorscale import ColorscaleValidator
from ._colorbar import ColorbarValidator
from ._coloraxis import ColoraxisValidator
from ._below import BelowValidator
from ._autocolorscale import AutocolorscaleValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._zsrc.ZsrcValidator",
"._zmin.ZminValidator",
"._zmid.ZmidValidator",
"._zmax.ZmaxValidator",
"._zauto.ZautoValidator",
"._z.ZValidator",
"._visible.VisibleValidator",
"._uirevision.UirevisionValidator",
"._uid.UidValidator",
"._textsrc.TextsrcValidator",
"._text.TextValidator",
"._subplot.SubplotValidator",
"._stream.StreamValidator",
"._showscale.ShowscaleValidator",
"._showlegend.ShowlegendValidator",
"._reversescale.ReversescaleValidator",
"._radiussrc.RadiussrcValidator",
"._radius.RadiusValidator",
"._opacity.OpacityValidator",
"._name.NameValidator",
"._metasrc.MetasrcValidator",
"._meta.MetaValidator",
"._lonsrc.LonsrcValidator",
"._lon.LonValidator",
"._legendgroup.LegendgroupValidator",
"._latsrc.LatsrcValidator",
"._lat.LatValidator",
"._idssrc.IdssrcValidator",
"._ids.IdsValidator",
"._hovertextsrc.HovertextsrcValidator",
"._hovertext.HovertextValidator",
"._hovertemplatesrc.HovertemplatesrcValidator",
"._hovertemplate.HovertemplateValidator",
"._hoverlabel.HoverlabelValidator",
"._hoverinfosrc.HoverinfosrcValidator",
"._hoverinfo.HoverinfoValidator",
"._customdatasrc.CustomdatasrcValidator",
"._customdata.CustomdataValidator",
"._colorscale.ColorscaleValidator",
"._colorbar.ColorbarValidator",
"._coloraxis.ColoraxisValidator",
"._below.BelowValidator",
"._autocolorscale.AutocolorscaleValidator",
],
)
| mit |
projecthamster/hamster | waflib/extras/use_config.py | 49 | 5657 | #!/usr/bin/env python
# coding=utf-8
# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
"""
When a project has a lot of options the 'waf configure' command line can be
very long and it becomes a cause of error.
This tool provides a convenient way to load a set of configuration parameters
from a local file or from a remote url.
The configuration parameters are stored in a Python file that is imported as
an extra waf tool can be.
Example:
$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
(or 'http://www.anywhere.org/wafcfg').
If the files are available locally, it could be:
$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
The configuration of 'myconf1.py' is automatically loaded by calling
its 'configure' function. In this example, it defines environment variables and
set options:
def configure(self):
self.env['CC'] = 'gcc-4.8'
self.env.append_value('LIBPATH', [...])
self.options.perlbinary = '/usr/local/bin/perl'
self.options.pyc = False
The corresponding command line should have been:
$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
This is an extra tool, not bundled with the default waf binary.
To add the use_config tool to the waf file:
$ ./waf-light --tools=use_config
When using this tool, the wscript will look like:
def options(opt):
opt.load('use_config')
def configure(conf):
conf.load('use_config')
"""
import sys
import os.path as osp
import os
local_repo = ''
"""Local repository containing additional Waf tools (plugins)"""
remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/'
"""
Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
$ waf configure --download
"""
remote_locs = ['waflib/extras', 'waflib/Tools']
"""
Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
"""
try:
from urllib import request
except ImportError:
from urllib import urlopen
else:
urlopen = request.urlopen
from waflib import Errors, Context, Logs, Utils, Options, Configure
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
DEFAULT_DIR = 'wafcfg'
# add first the current wafcfg subdirectory
sys.path.append(osp.abspath(DEFAULT_DIR))
def options(self):
group = self.add_option_group('configure options')
group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
group.add_option('--use-config', action='store', default=None,
metavar='CFG', dest='use_config',
help='force the configuration parameters by importing '
'CFG.py. Several modules may be provided (comma '
'separated).')
group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
metavar='CFG_DIR', dest='use_config_dir',
help='path or url where to find the configuration file')
def download_check(node):
"""
Hook to check for the tools which are downloaded. Replace with your function if necessary.
"""
pass
def download_tool(tool, force=False, ctx=None):
"""
Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
$ waf configure --download
"""
for x in Utils.to_list(remote_repo):
for sub in Utils.to_list(remote_locs):
url = '/'.join((x, sub, tool + '.py'))
try:
web = urlopen(url)
try:
if web.getcode() != 200:
continue
except AttributeError:
pass
except Exception:
# on python3 urlopen throws an exception
# python 2.3 does not have getcode and throws an exception to fail
continue
else:
tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
tmp.write(web.read(), 'wb')
Logs.warn('Downloaded %s from %s', tool, url)
download_check(tmp)
try:
module = Context.load_tool(tool)
except Exception:
Logs.warn('The tool %s from %s is unusable', tool, url)
try:
tmp.delete()
except Exception:
pass
continue
return module
raise Errors.WafError('Could not load the Waf tool')
def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
try:
module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
except ImportError as e:
if not ctx or not hasattr(Options.options, 'download'):
Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
raise
if Options.options.download:
module = download_tool(tool, ctx=ctx)
if not module:
ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
else:
ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
return module
Context.load_tool_default = Context.load_tool
Context.load_tool = load_tool
Configure.download_tool = download_tool
def configure(self):
opts = self.options
use_cfg = opts.use_config
if use_cfg is None:
return
url = urlparse(opts.use_config_dir)
kwargs = {}
if url.scheme:
kwargs['download'] = True
kwargs['remote_url'] = url.geturl()
# search first with the exact url, else try with +'/wafcfg'
kwargs['remote_locs'] = ['', DEFAULT_DIR]
tooldir = url.geturl() + ' ' + DEFAULT_DIR
for cfg in use_cfg.split(','):
Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
self.load(cfg, tooldir=tooldir, **kwargs)
self.start_msg('Checking for configuration')
self.end_msg(use_cfg)
| gpl-3.0 |
agry/NGECore2 | scripts/mobiles/tatooine/krayt_cult_ministrant.py | 2 | 1665 | import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('tatooine_opening_cultist_10')
mobileTemplate.setLevel(4)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("krayt cult")
mobileTemplate.setAssistRange(0)
mobileTemplate.setStalker(False)
templates = Vector()
templates.add('object/mobile/shared_human_female.iff')
templates.add('object/mobile/shared_human_male.iff')
templates.add('object/mobile/shared_zabrak_female.iff')
templates.add('object/mobile/shared_zabrak_male.iff')
templates.add('object/mobile/shared_rodian_female.iff')
templates.add('object/mobile/shared_rodian_male.iff')
templates.add('object/mobile/shared_moncal_female.iff')
templates.add('object/mobile/shared_moncal_male.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('meleehit')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('krayt_cult_ministrant', mobileTemplate)
return | lgpl-3.0 |
emkael/jfrteamy-playoff | jfr_playoff/gui/tabs.py | 1 | 18307 | #coding=utf-8
import os
from collections import OrderedDict
import Tkinter as tk
import ttk
import tkFileDialog as tkfd
import tkMessageBox as tkmb
from .frames import TraceableText, NumericSpinbox
from .frames.match import *
from .frames.network import *
from .frames.team import *
from .frames.translations import *
from .frames.visual import *
from .variables import NotifyStringVar, NotifyNumericVar, NotifyBoolVar
from ..data import PlayoffData
from ..db import PlayoffDB
class PlayoffTab(ttk.Frame):
def __init__(self, master):
ttk.Frame.__init__(self, master)
self.frame = ttk.Frame(self)
self.frame.pack(fill=tk.BOTH, expand=True, padx=5, pady=5)
self.initData()
self.renderContent(self.frame)
@property
def title(self):
pass
def initData(self):
pass
def renderContent(self, container):
pass
def setValues(self, config):
pass
def getConfig(self):
pass
class MainSettingsTab(PlayoffTab):
DEFAULT_INTERVAL = 60
@property
def title(self):
return 'Główne ustawienia'
def initData(self):
self.outputPath = NotifyStringVar()
self.pageTitle = NotifyStringVar()
self.pageLogoh = NotifyStringVar()
self.favicon = NotifyStringVar()
self.refresh = NotifyBoolVar()
self.refresh.trace('w', self._updateRefreshFields)
self.refreshInterval = NotifyNumericVar()
def _chooseOutputPath(self):
currentPath = self.outputPath.get()
filename = tkfd.asksaveasfilename(
initialdir=os.path.dirname(currentPath) if currentPath else '.',
title='Wybierz plik wyjściowy',
filetypes=(('HTML files', '*.html'),))
if filename:
if not filename.lower().endswith('.html'):
filename = filename + '.html'
self.outputPath.set(filename)
def _updateRefreshFields(self, *args):
self.intervalField.configure(
state=tk.NORMAL if self.refresh.get() else tk.DISABLED)
def setValues(self, config):
self.outputPath.set(config['output'] if 'output' in config else '')
if 'page' in config:
self.pageTitle.set(
config['page'].get('title', ''))
self.pageLogoh.set(
config['page'].get('logoh', ''))
self.favicon.set(
config['page'].get('favicon', ''))
try:
interval = int(config['page']['refresh'])
if interval > 0:
self.refresh.set(1)
self.refreshInterval.set(interval)
else:
self.refresh.set(0)
self.refreshInterval.set(self.DEFAULT_INTERVAL)
except:
self.refresh.set(0)
self.refreshInterval.set(self.DEFAULT_INTERVAL)
else:
self.pageTitle.set('')
self.pageLogoh.set('')
self.favicon.set('')
self.refresh.set(0)
self.refreshInterval.set(self.DEFAULT_INTERVAL)
def renderContent(self, container):
(ttk.Label(container, text='Plik wynikowy:')).grid(
row=0, column=0, sticky=tk.E, pady=2)
outputPath = tk.Frame(container)
outputPath.grid(row=0, column=1, sticky=tk.E+tk.W, pady=2)
(ttk.Entry(outputPath, width=60, textvariable=self.outputPath)).grid(
row=0, column=0, sticky=tk.W+tk.E)
(ttk.Button(
outputPath,
text='wybierz...', command=self._chooseOutputPath)).grid(
row=0, column=1)
outputPath.columnconfigure(0, weight=1)
(ttk.Separator(container, orient=tk.HORIZONTAL)).grid(
row=1, column=0, columnspan=2, sticky=tk.E+tk.W, pady=2)
pageSettings = ttk.LabelFrame(
container, text='Ustawienia strony')
pageSettings.grid(
row=2, column=0, columnspan=2, sticky=tk.W+tk.E+tk.N+tk.S, pady=5)
pageSettings.columnconfigure(1, weight=1)
(ttk.Label(pageSettings, text='Tytuł:')).grid(
row=0, column=0, sticky=tk.E, pady=2)
(tk.Entry(pageSettings, textvariable=self.pageTitle)).grid(
row=0, column=1, sticky=tk.W+tk.E, pady=2)
(ttk.Label(pageSettings, text='Logoh:')).grid(
row=1, column=0, sticky=tk.E+tk.N, pady=2)
(TraceableText(pageSettings, width=45, height=10,
variable=self.pageLogoh)).grid(
row=1, column=1,
sticky=tk.W+tk.N+tk.E+tk.S, pady=2)
(ttk.Label(pageSettings, text='Fawikona:')).grid(
row=2, column=0, sticky=tk.E, pady=2)
(tk.Entry(pageSettings, textvariable=self.favicon)).grid(
row=2, column=1, sticky=tk.W+tk.E, pady=2)
(ttk.Label(pageSettings, text='Odświeżaj:')).grid(
row=3, column=0, sticky=tk.E, pady=2)
refreshPanel = tk.Frame(pageSettings)
refreshPanel.grid(row=3, column=1, sticky=tk.W+tk.E, pady=2)
(ttk.Checkbutton(
refreshPanel,
command=self._updateRefreshFields, variable=self.refresh)).grid(
row=0, column=0)
(ttk.Label(refreshPanel, text='co:')).grid(row=0, column=1)
self.intervalField = NumericSpinbox(
refreshPanel, from_=30, to=3600, width=5, justify=tk.RIGHT,
textvariable=self.refreshInterval)
self.intervalField.grid(row=0, column=2)
(ttk.Label(refreshPanel, text='sekund')).grid(row=0, column=3)
container.columnconfigure(1, weight=1)
container.rowconfigure(4, weight=1)
def getConfig(self):
config = OrderedDict({
'output': self.outputPath.get(),
'page': OrderedDict({
'title': self.pageTitle.get(),
'logoh': self.pageLogoh.get(),
'refresh': self.refreshInterval.get() \
if self.refresh.get() > 0 else 0
})
})
favicon = self.favicon.get().strip()
if len(favicon):
config['page']['favicon'] = favicon
return config
class TeamsTab(PlayoffTab):
@property
def title(self):
return 'Uczestnicy'
def renderContent(self, container):
leftFrame = tk.Frame(container)
leftFrame.pack(side=tk.LEFT, fill=tk.BOTH, expand=True)
self.settingsFrame = TeamSettingsFrame(
leftFrame, vertical=True, padx=5, pady=5)
self.settingsFrame.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
(ttk.Separator(
leftFrame, orient=tk.HORIZONTAL)).pack(
side=tk.TOP, fill=tk.X)
self.aliasFrame = TeamAliasFrame(
leftFrame, vertical=True, padx=5, pady=5)
self.aliasFrame.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
self.previewFrame = TeamPreviewFrame(
container, vertical=True, padx=5, pady=5)
self.previewFrame.pack(side=tk.RIGHT, fill=tk.BOTH, expand=True)
self._teamList = []
self._teamListFetcher = None
self.winfo_toplevel().bind(
'<<TeamSettingsChanged>>', self.onTeamSettingsChange, add='+')
def onTeamSettingsChange(self, event):
if self._teamListFetcher is not None:
self.after_cancel(self._teamListFetcher)
self._teamListFetcher = self.after(500, self._fetchTeamList)
def _fetchTeamList(self):
config = self.collectConfig()
dbConfig = self.winfo_toplevel().getDbConfig()
if dbConfig is not None:
config['database'] = dbConfig
data = PlayoffData()
db = None
try:
db = PlayoffDB(dbConfig)
except Exception:
pass
self._teamList = data.fetch_team_list(config['teams'], db)[0]
self.winfo_toplevel().event_generate(
'<<TeamListChanged>>', when='tail')
def getTeams(self):
return self._teamList
def collectConfig(self):
config = OrderedDict({
'teams': self.settingsFrame.getConfig(),
'team_aliases': self.aliasFrame.getConfig()
})
tieConfig = self.previewFrame.getTieConfig()
if tieConfig is not None and isinstance(config['teams'], dict):
config['teams']['ties'] = tieConfig
orderConfig = self.previewFrame.getOrderConfig()
if orderConfig:
config['custom_final_order'] = orderConfig
return config
def setValues(self, config):
self.settingsFrame.setValues(
config['teams'] if 'teams' in config else [])
self.aliasFrame.setValues(
config['team_aliases'] if 'team_aliases' in config else {})
self.previewFrame.setTieConfig(
config['teams']['ties']
if 'teams' in config and 'ties' in config['teams'] else [])
self.previewFrame.setOrderConfig(
config.get('custom_final_order', []))
def getConfig(self):
return self.collectConfig()
class MatchesTab(PlayoffTab):
@property
def title(self):
return 'Mecze'
def addPhase(self):
phase = MatchPhaseFrame(
self.phaseFrame, vertical=True, padx=10, pady=10)
newPhase = max(self.phases.keys()) + 1 if len(self.phases) else 1
self.phaseFrame.add(phase)
self.phases[newPhase] = phase
self._renameTabs()
self.phaseFrame.select(phase)
self.winfo_toplevel().event_generate(
'<<MatchListChanged>>', when='tail')
self.winfo_toplevel().event_generate(
'<<ValueChanged>>', when='tail')
return newPhase
def removePhase(self, phase=None):
selected = self.phaseFrame.select() if phase is None \
else self.phases[phase]
if selected:
self.phaseFrame.forget(selected)
key_to_delete = None
for key, tab in self.phases.iteritems():
if str(selected) == str(tab):
key_to_delete = key
break
if key_to_delete:
self.phases.pop(key_to_delete)
self.winfo_toplevel().event_generate(
'<<MatchListChanged>>', when='tail')
def _renameTabs(self, *args):
for idx, tab in self.phases.iteritems():
title = tab.name.get().strip()
self.phaseFrame.tab(
tab, text=(title if len(title) else '') + ' (#%d)' % (idx))
def renderContent(self, container):
container.columnconfigure(1, weight=1)
container.rowconfigure(2, weight=1)
(ttk.Label(container, text='Fazy rozgrywek:')).grid(
row=0, column=0, columnspan=2, sticky=tk.W)
(ttk.Button(
container, text='+', command=self.addPhase, width=5)).grid(
row=1, column=0, sticky=tk.W)
(ttk.Button(
container, text='-', command=self.removePhase, width=5)).grid(
row=1, column=1, sticky=tk.W)
self.phases = {}
self.phaseFrame = ttk.Notebook(container)
self.phaseFrame.grid(
row=2, column=0, columnspan=2, sticky=tk.W+tk.E+tk.N+tk.S)
self.winfo_toplevel().bind(
'<<PhaseRenamed>>', self._renameTabs, add='+')
def getMatches(self):
matches = []
for phase in self.phases.values():
matches += [w for w in phase.matches.widgets
if isinstance(w, MatchSettingsFrame)]
return matches
def setValues(self, config):
phases = config['phases'] if 'phases' in config else []
for idx in self.phases.keys():
self.removePhase(idx)
for phase in phases:
newPhase = self.addPhase()
self.phases[newPhase].setValues(phase)
for phase in self.phases.values():
for match in phase.matches.widgets:
if isinstance(match, MatchSettingsFrame) \
and match.getMatchID == 0:
match.matchID.set(
self.winfo_toplevel().getNewMatchID(match))
def getConfig(self):
return OrderedDict({
'phases': [phase.getConfig() for phase in self.phases.values()]
})
class SwissesTab(PlayoffTab):
@property
def title(self):
return 'Swissy'
def renderContent(self, container):
self.swisses = SwissesFrame(container, vertical=True)
self.swisses.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
def setValues(self, config):
self.swisses.setValues(config['swiss'] if 'swiss' in config else [])
def getConfig(self):
swisses = self.swisses.getValues()
if len(swisses):
return OrderedDict({
'swiss': swisses
})
else:
return None
class NetworkTab(PlayoffTab):
@property
def title(self):
return 'Sieć'
def _onDBSettingsChange(self, event):
if self.dbFetchTimer is not None:
self.after_cancel(self.dbFetchTimer)
self.dbFetchTimer = self.after(1500, self._fetchDBList)
def _fetchDBList(self):
self._dbList = []
try:
db = PlayoffDB(self.getDB())
for row in db.fetch_all(
'information_schema',
'SELECT TABLE_SCHEMA FROM information_schema.COLUMNS WHERE TABLE_NAME = "admin" AND COLUMN_NAME = "teamcnt" ORDER BY TABLE_SCHEMA;', {}):
self._dbList.append(row[0])
except Exception as e:
pass
self.winfo_toplevel().event_generate('<<DBListChanged>>', when='tail')
def getDBList(self):
return self._dbList
def getDB(self):
return self.mysqlFrame.getConfig()
def renderContent(self, container):
container.columnconfigure(0, weight=1)
container.columnconfigure(1, weight=1)
container.rowconfigure(1, weight=1)
self.mysqlFrame = MySQLConfigurationFrame(container)
self.mysqlFrame.grid(row=0, column=0, sticky=tk.W+tk.E+tk.N+tk.S)
self.goniecFrame = GoniecConfigurationFrame(container)
self.goniecFrame.grid(row=0, column=1, sticky=tk.W+tk.E+tk.N+tk.S)
self.remoteFrame = RemoteConfigurationFrame(container, vertical=True)
self.remoteFrame.grid(
row=1, column=0, columnspan=2, sticky=tk.W+tk.E+tk.N+tk.S)
self._dbList = []
self.dbFetchTimer = None
self.winfo_toplevel().bind(
'<<DBSettingsChanged>>', self._onDBSettingsChange, add='+')
def setValues(self, config):
self.mysqlFrame.setValues(
config['database'] if 'database' in config else {})
self.goniecFrame.setValues(
config['goniec'] if 'goniec' in config else {})
self.remoteFrame.setValues(
config['remotes'] if 'remotes' in config else [])
def getConfig(self):
config = OrderedDict()
mysql = self.getDB()
if mysql is not None:
config['database'] = mysql
config['goniec'] = self.goniecFrame.getValues()
remotes = self.remoteFrame.getValues()
if len(remotes):
config['remotes'] = remotes
return config
class VisualTab(PlayoffTab):
@property
def title(self):
return 'Wygląd'
def renderContent(self, container):
container.columnconfigure(0, weight=1)
container.rowconfigure(1, weight=1)
self.settingsFrame = VisualSettingsFrame(container)
self.settingsFrame.grid(row=0, column=0, sticky=tk.S+tk.N+tk.E+tk.W)
self.positionFrame = BoxPositionsFrame(container, vertical=True)
self.positionFrame.grid(row=1, column=0, sticky=tk.S+tk.N+tk.E+tk.W)
def setValues(self, config):
if 'page' in config:
self.settingsFrame.setValues(config['page'])
else:
self.settingsFrame.setValues({})
if 'canvas' in config and 'box_positioning' in config['canvas']:
self.positionFrame.setValues(config['canvas']['box_positioning'])
else:
self.positionFrame.setValues({})
def getConfig(self):
config = OrderedDict({
'page': self.settingsFrame.getValues()
})
boxConfig = self.positionFrame.getValues()
if boxConfig:
config['canvas'] = OrderedDict()
config['canvas']['box_positioning'] = boxConfig
return config
class StyleTab(PlayoffTab):
@property
def title(self):
return 'Style'
def renderContent(self, container):
self.linesFrame = LineStylesFrame(container)
self.linesFrame.pack(side=tk.TOP, anchor=tk.W)
(ttk.Separator(container, orient=tk.HORIZONTAL)).pack(
side=tk.TOP, fill=tk.X)
self.positionStylesFrame = PositionStylesFrame(
container, vertical=True)
self.positionStylesFrame.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
def setValues(self, config):
if 'canvas' in config:
self.linesFrame.setValues(config['canvas'])
else:
self.linesFrame.setValues({})
if 'position_styles' in config:
self.positionStylesFrame.setValues(config['position_styles'])
else:
self.positionStylesFrame.setValues([])
def getConfig(self):
return OrderedDict({
'canvas': self.linesFrame.getValues(),
'position_styles': self.positionStylesFrame.getValues()
})
class TranslationsTab(PlayoffTab):
@property
def title(self):
return 'Tłumaczenia'
def renderContent(self, container):
self.translationsFrame = TranslationConfigurationFrame(
container, vertical=True)
self.translationsFrame.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
def setValues(self, config):
if 'i18n' in config:
self.translationsFrame.setTranslations(config['i18n'])
else:
self.translationsFrame.setTranslations({})
def getConfig(self):
return OrderedDict({
'i18n': self.translationsFrame.getTranslations()
})
__all__ = ['MainSettingsTab', 'TeamsTab', 'MatchesTab', 'SwissesTab',
'NetworkTab', 'VisualTab', 'StyleTab', 'TranslationsTab']
| bsd-2-clause |
LeroViten/LerNex-Ancora-Kernel | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
shashank971/edx-platform | common/djangoapps/student/tests/test_email.py | 63 | 17946 |
import json
import django.db
import unittest
from student.tests.factories import UserFactory, RegistrationFactory, PendingEmailChangeFactory
from student.views import (
reactivation_email_for_user, do_email_change_request, confirm_email_change,
validate_new_email, SETTING_CHANGE_INITIATED
)
from student.models import UserProfile, PendingEmailChange
from django.core.urlresolvers import reverse
from django.core import mail
from django.contrib.auth.models import User
from django.test import TestCase, TransactionTestCase
from django.test.client import RequestFactory
from mock import Mock, patch
from django.http import HttpResponse
from django.conf import settings
from edxmako.shortcuts import render_to_string
from edxmako.tests import mako_middleware_process_request
from util.request import safe_get_host
from util.testing import EventTestMixin
class TestException(Exception):
"""Exception used for testing that nothing will catch explicitly"""
pass
def mock_render_to_string(template_name, context):
"""Return a string that encodes template_name and context"""
return str((template_name, sorted(context.iteritems())))
def mock_render_to_response(template_name, context):
"""Return an HttpResponse with content that encodes template_name and context"""
# View confirm_email_change uses @transaction.commit_manually.
# This simulates any db access in the templates.
UserProfile.objects.exists()
return HttpResponse(mock_render_to_string(template_name, context))
class EmailTestMixin(object):
"""Adds useful assertions for testing `email_user`"""
def assertEmailUser(self, email_user, subject_template, subject_context, body_template, body_context):
"""Assert that `email_user` was used to send and email with the supplied subject and body
`email_user`: The mock `django.contrib.auth.models.User.email_user` function
to verify
`subject_template`: The template to have been used for the subject
`subject_context`: The context to have been used for the subject
`body_template`: The template to have been used for the body
`body_context`: The context to have been used for the body
"""
email_user.assert_called_with(
mock_render_to_string(subject_template, subject_context),
mock_render_to_string(body_template, body_context),
settings.DEFAULT_FROM_EMAIL
)
def append_allowed_hosts(self, hostname):
""" Append hostname to settings.ALLOWED_HOSTS """
settings.ALLOWED_HOSTS.append(hostname)
self.addCleanup(settings.ALLOWED_HOSTS.pop)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class ActivationEmailTests(TestCase):
"""Test sending of the activation email. """
ACTIVATION_SUBJECT = "Activate Your edX Account"
# Text fragments we expect in the body of an email
# sent from an OpenEdX installation.
OPENEDX_FRAGMENTS = [
"Thank you for signing up for {platform}.".format(platform=settings.PLATFORM_NAME),
"http://edx.org/activate/",
(
"if you require assistance, check the help section of the "
"{platform} website".format(platform=settings.PLATFORM_NAME)
)
]
# Text fragments we expect in the body of an email
# sent from an EdX-controlled domain.
EDX_DOMAIN_FRAGMENTS = [
"Thank you for signing up for {platform}".format(platform=settings.PLATFORM_NAME),
"http://edx.org/activate/",
"https://www.edx.org/contact-us",
"This email was automatically sent by edx.org"
]
def setUp(self):
super(ActivationEmailTests, self).setUp()
def test_activation_email(self):
self._create_account()
self._assert_activation_email(self.ACTIVATION_SUBJECT, self.OPENEDX_FRAGMENTS)
@patch.dict(settings.FEATURES, {'IS_EDX_DOMAIN': True})
def test_activation_email_edx_domain(self):
self._create_account()
self._assert_activation_email(self.ACTIVATION_SUBJECT, self.EDX_DOMAIN_FRAGMENTS)
def _create_account(self):
"""Create an account, triggering the activation email. """
url = reverse('create_account')
params = {
'username': 'test_user',
'email': 'test_user@example.com',
'password': 'edx',
'name': 'Test User',
'honor_code': True,
'terms_of_service': True
}
resp = self.client.post(url, params)
self.assertEqual(
resp.status_code, 200,
msg=u"Could not create account (status {status}). The response was {response}".format(
status=resp.status_code,
response=resp.content
)
)
def _assert_activation_email(self, subject, body_fragments):
"""Verify that the activation email was sent. """
self.assertEqual(len(mail.outbox), 1)
msg = mail.outbox[0]
self.assertEqual(msg.subject, subject)
for fragment in body_fragments:
self.assertIn(fragment, msg.body)
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
@patch('django.contrib.auth.models.User.email_user')
class ReactivationEmailTests(EmailTestMixin, TestCase):
"""Test sending a reactivation email to a user"""
def setUp(self):
super(ReactivationEmailTests, self).setUp()
self.user = UserFactory.create()
self.unregisteredUser = UserFactory.create()
self.registration = RegistrationFactory.create(user=self.user)
def reactivation_email(self, user):
"""
Send the reactivation email to the specified user,
and return the response as json data.
"""
return json.loads(reactivation_email_for_user(user).content)
def assertReactivateEmailSent(self, email_user):
"""Assert that the correct reactivation email has been sent"""
context = {
'name': self.user.profile.name,
'key': self.registration.activation_key
}
self.assertEmailUser(
email_user,
'emails/activation_email_subject.txt',
context,
'emails/activation_email.txt',
context
)
# Thorough tests for safe_get_host are elsewhere; here we just want a quick URL sanity check
request = RequestFactory().post('unused_url')
request.user = self.user
request.META['HTTP_HOST'] = "aGenericValidHostName"
self.append_allowed_hosts("aGenericValidHostName")
mako_middleware_process_request(request)
body = render_to_string('emails/activation_email.txt', context)
host = safe_get_host(request)
self.assertIn(host, body)
def test_reactivation_email_failure(self, email_user):
self.user.email_user.side_effect = Exception
response_data = self.reactivation_email(self.user)
self.assertReactivateEmailSent(email_user)
self.assertFalse(response_data['success'])
def test_reactivation_for_unregistered_user(self, email_user):
"""
Test that trying to send a reactivation email to an unregistered
user fails without throwing a 500 error.
"""
response_data = self.reactivation_email(self.unregisteredUser)
self.assertFalse(response_data['success'])
def test_reactivation_email_success(self, email_user):
response_data = self.reactivation_email(self.user)
self.assertReactivateEmailSent(email_user)
self.assertTrue(response_data['success'])
class EmailChangeRequestTests(EventTestMixin, TestCase):
"""Test changing a user's email address"""
def setUp(self):
super(EmailChangeRequestTests, self).setUp('student.views.tracker')
self.user = UserFactory.create()
self.new_email = 'new.email@edx.org'
self.req_factory = RequestFactory()
self.request = self.req_factory.post('unused_url', data={
'password': 'test',
'new_email': self.new_email
})
self.request.user = self.user
self.user.email_user = Mock()
def do_email_validation(self, email):
"""Executes validate_new_email, returning any resulting error message. """
try:
validate_new_email(self.request.user, email)
except ValueError as err:
return err.message
def do_email_change(self, user, email, activation_key=None):
"""Executes do_email_change_request, returning any resulting error message. """
try:
do_email_change_request(user, email, activation_key)
except ValueError as err:
return err.message
def assertFailedRequest(self, response_data, expected_error):
"""Assert that `response_data` indicates a failed request that returns `expected_error`"""
self.assertFalse(response_data['success'])
self.assertEquals(expected_error, response_data['error'])
self.assertFalse(self.user.email_user.called)
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_duplicate_activation_key(self):
"""Assert that if two users change Email address simultaneously, no error is thrown"""
# New emails for the users
user1_new_email = "valid_user1_email@example.com"
user2_new_email = "valid_user2_email@example.com"
# Create a another user 'user2' & make request for change email
user2 = UserFactory.create(email=self.new_email, password="test2")
# Send requests & ensure no error was thrown
self.assertIsNone(self.do_email_change(self.user, user1_new_email))
self.assertIsNone(self.do_email_change(user2, user2_new_email))
def test_invalid_emails(self):
"""
Assert the expected error message from the email validation method for an invalid
(improperly formatted) email address.
"""
for email in ('bad_email', 'bad_email@', '@bad_email'):
self.assertEqual(self.do_email_validation(email), 'Valid e-mail address required.')
def test_change_email_to_existing_value(self):
""" Test the error message if user attempts to change email to the existing value. """
self.assertEqual(self.do_email_validation(self.user.email), 'Old email is the same as the new email.')
def test_duplicate_email(self):
"""
Assert the expected error message from the email validation method for an email address
that is already in use by another account.
"""
UserFactory.create(email=self.new_email)
self.assertEqual(self.do_email_validation(self.new_email), 'An account with this e-mail already exists.')
@patch('django.core.mail.send_mail')
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_email_failure(self, send_mail):
""" Test the return value if sending the email for the user to click fails. """
send_mail.side_effect = [Exception, None]
self.assertEqual(
self.do_email_change(self.user, "valid@email.com"),
'Unable to send email activation link. Please try again later.'
)
self.assert_no_events_were_emitted()
@patch('django.core.mail.send_mail')
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
def test_email_success(self, send_mail):
""" Test email was sent if no errors encountered. """
old_email = self.user.email
new_email = "valid@example.com"
registration_key = "test registration key"
self.assertIsNone(self.do_email_change(self.user, new_email, registration_key))
context = {
'key': registration_key,
'old_email': old_email,
'new_email': new_email
}
send_mail.assert_called_with(
mock_render_to_string('emails/email_change_subject.txt', context),
mock_render_to_string('emails/email_change.txt', context),
settings.DEFAULT_FROM_EMAIL,
[new_email]
)
self.assert_event_emitted(
SETTING_CHANGE_INITIATED, user_id=self.user.id, setting=u'email', old=old_email, new=new_email
)
@patch('django.contrib.auth.models.User.email_user')
@patch('student.views.render_to_response', Mock(side_effect=mock_render_to_response, autospec=True))
@patch('student.views.render_to_string', Mock(side_effect=mock_render_to_string, autospec=True))
class EmailChangeConfirmationTests(EmailTestMixin, TransactionTestCase):
"""Test that confirmation of email change requests function even in the face of exceptions thrown while sending email"""
def setUp(self):
super(EmailChangeConfirmationTests, self).setUp()
self.user = UserFactory.create()
self.profile = UserProfile.objects.get(user=self.user)
self.req_factory = RequestFactory()
self.request = self.req_factory.get('unused_url')
self.request.user = self.user
self.user.email_user = Mock()
self.pending_change_request = PendingEmailChangeFactory.create(user=self.user)
self.key = self.pending_change_request.activation_key
def assertRolledBack(self):
"""Assert that no changes to user, profile, or pending email have been made to the db"""
self.assertEquals(self.user.email, User.objects.get(username=self.user.username).email)
self.assertEquals(self.profile.meta, UserProfile.objects.get(user=self.user).meta)
self.assertEquals(1, PendingEmailChange.objects.count())
def assertFailedBeforeEmailing(self, email_user):
"""Assert that the function failed before emailing a user"""
self.assertRolledBack()
self.assertFalse(email_user.called)
def check_confirm_email_change(self, expected_template, expected_context):
"""Call `confirm_email_change` and assert that the content was generated as expected
`expected_template`: The name of the template that should have been used
to generate the content
`expected_context`: The context dictionary that should have been used to
generate the content
"""
response = confirm_email_change(self.request, self.key)
self.assertEquals(
mock_render_to_response(expected_template, expected_context).content,
response.content
)
def assertChangeEmailSent(self, email_user):
"""Assert that the correct email was sent to confirm an email change"""
context = {
'old_email': self.user.email,
'new_email': self.pending_change_request.new_email,
}
self.assertEmailUser(
email_user,
'emails/email_change_subject.txt',
context,
'emails/confirm_email_change.txt',
context
)
# Thorough tests for safe_get_host are elsewhere; here we just want a quick URL sanity check
request = RequestFactory().post('unused_url')
request.user = self.user
request.META['HTTP_HOST'] = "aGenericValidHostName"
self.append_allowed_hosts("aGenericValidHostName")
mako_middleware_process_request(request)
body = render_to_string('emails/confirm_email_change.txt', context)
url = safe_get_host(request)
self.assertIn(url, body)
def test_not_pending(self, email_user):
self.key = 'not_a_key'
self.check_confirm_email_change('invalid_email_key.html', {})
self.assertFailedBeforeEmailing(email_user)
def test_duplicate_email(self, email_user):
UserFactory.create(email=self.pending_change_request.new_email)
self.check_confirm_email_change('email_exists.html', {})
self.assertFailedBeforeEmailing(email_user)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
def test_old_email_fails(self, email_user):
email_user.side_effect = [Exception, None]
self.check_confirm_email_change('email_change_failed.html', {
'email': self.user.email,
})
self.assertRolledBack()
self.assertChangeEmailSent(email_user)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
def test_new_email_fails(self, email_user):
email_user.side_effect = [None, Exception]
self.check_confirm_email_change('email_change_failed.html', {
'email': self.pending_change_request.new_email
})
self.assertRolledBack()
self.assertChangeEmailSent(email_user)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', "Test only valid in LMS")
def test_successful_email_change(self, email_user):
self.check_confirm_email_change('email_change_successful.html', {
'old_email': self.user.email,
'new_email': self.pending_change_request.new_email
})
self.assertChangeEmailSent(email_user)
meta = json.loads(UserProfile.objects.get(user=self.user).meta)
self.assertIn('old_emails', meta)
self.assertEquals(self.user.email, meta['old_emails'][0][0])
self.assertEquals(
self.pending_change_request.new_email,
User.objects.get(username=self.user.username).email
)
self.assertEquals(0, PendingEmailChange.objects.count())
@patch('student.views.PendingEmailChange.objects.get', Mock(side_effect=TestException))
@patch('student.views.transaction.rollback', wraps=django.db.transaction.rollback)
def test_always_rollback(self, rollback, _email_user):
with self.assertRaises(TestException):
confirm_email_change(self.request, self.key)
rollback.assert_called_with()
| agpl-3.0 |
thaim/ansible | lib/ansible/utils/cmd_functions.py | 233 | 2793 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import select
import shlex
import subprocess
import sys
from ansible.module_utils.six import PY2, PY3
from ansible.module_utils._text import to_bytes
def run_cmd(cmd, live=False, readsize=10):
# readsize = 10
# On python2, shlex needs byte strings
if PY2:
cmd = to_bytes(cmd, errors='surrogate_or_strict')
cmdargs = shlex.split(cmd)
# subprocess should be passed byte strings. (on python2.6 it must be
# passed byte strtings)
cmdargs = [to_bytes(a, errors='surrogate_or_strict') for a in cmdargs]
p = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = b''
stderr = b''
rpipes = [p.stdout, p.stderr]
while True:
rfd, wfd, efd = select.select(rpipes, [], rpipes, 1)
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), readsize)
if live:
# On python3, stdout has a codec to go from text type to bytes
if PY3:
sys.stdout.buffer.write(dat)
else:
sys.stdout.write(dat)
stdout += dat
if dat == b'':
rpipes.remove(p.stdout)
if p.stderr in rfd:
dat = os.read(p.stderr.fileno(), readsize)
stderr += dat
if live:
# On python3, stdout has a codec to go from text type to bytes
if PY3:
sys.stdout.buffer.write(dat)
else:
sys.stdout.write(dat)
if dat == b'':
rpipes.remove(p.stderr)
# only break out if we've emptied the pipes, or there is nothing to
# read from and the process has finished.
if (not rpipes or not rfd) and p.poll() is not None:
break
# Calling wait while there are still pipes to read can cause a lock
elif not rpipes and p.poll() is None:
p.wait()
return p.returncode, stdout, stderr
| mit |
sdanielf/dictate | dictation/config.py | 1 | 2826 | # Copyright (C) 2013 S. Daniel Francis <francis@sugarlabs.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import os
from gettext import gettext as _
import ConfigParser
from dictation.espeak import espeak_voices
configpath = os.path.join(os.environ['HOME'], '.dictate')
espeak_args = _('Options given to espeak. (See "espeak --help")')
settings = {'tbw': ('-t', '--tbw', 'TWB', {'type': 'float', 'min': 0},
_('Time Between Words (Word length * TBW)'), '0.5'),
'espeak_options': ('-e', '--espeak_options', 'ARGS',
{'type': 'str'}, espeak_args, ''),
'language': ('-l', '--language', 'LANG',
{'type': 'choice', 'options': espeak_voices},
_('Language voice to speak'), 'default'),
'speed': ('-s', '--speed', 'SPEED',
{'type': 'int', 'min': 80, 'max': 450},
_('Speed in words per minute. From 80 to 450'), '80')}
options = {}
if not os.path.exists(configpath):
config = ConfigParser.RawConfigParser()
config.add_section('Dictation')
for setting in settings:
config.set('Dictation', setting, settings[setting][-1])
configfile = open(configpath, 'w')
config.write(configfile)
configfile.close()
config = ConfigParser.RawConfigParser()
config.read(configpath)
for i in settings:
try:
options[i] = config.get('Dictation', i)
except:
options[i] = settings[i][-1]
def get_tbw():
try:
return float(options['tbw'])
except:
return float(settings['tbw'][-1])
def get_espeak_options():
try:
return options['espeak_options'].split()
except:
return settings['espeak_options'][-1].split()
def get_language():
language = options['language']
if language in espeak_voices:
return language
else:
return settings['language'][-1]
def get_speed():
try:
speed = int(options['speed'])
if speed >= 80 and speed <= 450:
return options['speed']
else:
raise Exception
except:
return settings['speed'][-1]
| gpl-3.0 |
jessrosenfield/pants | contrib/node/src/python/pants/contrib/node/targets/node_module.py | 8 | 1589 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.payload import Payload
from pants.contrib.node.targets.npm_package import NpmPackage
class NodeModule(NpmPackage):
"""Represents a Node module."""
def __init__(self, sources=None, address=None, payload=None, **kwargs):
"""
:param sources: Javascript and other source code files that make up this module; paths are
relative to the BUILD file's directory.
:type sources: `globs`, `rglobs` or a list of strings
"""
# TODO(John Sirois): Support devDependencies, etc. The devDependencies case is not
# clear-cut since pants controlled builds would provide devDependencies as needed to perform
# tasks. The reality is likely to be though that both pants will never cover all cases, and a
# back door to execute new tools during development will be desirable and supporting conversion
# of pre-existing package.json files as node_module targets will require this.
payload = payload or Payload()
payload.add_fields({
'sources': self.create_sources_field(sources=sources,
sources_rel_path=address.spec_path,
key_arg='sources'),
})
super(NodeModule, self).__init__(address=address, payload=payload, **kwargs)
| apache-2.0 |
mouadino/scrapy | scrapy/contrib/logstats.py | 19 | 2065 | from twisted.internet import task
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import NotConfigured
from scrapy.conf import settings
from scrapy import log, signals
class Slot(object):
def __init__(self):
self.items = 0
self.itemsprev = 0
self.pages = 0
self.pagesprev = 0
class LogStats(object):
"""Log basic scraping stats periodically"""
def __init__(self):
self.interval = settings.getfloat('LOGSTATS_INTERVAL')
if not self.interval:
raise NotConfigured
self.slots = {}
self.multiplier = 60.0 / self.interval
dispatcher.connect(self.item_scraped, signal=signals.item_scraped)
dispatcher.connect(self.response_received, signal=signals.response_received)
dispatcher.connect(self.spider_opened, signal=signals.spider_opened)
dispatcher.connect(self.spider_closed, signal=signals.spider_closed)
dispatcher.connect(self.engine_started, signal=signals.engine_started)
dispatcher.connect(self.engine_stopped, signal=signals.engine_stopped)
def item_scraped(self, spider):
self.slots[spider].items += 1
def response_received(self, spider):
self.slots[spider].pages += 1
def spider_opened(self, spider):
self.slots[spider] = Slot()
def spider_closed(self, spider):
del self.slots[spider]
def engine_started(self):
self.tsk = task.LoopingCall(self.log)
self.tsk.start(self.interval)
def log(self):
for spider, slot in self.slots.items():
irate = (slot.items - slot.itemsprev) * self.multiplier
prate = (slot.pages - slot.pagesprev) * self.multiplier
slot.pagesprev, slot.itemsprev = slot.pages, slot.items
msg = "Crawled %d pages (at %d pages/min), scraped %d items (at %d items/min)" \
% (slot.pages, prate, slot.items, irate)
log.msg(msg, spider=spider)
def engine_stopped(self):
if self.tsk.running:
self.tsk.stop()
| bsd-3-clause |
ealmansi/incc-tp-final | src/gensim/tut2.py | 1 | 1637 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# https://radimrehurek.com/gensim/tut2.html
import logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
import os
from gensim import corpora, models, similarities
if (os.path.exists("/tmp/deerwester.dict")):
dictionary = corpora.Dictionary.load('/tmp/deerwester.dict')
corpus = corpora.MmCorpus('/tmp/deerwester.mm')
print("Used files generated from first tutorial")
else:
print("Please run first tutorial to generate data set")
tfidf = models.TfidfModel(corpus) # step 1 -- initialize a model
doc_bow = [(0, 1), (1, 1)]
print(tfidf[doc_bow]) # step 2 -- use the model to transform vectors
corpus_tfidf = tfidf[corpus]
for doc in corpus_tfidf:
print(doc)
lsi = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=2) # initialize an LSI transformation
corpus_lsi = lsi[corpus_tfidf] # create a double wrapper over the original corpus: bow->tfidf->fold-in-lsi
lsi.print_topics(2)
for doc in corpus_lsi: # both bow->tfidf and tfidf->lsi transformations are actually executed here, on the fly
print(doc)
lsi.save('/tmp/model.lsi') # same for tfidf, lda, ...
lsi = models.LsiModel.load('/tmp/model.lsi')
tfidf_model = models.TfidfModel(corpus, normalize=True)
lsi_model = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=300)
rp_model = models.RpModel(corpus_tfidf, num_topics=500)
lda_model = models.LdaModel(corpus, id2word=dictionary, num_topics=100)
hdp_model = models.HdpModel(corpus, id2word=dictionary)
print(tfidf_model)
print(lsi_model)
print(rp_model)
print(lda_model)
print(hdp_model)
| mit |
milapour/palm | palm/test/test_blink_model.py | 1 | 3374 | import nose.tools
from palm.blink_factory import SingleDarkBlinkFactory,\
DoubleDarkBlinkFactory,\
ConnectedDarkBlinkFactory
from palm.blink_model import BlinkModel
from palm.blink_parameter_set import SingleDarkParameterSet,\
DoubleDarkParameterSet,\
ConnectedDarkParameterSet
from palm.util import n_choose_k
@nose.tools.istest
def SingleDarkModelHasCorrectNumberOfStatesAndRoutes():
parameter_set = SingleDarkParameterSet()
parameter_set.set_parameter('N', 3)
model_factory = SingleDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
num_states = model.get_num_states()
N = parameter_set.get_parameter('N')
expected_num_states = n_choose_k(N+3, 3)
error_message = "Got model with %d states, " \
"expected model with %d states.\n %s" % \
(num_states, expected_num_states, str(model))
nose.tools.eq_(num_states, expected_num_states,
error_message)
num_routes = model.get_num_routes()
nose.tools.ok_(num_routes > 0, "Model doesn't have routes.")
# print model.state_collection
# print model.route_collection
@nose.tools.istest
def DoubleDarkModelHasCorrectNumberOfStatesAndRoutes():
parameter_set = DoubleDarkParameterSet()
parameter_set.set_parameter('N', 5)
parameter_set.set_parameter('log_kr_diff', -1.0)
model_factory = DoubleDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
num_states = model.get_num_states()
N = parameter_set.get_parameter('N')
expected_num_states = n_choose_k(N+4, 4)
error_message = "Got model with %d states, " \
"expected model with %d states.\n %s" % \
(num_states, expected_num_states, str(model))
nose.tools.eq_(num_states, expected_num_states,
error_message)
num_routes = model.get_num_routes()
nose.tools.ok_(num_routes > 0, "Model doesn't have routes.")
@nose.tools.istest
def initial_vector_gives_probability_one_to_state_with_all_inactive():
parameter_set = SingleDarkParameterSet()
model_factory = SingleDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
init_prob_vec = model.get_initial_probability_vector()
prob = init_prob_vec.get_state_probability(model.all_inactive_state_id)
nose.tools.eq_(prob, 1.0)
@nose.tools.istest
def ConnectedDarkModelHasCorrectNumberOfStatesAndRoutes():
parameter_set = ConnectedDarkParameterSet()
parameter_set.set_parameter('N', 3)
parameter_set.set_parameter('log_kr2', -1.0)
model_factory = ConnectedDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
num_states = model.get_num_states()
N = parameter_set.get_parameter('N')
expected_num_states = n_choose_k(N+4, 4)
error_message = "Got model with %d states, " \
"expected model with %d states.\n %s" % \
(num_states, expected_num_states, str(model))
nose.tools.eq_(num_states, expected_num_states,
error_message)
num_routes = model.get_num_routes()
nose.tools.ok_(num_routes > 0, "Model doesn't have routes.")
print model.state_collection
print model.route_collection
| bsd-2-clause |
DeepVisionTeam/TensorFlowBook | Titanic/data_processing.py | 2 | 4807 | import os
import re
import pandas as pd
import tensorflow as tf
pjoin = os.path.join
DATA_DIR = pjoin(os.path.dirname(__file__), 'data')
train_data = pd.read_csv(pjoin(DATA_DIR, 'train.csv'))
test_data = pd.read_csv(pjoin(DATA_DIR, 'test.csv'))
# Translation:
# Don: an honorific title used in Spain, Portugal, Italy
# Dona: Feminine form for don
# Mme: Madame, Mrs
# Mlle: Mademoiselle, Miss
# Jonkheer (female equivalent: Jonkvrouw) is a Dutch honorific of nobility
HONORABLE_TITLES = ['sir', 'lady', 'don', 'dona', 'countess', 'jonkheer',
'major', 'col', 'dr', 'master', 'capt']
NORMAL_TITLES = ['mr', 'ms', 'mrs', 'miss', 'mme', 'mlle', 'rev']
TITLES = HONORABLE_TITLES + NORMAL_TITLES
def get_title(name):
title_search = re.search('([A-Za-z]+)\.', name)
return title_search.group(1).lower()
def get_family(row):
last_name = row['Name'].split(",")[0]
if last_name:
family_size = 1 + row['Parch'] + row['SibSp']
if family_size > 3:
return "{0}_{1}".format(last_name.lower(), family_size)
else:
return "nofamily"
else:
return "unknown"
def get_deck(cabin):
if pd.isnull(cabin):
return 'U'
return cabin[:1]
class TitanicDigest(object):
def __init__(self, dataset):
self.count_by_sex = dataset.groupby('Sex')['PassengerId'].count()
self.mean_age = dataset['Age'].mean()
self.mean_age_by_sex = dataset.groupby("Sex")["Age"].mean()
self.mean_fare_by_class = dataset.groupby("Pclass")["Fare"].mean()
self.titles = TITLES
self.families = dataset.apply(get_family, axis=1).unique().tolist()
self.decks = dataset["Cabin"].apply(get_deck).unique().tolist()
self.embarkments = dataset.Embarked.unique().tolist()
self.embark_mode = dataset.Embarked.dropna().mode().values
def preprocess(data, digest):
# convert ['male', 'female'] values of Sex to [1, 0]
data['Sex'] = data['Sex'].apply(lambda s: 1 if s == 'male' else 0)
# fill empty age field with mean age
data['Age'] = data['Age'].apply(
lambda age: digest.mean_age if pd.isnull(age) else age)
# is child flag
data['Child'] = data['Age'].apply(lambda age: 1 if age <= 15 else 0)
# fill fare with mean fare of the class
def get_fare_value(row):
if pd.isnull(row['Fare']):
return digest.mean_fare_by_class[row['Pclass']]
else:
return row['Fare']
data['Fare'] = data.apply(get_fare_value, axis=1)
# fill Embarked with mode
data['Embarked'] = data['Embarked'].apply(
lambda e: digest.embark_mode if pd.isnull(e) else e)
data["EmbarkedF"] = data["Embarked"].apply(digest.embarkments.index)
#
data['Cabin'] = data['Cabin'].apply(lambda c: 'U0' if pd.isnull(c) else c)
# Deck
data["Deck"] = data["Cabin"].apply(lambda cabin: cabin[0])
data["DeckF"] = data['Deck'].apply(digest.decks.index)
data['Title'] = data['Name'].apply(get_title)
data['TitleF'] = data['Title'].apply(digest.titles.index)
data['Honor'] = data['Title'].apply(
lambda title: int(title in HONORABLE_TITLES))
data['Family'] = data.apply(get_family, axis=1)
if 'Survived' in data.keys():
data['Deceased'] = data['Survived'].apply(lambda s: int(not s))
return data
digest = TitanicDigest(train_data)
def get_train_data():
return preprocess(train_data, digest)
def get_test_data():
return preprocess(test_data, digest)
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def transform_to_tfrecord():
data = pd.read_csv(pjoin(DATA_DIR, 'train.csv'))
filepath = pjoin(DATA_DIR, 'data.tfrecords')
writer = tf.python_io.TFRecordWriter(filepath)
for i in range(len(data)):
feature = {}
for key in data.keys():
value = data[key][i]
if isinstance(value, int):
value = tf.train.Feature(
int64_list=tf.train.Int64List(value=[value]))
elif isinstance(value, float):
value = tf.train.Feature(
float_list=tf.train.FloatList(value=[value])
)
elif isinstance(value, str):
value = tf.train.Feature(
bytes_list=tf.train.BytesList(
value=[value.encode(encoding="utf-8")])
)
feature[key] = value
example = tf.train.Example(
features=tf.train.Features(feature=feature))
writer.write(example.SerializeToString())
writer.close()
if __name__ == '__main__':
transform_to_tfrecord()
| apache-2.0 |
holmes/intellij-community | python/lib/Lib/site-packages/django/contrib/admin/actions.py | 83 | 2968 | """
Built-in, globally-available admin actions.
"""
from django import template
from django.core.exceptions import PermissionDenied
from django.contrib.admin import helpers
from django.contrib.admin.util import get_deleted_objects, model_ngettext
from django.db import router
from django.shortcuts import render_to_response
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext_lazy, ugettext as _
def delete_selected(modeladmin, request, queryset):
"""
Default action which deletes the selected objects.
This action first displays a confirmation page whichs shows all the
deleteable objects, or, if the user has no permission one of the related
childs (foreignkeys), a "permission denied" message.
Next, it delets all selected objects and redirects back to the change list.
"""
opts = modeladmin.model._meta
app_label = opts.app_label
# Check that the user has delete permission for the actual model
if not modeladmin.has_delete_permission(request):
raise PermissionDenied
using = router.db_for_write(modeladmin.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed = get_deleted_objects(
queryset, opts, request.user, modeladmin.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if request.POST.get('post'):
if perms_needed:
raise PermissionDenied
n = queryset.count()
if n:
for obj in queryset:
obj_display = force_unicode(obj)
modeladmin.log_deletion(request, obj, obj_display)
queryset.delete()
modeladmin.message_user(request, _("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(modeladmin.opts, n)
})
# Return None to display the change list page again.
return None
context = {
"title": _("Are you sure?"),
"object_name": force_unicode(opts.verbose_name),
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"opts": opts,
"root_path": modeladmin.admin_site.root_path,
"app_label": app_label,
'action_checkbox_name': helpers.ACTION_CHECKBOX_NAME,
}
# Display the confirmation page
return render_to_response(modeladmin.delete_selected_confirmation_template or [
"admin/%s/%s/delete_selected_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_selected_confirmation.html" % app_label,
"admin/delete_selected_confirmation.html"
], context, context_instance=template.RequestContext(request))
delete_selected.short_description = ugettext_lazy("Delete selected %(verbose_name_plural)s")
| apache-2.0 |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/requests/packages/chardet/chardistribution.py | 2755 | 9226 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTWCharToFreqOrder, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKRCharToFreqOrder, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312CharToFreqOrder, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (Big5CharToFreqOrder, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JISCharToFreqOrder, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._mCharToFreqOrder = None
self._mTableSize = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self._mTypicalDistributionRatio = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
self._mTotalChars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._mFreqChars = 0
def feed(self, aBuf, aCharLen):
"""feed a character with known length"""
if aCharLen == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(aBuf)
else:
order = -1
if order >= 0:
self._mTotalChars += 1
# order is valid
if order < self._mTableSize:
if 512 > self._mCharToFreqOrder[order]:
self._mFreqChars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
r = (self._mFreqChars / ((self._mTotalChars - self._mFreqChars)
* self._mTypicalDistributionRatio))
if r < SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._mTotalChars > ENOUGH_DATA_THRESHOLD
def get_order(self, aBuf):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCTWCharToFreqOrder
self._mTableSize = EUCTW_TABLE_SIZE
self._mTypicalDistributionRatio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = EUCKRCharToFreqOrder
self._mTableSize = EUCKR_TABLE_SIZE
self._mTypicalDistributionRatio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = wrap_ord(aBuf[0])
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + wrap_ord(aBuf[1]) - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = GB2312CharToFreqOrder
self._mTableSize = GB2312_TABLE_SIZE
self._mTypicalDistributionRatio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = Big5CharToFreqOrder
self._mTableSize = BIG5_TABLE_SIZE
self._mTypicalDistributionRatio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = wrap_ord(aBuf[0]), wrap_ord(aBuf[1])
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
CharDistributionAnalysis.__init__(self)
self._mCharToFreqOrder = JISCharToFreqOrder
self._mTableSize = JIS_TABLE_SIZE
self._mTypicalDistributionRatio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, aBuf):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = wrap_ord(aBuf[0])
if char >= 0xA0:
return 94 * (char - 0xA1) + wrap_ord(aBuf[1]) - 0xa1
else:
return -1
| bsd-3-clause |
ptisserand/ansible | lib/ansible/modules/packaging/os/apt_repository.py | 13 | 19320 | #!/usr/bin/python
# encoding: utf-8
# Copyright: (c) 2012, Matt Wright <matt@nobien.net>
# Copyright: (c) 2013, Alexander Saltanov <asd@mokote.com>
# Copyright: (c) 2014, Rutger Spiertz <rutger@kumina.nl>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: apt_repository
short_description: Add and remove APT repositories
description:
- Add or remove an APT repositories in Ubuntu and Debian.
notes:
- This module works on Debian, Ubuntu and their derivatives.
- This module supports Debian Squeeze (version 6) as well as its successors.
options:
repo:
description:
- A source string for the repository.
required: true
state:
description:
- A source string state.
choices: [ absent, present ]
default: "present"
mode:
description:
- The octal mode for newly created files in sources.list.d
default: 0644
version_added: "1.6"
update_cache:
description:
- Run the equivalent of C(apt-get update) when a change occurs. Cache updates are run after making changes.
type: bool
default: "yes"
validate_certs:
description:
- If C(no), SSL certificates for the target repo will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
type: bool
default: 'yes'
version_added: '1.8'
filename:
description:
- Sets the name of the source list file in sources.list.d.
Defaults to a file name based on the repository source url.
The .list extension will be automatically added.
version_added: '2.1'
codename:
description:
- Override the distribution codename to use for PPA repositories.
Should usually only be set when working with a PPA on a non-Ubuntu target (e.g. Debian or Mint)
version_added: '2.3'
author:
- Alexander Saltanov (@sashka)
version_added: "0.7"
requirements:
- python-apt (python 2)
- python3-apt (python 3)
'''
EXAMPLES = '''
# Add specified repository into sources list.
- apt_repository:
repo: deb http://archive.canonical.com/ubuntu hardy partner
state: present
# Add specified repository into sources list using specified filename.
- apt_repository:
repo: deb http://dl.google.com/linux/chrome/deb/ stable main
state: present
filename: google-chrome
# Add source repository into sources list.
- apt_repository:
repo: deb-src http://archive.canonical.com/ubuntu hardy partner
state: present
# Remove specified repository from sources list.
- apt_repository:
repo: deb http://archive.canonical.com/ubuntu hardy partner
state: absent
# Add nginx stable repository from PPA and install its signing key.
# On Ubuntu target:
- apt_repository:
repo: ppa:nginx/stable
# On Debian target
- apt_repository:
repo: 'ppa:nginx/stable'
codename: trusty
'''
import glob
import json
import os
import re
import sys
import tempfile
try:
import apt
import apt_pkg
import aptsources.distro as aptsources_distro
distro = aptsources_distro.get_distro()
HAVE_PYTHON_APT = True
except ImportError:
distro = None
HAVE_PYTHON_APT = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import fetch_url
if sys.version_info[0] < 3:
PYTHON_APT = 'python-apt'
else:
PYTHON_APT = 'python3-apt'
DEFAULT_SOURCES_PERM = 0o0644
VALID_SOURCE_TYPES = ('deb', 'deb-src')
def install_python_apt(module):
if not module.check_mode:
apt_get_path = module.get_bin_path('apt-get')
if apt_get_path:
rc, so, se = module.run_command([apt_get_path, 'update'])
if rc != 0:
module.fail_json(msg="Failed to auto-install %s. Error was: '%s'" % (PYTHON_APT, se.strip()))
rc, so, se = module.run_command([apt_get_path, 'install', PYTHON_APT, '-y', '-q'])
if rc == 0:
global apt, apt_pkg, aptsources_distro, distro, HAVE_PYTHON_APT
import apt
import apt_pkg
import aptsources.distro as aptsources_distro
distro = aptsources_distro.get_distro()
HAVE_PYTHON_APT = True
else:
module.fail_json(msg="Failed to auto-install %s. Error was: '%s'" % (PYTHON_APT, se.strip()))
else:
module.fail_json(msg="%s must be installed to use check mode" % PYTHON_APT)
class InvalidSource(Exception):
pass
# Simple version of aptsources.sourceslist.SourcesList.
# No advanced logic and no backups inside.
class SourcesList(object):
def __init__(self, module):
self.module = module
self.files = {} # group sources by file
# Repositories that we're adding -- used to implement mode param
self.new_repos = set()
self.default_file = self._apt_cfg_file('Dir::Etc::sourcelist')
# read sources.list if it exists
if os.path.isfile(self.default_file):
self.load(self.default_file)
# read sources.list.d
for file in glob.iglob('%s/*.list' % self._apt_cfg_dir('Dir::Etc::sourceparts')):
self.load(file)
def __iter__(self):
'''Simple iterator to go over all sources. Empty, non-source, and other not valid lines will be skipped.'''
for file, sources in self.files.items():
for n, valid, enabled, source, comment in sources:
if valid:
yield file, n, enabled, source, comment
raise StopIteration
def _expand_path(self, filename):
if '/' in filename:
return filename
else:
return os.path.abspath(os.path.join(self._apt_cfg_dir('Dir::Etc::sourceparts'), filename))
def _suggest_filename(self, line):
def _cleanup_filename(s):
filename = self.module.params['filename']
if filename is not None:
return filename
return '_'.join(re.sub('[^a-zA-Z0-9]', ' ', s).split())
def _strip_username_password(s):
if '@' in s:
s = s.split('@', 1)
s = s[-1]
return s
# Drop options and protocols.
line = re.sub(r'\[[^\]]+\]', '', line)
line = re.sub(r'\w+://', '', line)
# split line into valid keywords
parts = [part for part in line.split() if part not in VALID_SOURCE_TYPES]
# Drop usernames and passwords
parts[0] = _strip_username_password(parts[0])
return '%s.list' % _cleanup_filename(' '.join(parts[:1]))
def _parse(self, line, raise_if_invalid_or_disabled=False):
valid = False
enabled = True
source = ''
comment = ''
line = line.strip()
if line.startswith('#'):
enabled = False
line = line[1:]
# Check for another "#" in the line and treat a part after it as a comment.
i = line.find('#')
if i > 0:
comment = line[i + 1:].strip()
line = line[:i]
# Split a source into substring to make sure that it is source spec.
# Duplicated whitespaces in a valid source spec will be removed.
source = line.strip()
if source:
chunks = source.split()
if chunks[0] in VALID_SOURCE_TYPES:
valid = True
source = ' '.join(chunks)
if raise_if_invalid_or_disabled and (not valid or not enabled):
raise InvalidSource(line)
return valid, enabled, source, comment
@staticmethod
def _apt_cfg_file(filespec):
'''
Wrapper for `apt_pkg` module for running with Python 2.5
'''
try:
result = apt_pkg.config.find_file(filespec)
except AttributeError:
result = apt_pkg.Config.FindFile(filespec)
return result
@staticmethod
def _apt_cfg_dir(dirspec):
'''
Wrapper for `apt_pkg` module for running with Python 2.5
'''
try:
result = apt_pkg.config.find_dir(dirspec)
except AttributeError:
result = apt_pkg.Config.FindDir(dirspec)
return result
def load(self, file):
group = []
f = open(file, 'r')
for n, line in enumerate(f):
valid, enabled, source, comment = self._parse(line)
group.append((n, valid, enabled, source, comment))
self.files[file] = group
def save(self):
for filename, sources in list(self.files.items()):
if sources:
d, fn = os.path.split(filename)
fd, tmp_path = tempfile.mkstemp(prefix=".%s-" % fn, dir=d)
f = os.fdopen(fd, 'w')
for n, valid, enabled, source, comment in sources:
chunks = []
if not enabled:
chunks.append('# ')
chunks.append(source)
if comment:
chunks.append(' # ')
chunks.append(comment)
chunks.append('\n')
line = ''.join(chunks)
try:
f.write(line)
except IOError as err:
self.module.fail_json(msg="Failed to write to file %s: %s" % (tmp_path, to_native(err)))
self.module.atomic_move(tmp_path, filename)
# allow the user to override the default mode
if filename in self.new_repos:
this_mode = self.module.params.get('mode', DEFAULT_SOURCES_PERM)
self.module.set_mode_if_different(filename, this_mode, False)
else:
del self.files[filename]
if os.path.exists(filename):
os.remove(filename)
def dump(self):
dumpstruct = {}
for filename, sources in self.files.items():
if sources:
lines = []
for n, valid, enabled, source, comment in sources:
chunks = []
if not enabled:
chunks.append('# ')
chunks.append(source)
if comment:
chunks.append(' # ')
chunks.append(comment)
chunks.append('\n')
lines.append(''.join(chunks))
dumpstruct[filename] = ''.join(lines)
return dumpstruct
def _choice(self, new, old):
if new is None:
return old
return new
def modify(self, file, n, enabled=None, source=None, comment=None):
'''
This function to be used with iterator, so we don't care of invalid sources.
If source, enabled, or comment is None, original value from line ``n`` will be preserved.
'''
valid, enabled_old, source_old, comment_old = self.files[file][n][1:]
self.files[file][n] = (n, valid, self._choice(enabled, enabled_old), self._choice(source, source_old), self._choice(comment, comment_old))
def _add_valid_source(self, source_new, comment_new, file):
# We'll try to reuse disabled source if we have it.
# If we have more than one entry, we will enable them all - no advanced logic, remember.
found = False
for filename, n, enabled, source, comment in self:
if source == source_new:
self.modify(filename, n, enabled=True)
found = True
if not found:
if file is None:
file = self.default_file
else:
file = self._expand_path(file)
if file not in self.files:
self.files[file] = []
files = self.files[file]
files.append((len(files), True, True, source_new, comment_new))
self.new_repos.add(file)
def add_source(self, line, comment='', file=None):
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
# Prefer separate files for new sources.
self._add_valid_source(source, comment, file=file or self._suggest_filename(source))
def _remove_valid_source(self, source):
# If we have more than one entry, we will remove them all (not comment, remove!)
for filename, n, enabled, src, comment in self:
if source == src and enabled:
self.files[filename].pop(n)
def remove_source(self, line):
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
self._remove_valid_source(source)
class UbuntuSourcesList(SourcesList):
LP_API = 'https://launchpad.net/api/1.0/~%s/+archive/%s'
def __init__(self, module, add_ppa_signing_keys_callback=None):
self.module = module
self.add_ppa_signing_keys_callback = add_ppa_signing_keys_callback
self.codename = module.params['codename'] or distro.codename
super(UbuntuSourcesList, self).__init__(module)
def _get_ppa_info(self, owner_name, ppa_name):
lp_api = self.LP_API % (owner_name, ppa_name)
headers = dict(Accept='application/json')
response, info = fetch_url(self.module, lp_api, headers=headers)
if info['status'] != 200:
self.module.fail_json(msg="failed to fetch PPA information, error was: %s" % info['msg'])
return json.loads(to_native(response.read()))
def _expand_ppa(self, path):
ppa = path.split(':')[1]
ppa_owner = ppa.split('/')[0]
try:
ppa_name = ppa.split('/')[1]
except IndexError:
ppa_name = 'ppa'
line = 'deb http://ppa.launchpad.net/%s/%s/ubuntu %s main' % (ppa_owner, ppa_name, self.codename)
return line, ppa_owner, ppa_name
def _key_already_exists(self, key_fingerprint):
rc, out, err = self.module.run_command('apt-key export %s' % key_fingerprint, check_rc=True)
return len(err) == 0
def add_source(self, line, comment='', file=None):
if line.startswith('ppa:'):
source, ppa_owner, ppa_name = self._expand_ppa(line)
if source in self.repos_urls:
# repository already exists
return
if self.add_ppa_signing_keys_callback is not None:
info = self._get_ppa_info(ppa_owner, ppa_name)
if not self._key_already_exists(info['signing_key_fingerprint']):
command = ['apt-key', 'adv', '--recv-keys', '--keyserver', 'hkp://keyserver.ubuntu.com:80', info['signing_key_fingerprint']]
self.add_ppa_signing_keys_callback(command)
file = file or self._suggest_filename('%s_%s' % (line, self.codename))
else:
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
file = file or self._suggest_filename(source)
self._add_valid_source(source, comment, file)
def remove_source(self, line):
if line.startswith('ppa:'):
source = self._expand_ppa(line)[0]
else:
source = self._parse(line, raise_if_invalid_or_disabled=True)[2]
self._remove_valid_source(source)
@property
def repos_urls(self):
_repositories = []
for parsed_repos in self.files.values():
for parsed_repo in parsed_repos:
valid = parsed_repo[1]
enabled = parsed_repo[2]
source_line = parsed_repo[3]
if not valid or not enabled:
continue
if source_line.startswith('ppa:'):
source, ppa_owner, ppa_name = self._expand_ppa(source_line)
_repositories.append(source)
else:
_repositories.append(source_line)
return _repositories
def get_add_ppa_signing_key_callback(module):
def _run_command(command):
module.run_command(command, check_rc=True)
if module.check_mode:
return None
else:
return _run_command
def main():
module = AnsibleModule(
argument_spec=dict(
repo=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['absent', 'present']),
mode=dict(type='raw'),
update_cache=dict(type='bool', default=True, aliases=['update-cache']),
filename=dict(type='str'),
# This should not be needed, but exists as a failsafe
install_python_apt=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=True),
codename=dict(type='str'),
),
supports_check_mode=True,
)
params = module.params
repo = module.params['repo']
state = module.params['state']
update_cache = module.params['update_cache']
# Note: mode is referenced in SourcesList class via the passed in module (self here)
sourceslist = None
if not HAVE_PYTHON_APT:
if params['install_python_apt']:
install_python_apt(module)
else:
module.fail_json(msg='%s is not installed, and install_python_apt is False' % PYTHON_APT)
if not repo:
module.fail_json(msg='Please set argument \'repo\' to a non-empty value')
if isinstance(distro, aptsources_distro.Distribution):
sourceslist = UbuntuSourcesList(module, add_ppa_signing_keys_callback=get_add_ppa_signing_key_callback(module))
else:
module.fail_json(msg='Module apt_repository is not supported on target.')
sources_before = sourceslist.dump()
try:
if state == 'present':
sourceslist.add_source(repo)
elif state == 'absent':
sourceslist.remove_source(repo)
except InvalidSource as err:
module.fail_json(msg='Invalid repository string: %s' % to_native(err))
sources_after = sourceslist.dump()
changed = sources_before != sources_after
if changed and module._diff:
diff = []
for filename in set(sources_before.keys()).union(sources_after.keys()):
diff.append({'before': sources_before.get(filename, ''),
'after': sources_after.get(filename, ''),
'before_header': (filename, '/dev/null')[filename not in sources_before],
'after_header': (filename, '/dev/null')[filename not in sources_after]})
else:
diff = {}
if changed and not module.check_mode:
try:
sourceslist.save()
if update_cache:
cache = apt.Cache()
cache.update()
except OSError as err:
module.fail_json(msg=to_native(err))
module.exit_json(changed=changed, repo=repo, state=state, diff=diff)
if __name__ == '__main__':
main()
| gpl-3.0 |
MarkTseng/django-farmersale | farmersale-env/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py | 2930 | 11275 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
| mit |
crodrigues96/mtasa-blue | vendor/google-breakpad/src/tools/gyp/test/subdirectory/gyptest-SYMROOT-default.py | 399 | 1260 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target and a subsidiary dependent target from a
.gyp file in a subdirectory, without specifying an explicit output build
directory, and using the generated solution or project file at the top
of the tree as the entry point.
The configuration sets the Xcode SYMROOT variable and uses --depth=
to make Xcode behave like the other build tools--that is, put all
built targets in a single output build directory at the top of the tree.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('prog1.gyp', '-Dset_symroot=1', '--depth=.', chdir='src')
test.relocate('src', 'relocate/src')
# Suppress the test infrastructure's setting SYMROOT on the command line.
test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
test.run_built_executable('prog1',
stdout="Hello from prog1.c\n",
chdir='relocate/src')
test.run_built_executable('prog2',
stdout="Hello from prog2.c\n",
chdir='relocate/src')
test.pass_test()
| gpl-3.0 |
tomzw11/Pydrone | route.py | 1 | 2000 | import matplotlib.pyplot as plt
import matplotlib.patches as patches
def route(root):
root_height = root[2]
coordinates = [\
[0.42*root_height+root[0],0.42*root_height+root[1],root_height/2],\
[-0.42*root_height+root[0],0.42*root_height+root[1],root_height/2],\
[-0.42*root_height+root[0],-0.15*root_height+root[1],root_height/2],\
[0.42*root_height+root[0],-0.15*root_height+root[1],root_height/2]]
return coordinates
if __name__ == "__main__":
meter_to_feet = 3.28
root = [0,0,16*1]
print 'root',root,'\n'
level1 = route(root)
print 'level 1 \n'
print level1[0],'\n'
print level1[1],'\n'
print level1[2],'\n'
print level1[3],'\n'
print 'level 2 \n'
level2 = [[0]*3]*4
for x in xrange(4):
level2[x] = route(level1[x])
for y in xrange(4):
print 'level2 point[',x+1,y+1,']',level2[x][y],'\n'
fig, ax = plt.subplots()
ball, = plt.plot(6.72+1.52,6.72+1.52,'mo')
plt.plot(0,0,'bo')
plt.plot([level1[0][0],level1[1][0],level1[2][0],level1[3][0]],[level1[0][1],level1[1][1],level1[2][1],level1[3][1]],'ro')
rect_blue = patches.Rectangle((-13.44,-4.8),13.44*2,9.12*2,linewidth=1,edgecolor='b',facecolor='b',alpha = 0.1)
ax.add_patch(rect_blue)
rect_red = patches.Rectangle((0,4.23),13.44,9.12,linewidth=1,edgecolor='r',facecolor='r',alpha = 0.3)
ax.add_patch(rect_red)
plt.plot([level2[0][0][0],level2[0][1][0],level2[0][2][0],level2[0][3][0]],[level2[0][0][1],level2[0][1][1],level2[0][2][1],level2[0][3][1]],'go')
rect_green = patches.Rectangle((6.72,6.72+4.23/2),13.44/2,9.12/2,linewidth=1,edgecolor='g',facecolor='g',alpha = 0.5)
ax.add_patch(rect_green)
linear_s = [12,12]
plt.plot(12,12,'yo')
rect_yellow = patches.Rectangle((10,11),13.44/4,9.12/4,linewidth=1,edgecolor='y',facecolor='y',alpha = 0.5)
ax.add_patch(rect_yellow)
ax.legend([ball,rect_blue,rect_red,rect_green,rect_yellow],['Ball','Root View','Level 1 - 4 anchors','Level 2 - 16 anchors','Linear Search - 64 anchors'])
plt.axis([-13.44, 13.44, -4.8, 13.44])
plt.show()
| mit |
goodfeli/pylearn2 | pylearn2/training_algorithms/tests/test_default.py | 44 | 1798 | import numpy as np
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.models.rbm import RBM
from pylearn2.models.s3c import S3C, E_Step, Grad_M_Step
from pylearn2.training_algorithms.default import DefaultTrainingAlgorithm
from pylearn2.training_algorithms.training_algorithm import NoBatchSizeError
def test_multiple_monitoring_datasets():
# tests that DefaultTrainingAlgorithm can take multiple
# monitoring datasets.
BATCH_SIZE = 1
BATCHES = 3
dim = 4
m = 10
rng = np.random.RandomState([2014, 2, 25])
X = rng.randn(m, dim)
Y = rng.randn(m, dim)
train = DenseDesignMatrix(X=X)
test = DenseDesignMatrix(X=Y)
algorithm = DefaultTrainingAlgorithm(
batch_size=BATCH_SIZE,
batches_per_iter=BATCHES,
monitoring_dataset={'train': train, 'test': test})
model = S3C(nvis=dim, nhid=1,
irange=.01, init_bias_hid=0., init_B=1.,
min_B=1., max_B=1., init_alpha=1.,
min_alpha=1., max_alpha=1., init_mu=0.,
m_step=Grad_M_Step(learning_rate=0.),
e_step=E_Step(h_new_coeff_schedule=[1.]))
algorithm.setup(model=model, dataset=train)
algorithm.train(dataset=train)
def test_unspecified_batch_size():
# Test that failing to specify the batch size results in a
# NoBatchSizeError
m = 1
dim = 2
rng = np.random.RandomState([2014, 3, 17])
X = rng.randn(m, dim)
train = DenseDesignMatrix(X=X)
rbm = RBM(nvis=dim, nhid=3)
trainer = DefaultTrainingAlgorithm()
try:
trainer.setup(rbm, train)
except NoBatchSizeError:
return
raise AssertionError("Missed the lack of a batch size")
if __name__ == '__main__':
test_multiple_monitoring_datasets()
| bsd-3-clause |
davidzchen/tensorflow | tensorflow/compiler/tests/eager_test.py | 4 | 26149 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test cases for eager execution using XLA."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.layers import convolutional
from tensorflow.python.layers import pooling
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import gen_random_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.platform import googletest
from tensorflow.python.training import adam
class EagerTest(xla_test.XLATestCase):
def testBasic(self):
with self.test_scope():
three = constant_op.constant(3)
five = constant_op.constant(5)
product = three * five
self.assertAllEqual(15, product)
def testGradientTape(self):
with self.test_scope():
x = constant_op.constant(1.0)
y = constant_op.constant(10.0)
with backprop.GradientTape(persistent=True) as tape:
tape.watch(x)
tape.watch(y)
a = x + y + x * y
da_dx = tape.gradient(a, x)
da_dy = tape.gradient(a, y)
self.assertEqual(11.0, da_dx.numpy())
self.assertEqual(2.0, da_dy.numpy())
def testExecuteListOutputLen0(self):
with self.test_scope():
empty = constant_op.constant([], dtype=dtypes.float32)
result = array_ops.unstack(empty, 0)
self.assertTrue(isinstance(result, list))
self.assertEqual(0, len(result))
def testExecuteListOutputLen1(self):
with self.test_scope():
split_dim = constant_op.constant(1)
value = constant_op.constant([[0., 1., 2.], [3., 4., 5.]])
result = array_ops.split(value, 1, axis=split_dim)
self.assertTrue(isinstance(result, list))
self.assertEqual(1, len(result))
self.assertAllEqual([[0, 1, 2], [3, 4, 5]], result[0])
def testExecuteListOutputLen3(self):
with self.test_scope():
split_dim = constant_op.constant(1)
value = constant_op.constant([[0., 1., 2.], [3., 4., 5.]])
result = array_ops.split(value, 3, axis=split_dim)
self.assertTrue(isinstance(result, list))
self.assertEqual(3, len(result))
self.assertAllEqual([[0], [3]], result[0])
self.assertAllEqual([[1], [4]], result[1])
self.assertAllEqual([[2], [5]], result[2])
def testBasicGraph(self):
# Run some ops eagerly
with self.test_scope():
three = constant_op.constant(3)
five = constant_op.constant(5)
product = three * five
self.assertAllEqual(15, product)
# Run some ops graphly
with context.graph_mode(), self.session():
with self.test_scope():
three = constant_op.constant(3)
five = constant_op.constant(5)
product = three * five
self.assertAllEqual(15, self.evaluate(product))
def testDegenerateSlices(self):
with self.test_scope():
npt = np.arange(1, 19, dtype=np.float32).reshape(3, 2, 3)
t = constant_op.constant(npt)
# degenerate by offering a forward interval with a negative stride
self.assertAllEqual(npt[0:-1:-1, :, :], t[0:-1:-1, :, :])
# degenerate with a reverse interval with a positive stride
self.assertAllEqual(npt[-1:0, :, :], t[-1:0, :, :])
# empty interval in every dimension
self.assertAllEqual(npt[-1:0, 2:2, 2:3:-1], t[-1:0, 2:2, 2:3:-1])
def testIdentity(self):
with self.test_scope():
self.assertAllEqual(2, array_ops.identity(2))
def testRandomOps(self):
with self.test_scope():
tensor = gen_random_ops.random_uniform((2, 2), dtypes.float32)
row0 = tensor[0].numpy()
row1 = tensor[1].numpy()
# It should be very unlikely to rng to generate two equal rows.
self.assertFalse((row0 == row1).all())
def testIdentityOnVariable(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(True)
i = array_ops.identity(v)
self.assertAllEqual(True, i.numpy())
def testAssignAddVariable(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(1.0)
v.assign_add(2.0)
self.assertEqual(3.0, v.numpy())
def testReadAssignRead(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(1.0)
val1 = v.read_value()
v.assign_add(2.0)
val2 = v.read_value()
self.assertEqual(1.0, val1.numpy())
self.assertEqual(3.0, val2.numpy())
def testGradient(self):
def f(x):
return x
with self.test_scope():
grad_fn = backprop.gradients_function(f)
self.assertAllEqual(2., grad_fn(1., dy=2.)[0])
def testVariableGradient(self):
with self.test_scope():
v0 = resource_variable_ops.ResourceVariable(1.0)
def f():
x = v0 * v0
return x
grads = backprop.implicit_grad(f)()
self.assertEqual(2., grads[0][0].numpy())
def testMultipleVariableReads(self):
# This test makes sure consecutive variable reads don't copy
# the underlying memory.
with self.test_scope():
# Create 128MiB variables
var = resource_variable_ops.ResourceVariable(
array_ops.ones([32, 1024, 1024]))
# Read the same variable 100 times. If the underlying tensor
# is not copied, this is a trivial operation. If it is copied,
# this will eat over 13GB and OOM.
values = []
for _ in range(100):
values.append(var.value())
# The shape, shape_n, size, and rank are tested here because their
# execution kernels (as opposed to compilation only tf2xla kernels)
# are distincts from tf2xla kernels.
def testShape(self):
def const(value):
return array_ops.shape(
constant_op.constant(value)).numpy()
def ones(value):
return array_ops.shape(
array_ops.ones(value)).numpy()
with self.test_scope():
# Shapes of directly constructed tensors
self.assertAllEqual([], const(3))
self.assertAllEqual([3], const([1.0, 2.0, 3.0]))
self.assertAllEqual([2, 2], const([[1.0, 2.0], [3.0, 4.0]]))
self.assertAllEqual([2, 1, 2], const([[[1.0, 2.0]], [[3.0, 4.0]]]))
# Shapes of tensors created by op running on device
# We make this distinction because directly constructed tensors
# are treated differently in a few places that can influence shape:
# - they always have on_host_tensor
# - they and their shapes can be cached
# - they end up on device via a copy, instead of as program output
self.assertAllEqual([], ones([]))
self.assertAllEqual([3], ones([3]))
self.assertAllEqual([2, 2], ones([2, 2]))
self.assertAllEqual([2, 1, 2], ones([2, 1, 2]))
def testShapeN(self):
with self.test_scope():
# Shapes of directly constructed tensors
shapes = array_ops.shape_n([
constant_op.constant(1.0),
constant_op.constant([1.0, 2.0, 3.0]),
constant_op.constant([[1.0, 2.0], [3.0, 4.0]])])
self.assertAllEqual(
[[], [3], [2, 2]],
[x.numpy().tolist() for x in shapes])
# Shapes of tensors created by op running on device
shapes = array_ops.shape_n([
array_ops.ones([]),
array_ops.ones([3]),
array_ops.ones([2, 2])])
self.assertAllEqual(
[[], [3], [2, 2]],
[x.numpy().tolist() for x in shapes])
def testSize(self):
with self.test_scope():
self.assertEqual(
1, array_ops.size(constant_op.constant(1.0)).numpy())
self.assertEqual(
3, array_ops.size(constant_op.constant([1.0, 2.0, 3.0])).numpy())
self.assertEqual(
4, array_ops.size(
constant_op.constant([[1.0, 2.0], [3.0, 4.0]])).numpy())
def testRank(self):
with self.test_scope():
self.assertEqual(
0, array_ops.rank(constant_op.constant(1.0)).numpy())
self.assertEqual(
1, array_ops.rank(constant_op.constant([1.0, 2.0, 3.0])).numpy())
self.assertEqual(
2, array_ops.rank(
constant_op.constant([[1.0, 2.0], [3.0, 4.0]])).numpy())
def testAdam(self):
with self.test_scope():
optimizer = adam.AdamOptimizer(0.1)
x = resource_variable_ops.ResourceVariable(10.0)
with backprop.GradientTape() as tape:
y = x * x
dy_dx = tape.gradient(y, x)
optimizer.apply_gradients([(dy_dx, x)])
self.assertAlmostEqual(9.9, x.numpy(), places=3)
def testAdamSparse(self):
with ops.device('/cpu:0'):
# Create 2-D embedding for 3 objects on CPU because sparse/sliced updates
# are not implemented on TPU.
embedding_matrix = resource_variable_ops.ResourceVariable(
array_ops.ones([3, 2]))
with self.test_scope():
with backprop.GradientTape() as tape:
embedding = embedding_ops.embedding_lookup(embedding_matrix, [1])
y = math_ops.reduce_sum(embedding)
dy_dx = tape.gradient(y, embedding_matrix)
self.assertIsInstance(dy_dx, ops.IndexedSlices)
optimizer = adam.AdamOptimizer(0.1)
# The gradient application operations will run on CPU because optimizer
# updates are always collocated with the variable.
optimizer.apply_gradients([(dy_dx, embedding_matrix)])
# This assign_add will run on CPU because when an input to an
# operation is a resource, this operation is placed on the resource's
# device by the eager runtime.
embedding_matrix.assign_add(array_ops.ones([3, 2]))
self.assertAllClose([[2.0, 2.0],
[1.9, 1.9],
[2.0, 2.0]], embedding_matrix.numpy())
class EagerFunctionTest(xla_test.XLATestCase):
def testBasic(self):
with self.test_scope():
matmul = function.defun(math_ops.matmul)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq = matmul(t, t, transpose_a=True)
self.assertAllEqual(sq.numpy().reshape(-1), [10, 14, 14, 20])
def testConv(self):
if 'GPU' in self.device:
# TODO(b/32333178)
self.skipTest('Current implementation of RandomStandardNormal kernel '
'is very slow on GPU, and has been denylisted.')
with self.test_scope():
data_format = 'channels_last'
conv = convolutional.Conv2D(
filters=1, kernel_size=2, padding='VALID',
data_format=data_format, activation=nn_ops.relu,
kernel_initializer=init_ops.ones_initializer(),
bias_initializer=init_ops.zeros_initializer())
pool = pooling.MaxPooling2D(2, 2, data_format=data_format)
def model(x):
x = conv(x)
return pool(x)
model = function.defun(model)
x = array_ops.ones([1, 4, 4, 1])
y = model(x)
self.assertAllEqual(y.numpy(), [[[[4.]]]])
def testReadVariable(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v.read_value()
var = f()
self.assertEqual(1.0, var.numpy())
def testResourceVariableNoInlineReadWrite(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(1.0)
w = resource_variable_ops.ResourceVariable(0.0)
@function.defun_with_attributes(attributes={'_noinline': True})
def g(x):
w.assign(w.read_value() + x)
return v.read_value() + x * w.read_value()
@function.defun_with_attributes(attributes={'_noinline': True})
def f():
return g(1.0) + g(2.0) + g(3.0) + g(4.0) + g(5.0)
# 1 + 1*1 + 1 + 2*3 + 1 + 3*6 + 1 + 4*10 + 1 + 5*15
self.assertEqual(145.0, f().numpy())
self.assertEqual(15.0, w.read_value().numpy())
def testResourceVariableNoInlineReadOnly(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(10.0)
@function.defun_with_attributes(attributes={'_noinline': True})
def g():
return v.read_value()
@function.defun_with_attributes(attributes={'_noinline': True})
def f():
return g() + g() + g() + g() + g()
self.assertEqual(50.0, f().numpy())
def testResourceVariableNoInlineWriteOnly(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(0.0)
@function.defun_with_attributes(attributes={'_noinline': True})
def g(x):
v.assign(x)
@function.defun_with_attributes(attributes={'_noinline': True})
def f():
g(1.0)
g(2.0)
g(3.0)
g(4.0)
g(5.0)
f()
self.assertEqual(5.0, v.read_value().numpy())
def testUpdateVariable(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable(1.0)
def f(v):
v.assign_add(1.0)
return v
f = function.defun(f)
var = f(v)
self.assertEqual(2.0, var.numpy())
def testReturnResourceHandle(self):
with self.test_scope():
v = resource_variable_ops.ResourceVariable([[1.0, 2.0], [3.0, 4.0]])
def f(v):
return v.handle
f = function.defun(f)
handle = f(v)
self.assertAllEqual(v.numpy(),
resource_variable_ops.read_variable_op(
handle, dtypes.float32).numpy())
def testReturnMultipleResourceHandles(self):
with self.test_scope():
v1 = resource_variable_ops.ResourceVariable(1.25)
v2 = resource_variable_ops.ResourceVariable(2.0)
def f(v):
return v.handle, 3.0 * v, v2.handle, v + v2
f = function.defun(f)
v1_handle, v1_times_3, v2_handle, variable_sum = f(v1)
self.assertAllEqual(v1.numpy(),
resource_variable_ops.read_variable_op(
v1_handle, dtypes.float32).numpy())
self.assertEqual(3.75, v1_times_3.numpy())
self.assertAllEqual(v2.numpy(),
resource_variable_ops.read_variable_op(
v2_handle, dtypes.float32).numpy())
self.assertEqual(3.25, variable_sum.numpy())
def testAllArgumentKinds(self):
"""Test a complex function that takes different argument kinds.
tf2xla machinery that translates, compiles, and runs defuns
classifies arguments into: compile-time constants, regular tensors,
and resources. This test creates a function with a mix of all these
kinds. Moreover, the order of function arguments is intentionally mixed up.
This also tests the case when the same argument is a compile-time constant
as well as used in an operation that normally expects its inputs to be
in device memory - addition in this case.
"""
with self.test_scope():
def foo(c1, r1, v1, c2, v2, r2):
# c1 and c2 are compile-time constants
# r1 and r2 are regular tensors
# v1 and v2 are resource variables
a = c1 + r1
b = math_ops.cast(c2, dtypes.float32) + v2
c = array_ops.slice(v1, c1, c2)
d = r2 * v2
return a, b, c, d
foo = function.defun(foo)
c1 = [0, 0]
c2 = array_ops.ones([2], dtype=dtypes.int32)
r1 = array_ops.ones([2])
r2 = [[2., 2.], [3., 3.]]
v1 = resource_variable_ops.ResourceVariable([[1., 2.], [3., 4.]])
v2 = resource_variable_ops.ResourceVariable([[10., 20.], [30., 40.]])
a, b, c, d = foo(c1, r1, v1, c2, v2, r2)
self.assertAllEqual([1, 1], a.numpy())
self.assertAllEqual([[11., 21.], [31., 41.]], b.numpy())
self.assertAllEqual([[1.]], c.numpy())
self.assertAllEqual([[20., 40.], [90., 120.]], d.numpy())
def testDefunInGradientTape(self):
with self.test_scope():
v0 = resource_variable_ops.ResourceVariable(5.0)
@function.defun
def f(x):
x = v0 * v0 * x
return x
x = constant_op.constant(3.0)
with backprop.GradientTape() as tape:
y = f(x)
dy = tape.gradient(y, v0)
self.assertEqual(75, y.numpy())
self.assertEqual(30, dy.numpy())
def testGradientTapeInDefun(self):
with self.test_scope():
v0 = resource_variable_ops.ResourceVariable(5.0)
@function.defun
def f():
x = constant_op.constant(1.0)
with backprop.GradientTape() as tape:
y = v0 * x
dy = tape.gradient(y, v0)
return dy
dy = f()
self.assertEqual(1.0, dy.numpy())
def testSliceInDefun(self):
with self.test_scope():
@function.defun
def f(x, y):
return x[0::2, y:, ...]
x = array_ops.ones([2, 3, 4], dtype=dtypes.float32)
y = array_ops.ones([], dtype=dtypes.int32)
with backprop.GradientTape() as tape:
tape.watch(x)
tape.watch(y)
z = f(x, y)
dz = tape.gradient(z, x)
self.assertAllEqual(np.ones([1, 2, 4]), z.numpy())
self.assertAllEqual((2, 3, 4), dz.shape.as_list())
def testNestedDefun(self):
with self.test_scope():
@function.defun
def times_two(x):
return 2. * x
@function.defun
def two_x_plus_1(x):
return times_two(x) + 1.
x = constant_op.constant([2., 3., 4.])
y = two_x_plus_1(x)
self.assertAllEqual([5., 7., 9.], y.numpy())
def testNestedDefunWithVariable(self):
with self.test_scope():
v0 = resource_variable_ops.ResourceVariable(5.0)
@function.defun
def g(x):
x = v0 * x
return x
@function.defun
def f(x):
x = g(v0 * x)
return x
x = constant_op.constant(3.0)
y = f(x)
self.assertEqual(75.0, y.numpy())
def testNestedDefunInGradientTape(self):
with self.test_scope():
v0 = resource_variable_ops.ResourceVariable(5.0)
@function.defun
def g(x):
x = v0 * x
return x
@function.defun
def f(x):
x = g(v0 * x)
return x
x = constant_op.constant(3.0)
with backprop.GradientTape() as tape:
y = f(x)
dy = tape.gradient(y, v0)
self.assertEqual(75, y.numpy())
self.assertEqual(30, dy.numpy())
def testNestedDefunInGradientTapeDifferentVars(self):
with self.test_scope():
v0 = resource_variable_ops.ResourceVariable(5.0)
v1 = resource_variable_ops.ResourceVariable(3.0)
@function.defun
def g(x):
x = v1 * x
return x
@function.defun
def f(x):
x = g(v0 * x)
return x
x = constant_op.constant(3.0)
with backprop.GradientTape(persistent=True) as tape:
y = f(x)
dy_v0 = tape.gradient(y, v0)
dy_v1 = tape.gradient(y, v1)
self.assertEqual(45, y.numpy())
self.assertEqual(9, dy_v0.numpy())
self.assertEqual(15, dy_v1.numpy())
def testWhileInDefun(self):
with self.test_scope():
@def_function.function
def f(start):
c = lambda x: math_ops.less(x, 13.0)
b = lambda x: math_ops.add(x, 1.0)
return control_flow_ops.while_loop(c, b, [start])
y = f(constant_op.constant(3.0))
self.assertEqual(13.0, y.numpy())
def testAutoGraphWhileInDefun(self):
with self.test_scope():
@def_function.function
def f(start):
x = start
while x < 13.0:
x += 1.0
return x
y = f(constant_op.constant(3.0))
self.assertEqual(13.0, y.numpy())
def testCondInDefun(self):
with self.test_scope():
@def_function.function
def f(pred, value):
fn1 = lambda: math_ops.add(value, 1.0)
fn2 = lambda: math_ops.subtract(value, 1.0)
return control_flow_ops.cond(pred, fn1, fn2)
plus_one = f(constant_op.constant(True), constant_op.constant(10.0))
minus_one = f(constant_op.constant(False), constant_op.constant(10.0))
self.assertEqual(11.0, plus_one.numpy())
self.assertEqual(9.0, minus_one.numpy())
def testAutoGraphCondInDefun(self):
with self.test_scope():
@def_function.function
def f(pred, value):
if pred:
return value + 1.0
else:
return value - 1.0
plus_one = f(constant_op.constant(True), constant_op.constant(10.0))
minus_one = f(constant_op.constant(False), constant_op.constant(10.0))
self.assertEqual(11.0, plus_one.numpy())
self.assertEqual(9.0, minus_one.numpy())
def testScanInDefun(self):
with self.test_scope():
elems = constant_op.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], name='data')
v = constant_op.constant(2.0, name='v')
@def_function.function
def f(y):
# pylint: disable=unnecessary-lambda
return functional_ops.scan(
lambda a, x: math_ops.multiply(a, x), y, initializer=v)
# pylint: enable=unnecessary-lambda
r = f(elems)
self.assertAllEqual([2., 4., 12., 48., 240., 1440.], self.evaluate(r))
def testFeedDeviceMemoryToOpExpectingHostMemory(self):
@function.defun
def f(dims, value):
return array_ops.fill(dims, value)
with self.test_scope():
x = constant_op.constant([4], dtype=dtypes.int64)
y = f(x, 3)
self.assertAllEqual([3, 3, 3, 3], y)
def testRequestNotToCompile(self):
with self.test_scope():
def f(x):
with ops.device('device:CPU:0'):
y = 2.0 * x
return x, y
wholly_compiled_f = def_function.function(f)
op_by_op_f = def_function.function(f, experimental_compile=False)
x = array_ops.identity([0.0, 2.0], name='data')
# When function is wholly compiled, all outputs will be on the
# device on which it is run.
r_x, r_y = wholly_compiled_f(x)
self.assertAllEqual([0.0, 2.0], r_x)
self.assertAllEqual([0.0, 4.0], r_y)
if context.executing_eagerly():
# backing_device is only available for eager tensors.
self.assertRegex(r_x.backing_device, self.device)
self.assertRegex(r_y.backing_device, self.device)
# When function is executed op-by-op, requested devices will be
# respected.
r_x, r_y = op_by_op_f(x)
self.assertAllEqual([0.0, 2.0], r_x)
self.assertAllEqual([0.0, 4.0], r_y)
if context.executing_eagerly():
# backing_device is only available for eager tensors.
self.assertRegex(r_x.backing_device, self.device)
self.assertRegex(r_y.backing_device, 'device:CPU:0')
class ExcessivePaddingTest(xla_test.XLATestCase):
"""Test that eager execution works with TPU flattened tensors.
Tensors that would normally be excessively padded when written
to TPU memory are reshaped to 1-D flat tensors.
This test case verifies that such tensors work with eager execution.
The flattening currently only happens on TPU, but tests should work
fine with all backends as flattening is transparent.
"""
def testFromConstant(self):
with self.test_scope():
# Create constant of shape [100, 2, 1]. This tensor would be
# excessively padded on TPU.
tensor = constant_op.constant(100 * [[[10.0], [2.0]]])
# Use reduce_sum since it requires correctly working with
# a particular dimension.
reduced = math_ops.reduce_sum(tensor, axis=1)
self.assertAllEqual(100 * [[12.0]], reduced)
def testFromOperation(self):
with self.test_scope():
tensor = array_ops.ones([3, 100, 2, 2])
reduced = math_ops.reduce_sum(tensor, axis=[0, 2, 3])
self.assertAllEqual(100 * [12.0], reduced)
def testAsFunctionInput(self):
with self.test_scope():
@function.defun
def f(x):
return math_ops.reduce_sum(x, axis=2)
tensor = constant_op.constant(100 * [[[10.0, 2.0]]])
reduced = f(tensor)
self.assertAllEqual(100 * [[12.0]], reduced)
def testAsFunctionOutput(self):
with self.test_scope():
@function.defun
def f(x):
return x * constant_op.constant(100 * [[[10.0, 2.0]]])
y = f(3)
reduced = math_ops.reduce_sum(y, axis=2)
self.assertAllEqual(100 * [[36.0]], reduced)
def multiple_tpus():
devices = context.context().devices()
return len([d for d in devices if 'device:TPU:' in d]) > 1
class MultiDeviceTest(xla_test.XLATestCase):
"""Test running TPU computation on more than one core."""
def testBasic(self):
if not multiple_tpus():
self.skipTest('MultiDeviceTest requires multiple TPU devices.')
# Compute 10 on TPU core 0
with ops.device('device:TPU:0'):
two = constant_op.constant(2)
five = constant_op.constant(5)
ten = two * five
self.assertAllEqual(10, ten)
# Compute 6 on TPU core 1
with ops.device('device:TPU:1'):
two = constant_op.constant(2)
three = constant_op.constant(3)
six = two * three
self.assertAllEqual(6, six)
# Copy 10 and 6 to CPU and sum them
self.assertAllEqual(16, ten + six)
if __name__ == '__main__':
ops.enable_eager_execution(
config=config_pb2.ConfigProto(log_device_placement=True))
googletest.main()
| apache-2.0 |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/PyQt4/QtGui/QVector2D.py | 2 | 5675 | # encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python2.7/dist-packages/PyQt4/QtGui.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QVector2D(): # skipped bases: <type 'sip.simplewrapper'>
"""
QVector2D()
QVector2D(float, float)
QVector2D(QPoint)
QVector2D(QPointF)
QVector2D(QVector3D)
QVector2D(QVector4D)
QVector2D(QVector2D)
"""
def dotProduct(self, QVector2D, QVector2D_1): # real signature unknown; restored from __doc__
""" QVector2D.dotProduct(QVector2D, QVector2D) -> float """
return 0.0
def isNull(self): # real signature unknown; restored from __doc__
""" QVector2D.isNull() -> bool """
return False
def length(self): # real signature unknown; restored from __doc__
""" QVector2D.length() -> float """
return 0.0
def lengthSquared(self): # real signature unknown; restored from __doc__
""" QVector2D.lengthSquared() -> float """
return 0.0
def normalize(self): # real signature unknown; restored from __doc__
""" QVector2D.normalize() """
pass
def normalized(self): # real signature unknown; restored from __doc__
""" QVector2D.normalized() -> QVector2D """
return QVector2D
def setX(self, p_float): # real signature unknown; restored from __doc__
""" QVector2D.setX(float) """
pass
def setY(self, p_float): # real signature unknown; restored from __doc__
""" QVector2D.setY(float) """
pass
def toPoint(self): # real signature unknown; restored from __doc__
""" QVector2D.toPoint() -> QPoint """
pass
def toPointF(self): # real signature unknown; restored from __doc__
""" QVector2D.toPointF() -> QPointF """
pass
def toVector3D(self): # real signature unknown; restored from __doc__
""" QVector2D.toVector3D() -> QVector3D """
return QVector3D
def toVector4D(self): # real signature unknown; restored from __doc__
""" QVector2D.toVector4D() -> QVector4D """
return QVector4D
def x(self): # real signature unknown; restored from __doc__
""" QVector2D.x() -> float """
return 0.0
def y(self): # real signature unknown; restored from __doc__
""" QVector2D.y() -> float """
return 0.0
def __add__(self, y): # real signature unknown; restored from __doc__
""" x.__add__(y) <==> x+y """
pass
def __div__(self, y): # real signature unknown; restored from __doc__
""" x.__div__(y) <==> x/y """
pass
def __eq__(self, y): # real signature unknown; restored from __doc__
""" x.__eq__(y) <==> x==y """
pass
def __ge__(self, y): # real signature unknown; restored from __doc__
""" x.__ge__(y) <==> x>=y """
pass
def __gt__(self, y): # real signature unknown; restored from __doc__
""" x.__gt__(y) <==> x>y """
pass
def __iadd__(self, y): # real signature unknown; restored from __doc__
""" x.__iadd__(y) <==> x+=y """
pass
def __idiv__(self, y): # real signature unknown; restored from __doc__
""" x.__idiv__(y) <==> x/=y """
pass
def __imul__(self, y): # real signature unknown; restored from __doc__
""" x.__imul__(y) <==> x*=y """
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
def __isub__(self, y): # real signature unknown; restored from __doc__
""" x.__isub__(y) <==> x-=y """
pass
def __itruediv__(self, y): # real signature unknown; restored from __doc__
""" x.__itruediv__(y) <==> x/y """
pass
def __le__(self, y): # real signature unknown; restored from __doc__
""" x.__le__(y) <==> x<=y """
pass
def __lt__(self, y): # real signature unknown; restored from __doc__
""" x.__lt__(y) <==> x<y """
pass
def __mul__(self, y): # real signature unknown; restored from __doc__
""" x.__mul__(y) <==> x*y """
pass
def __neg__(self): # real signature unknown; restored from __doc__
""" x.__neg__() <==> -x """
pass
def __ne__(self, y): # real signature unknown; restored from __doc__
""" x.__ne__(y) <==> x!=y """
pass
def __radd__(self, y): # real signature unknown; restored from __doc__
""" x.__radd__(y) <==> y+x """
pass
def __rdiv__(self, y): # real signature unknown; restored from __doc__
""" x.__rdiv__(y) <==> y/x """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
def __rmul__(self, y): # real signature unknown; restored from __doc__
""" x.__rmul__(y) <==> y*x """
pass
def __rsub__(self, y): # real signature unknown; restored from __doc__
""" x.__rsub__(y) <==> y-x """
pass
def __rtruediv__(self, y): # real signature unknown; restored from __doc__
""" x.__rtruediv__(y) <==> y/x """
pass
def __sub__(self, y): # real signature unknown; restored from __doc__
""" x.__sub__(y) <==> x-y """
pass
def __truediv__(self, y): # real signature unknown; restored from __doc__
""" x.__truediv__(y) <==> x/y """
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
| gpl-2.0 |
SUSE/azure-sdk-for-python | azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2017_03_01/models/registry_name_status.py | 9 | 1388 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RegistryNameStatus(Model):
"""The result of a request to check the availability of a container registry
name.
:param name_available: The value that indicates whether the name is
available.
:type name_available: bool
:param reason: If any, the reason that the name is not available.
:type reason: str
:param message: If any, the error message that provides more detail for
the reason that the name is not available.
:type message: str
"""
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, name_available=None, reason=None, message=None):
self.name_available = name_available
self.reason = reason
self.message = message
| mit |
v1bri/gnuradio | grc/gui/external_editor.py | 12 | 2662 | """
Copyright 2015 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import os
import sys
import time
import threading
import tempfile
import subprocess
class ExternalEditor(threading.Thread):
def __init__(self, editor, name, value, callback):
threading.Thread.__init__(self)
self.daemon = True
self._stop_event = threading.Event()
self.editor = editor
self.callback = callback
self.filename = self._create_tempfile(name, value)
def _create_tempfile(self, name, value):
with tempfile.NamedTemporaryFile(
mode='wb', prefix=name + '_', suffix='.py', delete=False,
) as fp:
fp.write(value.encode('utf-8'))
return fp.name
def open_editor(self):
proc = subprocess.Popen(args=(self.editor, self.filename))
proc.poll()
return proc
def stop(self):
self._stop_event.set()
def run(self):
filename = self.filename
# print "file monitor: started for", filename
last_change = os.path.getmtime(filename)
try:
while not self._stop_event.is_set():
mtime = os.path.getmtime(filename)
if mtime > last_change:
# print "file monitor: reload trigger for", filename
last_change = mtime
with open(filename, 'rb') as fp:
data = fp.read().decode('utf-8')
self.callback(data)
time.sleep(1)
except Exception as e:
print >> sys.stderr, "file monitor crashed:", str(e)
finally:
try:
os.remove(self.filename)
except OSError:
pass
if __name__ == '__main__':
def p(data):
print data
e = ExternalEditor('/usr/bin/gedit', "test", "content", p)
e.open_editor()
e.start()
time.sleep(15)
e.stop()
e.join()
| gpl-3.0 |
nexusz99/boto | tests/integration/sqs/test_bigmessage.py | 114 | 2688 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some unit tests for the SQSConnection
"""
import time
from threading import Timer
from tests.unit import unittest
import boto
from boto.compat import StringIO
from boto.sqs.bigmessage import BigMessage
from boto.exception import SQSError
class TestBigMessage(unittest.TestCase):
sqs = True
def test_1_basic(self):
c = boto.connect_sqs()
# create a queue so we can test BigMessage
queue_name = 'test%d' % int(time.time())
timeout = 60
queue = c.create_queue(queue_name, timeout)
self.addCleanup(c.delete_queue, queue, True)
queue.set_message_class(BigMessage)
# create a bucket with the same name to store the message in
s3 = boto.connect_s3()
bucket = s3.create_bucket(queue_name)
self.addCleanup(s3.delete_bucket, queue_name)
time.sleep(30)
# now add a message
msg_body = 'This is a test of the big message'
fp = StringIO(msg_body)
s3_url = 's3://%s' % queue_name
message = queue.new_message(fp, s3_url=s3_url)
queue.write(message)
time.sleep(30)
s3_object_name = message.s3_url.split('/')[-1]
# Make sure msg body is in bucket
self.assertTrue(bucket.lookup(s3_object_name))
m = queue.read()
self.assertEqual(m.get_body().decode('utf-8'), msg_body)
m.delete()
time.sleep(30)
# Make sure msg is deleted from bucket
self.assertIsNone(bucket.lookup(s3_object_name))
| mit |
sbalde/edx-platform | lms/djangoapps/verify_student/models.py | 20 | 44086 | # -*- coding: utf-8 -*-
"""
Models for Student Identity Verification
This is where we put any models relating to establishing the real-life identity
of a student over a period of time. Right now, the only models are the abstract
`PhotoVerification`, and its one concrete implementation
`SoftwareSecurePhotoVerification`. The hope is to keep as much of the
photo verification process as generic as possible.
"""
import functools
import json
import logging
from datetime import datetime, timedelta
from email.utils import formatdate
import pytz
import requests
import uuid
from lazy import lazy
from opaque_keys.edx.keys import UsageKey
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext as _, ugettext_lazy
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from config_models.models import ConfigurationModel
from course_modes.models import CourseMode
from model_utils.models import StatusModel
from model_utils import Choices
from verify_student.ssencrypt import (
random_aes_key, encrypt_and_encode,
generate_signed_message, rsa_encrypt
)
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule_django.models import CourseKeyField
log = logging.getLogger(__name__)
def generateUUID(): # pylint: disable=invalid-name
""" Utility function; generates UUIDs """
return str(uuid.uuid4())
class VerificationException(Exception):
pass
def status_before_must_be(*valid_start_statuses):
"""
Helper decorator with arguments to make sure that an object with a `status`
attribute is in one of a list of acceptable status states before a method
is called. You could use it in a class definition like:
@status_before_must_be("submitted", "approved", "denied")
def refund_user(self, user_id):
# Do logic here...
If the object has a status that is not listed when the `refund_user` method
is invoked, it will throw a `VerificationException`. This is just to avoid
distracting boilerplate when looking at a Model that needs to go through a
workflow process.
"""
def decorator_func(func):
"""
Decorator function that gets returned
"""
@functools.wraps(func)
def with_status_check(obj, *args, **kwargs):
if obj.status not in valid_start_statuses:
exception_msg = (
u"Error calling {} {}: status is '{}', must be one of: {}"
).format(func, obj, obj.status, valid_start_statuses)
raise VerificationException(exception_msg)
return func(obj, *args, **kwargs)
return with_status_check
return decorator_func
class PhotoVerification(StatusModel):
"""
Each PhotoVerification represents a Student's attempt to establish
their identity by uploading a photo of themselves and a picture ID. An
attempt actually has a number of fields that need to be filled out at
different steps of the approval process. While it's useful as a Django Model
for the querying facilities, **you should only edit a `PhotoVerification`
object through the methods provided**. Initialize them with a user:
attempt = PhotoVerification(user=user)
We track this attempt through various states:
`created`
Initial creation and state we're in after uploading the images.
`ready`
The user has uploaded their images and checked that they can read the
images. There's a separate state here because it may be the case that we
don't actually submit this attempt for review until payment is made.
`submitted`
Submitted for review. The review may be done by a staff member or an
external service. The user cannot make changes once in this state.
`must_retry`
We submitted this, but there was an error on submission (i.e. we did not
get a 200 when we POSTed to Software Secure)
`approved`
An admin or an external service has confirmed that the user's photo and
photo ID match up, and that the photo ID's name matches the user's.
`denied`
The request has been denied. See `error_msg` for details on why. An
admin might later override this and change to `approved`, but the
student cannot re-open this attempt -- they have to create another
attempt and submit it instead.
Because this Model inherits from StatusModel, we can also do things like::
attempt.status == PhotoVerification.STATUS.created
attempt.status == "created"
pending_requests = PhotoVerification.submitted.all()
"""
######################## Fields Set During Creation ########################
# See class docstring for description of status states
STATUS = Choices('created', 'ready', 'submitted', 'must_retry', 'approved', 'denied')
user = models.ForeignKey(User, db_index=True)
# They can change their name later on, so we want to copy the value here so
# we always preserve what it was at the time they requested. We only copy
# this value during the mark_ready() step. Prior to that, you should be
# displaying the user's name from their user.profile.name.
name = models.CharField(blank=True, max_length=255)
# Where we place the uploaded image files (e.g. S3 URLs)
face_image_url = models.URLField(blank=True, max_length=255)
photo_id_image_url = models.URLField(blank=True, max_length=255)
# Randomly generated UUID so that external services can post back the
# results of checking a user's photo submission without use exposing actual
# user IDs or something too easily guessable.
receipt_id = models.CharField(
db_index=True,
default=lambda: generateUUID(),
max_length=255,
)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
# Indicates whether or not a user wants to see the verification status
# displayed on their dash. Right now, only relevant for allowing students
# to "dismiss" a failed midcourse reverification message
# TODO: This field is deprecated.
display = models.BooleanField(db_index=True, default=True)
######################## Fields Set When Submitting ########################
submitted_at = models.DateTimeField(null=True, db_index=True)
#################### Fields Set During Approval/Denial #####################
# If the review was done by an internal staff member, mark who it was.
reviewing_user = models.ForeignKey(
User,
db_index=True,
default=None,
null=True,
related_name="photo_verifications_reviewed"
)
# Mark the name of the service used to evaluate this attempt (e.g
# Software Secure).
reviewing_service = models.CharField(blank=True, max_length=255)
# If status is "denied", this should contain text explaining why.
error_msg = models.TextField(blank=True)
# Non-required field. External services can add any arbitrary codes as time
# goes on. We don't try to define an exhuastive list -- this is just
# capturing it so that we can later query for the common problems.
error_code = models.CharField(blank=True, max_length=50)
class Meta(object): # pylint: disable=missing-docstring
abstract = True
ordering = ['-created_at']
##### Methods listed in the order you'd typically call them
@classmethod
def _earliest_allowed_date(cls):
"""
Returns the earliest allowed date given the settings
"""
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
return datetime.now(pytz.UTC) - timedelta(days=days_good_for)
@classmethod
def user_is_verified(cls, user, earliest_allowed_date=None):
"""
Return whether or not a user has satisfactorily proved their identity.
Depending on the policy, this can expire after some period of time, so
a user might have to renew periodically.
This will check for the user's *initial* verification.
"""
return cls.objects.filter(
user=user,
status="approved",
created_at__gte=(earliest_allowed_date
or cls._earliest_allowed_date())
).exists()
@classmethod
def verification_valid_or_pending(cls, user, earliest_allowed_date=None, queryset=None):
"""
Check whether the user has a complete verification attempt that is
or *might* be good. This means that it's approved, been submitted,
or would have been submitted but had an non-user error when it was
being submitted.
It's basically any situation in which the user has signed off on
the contents of the attempt, and we have not yet received a denial.
This will check for the user's *initial* verification.
Arguments:
user:
earliest_allowed_date: earliest allowed date given in the
settings
queryset: If a queryset is provided, that will be used instead
of hitting the database.
Returns:
queryset: queryset of 'PhotoVerification' sorted by 'created_at' in
descending order.
"""
valid_statuses = ['submitted', 'approved', 'must_retry']
if queryset is None:
queryset = cls.objects.filter(user=user)
return queryset.filter(
status__in=valid_statuses,
created_at__gte=(
earliest_allowed_date
or cls._earliest_allowed_date()
)
).order_by('-created_at')
@classmethod
def user_has_valid_or_pending(cls, user, earliest_allowed_date=None, queryset=None):
"""
Check whether the user has an active or pending verification attempt
Returns:
bool: True or False according to existence of valid verifications
"""
return cls.verification_valid_or_pending(user, earliest_allowed_date, queryset).exists()
@classmethod
def active_for_user(cls, user):
"""
Return the most recent PhotoVerification that is marked ready (i.e. the
user has said they're set, but we haven't submitted anything yet).
This checks for the original verification.
"""
# This should only be one at the most, but just in case we create more
# by mistake, we'll grab the most recently created one.
active_attempts = cls.objects.filter(user=user, status='ready').order_by('-created_at')
if active_attempts:
return active_attempts[0]
else:
return None
@classmethod
def user_status(cls, user):
"""
Returns the status of the user based on their past verification attempts
If no such verification exists, returns 'none'
If verification has expired, returns 'expired'
If the verification has been approved, returns 'approved'
If the verification process is still ongoing, returns 'pending'
If the verification has been denied and the user must resubmit photos, returns 'must_reverify'
This checks initial verifications
"""
status = 'none'
error_msg = ''
if cls.user_is_verified(user):
status = 'approved'
elif cls.user_has_valid_or_pending(user):
# user_has_valid_or_pending does include 'approved', but if we are
# here, we know that the attempt is still pending
status = 'pending'
else:
# we need to check the most recent attempt to see if we need to ask them to do
# a retry
try:
attempts = cls.objects.filter(user=user).order_by('-updated_at')
attempt = attempts[0]
except IndexError:
# we return 'none'
return ('none', error_msg)
if attempt.created_at < cls._earliest_allowed_date():
return (
'expired',
_("Your {platform_name} verification has expired.").format(platform_name=settings.PLATFORM_NAME)
)
# If someone is denied their original verification attempt, they can try to reverify.
if attempt.status == 'denied':
status = 'must_reverify'
if attempt.error_msg:
error_msg = attempt.parsed_error_msg()
return (status, error_msg)
@classmethod
def verification_for_datetime(cls, deadline, candidates):
"""Find a verification in a set that applied during a particular datetime.
A verification is considered "active" during a datetime if:
1) The verification was created before the datetime, and
2) The verification is set to expire after the datetime.
Note that verification status is *not* considered here,
just the start/expire dates.
If multiple verifications were active at the deadline,
returns the most recently created one.
Arguments:
deadline (datetime): The datetime at which the verification applied.
If `None`, then return the most recently created candidate.
candidates (list of `PhotoVerification`s): Potential verifications to search through.
Returns:
PhotoVerification: A photo verification that was active at the deadline.
If no verification was active, return None.
"""
if len(candidates) == 0:
return None
# If there's no deadline, then return the most recently created verification
if deadline is None:
return candidates[0]
# Otherwise, look for a verification that was in effect at the deadline,
# preferring recent verifications.
# If no such verification is found, implicitly return `None`
for verification in candidates:
if verification.active_at_datetime(deadline):
return verification
@property
def expiration_datetime(self):
"""Datetime that the verification will expire. """
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
return self.created_at + timedelta(days=days_good_for)
def active_at_datetime(self, deadline):
"""Check whether the verification was active at a particular datetime.
Arguments:
deadline (datetime): The date at which the verification was active
(created before and expired after).
Returns:
bool
"""
return (
self.created_at < deadline and
self.expiration_datetime > deadline
)
def parsed_error_msg(self):
"""
Sometimes, the error message we've received needs to be parsed into
something more human readable
The default behavior is to return the current error message as is.
"""
return self.error_msg
@status_before_must_be("created")
def upload_face_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def upload_photo_id_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def mark_ready(self):
"""
Mark that the user data in this attempt is correct. In order to
succeed, the user must have uploaded the necessary images
(`face_image_url`, `photo_id_image_url`). This method will also copy
their name from their user profile. Prior to marking it ready, we read
this value directly from their profile, since they're free to change it.
This often happens because people put in less formal versions of their
name on signup, but realize they want something different to go on a
formal document.
Valid attempt statuses when calling this method:
`created`
Status after method completes: `ready`
Other fields that will be set by this method:
`name`
State Transitions:
`created` → `ready`
This is what happens when the user confirms to us that the pictures
they uploaded are good. Note that we don't actually do a submission
anywhere yet.
"""
# At any point prior to this, they can change their names via their
# student dashboard. But at this point, we lock the value into the
# attempt.
self.name = self.user.profile.name
self.status = "ready"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve this attempt. `user_id`
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `approved`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`
State Transitions:
`submitted` → `approved`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `approved`
No-op. First one to approve it wins.
`denied` → `approved`
This might happen if a staff member wants to override a decision
made by an external service or another staff member (say, in
response to a support request). In this case, the previous values
of `reviewed_by_user_id` and `reviewed_by_service` will be changed
to whoever is doing the approving, and `error_msg` will be reset.
The only record that this record was ever denied would be in our
logs. This should be a relatively rare occurence.
"""
# If someone approves an outdated version of this, the first one wins
if self.status == "approved":
return
log.info(u"Verification for user '{user_id}' approved by '{reviewer}'.".format(
user_id=self.user, reviewer=user_id
))
self.error_msg = "" # reset, in case this attempt was denied before
self.error_code = "" # reset, in case this attempt was denied before
self.reviewing_user = user_id
self.reviewing_service = service
self.status = "approved"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def deny(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Deny this attempt.
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `denied`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`,
`error_code`
State Transitions:
`submitted` → `denied`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `denied`
This might happen if a staff member wants to override a decision
made by an external service or another staff member, or just correct
a mistake made during the approval process. In this case, the
previous values of `reviewed_by_user_id` and `reviewed_by_service`
will be changed to whoever is doing the denying. The only record
that this record was ever approved would be in our logs. This should
be a relatively rare occurence.
`denied` → `denied`
Update the error message and reviewing_user/reviewing_service. Just
lets you amend the error message in case there were additional
details to be made.
"""
log.info(u"Verification for user '{user_id}' denied by '{reviewer}'.".format(
user_id=self.user, reviewer=reviewing_user
))
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "denied"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def system_error(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Mark that this attempt could not be completed because of a system error.
Status should be moved to `must_retry`. For example, if Software Secure
reported to us that they couldn't process our submission because they
couldn't decrypt the image we sent.
"""
if self.status in ["approved", "denied"]:
return # If we were already approved or denied, just leave it.
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "must_retry"
self.save()
class SoftwareSecurePhotoVerification(PhotoVerification):
"""
Model to verify identity using a service provided by Software Secure. Much
of the logic is inherited from `PhotoVerification`, but this class
encrypts the photos.
Software Secure (http://www.softwaresecure.com/) is a remote proctoring
service that also does identity verification. A student uses their webcam
to upload two images: one of their face, one of a photo ID. Due to the
sensitive nature of the data, the following security precautions are taken:
1. The snapshot of their face is encrypted using AES-256 in CBC mode. All
face photos are encypted with the same key, and this key is known to
both Software Secure and edx-platform.
2. The snapshot of a user's photo ID is also encrypted using AES-256, but
the key is randomly generated using pycrypto's Random. Every verification
attempt has a new key. The AES key is then encrypted using a public key
provided by Software Secure. We store only the RSA-encryped AES key.
Since edx-platform does not have Software Secure's private RSA key, it
means that we can no longer even read photo ID.
3. The encrypted photos are base64 encoded and stored in an S3 bucket that
edx-platform does not have read access to.
Note: this model handles *inital* verifications (which you must perform
at the time you register for a verified cert).
"""
# This is a base64.urlsafe_encode(rsa_encrypt(photo_id_aes_key), ss_pub_key)
# So first we generate a random AES-256 key to encrypt our photo ID with.
# Then we RSA encrypt it with Software Secure's public key. Then we base64
# encode that. The result is saved here. Actual expected length is 344.
photo_id_key = models.TextField(max_length=1024)
IMAGE_LINK_DURATION = 5 * 60 * 60 * 24 # 5 days in seconds
@classmethod
def original_verification(cls, user):
"""
Returns the most current SoftwareSecurePhotoVerification object associated with the user.
"""
query = cls.objects.filter(user=user).order_by('-updated_at')
return query[0]
@classmethod
def get_initial_verification(cls, user):
"""Get initial verification for a user
Arguments:
user(User): user object
Return:
SoftwareSecurePhotoVerification (object)
"""
init_verification = cls.objects.filter(user=user, status__in=["submitted", "approved"])
return init_verification.latest('created_at') if init_verification.exists() else None
@status_before_must_be("created")
def upload_face_image(self, img_data):
"""
Upload an image of the user's face to S3. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to S3.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
return
aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
aes_key = aes_key_str.decode("hex")
s3_key = self._generate_s3_key("face")
s3_key.set_contents_from_string(encrypt_and_encode(img_data, aes_key))
@status_before_must_be("created")
def fetch_photo_id_image(self):
"""
Find the user's photo ID image, which was submitted with their original verification.
The image has already been encrypted and stored in s3, so we just need to find that
location
"""
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
return
self.photo_id_key = self.original_verification(self.user).photo_id_key
self.save()
@status_before_must_be("created")
def upload_photo_id_image(self, img_data):
"""
Upload an the user's photo ID image to S3. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using a randomly generated AES key, encode it with base64 and save it to
S3. The random key is also encrypted using Software Secure's public RSA
key and stored in our `photo_id_key` field.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
return
aes_key = random_aes_key()
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str)
# Upload this to S3
s3_key = self._generate_s3_key("photo_id")
s3_key.set_contents_from_string(encrypt_and_encode(img_data, aes_key))
# Update our record fields
self.photo_id_key = rsa_encrypted_aes_key.encode('base64')
self.save()
@status_before_must_be("must_retry", "ready", "submitted")
def submit(self):
"""
Submit our verification attempt to Software Secure for validation. This
will set our status to "submitted" if the post is successful, and
"must_retry" if the post fails.
"""
try:
response = self.send_request()
if response.ok:
self.submitted_at = datetime.now(pytz.UTC)
self.status = "submitted"
self.save()
else:
self.status = "must_retry"
self.error_msg = response.text
self.save()
except Exception as error:
log.exception(error)
self.status = "must_retry"
self.save()
def parsed_error_msg(self):
"""
Parse the error messages we receive from SoftwareSecure
Error messages are written in the form:
`[{"photoIdReasons": ["Not provided"]}]`
Returns a list of error messages
"""
# Translates the category names and messages into something more human readable
message_dict = {
("photoIdReasons", "Not provided"): _("No photo ID was provided."),
("photoIdReasons", "Text not clear"): _("We couldn't read your name from your photo ID image."),
("generalReasons", "Name mismatch"): _("The name associated with your account and the name on your ID do not match."),
("userPhotoReasons", "Image not clear"): _("The image of your face was not clear."),
("userPhotoReasons", "Face out of view"): _("Your face was not visible in your self-photo."),
}
try:
msg_json = json.loads(self.error_msg)
msg_dict = msg_json[0]
msg = []
for category in msg_dict:
# find the messages associated with this category
category_msgs = msg_dict[category]
for category_msg in category_msgs:
msg.append(message_dict[(category, category_msg)])
return u", ".join(msg)
except (ValueError, KeyError):
# if we can't parse the message as JSON or the category doesn't
# match one of our known categories, show a generic error
log.error('PhotoVerification: Error parsing this error message: %s', self.error_msg)
return _("There was an error verifying your ID photos.")
def image_url(self, name):
"""
We dynamically generate this, since we want it the expiration clock to
start when the message is created, not when the record is created.
"""
s3_key = self._generate_s3_key(name)
return s3_key.generate_url(self.IMAGE_LINK_DURATION)
def _generate_s3_key(self, prefix):
"""
Generates a key for an s3 bucket location
Example: face/4dd1add9-6719-42f7-bea0-115c008c4fca
"""
conn = S3Connection(
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["AWS_ACCESS_KEY"],
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["AWS_SECRET_KEY"]
)
bucket = conn.get_bucket(settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["S3_BUCKET"])
key = Key(bucket)
key.key = "{}/{}".format(prefix, self.receipt_id)
return key
def _encrypted_user_photo_key_str(self):
"""
Software Secure needs to have both UserPhoto and PhotoID decrypted in
the same manner. So even though this is going to be the same for every
request, we're also using RSA encryption to encrypt the AES key for
faces.
"""
face_aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
face_aes_key = face_aes_key_str.decode("hex")
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_face_aes_key = rsa_encrypt(face_aes_key, rsa_key_str)
return rsa_encrypted_face_aes_key.encode("base64")
def create_request(self):
"""return headers, body_dict"""
access_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]
secret_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
scheme = "https" if settings.HTTPS == "on" else "http"
callback_url = "{}://{}{}".format(
scheme, settings.SITE_NAME, reverse('verify_student_results_callback')
)
body = {
"EdX-ID": str(self.receipt_id),
"ExpectedName": self.name,
"PhotoID": self.image_url("photo_id"),
"PhotoIDKey": self.photo_id_key,
"SendResponseTo": callback_url,
"UserPhoto": self.image_url("face"),
"UserPhotoKey": self._encrypted_user_photo_key_str(),
}
headers = {
"Content-Type": "application/json",
"Date": formatdate(timeval=None, localtime=False, usegmt=True)
}
_message, _sig, authorization = generate_signed_message(
"POST", headers, body, access_key, secret_key
)
headers['Authorization'] = authorization
return headers, body
def request_message_txt(self):
"""
This is the body of the request we send across. This is never actually
used in the code, but exists for debugging purposes -- you can call
`print attempt.request_message_txt()` on the console and get a readable
rendering of the request that would be sent across, without actually
sending anything.
"""
headers, body = self.create_request()
header_txt = "\n".join(
"{}: {}".format(h, v) for h, v in sorted(headers.items())
)
body_txt = json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8')
return header_txt + "\n\n" + body_txt
def send_request(self):
"""
Assembles a submission to Software Secure and sends it via HTTPS.
Returns a request.Response() object with the reply we get from SS.
"""
# If AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING is True, we want to
# skip posting anything to Software Secure. We actually don't even
# create the message because that would require encryption and message
# signing that rely on settings.VERIFY_STUDENT values that aren't set
# in dev. So we just pretend like we successfully posted
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
fake_response = requests.Response()
fake_response.status_code = 200
return fake_response
headers, body = self.create_request()
response = requests.post(
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_URL"],
headers=headers,
data=json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8'),
verify=False
)
log.debug("Sent request to Software Secure for {}".format(self.receipt_id))
log.debug("Headers:\n{}\n\n".format(headers))
log.debug("Body:\n{}\n\n".format(body))
log.debug("Return code: {}".format(response.status_code))
log.debug("Return message:\n\n{}\n\n".format(response.text))
return response
@classmethod
def submit_faceimage(cls, user, face_image, photo_id_key):
"""Submit the face image to SoftwareSecurePhotoVerification.
Arguments:
user(User): user object
face_image (bytestream): raw bytestream image data
photo_id_key (str) : SoftwareSecurePhotoVerification attribute
Returns:
SoftwareSecurePhotoVerification Object
"""
b64_face_image = face_image.split(",")[1]
attempt = SoftwareSecurePhotoVerification(user=user)
attempt.upload_face_image(b64_face_image.decode('base64'))
attempt.photo_id_key = photo_id_key
attempt.mark_ready()
attempt.save()
attempt.submit()
return attempt
@classmethod
def verification_status_for_user(cls, user, course_id, user_enrollment_mode):
"""
Returns the verification status for use in grade report.
"""
if user_enrollment_mode not in CourseMode.VERIFIED_MODES:
return 'N/A'
user_is_verified = cls.user_is_verified(user)
if not user_is_verified:
return 'Not ID Verified'
else:
return 'ID Verified'
class VerificationCheckpoint(models.Model):
"""Represents a point at which a user is asked to re-verify his/her
identity.
Each checkpoint is uniquely identified by a
(course_id, checkpoint_location) tuple.
"""
course_id = CourseKeyField(max_length=255, db_index=True)
checkpoint_location = models.CharField(max_length=255)
photo_verification = models.ManyToManyField(SoftwareSecurePhotoVerification)
class Meta: # pylint: disable=missing-docstring, old-style-class
unique_together = ('course_id', 'checkpoint_location')
def __unicode__(self):
"""
Unicode representation of the checkpoint.
"""
return u"{checkpoint} in {course}".format(
checkpoint=self.checkpoint_name,
course=self.course_id
)
@lazy
def checkpoint_name(self):
"""Lazy method for getting checkpoint name of reverification block.
Return location of the checkpoint if no related assessment found in
database.
"""
checkpoint_key = UsageKey.from_string(self.checkpoint_location)
try:
checkpoint_name = modulestore().get_item(checkpoint_key).related_assessment
except ItemNotFoundError:
log.warning(
u"Verification checkpoint block with location '%s' and course id '%s' "
u"not found in database.", self.checkpoint_location, unicode(self.course_id)
)
checkpoint_name = self.checkpoint_location
return checkpoint_name
def add_verification_attempt(self, verification_attempt):
"""Add the verification attempt in M2M relation of photo_verification.
Arguments:
verification_attempt(object): SoftwareSecurePhotoVerification object
Returns:
None
"""
self.photo_verification.add(verification_attempt) # pylint: disable=no-member
def get_user_latest_status(self, user_id):
"""Get the status of the latest checkpoint attempt of the given user.
Args:
user_id(str): Id of user
Returns:
VerificationStatus object if found any else None
"""
try:
return self.checkpoint_status.filter(user_id=user_id).latest() # pylint: disable=no-member
except ObjectDoesNotExist:
return None
@classmethod
def get_verification_checkpoint(cls, course_id, checkpoint_location):
"""Get the verification checkpoint for given 'course_id' and
checkpoint name.
Arguments:
course_id(CourseKey): CourseKey
checkpoint_location(str): Verification checkpoint location
Returns:
VerificationCheckpoint object if exists otherwise None
"""
try:
return cls.objects.get(course_id=course_id, checkpoint_location=checkpoint_location)
except cls.DoesNotExist:
return None
class VerificationStatus(models.Model):
"""This model is an append-only table that represents user status changes
during the verification process.
A verification status represents a user’s progress through the verification
process for a particular checkpoint.
"""
VERIFICATION_STATUS_CHOICES = (
("submitted", "submitted"),
("approved", "approved"),
("denied", "denied"),
("error", "error")
)
checkpoint = models.ForeignKey(VerificationCheckpoint, related_name="checkpoint_status")
user = models.ForeignKey(User)
status = models.CharField(choices=VERIFICATION_STATUS_CHOICES, db_index=True, max_length=32)
timestamp = models.DateTimeField(auto_now_add=True)
response = models.TextField(null=True, blank=True)
error = models.TextField(null=True, blank=True)
class Meta(object): # pylint: disable=missing-docstring
get_latest_by = "timestamp"
verbose_name = "Verification Status"
verbose_name_plural = "Verification Statuses"
@classmethod
def add_verification_status(cls, checkpoint, user, status):
"""Create new verification status object.
Arguments:
checkpoint(VerificationCheckpoint): VerificationCheckpoint object
user(User): user object
status(str): Status from VERIFICATION_STATUS_CHOICES
Returns:
None
"""
cls.objects.create(checkpoint=checkpoint, user=user, status=status)
@classmethod
def add_status_from_checkpoints(cls, checkpoints, user, status):
"""Create new verification status objects for a user against the given
checkpoints.
Arguments:
checkpoints(list): list of VerificationCheckpoint objects
user(User): user object
status(str): Status from VERIFICATION_STATUS_CHOICES
Returns:
None
"""
for checkpoint in checkpoints:
cls.objects.create(checkpoint=checkpoint, user=user, status=status)
@classmethod
def get_user_attempts(cls, user_id, course_key, related_assessment_location):
"""
Get re-verification attempts against a user for a given 'checkpoint'
and 'course_id'.
Arguments:
user_id(str): User Id string
course_key(str): A CourseKey of a course
related_assessment_location(str): Verification checkpoint location
Returns:
Count of re-verification attempts
"""
return cls.objects.filter(
user_id=user_id,
checkpoint__course_id=course_key,
checkpoint__checkpoint_location=related_assessment_location,
status="submitted"
).count()
@classmethod
def get_location_id(cls, photo_verification):
"""Get the location ID of reverification XBlock.
Args:
photo_verification(object): SoftwareSecurePhotoVerification object
Return:
Location Id of XBlock if any else empty string
"""
try:
verification_status = cls.objects.filter(checkpoint__photo_verification=photo_verification).latest()
return verification_status.checkpoint.checkpoint_location
except cls.DoesNotExist:
return ""
class InCourseReverificationConfiguration(ConfigurationModel):
"""Configure in-course re-verification.
Enable or disable in-course re-verification feature.
When this flag is disabled, the "in-course re-verification" feature
will be disabled.
When the flag is enabled, the "in-course re-verification" feature
will be enabled.
"""
pass
class SkippedReverification(models.Model):
"""Model for tracking skipped Reverification of a user against a specific
course.
If a user skipped a Reverification checkpoint for a specific course then in
future that user cannot see the reverification link.
"""
user = models.ForeignKey(User)
course_id = CourseKeyField(max_length=255, db_index=True)
checkpoint = models.ForeignKey(VerificationCheckpoint, related_name="skipped_checkpoint")
created_at = models.DateTimeField(auto_now_add=True)
class Meta: # pylint: disable=missing-docstring, old-style-class
unique_together = (('user', 'course_id'),)
@classmethod
def add_skipped_reverification_attempt(cls, checkpoint, user_id, course_id):
"""Create skipped reverification object.
Arguments:
checkpoint(VerificationCheckpoint): VerificationCheckpoint object
user_id(str): User Id of currently logged in user
course_id(CourseKey): CourseKey
Returns:
None
"""
cls.objects.create(checkpoint=checkpoint, user_id=user_id, course_id=course_id)
@classmethod
def check_user_skipped_reverification_exists(cls, user, course_id):
"""Check existence of a user's skipped re-verification attempt for a
specific course.
Arguments:
user(User): user object
course_id(CourseKey): CourseKey
Returns:
Boolean
"""
return cls.objects.filter(user=user, course_id=course_id).exists()
| agpl-3.0 |
adamtiger/tensorflow | tensorflow/contrib/tensorboard/plugins/projector/projector_api_test.py | 94 | 2122 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""API tests for the projector plugin in TensorBoard."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from google.protobuf import text_format
from tensorflow.contrib.tensorboard.plugins import projector
from tensorflow.contrib.tensorboard.plugins.projector import projector_config_pb2
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.summary.writer import writer as writer_lib
class ProjectorApiTest(test.TestCase):
def testVisualizeEmbeddings(self):
# Create a dummy configuration.
config = projector_config_pb2.ProjectorConfig()
config.model_checkpoint_path = 'test'
emb1 = config.embeddings.add()
emb1.tensor_name = 'tensor1'
emb1.metadata_path = 'metadata1'
# Call the API method to save the configuration to a temporary dir.
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir)
writer = writer_lib.FileWriter(temp_dir)
projector.visualize_embeddings(writer, config)
# Read the configurations from disk and make sure it matches the original.
with gfile.GFile(os.path.join(temp_dir, 'projector_config.pbtxt')) as f:
config2 = projector_config_pb2.ProjectorConfig()
text_format.Parse(f.read(), config2)
self.assertEqual(config, config2)
if __name__ == '__main__':
test.main()
| apache-2.0 |
PierreBdR/point_tracker | point_tracker/tissue_plot/tracking_plot.py | 1 | 45873 | # coding=utf-8
from __future__ import print_function, division, absolute_import
"""
This module contains the bases classes needed for plotting.
"""
__author__ = "Pierre Barbier de Reuille <pierre@barbierdereuille.net>"
from PyQt4.QtGui import (QColor, QDialog, QFontDialog, QFont, QDoubleValidator, QPicture,
QPainter, QFileDialog)
from PyQt4.QtCore import QObject, Slot, QTimer, Signal
from ..transferfunction import TransferFunction
from ..transferfunctiondlg import TransferFunctionDlg
from ..scale_bar import ScaleBar as ScaleBarDrawer
from ..sys_utils import setColor, changeColor, getColor, createForm
from ..debug import log_debug
from math import hypot as norm
from ..path import path
from ..tracking_data import RetryTrackingDataException, TrackingData
from ..growth_computation_methods import Result
from ..sys_utils import toBool, cleanQObject
def make_cap_symetric(caps):
caps = list(caps)
if caps[0] * caps[1] < 0:
caps[1] = max(abs(caps[0]), abs(caps[1]))
caps[0] = -caps[1]
elif caps[1] > 0:
caps[0] = 0
else:
caps[1] = 0
return tuple(caps)
cell_colorings_cls = []
"""
List of cell coloring classes.
:type: list of class
"""
wall_colorings_cls = []
"""
List of wall coloring classes.
:type: list of class
"""
point_colorings_cls = []
"""
List of point coloring classes.
:type: list of class
"""
class Struct(object):
pass
def reset_classes():
global cell_colorings_cls
global wall_colorings_cls
global point_colorings_cls
del cell_colorings_cls[:]
del wall_colorings_cls[:]
del point_colorings_cls[:]
def transfer_fct_dlg():
"""
This function create a singleton of the transfer function dialog box.
"""
if transfer_fct_dlg.instance is None:
dlg = TransferFunctionDlg()
dlg.use_histogram = False
dlg.loadSettings("")
transfer_fct_dlg.instance = dlg
return transfer_fct_dlg.instance
transfer_fct_dlg.instance = None
class NoParametersObject(QObject):
changed = Signal()
"""
Class handling parameters when none are needed.
It is also useful as a template to create a new parameter class.
:Parameters:
params : struct
Structure holding the parameters for this class
"""
def __init__(self, params, parent=None):
QObject.__init__(self, parent)
pass
def widget(self, parent):
"""
:returns: The widget used to get the values or None.
:returntype: QWidget|None
"""
return None
@staticmethod
def load(params, settings):
"""
Load the parameters and save them in the `params` argument.
:Parameters:
params : struct
Structure in which to place the parameters.
settings : `QSettings`
Settings object where the settings are read. No need to create a group ...
"""
pass
@staticmethod
def save(params, settings):
"""
Save the parameters contained in the `params` argument.
:Parameters:
params : struct
Structure in which to place the parameters.
settings : `QSettings`
Settings object where the settings are read. No need to create a group ...
"""
pass
class ColoringObject(QObject):
changed = Signal()
"""
Base class for all coloring object.
If the parameters of the object change, the ``changed`` signal is sent.
:signal: ``changed``
"""
parameter_class = NoParametersObject
"""
Object used to store the parameters
"""
coloring_name = None
"""
Name of the coloring.
"""
settings_name = None
"""
Name used to store in the settings
"""
def __init__(self, result, parent=None):
QObject.__init__(self, parent)
self._result = result
self._parameters = None
self._config = None
self._update_parameters()
def __del__(self):
cleanQObject(self)
@property
def result(self):
'''Result object used for coloring'''
return self._result
@result.setter
def result(self, value):
if self._result != value:
self._result = value
self._update_parameters()
@property
def parameters(self):
'''
Parameter class as a singleton per instance
'''
if self._parameters is None:
self._parameters = self.create_parameters()
self._parameters.changed.connect(self.changed)
return self._parameters
#{ Main interface methods
def init(self):
"""
Initialise the object if needed.
This function is called once after all the parameters of the object are set to allow for precomputing.
"""
pass
def startImage(self, painter, imageid):
"""
This method is called once the image is placed but before any cell or wall is drawn.
"""
pass
def finalizeImage(self, painter, imageid, image_transform, size=None):
"""
This method is called after all cells and walls are drawn.
Useful to add elements global to the image (i.e. color scale, ...)
"""
pass
def __call__(self, imageid, uid):
"""
:Parameters:
imageid : int
Id of the image in the result (i.e. its position in the images list)
uid : int | (int,int)
A cell id if integer, a wall id if tuple
:returns: the color of the object according to the instanciated class.
:returntype: `QColor`
"""
raise NotImplemented("This is an abstract method.")
def config_widget(self, parent):
"""
Default implementation returns `_config` if it exists, otherwise,
call `_config_widget` and store the result in `_config` for
later calls.
:returns: The configuration widget for the current method
:returntype: `QWidget`
"""
if self._config is None:
log_debug("Creating config widget")
self._config = self._config_widget(parent)
self._update_parameters()
return self._config
@staticmethod
def accept_result_type(result_type):
"""
:returns: true if the result type is handled by the class, False otherwise.
:returntype: bool
Default implementation accept nothing.
:Parameters:
result_type: str
For now, it is one of "Data" and "Growth" depending if the object is a growth
result object or a data object.
"""
return False
#{ Private methods to implement in subclasses if needed
def _update_parameters(self):
"""
Update the parameters according to the current result object.
Default implementation does nothing.
"""
pass
def _config_widget(self, parent):
"""
Return a new config widget at each call.
"""
return self.parameters.widget(parent)
@classmethod
def load_parameters(cls, settings):
"""
Load the parameters from settings.
Default implementation uses the class defined in the `parameter_class`
class member with the name `settings_name`.
"""
from ..parameters import instance
params = instance.plotting
name = cls.settings_name
s = Struct()
settings.beginGroup(name)
cls.parameter_class.load(s, settings)
setattr(params, name, s)
settings.endGroup()
@classmethod
def save_parameters(cls, settings):
"""
Save the parameters into a settings object.
Default implementation uses the class defined in the `parameter_class`
class member with the name `settings_name`.
"""
from ..parameters import instance
params = instance.plotting
name = cls.settings_name
if hasattr(params, name):
s = getattr(params, name)
settings.beginGroup(name)
cls.parameter_class.save(s, settings)
settings.endGroup()
@classmethod
def create_parameters(cls):
"""
Create an instance of the parameter class `parameter_class`.
"""
from ..parameters import instance
params = instance.plotting
name = cls.settings_name
p = getattr(params, name)
return cls.parameter_class(p)
#}
coloring_baseclasses = {}
coloring_classes = {'cell': cell_colorings_cls,
'wall': wall_colorings_cls,
'point': point_colorings_cls}
coloring_metaclasses = {}
def ColoringObjectType(*objects):
if not objects:
objects = ('cell', 'wall', 'point')
objects = frozenset(objects)
global coloring_metaclasses
if objects not in coloring_metaclasses:
colorings_cls = tuple(coloring_classes[obj] for obj in objects)
class ObjectColoringObjectType(type(QObject)):
def __init__(cls, name, bases, dct):
#print("Adding coloring object {0} for objects {1}".format(cls, ", ".join(objects)))
type(QObject).__init__(cls, name, bases, dct)
if cls.coloring_name:
for ccls in colorings_cls:
ccls.append(cls)
coloring_metaclasses[objects] = ObjectColoringObjectType
return coloring_metaclasses[objects]
def ColoringClass(objects=None, base=ColoringObject):
if objects is None:
objects = ('cell', 'wall', 'point')
elif not isinstance(objects, tuple):
objects = (objects,)
ids = frozenset(objects + (base,))
global coloring_baseclasses
if ids not in coloring_baseclasses:
name = "{0}ColoringBaseClass".format("".join(t.capitalize() for t in objects))
ColoringBaseClass = ColoringObjectType(*objects)(name, (base,), {})
coloring_baseclasses[ids] = ColoringBaseClass
return coloring_baseclasses[ids]
class ScaleBar(QObject):
changed = Signal()
"""
The scale bar has to be inherited.
It assumes there is a `transfer_function` property defined when the scale bar might be drawn.
If any parameter change, the ``changed`` signal is sent.
:signal: ``changed``
"""
def __init__(self, params, parent=None):
QObject.__init__(self, parent)
self._scale_config = None
self._scale_config_param = None
self._scale_text = params.scale_text
self._scale_line = params.scale_line
self._scale_line_thickness = params.scale_line_thickness
self._scale_position = params.scale_position
self._scale_font = params.scale_font
self._scale_show = params.scale_show
self._scale_bar_outside_image = params.scale_bar_outside_image
self._params = params
def _showConfig(self):
self._scale_config_param.exec_()
def addScaleBarWidget(self, parent):
config = createForm("plot_scale.ui", parent)
config_params = createForm("plot_scale_config.ui", None)
self._scale_config = config
self._scale_config_param = config_params
config.configuration.clicked.connect(self._showConfig)
config.scaleBar.toggled.connect(self.set_scale_show)
config_params.selectTextColor.clicked.connect(self._changeScaleTextColor)
config_params.selectLineColor.clicked.connect(self._changeScaleLineColor)
config_params.selectPosition.highlighted['QString'].connect(self.set_scale_position)
config_params.selectFont.clicked.connect(self._changeFont)
config_params.lineThickness.valueChanged[int].connect(self._changeScaleLineThickness)
config_params.outsideImage.toggled.connect(self._set_scaleBarOutsideImage)
config.scaleBar.setChecked(self.scale_show)
scaled_font = QFont(self.scale_font)
scaled_font.setPointSizeF(config_params.selectFont.font().pointSizeF())
config_params.selectFont.setFont(scaled_font)
setColor(config_params.textColor, self.scale_text)
setColor(config_params.lineColor, self.scale_line)
config_params.outsideImage.setChecked(self.scale_bar_outside_image)
for i in range(config_params.selectPosition.count()):
txt = config_params.selectPosition.itemText(i)
if txt == self.scale_position:
config_params.selectPosition.setCurrentIndex(i)
break
else:
self.scale_position = config_params.selectPosition.itemText(0)
config_params.selectPosition.setCurrentIndex(0)
parent.layout().addWidget(config)
@Slot()
def _changeFont(self):
fnt, ok = QFontDialog.getFont(self.scale_font, self._scale_config_param, "Font for the color scale bar")
if ok:
self.scale_font = fnt
normal_size = self._scale_config_param.selectFont.font().pointSizeF()
scaled_font = QFont(fnt)
scaled_font.setPointSizeF(normal_size)
self._scale_config_param.selectFont.setFont(scaled_font)
@Slot()
def _changeScaleLineColor(self):
if changeColor(self._scale_config_param.lineColor):
self.scale_line = getColor(self._scale_config_param.lineColor)
@Slot(int)
def _changeScaleLineThickness(self, value):
self.scale_line_thickness = value
@Slot(bool)
def _set_scaleBarOutsideImage(self, value):
self.scale_bar_outside_image = value
@Slot()
def _changeScaleTextColor(self):
if changeColor(self._scale_config_param.textColor):
self.scale_text = getColor(self._scale_config_param.textColor)
@property
def scale_text(self):
"""
Color of the text on the scale bar
:returntype: QColor
"""
return self._scale_text
@scale_text.setter
def scale_text(self, value):
value = QColor(value)
if self._scale_text != value:
self._scale_text = value
self._params.scale_text = value
self.changed.emit()
@property
def scale_line(self):
"""
Color of the line around the scale bar and the ticks of the scale bar.
:returntype: QColor
"""
return self._scale_line
@scale_line.setter
def scale_line(self, value):
value = QColor(value)
if self._scale_line != value:
self._scale_line = value
self._params.scale_line = value
self.changed.emit()
@property
def scale_line_thickness(self):
"""
Thickness of the line around the scale bar and the ticks of the scale bar.
:returntype: QColor
"""
return self._scale_line_thickness
@scale_line_thickness.setter
def scale_line_thickness(self, value):
value = int(value)
if self._scale_line_thickness != value:
self._scale_line_thickness = value
self._params.scale_line_thickness = value
self.changed.emit()
@property
def scale_position(self):
"""
Position of the scale bar with respect to the image. Must be one of "Top", "Right", "Bottom" or "Left".
:returntype: str
"""
return self._scale_position
@scale_position.setter
def scale_position(self, value):
value = str(value)
if self._scale_position != value:
self._scale_position = value
self._params.scale_position = value
self.changed.emit()
@Slot(str)
def set_scale_position(self, value):
self.scale_position = value
@property
def scale_show(self):
"""
Wether or not to show the scale bar
:returntype: bool
"""
return self._scale_show
@scale_show.setter
def scale_show(self, value):
value = bool(value)
if self._scale_show != value:
self._scale_show = value
self._params.scale_show = value
self.changed.emit()
@Slot(bool)
def set_scale_show(self, value):
self.scale_show = value
@property
def scale_bar_outside_image(self):
"""
Wether or not to show the scale bar
:returntype: bool
"""
return self._scale_bar_outside_image
@scale_bar_outside_image.setter
def scale_bar_outside_image(self, value):
value = bool(value)
if self._scale_bar_outside_image != value:
self._scale_bar_outside_image = value
self._params.scale_bar_outside_image = value
self.changed.emit()
@property
def scale_font(self):
"""
Font used for the text of the scale bar.
:returntype: QFont
"""
return self._scale_font
@scale_font.setter
def scale_font(self, value):
value = QFont(value)
if self._scale_font != value:
self._scale_font = value
self._params.scale_font = value
self.changed.emit()
def drawScaleBar(self, painter, value_range, unit="", size=None):
if self.scale_show:
sc = ScaleBarDrawer(position=self.scale_position,
transfer_function=self.transfer_function,
font=self.scale_font,
text_color=self.scale_text,
line_color=self.scale_line,
line_thickness=self.scale_line_thickness,
value_range=value_range,
unit=unit)
log_debug("Drawing scale bar!")
if not self.scale_bar_outside_image:
sc.draw(painter, size)
else:
if size is None:
viewport = painter.viewport() # viewport rectangle
mat, ok = painter.worldMatrix().inverted()
if not ok:
raise ValueError("Transformation matrix of painter is singular.")
size = mat.mapRect(viewport)
pic = QPicture()
new_painter = QPainter()
new_painter.begin(pic)
bounding_rect = sc.draw(new_painter, size)
new_painter.end()
pic.setBoundingRect(pic.boundingRect() | bounding_rect.toRect())
log_debug("Returning picture %s" % (pic,))
return pic
@staticmethod
def load(params, settings):
col = QColor(settings.value("ScaleText"))
if not col.isValid():
col = QColor(0, 0, 0)
params.scale_text = col
col = QColor(settings.value("ScaleLine"))
if not col.isValid():
col = QColor(0, 0, 0)
params.scale_line = col
try:
params.scale_line_thickness = int(settings.value("ScaleLineThickness"))
except (ValueError, TypeError):
params.scale_line_thickness = 0
params.scale_position = settings.value("ScalePosition", "Top")
fnt = QFont(settings.value("ScaleFont", QFont()))
params.scale_font = fnt
params.scale_show = toBool(settings.value("ScaleShow", "True"))
params.scale_bar_outside_image = toBool(settings.value("ScaleBarOutsideImage", "False"))
@staticmethod
def save(params, settings):
settings.setValue("ScaleText", params.scale_text)
settings.setValue("ScaleLine", params.scale_line)
settings.setValue("ScaleLineThickness", params.scale_line_thickness)
settings.setValue("ScalePosition", params.scale_position)
settings.setValue("ScaleFont", params.scale_font)
settings.setValue("ScaleShow", params.scale_show)
settings.setValue("ScaleBarOutsideImage", params.scale_bar_outside_image)
def fixRangeParameters(m, M):
range = (m, M)
class FixRangeParameters(ScaleBar):
"""
Parameters for the theta object.
"""
def __init__(self, params):
ScaleBar.__init__(self, params)
self.range = range
self._transfer_function = params.transfer_function
self._config = None
@property
def transfer_function(self):
'''Transfer function used to convert values into colors
:returntype: `TransferFunction`'''
return self._transfer_function
@transfer_function.setter
def transfer_function(self, value):
if self._transfer_function != value:
self._transfer_function = TransferFunction(value)
self._params.transfer_function = self._transfer_function
self.changed.emit()
@property
def value_capping(self):
return None
@value_capping.setter
def value_capping(self, value):
pass
@property
def symetric_coloring(self):
return False
def widget(self, parent):
config = createForm("plot_param_theta.ui", parent)
self._config = config
config.changeColorMap.clicked.connect(self._changeColorMap)
self.addScaleBarWidget(config)
return self._config
def drawScaleBar(self, painter, value_range, unit, size=None):
return ScaleBar.drawScaleBar(self, painter, self.range, unit, size)
@Slot()
def _changeColorMap(self):
dlg = transfer_fct_dlg()
dlg.transfer_fct = self.transfer_function
if dlg.exec_() == QDialog.Accepted:
self.transfer_function = dlg.transfer_fct
dlg.saveSettings("")
@staticmethod
def load(params, settings):
ScaleBar.load(params, settings)
tr = settings.value("TransferFunction", "")
if tr:
params.transfer_function = TransferFunction.loads(tr)
else:
params.transfer_function = TransferFunction.hue_scale()
params.symetric_coloring = False
params.value_capping = None
@staticmethod
def save(params, settings):
ScaleBar.save(params, settings)
settings.setValue("TransferFunction", params.transfer_function.dumps())
return FixRangeParameters
class TransferFunctionParameters(ScaleBar):
"""
Parameters for continuous objects.
"""
def __init__(self, params):
ScaleBar.__init__(self, params)
self._transfer_function = params.transfer_function
self._symetric_coloring = params.symetric_coloring
self._value_capping = params.value_capping
self._minmax_values = (-100.0, 100.0)
self._config = None
@property
def transfer_function(self):
'''Transfer function used to convert values into colors
:returntype: `TransferFunction`'''
return self._transfer_function
@transfer_function.setter
def transfer_function(self, value):
if self._transfer_function != value:
self._transfer_function = TransferFunction(value)
self._params.transfer_function = self._transfer_function
self.changed.emit()
@property
def symetric_coloring(self):
'''
If true, the color scheme is forced to be symetric. i.e. If all
values are of the same sign, then 0 is forced into the range.
Otherwise, 0 is the middle color of the transfer function.
:returntype: `bool`
'''
return self._symetric_coloring
@Slot(bool)
def _set_symetric_coloring(self, value):
value = bool(value)
if self._symetric_coloring != value:
self._symetric_coloring = value
self._params.symetric_coloring = value
self.changed.emit()
symetric_coloring.setter(_set_symetric_coloring)
@Slot(bool)
def set_symetric_coloring(self, value):
self.symetric_coloring = value
@property
def value_capping(self):
"""
If not None, value_capping gives the min and max of the color used.
If symetric_coloring is True, the actual capping will be adjusted
to a symetric one.
:returntype: (float,float)|None
"""
return self._value_capping
@value_capping.setter
def value_capping(self, value):
if value is not None:
value = (float(value[0]), float(value[1]))
if self._value_capping != value:
self._value_capping = value
self._params.value_capping = value
self.changed.emit()
@property
def minmax_values(self):
'''
Get the min and max of the values for the capping
:returntype: (float,float)
'''
return self._minmax_values
@minmax_values.setter
def minmax_values(self, value):
value = (float(value[0]), float(value[1]))
if self._minmax_values != value:
self._minmax_values = value
self.changed.emit()
if self._config is not None:
self.resetMinMax(value)
def resetMinMax(self, bounds):
self._config.minCap.setText(u"{:.5g}".format(bounds[0]))
self._config.maxCap.setText(u"{:.5g}".format(bounds[1]))
def widget(self, parent):
config = createForm("plot_param_fct.ui", parent)
self._config = config
config.changeColorMap.clicked.connect(self._changeColorMap)
config.symetricColoring.toggled[bool].connect(self.set_symetric_coloring)
config.capping.toggled[bool].connect(self._cappingChanged)
config.minCap.setValidator(QDoubleValidator(config.minCap))
config.maxCap.setValidator(QDoubleValidator(config.minCap))
config.minCap.textChanged["const QString&"].connect(self._minCapStringChanged)
config.maxCap.textChanged["const QString&"].connect(self._maxCapStringChanged)
value = self.minmax_values
self.resetMinMax(value)
config.symetricColoring.setChecked(self._symetric_coloring)
if self._value_capping is not None:
config.capping.setChecked(True)
config.minCap.setText(u"{:.5g}".format(self._value_capping[0]))
config.maxCap.setText(u"{:.5g}".format(self._value_capping[1]))
self.addScaleBarWidget(config)
return self._config
@Slot()
def _changeColorMap(self):
dlg = transfer_fct_dlg()
if self._symetric_coloring:
dlg.stickers = [0.5]
dlg.transfer_fct = self.transfer_function
if dlg.exec_() == QDialog.Accepted:
self.transfer_function = dlg.transfer_fct
dlg.stickers = []
dlg.saveSettings("")
@Slot(bool)
def _cappingChanged(self, value):
if value:
self.value_capping = (float(self._config.minCap.text()), float(self._config.maxCap.text()))
else:
self.value_capping = None
@Slot("const QString&")
def _minCapStringChanged(self, value):
try:
value_double = float(value)
except ValueError:
return
cap = self.value_capping
if cap is not None:
if value_double != cap[0]:
cap = (value_double, cap[1])
self.value_capping = cap
@Slot("const QString&")
def _maxCapStringChanged(self, value):
try:
value_double = float(value)
except ValueError:
return
cap = self.value_capping
if cap is not None:
if value_double != cap[1]:
cap = (cap[0], value_double)
self.value_capping = cap
@staticmethod
def load(params, settings):
ScaleBar.load(params, settings)
tr = settings.value("TransferFunction", "")
if tr:
params.transfer_function = TransferFunction.loads(str(tr))
else:
params.transfer_function = TransferFunction.hue_scale()
params.symetric_coloring = toBool(settings.value("SymetricColoring", "False"))
isc = toBool(settings.value("IsCapping", "False"))
if isc:
vc = [0, 0]
try:
vc[0] = float(settings.value("ValueCappingMin"))
except (ValueError, TypeError):
vc[0] = 0
try:
vc[1] = float(settings.value("ValueCappingMax"))
except (ValueError, TypeError):
vc[1] = 1
params.value_capping = vc
else:
params.value_capping = None
@staticmethod
def save(params, settings):
ScaleBar.save(params, settings)
tf = params.transfer_function.dumps()
settings.setValue("TransferFunction", tf)
settings.setValue("SymetricColoring", params.symetric_coloring)
if params.value_capping is not None:
settings.setValue("IsCapping", True)
settings.setValue("ValueCappingMin", params.value_capping[0])
settings.setValue("ValueCappingMax", params.value_capping[1])
else:
settings.setValue("IsCapping", False)
class DirectionGrowthParameters(ScaleBar):
"""
Parameters for growth along a direction.
"""
def __init__(self, params):
ScaleBar.__init__(self, params)
self._transfer_function = params.transfer_function
self._symetric_coloring = params.symetric_coloring
self._value_capping = params.value_capping
self._minmax_values = (-100.0, 100.0)
self._config = None
self._data_file = ""
self.data = None
self._direction = None
self._data_points = (0, 1)
self._next_data_file = None
self._orthogonal = params.orthogonal
self._draw_line = params.draw_line
self._line_width = params.line_width
self._line_color = params.line_color
self.edit_timer = QTimer(self)
self.edit_timer.setSingleShot(True)
self.edit_timer.setInterval(500)
self.edit_timer.timeout.connect(self.loadEdit)
@property
def data_file(self):
"""Data file holding the points for the direction"""
return self._data_file
@data_file.setter
def data_file(self, value):
value = path(value)
if self._data_file != value:
self._data_file = value
self.load_data()
self.changed.emit()
def load_data(self, **loading_arguments):
try:
if self.data_file == self.result.current_filename:
self.data = self.result.data
else:
# First, prepare the data by getting the images and computing how big they
# should be
f = open(self.data_file)
first_line = f.readline()
f.close()
if first_line.startswith("TRKR_VERSION"):
result = Result(None)
result.load(self.data_file, **loading_arguments)
data = result.data
else:
data = TrackingData()
data.load(self.data_file, **loading_arguments)
data.copyAlignementAndScale(self.result.data)
self.data = data
self.points = list(self.data.cell_points)
if self._config is not None:
config = self._config
config.point1.clear()
config.point2.clear()
for i in self.points:
log_debug("i = %s" % i)
config.point1.addItem(str(i))
config.point2.addItem(str(i))
config.point1.setCurrentIndex(0)
config.point2.setCurrentIndex(1)
except RetryTrackingDataException as ex:
loading_arguments.update(ex.method_args)
self.load_data(**loading_arguments)
def direction(self, img_data):
i1, i2 = self.data_points
p1 = img_data[i1]
p2 = img_data[i2]
u = p2 - p1
u /= norm(u.x(), u.y())
return u
@Slot("const QString&")
def _changePoint1(self, value):
try:
value = int(value)
if value != self.data_points[0]:
self.data_points = (value, self.data_points[1])
except ValueError as err:
log_debug("Error while changing point1 = %s" % str(err))
@Slot("const QString&")
def _changePoint2(self, value):
try:
value = int(value)
if value != self.data_points[1]:
self.data_points = (self.data_points[0], value)
except ValueError as err:
log_debug("Error while changing point1 = %s" % str(err))
@property
def data_points(self):
"""Ids of the data points defining the direction in the data file"""
return self._data_points
@data_points.setter
def data_points(self, value):
value = (int(value[0]), int(value[1]))
if self._data_points != value:
self._data_points = value
self.changed.emit()
@property
def transfer_function(self):
'''Transfer function used to convert values into colors
:returntype: `TransferFunction`'''
return self._transfer_function
@transfer_function.setter
def transfer_function(self, value):
if self._transfer_function != value:
self._transfer_function = TransferFunction(value)
self._params.transfer_function = self._transfer_function
self.changed.emit()
@property
def symetric_coloring(self):
'''
If true, the color scheme is forced to be symetric. i.e. If all
values are of the same sign, then 0 is forced into the range.
Otherwise, 0 is the middle color of the transfer function.
:returntype: `bool`
'''
return self._symetric_coloring
@Slot(bool)
def _set_symetric_coloring(self, value):
value = bool(value)
if self._symetric_coloring != value:
self._symetric_coloring = value
self._params.symetric_coloring = value
self.changed.emit()
symetric_coloring.setter(_set_symetric_coloring)
@property
def orthogonal(self):
"""If true, the points mark the line orthogonal to the direction wanted"""
return self._orthogonal
@Slot(bool)
def _set_orthogonal(self, value):
value = bool(value)
if self._orthogonal != value:
self._orthogonal = value
self._params.orthogonal = value
self.changed.emit()
orthogonal.setter(_set_orthogonal)
@property
def draw_line(self):
"""If truem draw the line defining the direction"""
return self._draw_line
@Slot(bool)
def _set_draw_line(self, value):
value = bool(value)
if self._draw_line != value:
self._draw_line = value
self._params.draw_line = value
self.changed.emit()
draw_line.setter(_set_draw_line)
@property
def line_color(self):
"""Color of the line defining the direction"""
return self._line_color
@line_color.setter
def line_color(self, value):
value = QColor(value)
if self._line_color != value:
self._line_color = value
self._params.line_color = value
self.changed.emit()
@property
def line_width(self):
"""Width of the line in pixels"""
return self._line_width
@Slot(int)
def _set_line_width(self, value):
value = int(value)
if self._line_width != value:
self._line_width = value
self._params.line_width = value
self.changed.emit()
line_width.setter(_set_line_width)
@property
def value_capping(self):
"""
If not None, value_capping gives the min and max of the color used.
If symetric_coloring is True, the actual capping will be adjusted
to a symetric one.
:returntype: (float,float)|None
"""
return self._value_capping
@value_capping.setter
def value_capping(self, value):
if value is not None:
value = (float(value[0]), float(value[1]))
if self._value_capping != value:
self._value_capping = value
self._params.value_capping = value
self.changed.emit()
@property
def minmax_values(self):
'''
Get the min and max of the values for the capping
:returntype: (float,float)
'''
return self._minmax_values
@minmax_values.setter
def minmax_values(self, value):
value = (float(value[0]), float(value[1]))
if self._minmax_values != value:
self._minmax_values = value
self.changed.emit()
if self._config is not None:
self.resetMinMax(value)
def resetMinMax(self, bounds):
self._config.minCap.setText(u"{:.5g}".format(bounds[0]))
self._config.maxCap.setText(u"{:.5g}".format(bounds[1]))
@Slot()
def _changeLineColor(self):
if changeColor(self._config.lineColor):
self.line_color = getColor(self._config.lineColor)
def widget(self, parent):
config = createForm("plot_param_dir_fct.ui", parent)
self._config = config
config.selectDataFile.clicked.connect(self._selectDataFile)
config.changeColorMap.clicked.connect(self._changeColorMap)
config.dataFile.textChanged.connect(self._checkAndLoad)
config.orthogonal.toggled.connect(self._set_orthogonal)
config.symetricColoring.toggled[bool].connect(self._set_symetric_coloring)
config.capping.toggled[bool].connect(self._cappingChanged)
config.minCap.setValidator(QDoubleValidator(config.minCap))
config.maxCap.setValidator(QDoubleValidator(config.minCap))
config.minCap.textChanged['QString'].connect(self._minCapStringChanged)
config.maxCap.textChanged['QString'].connect(self._maxCapStringChanged)
config.point1.currentIndexChanged['QString'].connect(self._changePoint1)
config.point2.currentIndexChanged['QString'].connect(self._changePoint2)
config.drawLine.toggled.connect(self._set_draw_line)
config.lineWidth.valueChanged.connect(self._set_line_width)
config.selectLineColor.clicked.connect(self._changeLineColor)
config.dataFile.setText(self.data_file)
value = self.minmax_values
self.resetMinMax(value)
config.orthogonal.setChecked(self.orthogonal)
config.symetricColoring.setChecked(self._symetric_coloring)
config.drawLine.setChecked(self.draw_line)
config.lineWidth.setValue(self.line_width)
setColor(config.lineColor, self.line_color)
if self._value_capping is not None:
config.capping.setChecked(True)
config.minCap.setText(u"{:.5g}".format(self._value_capping[0]))
config.maxCap.setText(u"{:.5g}".format(self._value_capping[1]))
if self.data is not None:
config = self._config
config.point1.clear()
config.point2.clear()
for i in self.points:
config.point1.addItem(str(i))
config.point2.addItem(str(i))
config.point1.setCurrentIndex(0)
config.point2.setCurrentIndex(1)
self.addScaleBarWidget(config)
return self._config
@Slot()
def _selectDataFile(self):
fn = QFileDialog.getOpenFileName(self._config, "Select the data file defining your line", self.data_file,
"All data files (*.csv *.xls);;CSV Files (*.csv);;"
"XLS files (*.xls);;"
"All files (*.*)")
if fn:
self._config.dataFile.setText(fn)
@Slot("const QString&")
def _checkAndLoad(self, txt):
pth = path(txt)
if pth.exists() and pth.isfile():
self._next_data_file = pth
self.edit_timer.start()
else:
self._next_data_file = None
self.edit_timer.stop()
@Slot()
def loadEdit(self):
if self._next_data_file is not None:
self.data_file = self._next_data_file
@Slot()
def _changeColorMap(self):
dlg = transfer_fct_dlg()
if self._symetric_coloring:
dlg.stickers = [0.5]
dlg.transfer_fct = self.transfer_function
if dlg.exec_() == QDialog.Accepted:
self.transfer_function = dlg.transfer_fct
dlg.stickers = []
dlg.saveSettings("")
@Slot(bool)
def _cappingChanged(self, value):
if value:
self.value_capping = (float(self._config.minCap.text()), float(self._config.maxCap.text()))
else:
self.value_capping = None
@Slot("const QString&")
def _minCapStringChanged(self, value):
try:
value_double = float(value)
except ValueError:
return
cap = self.value_capping
if cap is not None:
if value_double != cap[0]:
cap = (value_double, cap[1])
self.value_capping = cap
@Slot("const QString&")
def _maxCapStringChanged(self, value):
try:
value_double = float(value)
except ValueError:
return
cap = self.value_capping
if cap is not None:
if value_double != cap[1]:
cap = (cap[0], value_double)
self.value_capping = cap
@staticmethod
def load(params, settings):
ScaleBar.load(params, settings)
df = settings.value("DataFile", "")
if df:
params.data_file = path(df)
else:
params.data_file = None
try:
p0 = int(settings.value("DataPoint0"))
except (ValueError, TypeError):
p0 = 0
try:
p1 = int(settings.value("DataPoint1"))
except (ValueError, TypeError):
p1 = 1
params.data_points = (p0, p1)
tr = settings.value("TransferFunction", "")
if tr:
params.transfer_function = TransferFunction.loads(str(tr))
else:
params.transfer_function = TransferFunction.hue_scale()
params.orthogonal = toBool(settings.value("Orthogonal"))
params.symetric_coloring = toBool(settings.value("SymetricColoring", False))
params.draw_line = toBool(settings.value("DrawLine", False))
col = QColor(settings.value("LineColor"))
if not col.isValid():
col = QColor(0, 0, 0)
params.line_color = col
try:
lw = int(settings.value("LineWidth", 0))
except (ValueError, TypeError):
lw = 0
params.line_width = lw
isc = toBool(settings.value("IsCapping"))
if isc:
vc = [0, 0]
try:
vc[0] = float(settings.value("ValueCappingMin"))
except (ValueError, TypeError):
vc[0] = 0
try:
vc[1] = float(settings.value("ValueCappingMax"))
except (ValueError, TypeError):
vc[1] = 1
params.value_capping = vc
else:
params.value_capping = None
@staticmethod
def save(params, settings):
ScaleBar.save(params, settings)
settings.setValue("DataFile", params.data_file)
settings.setValue("DataPoint0", params.data_points[0])
settings.setValue("DataPoint1", params.data_points[1])
settings.setValue("Orthogonal", params.orthogonal)
settings.setValue("DrawLine", params.draw_line)
settings.setValue("LineWidth", params.line_width)
settings.setValue("LineColor", params.line_color)
tf = params.transfer_function.dumps()
settings.setValue("TransferFunction", tf)
settings.setValue("SymetricColoring", params.symetric_coloring)
if params.value_capping is not None:
settings.setValue("IsCapping", True)
settings.setValue("ValueCappingMin", params.value_capping[0])
settings.setValue("ValueCappingMax", params.value_capping[1])
else:
settings.setValue("IsCapping", False)
class ColorParameters(QObject):
changed = Signal()
"""
Parameters for continuous objects.
"""
def __init__(self, params, parent=None):
QObject.__init__(self, parent)
log_debug("Parameter object: %s" % id(params))
self._color = params.color
self._params = params
def widget(self, parent):
config = createForm("plot_param_color.ui", parent)
setColor(config.color, self._color)
config.selectColor.clicked.connect(self._changeColor)
self._config = config
return config
@Slot()
def _changeColor(self):
if changeColor(self._config.color):
self.color = getColor(self._config.color)
@property
def color(self):
'''Color used for the rendering of the property.
:returntype: `QColor`'''
return self._color
@color.setter
def color(self, value):
value = QColor(value)
if self._color != value:
self._color = value
self._params.color = value
self.changed.emit()
@staticmethod
def load(params, settings):
log_debug("Loading with parameter object: %s" % id(params))
color = QColor(settings.value("Color"))
if not color.isValid():
color = QColor(0, 0, 0)
params.color = color
@staticmethod
def save(params, settings):
settings.setValue("Color", params.color)
| gpl-2.0 |
openhatch/new-mini-tasks | vendor/packages/south/south/introspection_plugins/django_audit_log.py | 154 | 1436 | """
South introspection rules for django-audit-log
"""
from django.contrib.auth.models import User
from django.conf import settings
from south.modelsinspector import add_introspection_rules
if "audit_log" in settings.INSTALLED_APPS:
try:
# Try and import the field so we can see if audit_log is available
from audit_log.models import fields
# Make sure the `to` and `null` parameters will be ignored
rules = [(
(fields.LastUserField,),
[],
{
'to': ['rel.to', {'default': User}],
'null': ['null', {'default': True}],
},
)]
# Add the rules for the `LastUserField`
add_introspection_rules(
rules,
['^audit_log\.models\.fields\.LastUserField'],
)
except ImportError:
pass
| apache-2.0 |
jeanlinux/calibre | src/calibre/ebooks/oeb/polish/main.py | 11 | 10814 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import re, sys, os, time
from collections import namedtuple
from functools import partial
from calibre.ebooks.oeb.polish.container import get_container
from calibre.ebooks.oeb.polish.stats import StatsCollector
from calibre.ebooks.oeb.polish.subset import subset_all_fonts
from calibre.ebooks.oeb.polish.embed import embed_all_fonts
from calibre.ebooks.oeb.polish.cover import set_cover
from calibre.ebooks.oeb.polish.replace import smarten_punctuation
from calibre.ebooks.oeb.polish.jacket import (
replace_jacket, add_or_replace_jacket, find_existing_jacket, remove_jacket)
from calibre.ebooks.oeb.polish.css import remove_unused_css
from calibre.utils.logging import Log
ALL_OPTS = {
'embed': False,
'subset': False,
'opf': None,
'cover': None,
'jacket': False,
'remove_jacket':False,
'smarten_punctuation':False,
'remove_unused_css':False,
}
CUSTOMIZATION = {
'remove_unused_classes': False,
}
SUPPORTED = {'EPUB', 'AZW3'}
# Help {{{
HELP = {'about': _(
'''\
<p><i>Polishing books</i> is all about putting the shine of perfection onto
your carefully crafted ebooks.</p>
<p>Polishing tries to minimize the changes to the internal code of your ebook.
Unlike conversion, it <i>does not</i> flatten CSS, rename files, change font
sizes, adjust margins, etc. Every action performs only the minimum set of
changes needed for the desired effect.</p>
<p>You should use this tool as the last step in your ebook creation process.</p>
{0}
<p>Note that polishing only works on files in the %s formats.</p>\
''')%_(' or ').join(sorted('<b>%s</b>'%x for x in SUPPORTED)),
'embed': _('''\
<p>Embed all fonts that are referenced in the document and are not already embedded.
This will scan your computer for the fonts, and if they are found, they will be
embedded into the document.</p>
<p>Please ensure that you have the proper license for embedding the fonts used in this document.</p>
'''),
'subset': _('''\
<p>Subsetting fonts means reducing an embedded font to contain
only the characters used from that font in the book. This
greatly reduces the size of the font files (halving the font
file sizes is common).</p>
<p>For example, if the book uses a specific font for headers,
then subsetting will reduce that font to contain only the
characters present in the actual headers in the book. Or if the
book embeds the bold and italic versions of a font, but bold
and italic text is relatively rare, or absent altogether, then
the bold and italic fonts can either be reduced to only a few
characters or completely removed.</p>
<p>The only downside to subsetting fonts is that if, at a later
date you decide to add more text to your books, the newly added
text might not be covered by the subset font.</p>
'''),
'jacket': _('''\
<p>Insert a "book jacket" page at the start of the book that contains
all the book metadata such as title, tags, authors, series, comments,
etc. Any previous book jacket will be replaced.</p>'''),
'remove_jacket': _('''\
<p>Remove a previous inserted book jacket page.</p>
'''),
'smarten_punctuation': _('''\
<p>Convert plain text dashes, ellipsis, quotes, multiple hyphens, etc. into their
typographically correct equivalents.</p>
<p>Note that the algorithm can sometimes generate incorrect results, especially
when single quotes at the start of contractions are involved.</p>
'''),
'remove_unused_css': _('''\
<p>Remove all unused CSS rules from stylesheets and <style> tags. Some books
created from production templates can have a large number of extra CSS rules
that dont match any actual content. These extra rules can slow down readers
that need to parse them all.</p>
'''),
}
def hfix(name, raw):
if name == 'about':
return raw.format('')
raw = raw.replace('\n\n', '__XX__')
raw = raw.replace('\n', ' ')
raw = raw.replace('__XX__', '\n')
raw = raw.replace('<', '<').replace('>', '>')
return raw
CLI_HELP = {x:hfix(x, re.sub('<.*?>', '', y)) for x, y in HELP.iteritems()}
# }}}
def update_metadata(ebook, new_opf):
from calibre.ebooks.metadata.opf2 import OPF
from calibre.ebooks.metadata.epub import update_metadata
opfpath = ebook.name_to_abspath(ebook.opf_name)
with ebook.open(ebook.opf_name, 'r+b') as stream, open(new_opf, 'rb') as ns:
opf = OPF(stream, basedir=os.path.dirname(opfpath), populate_spine=False,
unquote_urls=False)
mi = OPF(ns, unquote_urls=False,
populate_spine=False).to_book_metadata()
mi.cover, mi.cover_data = None, (None, None)
update_metadata(opf, mi, apply_null=True, update_timestamp=True)
stream.seek(0)
stream.truncate()
stream.write(opf.render())
def polish_one(ebook, opts, report, customization=None):
rt = lambda x: report('\n### ' + x)
jacket = None
changed = False
customization = customization or CUSTOMIZATION.copy()
if opts.subset or opts.embed:
stats = StatsCollector(ebook, do_embed=opts.embed)
if opts.opf:
changed = True
rt(_('Updating metadata'))
update_metadata(ebook, opts.opf)
jacket = find_existing_jacket(ebook)
if jacket is not None:
replace_jacket(ebook, jacket)
report(_('Updated metadata jacket'))
report(_('Metadata updated\n'))
if opts.cover:
changed = True
rt(_('Setting cover'))
set_cover(ebook, opts.cover, report)
report('')
if opts.jacket:
changed = True
rt(_('Inserting metadata jacket'))
if jacket is None:
if add_or_replace_jacket(ebook):
report(_('Existing metadata jacket replaced'))
else:
report(_('Metadata jacket inserted'))
else:
report(_('Existing metadata jacket replaced'))
report('')
if opts.remove_jacket:
rt(_('Removing metadata jacket'))
if remove_jacket(ebook):
report(_('Metadata jacket removed'))
changed = True
else:
report(_('No metadata jacket found'))
report('')
if opts.smarten_punctuation:
rt(_('Smartening punctuation'))
if smarten_punctuation(ebook, report):
changed = True
report('')
if opts.embed:
rt(_('Embedding referenced fonts'))
if embed_all_fonts(ebook, stats, report):
changed = True
report('')
if opts.subset:
rt(_('Subsetting embedded fonts'))
if subset_all_fonts(ebook, stats.font_stats, report):
changed = True
report('')
if opts.remove_unused_css:
rt(_('Removing unused CSS rules'))
if remove_unused_css(ebook, report, remove_unused_classes=customization['remove_unused_classes']):
changed = True
report('')
return changed
def polish(file_map, opts, log, report):
st = time.time()
for inbook, outbook in file_map.iteritems():
report(_('## Polishing: %s')%(inbook.rpartition('.')[-1].upper()))
ebook = get_container(inbook, log)
polish_one(ebook, opts, report)
ebook.commit(outbook)
report('-'*70)
report(_('Polishing took: %.1f seconds')%(time.time()-st))
REPORT = '{0} REPORT {0}'.format('-'*30)
def gui_polish(data):
files = data.pop('files')
if not data.pop('metadata'):
data.pop('opf')
if not data.pop('do_cover'):
data.pop('cover', None)
file_map = {x:x for x in files}
opts = ALL_OPTS.copy()
opts.update(data)
O = namedtuple('Options', ' '.join(ALL_OPTS.iterkeys()))
opts = O(**opts)
log = Log(level=Log.DEBUG)
report = []
polish(file_map, opts, log, report.append)
log('')
log(REPORT)
for msg in report:
log(msg)
return '\n\n'.join(report)
def tweak_polish(container, actions, customization=None):
opts = ALL_OPTS.copy()
opts.update(actions)
O = namedtuple('Options', ' '.join(ALL_OPTS.iterkeys()))
opts = O(**opts)
report = []
changed = polish_one(container, opts, report.append, customization=customization)
return report, changed
def option_parser():
from calibre.utils.config import OptionParser
USAGE = '%prog [options] input_file [output_file]\n\n' + re.sub(
r'<.*?>', '', CLI_HELP['about'])
parser = OptionParser(usage=USAGE)
a = parser.add_option
o = partial(a, default=False, action='store_true')
o('--embed-fonts', '-e', dest='embed', help=CLI_HELP['embed'])
o('--subset-fonts', '-f', dest='subset', help=CLI_HELP['subset'])
a('--cover', '-c', help=_(
'Path to a cover image. Changes the cover specified in the ebook. '
'If no cover is present, or the cover is not properly identified, inserts a new cover.'))
a('--opf', '-o', help=_(
'Path to an OPF file. The metadata in the book is updated from the OPF file.'))
o('--jacket', '-j', help=CLI_HELP['jacket'])
o('--remove-jacket', help=CLI_HELP['remove_jacket'])
o('--smarten-punctuation', '-p', help=CLI_HELP['smarten_punctuation'])
o('--remove-unused-css', '-u', help=CLI_HELP['remove_unused_css'])
o('--verbose', help=_('Produce more verbose output, useful for debugging.'))
return parser
def main(args=None):
parser = option_parser()
opts, args = parser.parse_args(args or sys.argv[1:])
log = Log(level=Log.DEBUG if opts.verbose else Log.INFO)
if not args:
parser.print_help()
log.error(_('You must provide the input file to polish'))
raise SystemExit(1)
if len(args) > 2:
parser.print_help()
log.error(_('Unknown extra arguments'))
raise SystemExit(1)
if len(args) == 1:
inbook = args[0]
base, ext = inbook.rpartition('.')[0::2]
outbook = base + '_polished.' + ext
else:
inbook, outbook = args
popts = ALL_OPTS.copy()
for k, v in popts.iteritems():
popts[k] = getattr(opts, k, None)
O = namedtuple('Options', ' '.join(popts.iterkeys()))
popts = O(**popts)
report = []
if not tuple(filter(None, (getattr(popts, name) for name in ALL_OPTS))):
parser.print_help()
log.error(_('You must specify at least one action to perform'))
raise SystemExit(1)
polish({inbook:outbook}, popts, log, report.append)
log('')
log(REPORT)
for msg in report:
log(msg)
log('Output written to:', outbook)
if __name__ == '__main__':
main()
| gpl-3.0 |
MKTCloud/MKTCloud | horizon/test/tests/middleware.py | 12 | 1168 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from horizon import exceptions
from horizon import middleware
from horizon.test import helpers as test
class MiddlewareTests(test.TestCase):
def test_redirect_login_fail_to_login(self):
url = settings.LOGIN_URL
request = self.factory.post(url)
mw = middleware.HorizonMiddleware()
resp = mw.process_exception(request, exceptions.NotAuthorized())
resp.client = self.client
self.assertRedirects(resp, url)
| apache-2.0 |
brokenjacobs/ansible | lib/ansible/modules/cloud/ovirt/ovirt_disks.py | 7 | 24625 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_disks
short_description: "Module to manage Virtual Machine and floating disks in oVirt/RHV"
version_added: "2.2"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage Virtual Machine and floating disks in oVirt/RHV."
options:
id:
description:
- "ID of the disk to manage. Either C(id) or C(name) is required."
name:
description:
- "Name of the disk to manage. Either C(id) or C(name)/C(alias) is required."
aliases: ['alias']
vm_name:
description:
- "Name of the Virtual Machine to manage. Either C(vm_id) or C(vm_name) is required if C(state) is I(attached) or I(detached)."
vm_id:
description:
- "ID of the Virtual Machine to manage. Either C(vm_id) or C(vm_name) is required if C(state) is I(attached) or I(detached)."
state:
description:
- "Should the Virtual Machine disk be present/absent/attached/detached."
choices: ['present', 'absent', 'attached', 'detached']
default: 'present'
download_image_path:
description:
- "Path on a file system where disk should be downloaded."
- "Note that you must have an valid oVirt/RHV engine CA in your system trust store
or you must provide it in C(ca_file) parameter."
- "Note that the disk is not downloaded when the file already exists,
but you can forcibly download the disk when using C(force) I (true)."
version_added: "2.3"
upload_image_path:
description:
- "Path to disk image, which should be uploaded."
- "Note that currently we support only compatibility version 0.10 of the qcow disk."
- "Note that you must have an valid oVirt/RHV engine CA in your system trust store
or you must provide it in C(ca_file) parameter."
- "Note that there is no reliable way to achieve idempotency, so
if you want to upload the disk even if the disk with C(id) or C(name) exists,
then please use C(force) I(true). If you will use C(force) I(false), which
is default, then the disk image won't be uploaded."
version_added: "2.3"
size:
description:
- "Size of the disk. Size should be specified using IEC standard units.
For example 10GiB, 1024MiB, etc."
- "Size can be only increased, not decreased."
interface:
description:
- "Driver of the storage interface."
choices: ['virtio', 'ide', 'virtio_scsi']
default: 'virtio'
format:
description:
- Specify format of the disk.
- If (cow) format is used, disk will by created as sparse, so space will be allocated for the volume as needed, also known as I(thin provision).
- If (raw) format is used, disk storage will be allocated right away, also known as I(preallocated).
- Note that this option isn't idempotent as it's not currently possible to change format of the disk via API.
choices: ['raw', 'cow']
storage_domain:
description:
- "Storage domain name where disk should be created. By default storage is chosen by oVirt/RHV engine."
storage_domains:
description:
- "Storage domain names where disk should be copied."
- "C(**IMPORTANT**)"
- "There is no reliable way to achieve idempotency, so every time
you specify this parameter the disks are copied, so please handle
your playbook accordingly to not copy the disks all the time. This
is valid only for VM and floating disks, template disks works
as expected."
version_added: "2.3"
force:
description:
- "Please take a look at C(image_path) documentation to see the correct
usage of this parameter."
version_added: "2.3"
profile:
description:
- "Disk profile name to be attached to disk. By default profile is chosen by oVirt/RHV engine."
bootable:
description:
- "I(True) if the disk should be bootable. By default when disk is created it isn't bootable."
shareable:
description:
- "I(True) if the disk should be shareable. By default when disk is created it isn't shareable."
logical_unit:
description:
- "Dictionary which describes LUN to be directly attached to VM:"
- "C(address) - Address of the storage server. Used by iSCSI."
- "C(port) - Port of the storage server. Used by iSCSI."
- "C(target) - iSCSI target."
- "C(lun_id) - LUN id."
- "C(username) - CHAP Username to be used to access storage server. Used by iSCSI."
- "C(password) - CHAP Password of the user to be used to access storage server. Used by iSCSI."
- "C(storage_type) - Storage type either I(fcp) or I(iscsi)."
sparsify:
description:
- "I(True) if the disk should be sparsified."
- "Sparsification frees space in the disk image that is not used by
its filesystem. As a result, the image will occupy less space on
the storage."
- "Note that this parameter isn't idempotent, as it's not possible
to check if the disk should be or should not be sparsified."
version_added: "2.4"
openstack_volume_type:
description:
- "Name of the openstack volume type. This is valid when working
with cinder."
version_added: "2.4"
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Create and attach new disk to VM
- ovirt_disks:
name: myvm_disk
vm_name: rhel7
size: 10GiB
format: cow
interface: virtio
# Attach logical unit to VM rhel7
- ovirt_disks:
vm_name: rhel7
logical_unit:
target: iqn.2016-08-09.brq.str-01:omachace
id: 1IET_000d0001
address: 10.34.63.204
interface: virtio
# Detach disk from VM
- ovirt_disks:
state: detached
name: myvm_disk
vm_name: rhel7
size: 10GiB
format: cow
interface: virtio
# Upload local image to disk and attach it to vm:
# Since Ansible 2.3
- ovirt_disks:
name: mydisk
vm_name: myvm
interface: virtio
size: 10GiB
format: cow
image_path: /path/to/mydisk.qcow2
storage_domain: data
# Download disk to local file system:
# Since Ansible 2.3
- ovirt_disks:
id: 7de90f31-222c-436c-a1ca-7e655bd5b60c
download_image_path: /home/user/mydisk.qcow2
'''
RETURN = '''
id:
description: "ID of the managed disk"
returned: "On success if disk is found."
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
disk:
description: "Dictionary of all the disk attributes. Disk attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/disk."
returned: "On success if disk is found and C(vm_id) or C(vm_name) wasn't passed."
type: dict
disk_attachment:
description: "Dictionary of all the disk attachment attributes. Disk attachment attributes can be found
on your oVirt/RHV instance at following url:
http://ovirt.github.io/ovirt-engine-api-model/master/#types/disk_attachment."
returned: "On success if disk is found and C(vm_id) or C(vm_name) was passed and VM was found."
type: dict
'''
import os
import time
import traceback
import ssl
from httplib import HTTPSConnection
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
check_params,
create_connection,
convert_to_bytes,
equal,
follow_link,
get_id_by_name,
ovirt_full_argument_spec,
search_by_name,
wait,
)
def _search_by_lun(disks_service, lun_id):
"""
Find disk by LUN ID.
"""
res = [
disk for disk in disks_service.list(search='disk_type=lun') if (
disk.lun_storage.id == lun_id
)
]
return res[0] if res else None
def transfer(connection, module, direction, transfer_func):
transfers_service = connection.system_service().image_transfers_service()
transfer = transfers_service.add(
otypes.ImageTransfer(
image=otypes.Image(
id=module.params['id'],
),
direction=direction,
)
)
transfer_service = transfers_service.image_transfer_service(transfer.id)
try:
# After adding a new transfer for the disk, the transfer's status will be INITIALIZING.
# Wait until the init phase is over. The actual transfer can start when its status is "Transferring".
while transfer.phase == otypes.ImageTransferPhase.INITIALIZING:
time.sleep(module.params['poll_interval'])
transfer = transfer_service.get()
proxy_url = urlparse(transfer.proxy_url)
context = ssl.create_default_context()
auth = module.params['auth']
if auth.get('insecure'):
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
elif auth.get('ca_file'):
context.load_verify_locations(cafile=auth.get('ca_file'))
proxy_connection = HTTPSConnection(
proxy_url.hostname,
proxy_url.port,
context=context,
)
transfer_func(
transfer_service,
proxy_connection,
proxy_url,
transfer.signed_ticket
)
return True
finally:
transfer_service.finalize()
while transfer.phase in [
otypes.ImageTransferPhase.TRANSFERRING,
otypes.ImageTransferPhase.FINALIZING_SUCCESS,
]:
time.sleep(module.params['poll_interval'])
transfer = transfer_service.get()
if transfer.phase in [
otypes.ImageTransferPhase.UNKNOWN,
otypes.ImageTransferPhase.FINISHED_FAILURE,
otypes.ImageTransferPhase.FINALIZING_FAILURE,
otypes.ImageTransferPhase.CANCELLED,
]:
raise Exception(
"Error occurred while uploading image. The transfer is in %s" % transfer.phase
)
if module.params.get('logical_unit'):
disks_service = connection.system_service().disks_service()
wait(
service=disks_service.service(module.params['id']),
condition=lambda d: d.status == otypes.DiskStatus.OK,
wait=module.params['wait'],
timeout=module.params['timeout'],
)
def download_disk_image(connection, module):
def _transfer(transfer_service, proxy_connection, proxy_url, transfer_ticket):
disks_service = connection.system_service().disks_service()
disk = disks_service.disk_service(module.params['id']).get()
size = disk.provisioned_size
transfer_headers = {
'Authorization': transfer_ticket,
}
with open(module.params['download_image_path'], "wb") as mydisk:
pos = 0
MiB_per_request = 8
chunk_size = 1024 * 1024 * MiB_per_request
while pos < size:
transfer_service.extend()
transfer_headers['Range'] = 'bytes=%d-%d' % (pos, min(size, pos + chunk_size) - 1)
proxy_connection.request(
'GET',
proxy_url.path,
headers=transfer_headers,
)
r = proxy_connection.getresponse()
if r.status >= 300:
raise Exception("Error: %s" % r.read())
mydisk.write(r.read())
pos += chunk_size
return transfer(
connection,
module,
otypes.ImageTransferDirection.DOWNLOAD,
transfer_func=_transfer,
)
def upload_disk_image(connection, module):
def _transfer(transfer_service, proxy_connection, proxy_url, transfer_ticket):
path = module.params['upload_image_path']
transfer_headers = {
'Authorization': transfer_ticket,
}
with open(path, "rb") as disk:
pos = 0
MiB_per_request = 8
size = os.path.getsize(path)
chunk_size = 1024 * 1024 * MiB_per_request
while pos < size:
transfer_service.extend()
transfer_headers['Content-Range'] = "bytes %d-%d/%d" % (pos, min(pos + chunk_size, size) - 1, size)
proxy_connection.request(
'PUT',
proxy_url.path,
disk.read(chunk_size),
headers=transfer_headers,
)
r = proxy_connection.getresponse()
if r.status >= 400:
raise Exception("Failed to upload disk image.")
pos += chunk_size
return transfer(
connection,
module,
otypes.ImageTransferDirection.UPLOAD,
transfer_func=_transfer,
)
class DisksModule(BaseModule):
def build_entity(self):
logical_unit = self._module.params.get('logical_unit')
disk = otypes.Disk(
id=self._module.params.get('id'),
name=self._module.params.get('name'),
description=self._module.params.get('description'),
format=otypes.DiskFormat(
self._module.params.get('format')
) if self._module.params.get('format') else None,
sparse=self._module.params.get('format') != 'raw',
openstack_volume_type=otypes.OpenStackVolumeType(
name=self.param('openstack_volume_type')
) if self.param('openstack_volume_type') else None,
provisioned_size=convert_to_bytes(
self._module.params.get('size')
),
storage_domains=[
otypes.StorageDomain(
name=self._module.params.get('storage_domain'),
),
],
shareable=self._module.params.get('shareable'),
lun_storage=otypes.HostStorage(
type=otypes.StorageType(
logical_unit.get('storage_type', 'iscsi')
),
logical_units=[
otypes.LogicalUnit(
address=logical_unit.get('address'),
port=logical_unit.get('port', 3260),
target=logical_unit.get('target'),
id=logical_unit.get('id'),
username=logical_unit.get('username'),
password=logical_unit.get('password'),
)
],
) if logical_unit else None,
)
if hasattr(disk, 'initial_size'):
disk.initial_size = convert_to_bytes(
self._module.params.get('size')
)
return disk
def update_storage_domains(self, disk_id):
changed = False
disk_service = self._service.service(disk_id)
disk = disk_service.get()
sds_service = self._connection.system_service().storage_domains_service()
# We don't support move© for non file based storages:
if disk.storage_type != otypes.DiskStorageType.IMAGE:
return changed
# Initiate move:
if self._module.params['storage_domain']:
new_disk_storage_id = get_id_by_name(sds_service, self._module.params['storage_domain'])
changed = self.action(
action='move',
entity=disk,
action_condition=lambda d: new_disk_storage_id != d.storage_domains[0].id,
wait_condition=lambda d: d.status == otypes.DiskStatus.OK,
storage_domain=otypes.StorageDomain(
id=new_disk_storage_id,
),
post_action=lambda _: time.sleep(self._module.params['poll_interval']),
)['changed']
if self._module.params['storage_domains']:
for sd in self._module.params['storage_domains']:
new_disk_storage = search_by_name(sds_service, sd)
changed = changed or self.action(
action='copy',
entity=disk,
action_condition=(
lambda disk: new_disk_storage.id not in [sd.id for sd in disk.storage_domains]
),
wait_condition=lambda disk: disk.status == otypes.DiskStatus.OK,
storage_domain=otypes.StorageDomain(
id=new_disk_storage.id,
),
)['changed']
return changed
def _update_check(self, entity):
return (
equal(self._module.params.get('description'), entity.description) and
equal(convert_to_bytes(self._module.params.get('size')), entity.provisioned_size) and
equal(self._module.params.get('shareable'), entity.shareable)
)
class DiskAttachmentsModule(DisksModule):
def build_entity(self):
return otypes.DiskAttachment(
disk=super(DiskAttachmentsModule, self).build_entity(),
interface=otypes.DiskInterface(
self._module.params.get('interface')
) if self._module.params.get('interface') else None,
bootable=self._module.params.get('bootable'),
active=True,
)
def update_check(self, entity):
return (
super(DiskAttachmentsModule, self)._update_check(follow_link(self._connection, entity.disk)) and
equal(self._module.params.get('interface'), str(entity.interface)) and
equal(self._module.params.get('bootable'), entity.bootable)
)
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent', 'attached', 'detached'],
default='present'
),
id=dict(default=None),
name=dict(default=None, aliases=['alias']),
vm_name=dict(default=None),
vm_id=dict(default=None),
size=dict(default=None),
interface=dict(default=None,),
storage_domain=dict(default=None),
storage_domains=dict(default=None, type='list'),
profile=dict(default=None),
format=dict(default='cow', choices=['raw', 'cow']),
bootable=dict(default=None, type='bool'),
shareable=dict(default=None, type='bool'),
logical_unit=dict(default=None, type='dict'),
download_image_path=dict(default=None),
upload_image_path=dict(default=None, aliases=['image_path']),
force=dict(default=False, type='bool'),
sparsify=dict(default=None, type='bool'),
openstack_volume_type=dict(default=None),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
check_sdk(module)
check_params(module)
try:
disk = None
state = module.params['state']
auth = module.params.get('auth')
connection = create_connection(auth)
disks_service = connection.system_service().disks_service()
disks_module = DisksModule(
connection=connection,
module=module,
service=disks_service,
)
lun = module.params.get('logical_unit')
if lun:
disk = _search_by_lun(disks_service, lun.get('id'))
ret = None
# First take care of creating the VM, if needed:
if state == 'present' or state == 'detached' or state == 'attached':
ret = disks_module.create(
entity=disk,
result_state=otypes.DiskStatus.OK if lun is None else None,
)
is_new_disk = ret['changed']
ret['changed'] = ret['changed'] or disks_module.update_storage_domains(ret['id'])
# We need to pass ID to the module, so in case we want detach/attach disk
# we have this ID specified to attach/detach method:
module.params['id'] = ret['id'] if disk is None else disk.id
# Upload disk image in case it's new disk or force parameter is passed:
if module.params['upload_image_path'] and (is_new_disk or module.params['force']):
uploaded = upload_disk_image(connection, module)
ret['changed'] = ret['changed'] or uploaded
# Download disk image in case it's file don't exist or force parameter is passed:
if (
module.params['download_image_path']
and (not os.path.isfile(module.params['download_image_path']) or module.params['force'])
):
downloaded = download_disk_image(connection, module)
ret['changed'] = ret['changed'] or downloaded
# Disk sparsify, only if disk is of image type:
disk = disks_service.disk_service(module.params['id']).get()
if disk.storage_type == otypes.DiskStorageType.IMAGE:
ret = disks_module.action(
action='sparsify',
action_condition=lambda d: module.params['sparsify'],
wait_condition=lambda d: d.status == otypes.DiskStatus.OK,
)
elif state == 'absent':
ret = disks_module.remove()
# If VM was passed attach/detach disks to/from the VM:
if module.params.get('vm_id') is not None or module.params.get('vm_name') is not None and state != 'absent':
vms_service = connection.system_service().vms_service()
# If `vm_id` isn't specified, find VM by name:
vm_id = module.params['vm_id']
if vm_id is None:
vm_id = getattr(search_by_name(vms_service, module.params['vm_name']), 'id', None)
if vm_id is None:
module.fail_json(
msg="VM don't exists, please create it first."
)
disk_attachments_service = vms_service.vm_service(vm_id).disk_attachments_service()
disk_attachments_module = DiskAttachmentsModule(
connection=connection,
module=module,
service=disk_attachments_service,
changed=ret['changed'] if ret else False,
)
if state == 'present' or state == 'attached':
ret = disk_attachments_module.create()
if lun is None:
wait(
service=disk_attachments_service.service(ret['id']),
condition=lambda d:follow_link(connection, d.disk).status == otypes.DiskStatus.OK,
wait=module.params['wait'],
timeout=module.params['timeout'],
)
elif state == 'detached':
ret = disk_attachments_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == "__main__":
main()
| gpl-3.0 |
lakshayg/tensorflow | tensorflow/contrib/slim/python/slim/nets/inception_v1_test.py | 112 | 8960 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for nets.inception_v1."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.framework.python.ops import variables as variables_lib
from tensorflow.contrib.slim.python.slim import model_analyzer
from tensorflow.contrib.slim.python.slim.nets import inception_v1
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class InceptionV1Test(test.TestCase):
def testBuildClassificationNetwork(self):
batch_size = 5
height, width = 224, 224
num_classes = 1000
inputs = random_ops.random_uniform((batch_size, height, width, 3))
logits, end_points = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
self.assertTrue('Predictions' in end_points)
self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
[batch_size, num_classes])
def testBuildBaseNetwork(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
mixed_6c, end_points = inception_v1.inception_v1_base(inputs)
self.assertTrue(mixed_6c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_6c.get_shape().as_list(),
[batch_size, 7, 7, 1024])
expected_endpoints = [
'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c'
]
self.assertItemsEqual(end_points.keys(), expected_endpoints)
def testBuildOnlyUptoFinalEndpoint(self):
batch_size = 5
height, width = 224, 224
endpoints = [
'Conv2d_1a_7x7', 'MaxPool_2a_3x3', 'Conv2d_2b_1x1', 'Conv2d_2c_3x3',
'MaxPool_3a_3x3', 'Mixed_3b', 'Mixed_3c', 'MaxPool_4a_3x3', 'Mixed_4b',
'Mixed_4c', 'Mixed_4d', 'Mixed_4e', 'Mixed_4f', 'MaxPool_5a_2x2',
'Mixed_5b', 'Mixed_5c'
]
for index, endpoint in enumerate(endpoints):
with ops.Graph().as_default():
inputs = random_ops.random_uniform((batch_size, height, width, 3))
out_tensor, end_points = inception_v1.inception_v1_base(
inputs, final_endpoint=endpoint)
self.assertTrue(
out_tensor.op.name.startswith('InceptionV1/' + endpoint))
self.assertItemsEqual(endpoints[:index + 1], end_points)
def testBuildAndCheckAllEndPointsUptoMixed5c(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
_, end_points = inception_v1.inception_v1_base(
inputs, final_endpoint='Mixed_5c')
endpoints_shapes = {
'Conv2d_1a_7x7': [5, 112, 112, 64],
'MaxPool_2a_3x3': [5, 56, 56, 64],
'Conv2d_2b_1x1': [5, 56, 56, 64],
'Conv2d_2c_3x3': [5, 56, 56, 192],
'MaxPool_3a_3x3': [5, 28, 28, 192],
'Mixed_3b': [5, 28, 28, 256],
'Mixed_3c': [5, 28, 28, 480],
'MaxPool_4a_3x3': [5, 14, 14, 480],
'Mixed_4b': [5, 14, 14, 512],
'Mixed_4c': [5, 14, 14, 512],
'Mixed_4d': [5, 14, 14, 512],
'Mixed_4e': [5, 14, 14, 528],
'Mixed_4f': [5, 14, 14, 832],
'MaxPool_5a_2x2': [5, 7, 7, 832],
'Mixed_5b': [5, 7, 7, 832],
'Mixed_5c': [5, 7, 7, 1024]
}
self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
for endpoint_name in endpoints_shapes:
expected_shape = endpoints_shapes[endpoint_name]
self.assertTrue(endpoint_name in end_points)
self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
expected_shape)
def testModelHasExpectedNumberOfParameters(self):
batch_size = 5
height, width = 224, 224
inputs = random_ops.random_uniform((batch_size, height, width, 3))
with arg_scope(inception_v1.inception_v1_arg_scope()):
inception_v1.inception_v1_base(inputs)
total_params, _ = model_analyzer.analyze_vars(
variables_lib.get_model_variables())
self.assertAlmostEqual(5607184, total_params)
def testHalfSizeImages(self):
batch_size = 5
height, width = 112, 112
inputs = random_ops.random_uniform((batch_size, height, width, 3))
mixed_5c, _ = inception_v1.inception_v1_base(inputs)
self.assertTrue(mixed_5c.op.name.startswith('InceptionV1/Mixed_5c'))
self.assertListEqual(mixed_5c.get_shape().as_list(),
[batch_size, 4, 4, 1024])
def testUnknownImageShape(self):
ops.reset_default_graph()
batch_size = 2
height, width = 224, 224
num_classes = 1000
input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
with self.test_session() as sess:
inputs = array_ops.placeholder(
dtypes.float32, shape=(batch_size, None, None, 3))
logits, end_points = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(),
[batch_size, num_classes])
pre_pool = end_points['Mixed_5c']
feed_dict = {inputs: input_np}
variables.global_variables_initializer().run()
pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])
def testUnknownBatchSize(self):
batch_size = 1
height, width = 224, 224
num_classes = 1000
inputs = array_ops.placeholder(dtypes.float32, (None, height, width, 3))
logits, _ = inception_v1.inception_v1(inputs, num_classes)
self.assertTrue(logits.op.name.startswith('InceptionV1/Logits'))
self.assertListEqual(logits.get_shape().as_list(), [None, num_classes])
images = random_ops.random_uniform((batch_size, height, width, 3))
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(logits, {inputs: images.eval()})
self.assertEquals(output.shape, (batch_size, num_classes))
def testEvaluation(self):
batch_size = 2
height, width = 224, 224
num_classes = 1000
eval_inputs = random_ops.random_uniform((batch_size, height, width, 3))
logits, _ = inception_v1.inception_v1(
eval_inputs, num_classes, is_training=False)
predictions = math_ops.argmax(logits, 1)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (batch_size,))
def testTrainEvalWithReuse(self):
train_batch_size = 5
eval_batch_size = 2
height, width = 224, 224
num_classes = 1000
train_inputs = random_ops.random_uniform(
(train_batch_size, height, width, 3))
inception_v1.inception_v1(train_inputs, num_classes)
eval_inputs = random_ops.random_uniform((eval_batch_size, height, width, 3))
logits, _ = inception_v1.inception_v1(eval_inputs, num_classes, reuse=True)
predictions = math_ops.argmax(logits, 1)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
output = sess.run(predictions)
self.assertEquals(output.shape, (eval_batch_size,))
def testLogitsNotSqueezed(self):
num_classes = 25
images = random_ops.random_uniform([1, 224, 224, 3])
logits, _ = inception_v1.inception_v1(
images, num_classes=num_classes, spatial_squeeze=False)
with self.test_session() as sess:
variables.global_variables_initializer().run()
logits_out = sess.run(logits)
self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
if __name__ == '__main__':
test.main()
| apache-2.0 |
eayunstack/nova | nova/tests/compute/test_multiple_nodes.py | 7 | 7378 | # Copyright (c) 2012 NTT DOCOMO, INC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for compute service with multiple compute nodes."""
from oslo.config import cfg
from nova import context
from nova import db
from nova import objects
from nova.openstack.common import importutils
from nova import test
from nova.virt import fake
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
CONF.import_opt('compute_driver', 'nova.virt.driver')
class BaseTestCase(test.TestCase):
def tearDown(self):
fake.restore_nodes()
super(BaseTestCase, self).tearDown()
class FakeDriverSingleNodeTestCase(BaseTestCase):
def setUp(self):
super(FakeDriverSingleNodeTestCase, self).setUp()
self.driver = fake.FakeDriver(virtapi=None)
fake.set_nodes(['xyz'])
def test_get_host_stats(self):
stats = self.driver.get_host_stats()
self.assertIsInstance(stats, dict)
self.assertEqual(stats['hypervisor_hostname'], 'xyz')
def test_get_available_resource(self):
res = self.driver.get_available_resource('xyz')
self.assertEqual(res['hypervisor_hostname'], 'xyz')
class FakeDriverMultiNodeTestCase(BaseTestCase):
def setUp(self):
super(FakeDriverMultiNodeTestCase, self).setUp()
self.driver = fake.FakeDriver(virtapi=None)
fake.set_nodes(['aaa', 'bbb'])
def test_get_host_stats(self):
stats = self.driver.get_host_stats()
self.assertIsInstance(stats, list)
self.assertEqual(len(stats), 2)
self.assertEqual(stats[0]['hypervisor_hostname'], 'aaa')
self.assertEqual(stats[1]['hypervisor_hostname'], 'bbb')
def test_get_available_resource(self):
res_a = self.driver.get_available_resource('aaa')
self.assertEqual(res_a['hypervisor_hostname'], 'aaa')
res_b = self.driver.get_available_resource('bbb')
self.assertEqual(res_b['hypervisor_hostname'], 'bbb')
res_x = self.driver.get_available_resource('xxx')
self.assertEqual(res_x, {})
class MultiNodeComputeTestCase(BaseTestCase):
def setUp(self):
super(MultiNodeComputeTestCase, self).setUp()
self.flags(compute_driver='nova.virt.fake.FakeDriver')
self.compute = importutils.import_object(CONF.compute_manager)
self.flags(use_local=True, group='conductor')
self.conductor = self.start_service('conductor',
manager=CONF.conductor.manager)
def fake_get_compute_nodes_in_db(context, use_slave=False):
fake_compute_nodes = [{'local_gb': 259,
'vcpus_used': 0,
'deleted': 0,
'hypervisor_type': 'powervm',
'created_at': '2013-04-01T00:27:06.000000',
'local_gb_used': 0,
'updated_at': '2013-04-03T00:35:41.000000',
'hypervisor_hostname': 'fake_phyp1',
'memory_mb_used': 512,
'memory_mb': 131072,
'current_workload': 0,
'vcpus': 16,
'cpu_info': 'ppc64,powervm,3940',
'running_vms': 0,
'free_disk_gb': 259,
'service_id': 7,
'hypervisor_version': 7,
'disk_available_least': 265856,
'deleted_at': None,
'free_ram_mb': 130560,
'metrics': '',
'numa_topology': '',
'stats': '',
'id': 2,
'host_ip': '127.0.0.1'}]
return [objects.ComputeNode._from_db_object(
context, objects.ComputeNode(), cn)
for cn in fake_compute_nodes]
def fake_compute_node_delete(context, compute_node_id):
self.assertEqual(2, compute_node_id)
self.stubs.Set(self.compute, '_get_compute_nodes_in_db',
fake_get_compute_nodes_in_db)
self.stubs.Set(db, 'compute_node_delete',
fake_compute_node_delete)
def test_update_available_resource_add_remove_node(self):
ctx = context.get_admin_context()
fake.set_nodes(['A', 'B', 'C'])
self.compute.update_available_resource(ctx)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B', 'C'])
fake.set_nodes(['A', 'B'])
self.compute.update_available_resource(ctx)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B'])
fake.set_nodes(['A', 'B', 'C'])
self.compute.update_available_resource(ctx)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B', 'C'])
def test_compute_manager_removes_deleted_node(self):
ctx = context.get_admin_context()
fake.set_nodes(['A', 'B'])
fake_compute_nodes = [
objects.ComputeNode(
context=ctx, hypervisor_hostname='A', id=2),
objects.ComputeNode(
context=ctx, hypervisor_hostname='B', id=3),
]
def fake_get_compute_nodes_in_db(context, use_slave=False):
return fake_compute_nodes
def fake_compute_node_delete(context, compute_node_id):
for cn in fake_compute_nodes:
if compute_node_id == cn.id:
fake_compute_nodes.remove(cn)
return
self.stubs.Set(self.compute, '_get_compute_nodes_in_db',
fake_get_compute_nodes_in_db)
self.stubs.Set(db, 'compute_node_delete',
fake_compute_node_delete)
self.compute.update_available_resource(ctx)
# Verify nothing is deleted if driver and db compute nodes match
self.assertEqual(len(fake_compute_nodes), 2)
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A', 'B'])
fake.set_nodes(['A'])
self.compute.update_available_resource(ctx)
# Verify B gets deleted since now only A is reported by driver
self.assertEqual(len(fake_compute_nodes), 1)
self.assertEqual(fake_compute_nodes[0]['hypervisor_hostname'], 'A')
self.assertEqual(sorted(self.compute._resource_tracker_dict.keys()),
['A'])
| apache-2.0 |
ryfeus/lambda-packs | Pyrestest_wrk/source/pip/_vendor/cachecontrol/adapter.py | 87 | 3967 | import functools
from pip._vendor.requests.adapters import HTTPAdapter
from .controller import CacheController
from .cache import DictCache
from .filewrapper import CallbackFileWrapper
class CacheControlAdapter(HTTPAdapter):
invalidating_methods = set(['PUT', 'DELETE'])
def __init__(self, cache=None,
cache_etags=True,
controller_class=None,
serializer=None,
heuristic=None,
*args, **kw):
super(CacheControlAdapter, self).__init__(*args, **kw)
self.cache = cache or DictCache()
self.heuristic = heuristic
controller_factory = controller_class or CacheController
self.controller = controller_factory(
self.cache,
cache_etags=cache_etags,
serializer=serializer,
)
def send(self, request, **kw):
"""
Send a request. Use the request information to see if it
exists in the cache and cache the response if we need to and can.
"""
if request.method == 'GET':
cached_response = self.controller.cached_request(request)
if cached_response:
return self.build_response(request, cached_response, from_cache=True)
# check for etags and add headers if appropriate
request.headers.update(self.controller.conditional_headers(request))
resp = super(CacheControlAdapter, self).send(request, **kw)
return resp
def build_response(self, request, response, from_cache=False):
"""
Build a response by making a request or using the cache.
This will end up calling send and returning a potentially
cached response
"""
if not from_cache and request.method == 'GET':
# apply any expiration heuristics
if response.status == 304:
# We must have sent an ETag request. This could mean
# that we've been expired already or that we simply
# have an etag. In either case, we want to try and
# update the cache if that is the case.
cached_response = self.controller.update_cached_response(
request, response
)
if cached_response is not response:
from_cache = True
# We are done with the server response, read a
# possible response body (compliant servers will
# not return one, but we cannot be 100% sure) and
# release the connection back to the pool.
response.read(decode_content=False)
response.release_conn()
response = cached_response
else:
# Check for any heuristics that might update headers
# before trying to cache.
if self.heuristic:
response = self.heuristic.apply(response)
# Wrap the response file with a wrapper that will cache the
# response when the stream has been consumed.
response._fp = CallbackFileWrapper(
response._fp,
functools.partial(
self.controller.cache_response,
request,
response,
)
)
resp = super(CacheControlAdapter, self).build_response(
request, response
)
# See if we should invalidate the cache.
if request.method in self.invalidating_methods and resp.ok:
cache_url = self.controller.cache_url(request.url)
self.cache.delete(cache_url)
# Give the request a from_cache attr to let people use it
resp.from_cache = from_cache
return resp
def close(self):
self.cache.close()
super(CacheControlAdapter, self).close()
| mit |
marcelovilaca/DIRAC | Resources/Computing/glexecComputingElement.py | 2 | 13887 | """ A computing element class that attempts to use glexec if available then
defaults to the standard InProcess Computing Element behaviour.
"""
__RCSID__ = "$Id$"
import os
import stat
import distutils.spawn
import DIRAC
from DIRAC import S_OK, S_ERROR
from DIRAC.Resources.Computing.ComputingElement import ComputingElement
from DIRAC.Core.Utilities.ThreadScheduler import gThreadScheduler
from DIRAC.Core.Utilities.Subprocess import shellCall
MandatoryParameters = [ ]
class glexecComputingElement( ComputingElement ):
mandatoryParameters = MandatoryParameters
#############################################################################
def __init__( self, ceUniqueID ):
""" Standard constructor.
"""
ComputingElement.__init__( self, ceUniqueID )
self.submittedJobs = 0
#############################################################################
def _addCEConfigDefaults( self ):
"""Method to make sure all necessary Configuration Parameters are defined
"""
# First assure that any global parameters are loaded
ComputingElement._addCEConfigDefaults( self )
# Now glexec specific ones
#############################################################################
def submitJob( self, executableFile, proxy, dummy = None ):
""" Method to submit job, should be overridden in sub-class.
"""
self.log.verbose( 'Setting up proxy for payload' )
result = self.writeProxyToFile( proxy )
if not result['OK']:
return result
payloadProxy = result['Value']
if not os.environ.has_key( 'X509_USER_PROXY' ):
self.log.error( 'X509_USER_PROXY variable for pilot proxy not found in local environment' )
return S_ERROR( 'X509_USER_PROXY not found' )
pilotProxy = os.environ['X509_USER_PROXY']
self.log.info( 'Pilot proxy X509_USER_PROXY=%s' % pilotProxy )
os.environ[ 'GLEXEC_CLIENT_CERT' ] = payloadProxy
os.environ[ 'GLEXEC_SOURCE_PROXY' ] = payloadProxy
self.log.info( '\n'.join( [ 'Set payload proxy variables:',
'GLEXEC_CLIENT_CERT=%s' % payloadProxy,
'GLEXEC_SOURCE_PROXY=%s' % payloadProxy ] ) )
#Determine glexec location (default to standard InProcess behaviour if not found)
glexecLocation = None
result = self.glexecLocate()
if result['OK']:
glexecLocation = result['Value']
self.log.info( 'glexec found for local site at %s' % glexecLocation )
if glexecLocation:
result = self.recursivelyChangePermissions()
if not result['OK']:
self.log.error( 'Permissions change failed, continuing regardless...' )
else:
self.log.info( 'glexec not found, no permissions to change' )
#Test glexec with payload proxy prior to submitting the job
result = self.glexecTest( glexecLocation )
if not result['OK']:
res = self.analyseExitCode( result['Value'] ) #take no action as we currently default to InProcess
glexecLocation = None
if 'RescheduleOnError' in self.ceParameters and self.ceParameters['RescheduleOnError']:
result = S_ERROR( 'gLexec Test Failed: %s' % res['Value'] )
result['ReschedulePayload'] = True
return result
self.log.info( 'glexec test failed, will submit payload regardless...' )
#Revert to InProcess behaviour
if not glexecLocation:
self.log.info( 'glexec is not found, setting X509_USER_PROXY for payload proxy' )
os.environ[ 'X509_USER_PROXY' ] = payloadProxy
self.log.verbose( 'Starting process for monitoring payload proxy' )
gThreadScheduler.addPeriodicTask( self.proxyCheckPeriod, self.monitorProxy,
taskArgs = ( glexecLocation, pilotProxy, payloadProxy ),
executions = 0, elapsedTime = 0 )
#Submit job
self.log.info( 'Changing permissions of executable to 0755' )
try:
os.chmod( os.path.abspath( executableFile ), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH )
except Exception, x:
self.log.error( 'Failed to change permissions of executable to 0755 with exception',
'\n%s' % ( x ) )
result = self.glexecExecute( os.path.abspath( executableFile ), glexecLocation )
if not result['OK']:
self.analyseExitCode( result['Value'] ) #take no action as we currently default to InProcess
self.log.error( 'Failed glexecExecute', result )
return result
self.log.debug( 'glexec CE result OK' )
self.submittedJobs += 1
return S_OK()
#############################################################################
def recursivelyChangePermissions( self ):
""" Ensure that the current directory and all those beneath have the correct
permissions.
"""
currentDir = os.getcwd()
try:
self.log.info( 'Trying to explicitly change permissions for parent directory %s' % currentDir )
os.chmod( currentDir, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH )
except Exception, x:
self.log.error( 'Problem changing directory permissions in parent directory', str( x ) )
return S_OK()
userID = None
res = shellCall( 10, 'ls -al' )
if res['OK'] and res['Value'][0] == 0:
self.log.info( 'Contents of the working directory before permissions change:' )
self.log.info( str( res['Value'][1] ) )
else:
self.log.error( 'Failed to list the log directory contents', str( res['Value'][2] ) )
res = shellCall( 10, 'id -u' )
if res['OK'] and res['Value'][0] == 0:
userID = res['Value'][1]
self.log.info( 'Current user ID is: %s' % ( userID ) )
else:
self.log.error( 'Failed to obtain current user ID', str( res['Value'][2] ) )
return res
res = shellCall( 10, 'ls -al %s/../' % currentDir )
if res['OK'] and res['Value'][0] == 0:
self.log.info( 'Contents of the parent directory before permissions change:' )
self.log.info( str( res['Value'][1] ) )
else:
self.log.error( 'Failed to list the parent directory contents', str( res['Value'][2] ) )
self.log.verbose( 'Changing permissions to 0755 in current directory %s' % currentDir )
for dirName, _, files in os.walk( currentDir ):
try:
self.log.info( 'Changing file and directory permissions to 0755 for %s' % dirName )
if os.stat( dirName )[4] == userID and not os.path.islink( dirName ):
os.chmod( dirName, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH )
for toChange in files:
toChange = os.path.join( dirName, toChange )
if os.stat( toChange )[4] == userID and not os.path.islink( toChange ):
os.chmod( toChange, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH )
except Exception, x:
self.log.error( 'Problem changing directory permissions', str( x ) )
self.log.info( 'Permissions in current directory %s updated successfully' % ( currentDir ) )
res = shellCall( 10, 'ls -al' )
if res['OK'] and res['Value'][0] == 0:
self.log.info( 'Contents of the working directory after changing permissions:' )
self.log.info( str( res['Value'][1] ) )
else:
self.log.error( 'Failed to list the log directory contents', str( res['Value'][2] ) )
res = shellCall( 10, 'ls -al %s/../' % currentDir )
if res['OK'] and res['Value'][0] == 0:
self.log.info( 'Contents of the parent directory after permissions change:' )
self.log.info( str( res['Value'][1] ) )
else:
self.log.error( 'Failed to list the parent directory contents', str( res['Value'][2] ) )
return S_OK()
#############################################################################
def analyseExitCode( self, resultTuple ):
""" Analyses the exit codes in case of glexec failures. The convention for
glexec exit codes is listed below:
Shell exit codes:
127 - command not found
129 - command died due to signal 1 (SIGHUP)
130 - command died due to signal 2 (SIGINT)
glexec specific codes:
201 - client error
202 - internal error
203 - authz error
"""
if not resultTuple:
return S_OK()
# FIXME: the wrapper will return:
# > 0 if there are problems with the payload
# < 0 if there are problems with the wrapper itself
# 0 if everything is OK
codes = {}
codes[127] = 'Shell exited, command not found'
codes[129] = 'Shell interrupt signal 1 (SIGHUP)'
codes[130] = 'Shell interrupt signal 2 (SIGINT)'
codes[201] = 'glexec failed with client error'
codes[202] = 'glexec failed with internal error'
codes[203] = 'glexec failed with authorization error'
status = resultTuple[0]
stdOutput = resultTuple[1]
stdError = resultTuple[2]
self.log.info( 'glexec call failed with status %s' % ( status ) )
self.log.info( 'glexec stdout:\n%s' % stdOutput )
self.log.info( 'glexec stderr:\n%s' % stdError )
error = None
for code, msg in codes.items():
self.log.verbose( 'Exit code %s => %s' % ( code, msg ) )
if status == code:
error = msg
if not error:
self.log.error( 'glexec exit code not in expected list', '%s' % status )
else:
self.log.error( 'Error in glexec return code', '%s = %s' % ( status, error ) )
return S_OK( error )
#############################################################################
def glexecTest( self, glexecLocation ):
"""Ensure that the current DIRAC distribution is group readable e.g. dirac-proxy-info
also check the status code of the glexec call.
"""
if not glexecLocation:
return S_OK( 'Nothing to test' )
testFile = 'glexecTest.sh'
cmds = ['#!/bin/sh']
cmds.append( 'id' )
cmds.append( 'hostname' )
cmds.append( 'date' )
cmds.append( '%s/scripts/dirac-proxy-info' % DIRAC.rootPath )
fopen = open( testFile, 'w' )
fopen.write( '\n'.join( cmds ) )
fopen.close()
self.log.info( 'Changing permissions of test script to 0755' )
try:
os.chmod( os.path.abspath( testFile ), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH )
except Exception, x:
self.log.error( 'Failed to change permissions of test script to 0755 with exception',
'\n%s' % ( x ) )
return S_ERROR( 'Could not change permissions of test script' )
return self.glexecExecute( os.path.abspath( testFile ), glexecLocation )
#############################################################################
def glexecExecute( self, executableFile, glexecLocation ):
"""Run glexec with checking of the exit status code.
"""
cmd = executableFile
if glexecLocation and executableFile:
cmd = "%s /bin/bash -lc '%s'" % ( glexecLocation, executableFile )
if glexecLocation and not executableFile:
cmd = '%s' % ( glexecLocation )
self.log.info( 'CE submission command is: %s' % cmd )
result = shellCall( 0, cmd, callbackFunction = self.sendOutput )
if not result['OK']:
result['Value'] = ( 0, '', '' )
return result
resultTuple = result['Value']
status = resultTuple[0]
stdOutput = resultTuple[1]
stdError = resultTuple[2]
self.log.info( "Status after the glexec execution is %s" % str( status ) )
if status >=127:
error = S_ERROR( status )
error['Value'] = ( status, stdOutput, stdError )
return error
return result
#############################################################################
def glexecLocate( self ):
"""Try to find glexec on the local system, if not found default to InProcess.
"""
glexecPath = ""
if os.environ.has_key( 'OSG_GLEXEC_LOCATION' ):
glexecPath = '%s' % ( os.environ['OSG_GLEXEC_LOCATION'] )
elif os.environ.has_key( 'GLITE_LOCATION' ):
glexecPath = '%s/sbin/glexec' % ( os.environ['GLITE_LOCATION'] )
else: #try to locate the excutable in the PATH
glexecPath = distutils.spawn.find_executable( "glexec" )
if not glexecPath:
self.log.info( 'Unable to locate glexec, site does not have GLITE_LOCATION nor OSG_GLEXEC_LOCATION defined' )
return S_ERROR( 'glexec not found' )
if not os.path.exists( glexecPath ):
self.log.info( 'glexec not found at path %s' % ( glexecPath ) )
return S_ERROR( 'glexec not found' )
return S_OK( glexecPath )
#############################################################################
def getCEStatus( self ):
""" Method to return information on running and pending jobs.
"""
result = S_OK()
result['SubmittedJobs'] = 0
result['RunningJobs'] = 0
result['WaitingJobs'] = 0
return result
#############################################################################
def monitorProxy( self, glexecLocation, pilotProxy, payloadProxy ):
""" Monitor the payload proxy and renew as necessary.
"""
retVal = self._monitorProxy( pilotProxy, payloadProxy )
if not retVal['OK']:
# Failed to renew the proxy, nothing else to be done
return retVal
if not retVal['Value']:
# No need to renew the proxy, nothing else to be done
return retVal
if glexecLocation:
self.log.info( 'Rerunning glexec without arguments to renew payload proxy' )
result = self.glexecExecute( None, glexecLocation )
if not result['OK']:
self.log.error( 'Failed glexecExecute', result )
else:
self.log.info( 'Running without glexec, checking local proxy' )
return S_OK( 'Proxy checked' )
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.