repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
epssy/hue | apps/pig/src/pig/migrations/0001_initial.py | 37 | 5331 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
needed_by = (
("desktop", "0007_auto__add_documentpermission__add_documenttag__add_document"),
)
def forwards(self, orm):
# Adding model 'Document'
db.create_table('pig_document', (
('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('is_design', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('pig', ['Document'])
# Adding model 'PigScript'
db.create_table('pig_pigscript', (
('document_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['pig.Document'], unique=True, primary_key=True)),
('data', self.gf('django.db.models.fields.TextField')(default='{"name": "", "parameters": [], "script": "", "properties": [], "resources": [], "job_id": null}')),
))
db.send_create_signal('pig', ['PigScript'])
def backwards(self, orm):
# Deleting model 'Document'
db.delete_table('pig_document')
# Deleting model 'PigScript'
db.delete_table('pig_pigscript')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'pig.document': {
'Meta': {'object_name': 'Document'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_design': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'pig.pigscript': {
'Meta': {'object_name': 'PigScript', '_ormbases': ['pig.Document']},
'data': ('django.db.models.fields.TextField', [], {'default': '\'{"name": "", "parameters": [], "script": "", "properties": [], "resources": [], "job_id": null}\''}),
'document_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['pig.Document']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['pig']
| apache-2.0 |
timesong/pycha | chavier/gui.py | 1 | 19596 | # Copyright(c) 2007-2010 by Lorenzo Gil Sanchez <lorenzo.gil.sanchez@gmail.com>
#
# This file is part of Chavier.
#
# Chavier is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Chavier is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Chavier. If not, see <http://www.gnu.org/licenses/>.
import pygtk
pygtk.require('2.0')
import gtk
from chavier.dialogs import (
TextInputDialog, PointDialog, OptionDialog, RandomGeneratorDialog,
AboutDialog, warning,
)
class GUI(object):
def __init__(self, app):
self.app = app
self.chart = None
self.surface = None
self.main_window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.main_window.connect('delete_event', self.delete_event)
self.main_window.connect('destroy', self.destroy)
self.main_window.set_default_size(640, 480)
self.main_window.set_title(u'Chavier')
vbox = gtk.VBox()
self.main_window.add(vbox)
vbox.show()
menubar, toolbar = self._create_ui_manager()
vbox.pack_start(menubar, False, False)
menubar.show()
vbox.pack_start(toolbar, False, False)
toolbar.show()
hpaned = gtk.HPaned()
vbox.pack_start(hpaned, True, True)
hpaned.show()
vpaned = gtk.VPaned()
hpaned.add1(vpaned)
vpaned.show()
block1 = self._create_sidebar_block(u'Data sets',
self._datasets_notebook_creator)
self._create_dataset("Dataset 1")
block1.set_size_request(-1, 200)
vpaned.add1(block1)
block1.show()
block2 = self._create_sidebar_block(u'Options',
self._options_treeview_creator)
vpaned.add2(block2)
block2.show()
self.drawing_area = gtk.DrawingArea()
self.drawing_area.connect('expose_event',
self.drawing_area_expose_event)
self.drawing_area.connect('size_allocate',
self.drawing_area_size_allocate_event)
hpaned.add2(self.drawing_area)
self.drawing_area.show()
self.main_window.show()
def _create_ui_manager(self):
self.uimanager = gtk.UIManager()
accel_group = self.uimanager.get_accel_group()
self.main_window.add_accel_group(accel_group)
action_group = gtk.ActionGroup('default')
action_group.add_actions([
('file', None, '_File', None, 'File', None),
('quit', gtk.STOCK_QUIT, None, None, 'Quit the program',
self.quit),
('edit', None, '_Edit', None, 'Edit', None),
('add_dataset', gtk.STOCK_ADD, '_Add dataset',
'<ctrl><alt>plus', 'Add another dataset', self.add_dataset),
('remove_dataset', gtk.STOCK_REMOVE, '_Remove dataset',
'<ctrl><alt>minus', 'Remove the current dataset',
self.remove_dataset),
('edit_dataset', gtk.STOCK_EDIT, '_Edit dataset name',
'<ctrl><alt>e', 'Edit the name of the current dataset',
self.edit_dataset),
('add_point', gtk.STOCK_ADD, 'Add _point', '<ctrl>plus',
'Add another point to the current dataset', self.add_point),
('remove_point', gtk.STOCK_REMOVE, 'Remove p_oint',
'<ctrl>minus',
'Remove the current point of the current dataset',
self.remove_point),
('edit_point', gtk.STOCK_EDIT, 'Edit po_int', '<ctrl>e',
'Edit the current point of the current dataset',
self.edit_point),
('edit_option', gtk.STOCK_EDIT, 'Edit op_tion', None,
'Edit the current option',
self.edit_option),
('view', None, '_View', None, 'View', None),
('refresh', gtk.STOCK_REFRESH, None, '<ctrl>r',
'Update the chart', self.refresh),
('tools', None, '_Tools', None, 'Tools', None),
('random-points', gtk.STOCK_EXECUTE, '_Generate random points',
'<ctrl>g', 'Generate random points',
self.generate_random_points),
('dump-chart-state', gtk.STOCK_CONVERT, '_Dump chart state',
'<ctrl>d', 'Dump internal chart variables',
self.dump_chart_state),
('help', None, '_Help', None, 'Help', None),
('about', gtk.STOCK_ABOUT, None, None, 'About this program',
self.about),
])
action_group.add_radio_actions([
('verticalbar', None, '_Vertical bars', None,
'Use vertical bars chart', self.app.VERTICAL_BAR_TYPE),
('horizontalbar', None, '_Horizontal bars', None,
'Use horizontal bars chart', self.app.HORIZONTAL_BAR_TYPE),
('line', None, '_Line', None,
'Use lines chart', self.app.LINE_TYPE),
('pie', None, '_Pie', None,
'Use pie chart', self.app.PIE_TYPE),
('scatter', None, '_Scatter', None,
'Use scatter chart', self.app.SCATTER_TYPE),
('stackedverticalbar', None, '_Stacked Vertical bars', None,
'Use stacked vertical bars chart',
self.app.STACKED_VERTICAL_BAR_TYPE),
('stackedhorizontalbar', None, '_Stacked Horizontal bars', None,
'Use stacked horizontal bars chart',
self.app.STACKED_HORIZONTAL_BAR_TYPE),
], self.app.VERTICAL_BAR_TYPE, self.on_chart_type_change)
self.uimanager.insert_action_group(action_group, -1)
ui = """<ui>
<menubar name="MenuBar">
<menu action="file">
<menuitem action="quit"/>
</menu>
<menu action="edit">
<menuitem action="add_dataset"/>
<menuitem action="remove_dataset"/>
<menuitem action="edit_dataset"/>
<separator />
<menuitem action="add_point"/>
<menuitem action="remove_point"/>
<menuitem action="edit_point"/>
<separator />
<menuitem action="edit_option"/>
</menu>
<menu action="view">
<menuitem action="refresh"/>
<separator />
<menuitem action="verticalbar"/>
<menuitem action="horizontalbar"/>
<menuitem action="stackedverticalbar"/>
<menuitem action="stackedhorizontalbar"/>
<menuitem action="line"/>
<menuitem action="pie"/>
<menuitem action="scatter"/>
</menu>
<menu action="tools">
<menuitem action="random-points"/>
<menuitem action="dump-chart-state"/>
</menu>
<menu action="help">
<menuitem action="about"/>
</menu>
</menubar>
<toolbar name="ToolBar">
<toolitem action="quit"/>
<separator />
<toolitem action="add_dataset"/>
<toolitem action="remove_dataset"/>
<separator />
<toolitem action="add_point"/>
<toolitem action="remove_point"/>
<separator />
<toolitem action="refresh"/>
</toolbar>
</ui>
"""
self.uimanager.add_ui_from_string(ui)
self.uimanager.ensure_update()
menubar = self.uimanager.get_widget('/MenuBar')
toolbar = self.uimanager.get_widget('/ToolBar')
return menubar, toolbar
def _create_sidebar_block(self, title, child_widget_creator):
box = gtk.VBox(spacing=6)
box.set_border_width(6)
label = gtk.Label()
label.set_markup(u'<span size="large" weight="bold">%s</span>' % title)
label.set_alignment(0.0, 0.5)
box.pack_start(label, False, False)
label.show()
child_widget = child_widget_creator()
box.pack_start(child_widget, True, True)
child_widget.show()
return box
def _datasets_notebook_creator(self):
self.datasets_notebook = gtk.Notebook()
self.datasets_notebook.set_scrollable(True)
return self.datasets_notebook
def _dataset_treeview_creator(self):
store = gtk.ListStore(float, float)
treeview = gtk.TreeView(store)
column1 = gtk.TreeViewColumn('x', gtk.CellRendererText(), text=0)
treeview.append_column(column1)
column2 = gtk.TreeViewColumn('y', gtk.CellRendererText(), text=1)
treeview.append_column(column2)
treeview.connect('row-activated', self.dataset_treeview_row_activated)
scrolled_window = gtk.ScrolledWindow()
scrolled_window.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
scrolled_window.add(treeview)
treeview.show()
return scrolled_window
def _options_treeview_creator(self):
self.options_store = gtk.TreeStore(str, str, object)
options = self.app.get_default_options()
self._fill_options_store(options, None, self.app.OPTIONS_TYPES)
self.options_treeview = gtk.TreeView(self.options_store)
column1 = gtk.TreeViewColumn('Name', gtk.CellRendererText(), text=0)
self.options_treeview.append_column(column1)
column2 = gtk.TreeViewColumn('Value', gtk.CellRendererText(), text=1)
self.options_treeview.append_column(column2)
self.options_treeview.expand_all()
self.options_treeview.connect('row-activated',
self.options_treeview_row_activated)
scrolled_window = gtk.ScrolledWindow()
scrolled_window.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
scrolled_window.add(self.options_treeview)
self.options_treeview.show()
return scrolled_window
def _fill_options_store(self, options, parent_node, types):
for name, value in options.items():
value_type = types[name]
if isinstance(value, dict):
current_parent = self.options_store.append(parent_node,
(name, None, None))
self._fill_options_store(value, current_parent, value_type)
else:
if value is not None:
value = str(value)
self.options_store.append(parent_node,
(name, value, value_type))
def _get_current_dataset_tab(self):
current_tab = self.datasets_notebook.get_current_page()
if current_tab != -1:
return self.datasets_notebook.get_nth_page(current_tab)
def _create_dataset(self, name):
scrolled_window = self._dataset_treeview_creator()
scrolled_window.show()
label = gtk.Label(name)
self.datasets_notebook.append_page(scrolled_window, label)
def _get_datasets(self):
datasets = []
n_pages = self.datasets_notebook.get_n_pages()
for i in range(n_pages):
tab = self.datasets_notebook.get_nth_page(i)
label = self.datasets_notebook.get_tab_label(tab)
name = label.get_label()
treeview = tab.get_children()[0]
model = treeview.get_model()
points = [(x, y) for x, y in model]
if len(points) > 0:
datasets.append((name, points))
return datasets
def _get_chart_type(self):
action_group = self.uimanager.get_action_groups()[0]
action = action_group.get_action('verticalbar')
return action.get_current_value()
def _get_options(self, iter):
options = {}
while iter is not None:
name, value, value_type = self.options_store.get(iter, 0, 1, 2)
if value_type is None:
child = self.options_store.iter_children(iter)
options[name] = self._get_options(child)
else:
if value is not None:
converter = str_converters[value_type]
value = converter(value)
options[name] = value
iter = self.options_store.iter_next(iter)
return options
def _edit_point_internal(self, model, iter):
x, y = model.get(iter, 0, 1)
dialog = PointDialog(self.main_window, x, y)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
x, y = dialog.get_point()
model.set(iter, 0, x, 1, y)
self.refresh()
dialog.destroy()
def _edit_option_internal(self, model, iter):
name, value, value_type = model.get(iter, 0, 1, 2)
parents = []
parent = model.iter_parent(iter)
while parent is not None:
parents.append(model.get_value(parent, 0))
parent = model.iter_parent(parent)
parents.reverse()
parents.append(name)
label = u'.'.join(parents)
dialog = OptionDialog(self.main_window, label, value, value_type)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
new_value = dialog.get_value()
if new_value == "":
new_value = None
model.set_value(iter, 1, new_value)
self.refresh()
dialog.destroy()
def delete_event(self, widget, event, data=None):
return False
def destroy(self, widget, data=None):
gtk.main_quit()
def drawing_area_expose_event(self, widget, event, data=None):
if self.chart is None:
return
cr = widget.window.cairo_create()
cr.rectangle(event.area.x, event.area.y,
event.area.width, event.area.height)
cr.clip()
cr.set_source_surface(self.chart.surface, 0, 0)
cr.paint()
def drawing_area_size_allocate_event(self, widget, event, data=None):
if self.chart is not None:
self.refresh()
def on_chart_type_change(self, action, current, data=None):
if self.chart is not None:
self.refresh()
def dataset_treeview_row_activated(self, treeview, path, view_column):
model = treeview.get_model()
iter = model.get_iter(path)
self._edit_point_internal(model, iter)
def options_treeview_row_activated(self, treeview, path, view_column):
model = treeview.get_model()
iter = model.get_iter(path)
self._edit_option_internal(model, iter)
def quit(self, action):
self.main_window.destroy()
def add_dataset(self, action):
n_pages = self.datasets_notebook.get_n_pages()
suggested_name = u'Dataset %d' % (n_pages + 1)
dialog = TextInputDialog(self.main_window, suggested_name)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
name = dialog.get_name()
self._create_dataset(name)
self.datasets_notebook.set_current_page(n_pages)
dialog.destroy()
def remove_dataset(self, action):
current_tab = self.datasets_notebook.get_current_page()
assert current_tab != -1
self.datasets_notebook.remove_page(current_tab)
def edit_dataset(self, action):
tab = self._get_current_dataset_tab()
assert tab is not None
label = self.datasets_notebook.get_tab_label(tab)
name = label.get_label()
dialog = TextInputDialog(self.main_window, name)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
name = dialog.get_name()
label.set_label(name)
dialog.destroy()
def add_point(self, action):
tab = self._get_current_dataset_tab()
assert tab is not None
treeview = tab.get_children()[0]
model = treeview.get_model()
dialog = PointDialog(self.main_window, len(model) * 1.0, 0.0)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
x, y = dialog.get_point()
model.append((x, y))
self.refresh()
dialog.destroy()
def remove_point(self, action):
tab = self._get_current_dataset_tab()
assert tab is not None
treeview = tab.get_children()[0]
selection = treeview.get_selection()
model, selected = selection.get_selected()
if selected is None:
warning(self.main_window, "You must select the point to remove")
return
model.remove(selected)
self.refresh()
def edit_point(self, action):
tab = self._get_current_dataset_tab()
assert tab is not None
treeview = tab.get_children()[0]
selection = treeview.get_selection()
model, selected = selection.get_selected()
if selected is None:
warning(self.main_window, "You must select the point to edit")
return
self._edit_point_internal(model, selected)
def edit_option(self, action):
selection = self.options_treeview.get_selection()
model, selected = selection.get_selected()
if selected is None:
warning(self.main_window, "You must select the option to edit")
return
self._edit_option_internal(model, selected)
def refresh(self, action=None):
datasets = self._get_datasets()
if datasets:
root = self.options_store.get_iter_first()
options = self._get_options(root)
chart_type = self._get_chart_type()
alloc = self.drawing_area.get_allocation()
self.chart = self.app.get_chart(datasets, options, chart_type,
alloc.width, alloc.height)
self.drawing_area.queue_draw()
else:
self.chart = None
def generate_random_points(self, action=None):
tab = self._get_current_dataset_tab()
assert tab is not None
treeview = tab.get_children()[0]
model = treeview.get_model()
dialog = RandomGeneratorDialog(self.main_window)
response = dialog.run()
if response == gtk.RESPONSE_ACCEPT:
points = dialog.generate_points()
for point in points:
model.append(point)
self.refresh()
dialog.destroy()
def dump_chart_state(self, action=None):
if self.chart is None:
return
alloc = self.drawing_area.get_allocation()
print 'CHART STATE'
print '-' * 70
print 'surface: %d x %d' % (alloc.width, alloc.height)
print 'area :', self.chart.area
print
print 'minxval:', self.chart.minxval
print 'maxxval:', self.chart.maxxval
print 'xrange :', self.chart.xrange
print
print 'minyval:', self.chart.minyval
print 'maxyval:', self.chart.maxyval
print 'yrange :', self.chart.yrange
def about(self, action=None):
dialog = AboutDialog(self.main_window)
dialog.run()
dialog.destroy()
def run(self):
gtk.main()
def str2bool(str):
if str.lower() == "true":
return True
else:
return False
str_converters = {
str: str,
int: int,
float: float,
unicode: unicode,
bool: str2bool,
}
| lgpl-3.0 |
collinjackson/mojo | third_party/cython/src/Cython/Compiler/ParseTreeTransforms.py | 86 | 115877 | import cython
cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object,
Options=object, UtilNodes=object, LetNode=object,
LetRefNode=object, TreeFragment=object, EncodedString=object,
error=object, warning=object, copy=object)
import PyrexTypes
import Naming
import ExprNodes
import Nodes
import Options
import Builtin
from Cython.Compiler.Visitor import VisitorTransform, TreeVisitor
from Cython.Compiler.Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform
from Cython.Compiler.UtilNodes import LetNode, LetRefNode, ResultRefNode
from Cython.Compiler.TreeFragment import TreeFragment
from Cython.Compiler.StringEncoding import EncodedString
from Cython.Compiler.Errors import error, warning, CompileError, InternalError
from Cython.Compiler.Code import UtilityCode
import copy
class NameNodeCollector(TreeVisitor):
"""Collect all NameNodes of a (sub-)tree in the ``name_nodes``
attribute.
"""
def __init__(self):
super(NameNodeCollector, self).__init__()
self.name_nodes = []
def visit_NameNode(self, node):
self.name_nodes.append(node)
def visit_Node(self, node):
self._visitchildren(node, None)
class SkipDeclarations(object):
"""
Variable and function declarations can often have a deep tree structure,
and yet most transformations don't need to descend to this depth.
Declaration nodes are removed after AnalyseDeclarationsTransform, so there
is no need to use this for transformations after that point.
"""
def visit_CTypeDefNode(self, node):
return node
def visit_CVarDefNode(self, node):
return node
def visit_CDeclaratorNode(self, node):
return node
def visit_CBaseTypeNode(self, node):
return node
def visit_CEnumDefNode(self, node):
return node
def visit_CStructOrUnionDefNode(self, node):
return node
class NormalizeTree(CythonTransform):
"""
This transform fixes up a few things after parsing
in order to make the parse tree more suitable for
transforms.
a) After parsing, blocks with only one statement will
be represented by that statement, not by a StatListNode.
When doing transforms this is annoying and inconsistent,
as one cannot in general remove a statement in a consistent
way and so on. This transform wraps any single statements
in a StatListNode containing a single statement.
b) The PassStatNode is a noop and serves no purpose beyond
plugging such one-statement blocks; i.e., once parsed a
` "pass" can just as well be represented using an empty
StatListNode. This means less special cases to worry about
in subsequent transforms (one always checks to see if a
StatListNode has no children to see if the block is empty).
"""
def __init__(self, context):
super(NormalizeTree, self).__init__(context)
self.is_in_statlist = False
self.is_in_expr = False
def visit_ExprNode(self, node):
stacktmp = self.is_in_expr
self.is_in_expr = True
self.visitchildren(node)
self.is_in_expr = stacktmp
return node
def visit_StatNode(self, node, is_listcontainer=False):
stacktmp = self.is_in_statlist
self.is_in_statlist = is_listcontainer
self.visitchildren(node)
self.is_in_statlist = stacktmp
if not self.is_in_statlist and not self.is_in_expr:
return Nodes.StatListNode(pos=node.pos, stats=[node])
else:
return node
def visit_StatListNode(self, node):
self.is_in_statlist = True
self.visitchildren(node)
self.is_in_statlist = False
return node
def visit_ParallelAssignmentNode(self, node):
return self.visit_StatNode(node, True)
def visit_CEnumDefNode(self, node):
return self.visit_StatNode(node, True)
def visit_CStructOrUnionDefNode(self, node):
return self.visit_StatNode(node, True)
def visit_PassStatNode(self, node):
"""Eliminate PassStatNode"""
if not self.is_in_statlist:
return Nodes.StatListNode(pos=node.pos, stats=[])
else:
return []
def visit_ExprStatNode(self, node):
"""Eliminate useless string literals"""
if node.expr.is_string_literal:
return self.visit_PassStatNode(node)
else:
return self.visit_StatNode(node)
def visit_CDeclaratorNode(self, node):
return node
class PostParseError(CompileError): pass
# error strings checked by unit tests, so define them
ERR_CDEF_INCLASS = 'Cannot assign default value to fields in cdef classes, structs or unions'
ERR_BUF_DEFAULTS = 'Invalid buffer defaults specification (see docs)'
ERR_INVALID_SPECIALATTR_TYPE = 'Special attributes must not have a type declared'
class PostParse(ScopeTrackingTransform):
"""
Basic interpretation of the parse tree, as well as validity
checking that can be done on a very basic level on the parse
tree (while still not being a problem with the basic syntax,
as such).
Specifically:
- Default values to cdef assignments are turned into single
assignments following the declaration (everywhere but in class
bodies, where they raise a compile error)
- Interpret some node structures into Python runtime values.
Some nodes take compile-time arguments (currently:
TemplatedTypeNode[args] and __cythonbufferdefaults__ = {args}),
which should be interpreted. This happens in a general way
and other steps should be taken to ensure validity.
Type arguments cannot be interpreted in this way.
- For __cythonbufferdefaults__ the arguments are checked for
validity.
TemplatedTypeNode has its directives interpreted:
Any first positional argument goes into the "dtype" attribute,
any "ndim" keyword argument goes into the "ndim" attribute and
so on. Also it is checked that the directive combination is valid.
- __cythonbufferdefaults__ attributes are parsed and put into the
type information.
Note: Currently Parsing.py does a lot of interpretation and
reorganization that can be refactored into this transform
if a more pure Abstract Syntax Tree is wanted.
"""
def __init__(self, context):
super(PostParse, self).__init__(context)
self.specialattribute_handlers = {
'__cythonbufferdefaults__' : self.handle_bufferdefaults
}
def visit_ModuleNode(self, node):
self.lambda_counter = 1
self.genexpr_counter = 1
return super(PostParse, self).visit_ModuleNode(node)
def visit_LambdaNode(self, node):
# unpack a lambda expression into the corresponding DefNode
lambda_id = self.lambda_counter
self.lambda_counter += 1
node.lambda_name = EncodedString(u'lambda%d' % lambda_id)
collector = YieldNodeCollector()
collector.visitchildren(node.result_expr)
if collector.yields or isinstance(node.result_expr, ExprNodes.YieldExprNode):
body = Nodes.ExprStatNode(
node.result_expr.pos, expr=node.result_expr)
else:
body = Nodes.ReturnStatNode(
node.result_expr.pos, value=node.result_expr)
node.def_node = Nodes.DefNode(
node.pos, name=node.name, lambda_name=node.lambda_name,
args=node.args, star_arg=node.star_arg,
starstar_arg=node.starstar_arg,
body=body, doc=None)
self.visitchildren(node)
return node
def visit_GeneratorExpressionNode(self, node):
# unpack a generator expression into the corresponding DefNode
genexpr_id = self.genexpr_counter
self.genexpr_counter += 1
node.genexpr_name = EncodedString(u'genexpr%d' % genexpr_id)
node.def_node = Nodes.DefNode(node.pos, name=node.name,
doc=None,
args=[], star_arg=None,
starstar_arg=None,
body=node.loop)
self.visitchildren(node)
return node
# cdef variables
def handle_bufferdefaults(self, decl):
if not isinstance(decl.default, ExprNodes.DictNode):
raise PostParseError(decl.pos, ERR_BUF_DEFAULTS)
self.scope_node.buffer_defaults_node = decl.default
self.scope_node.buffer_defaults_pos = decl.pos
def visit_CVarDefNode(self, node):
# This assumes only plain names and pointers are assignable on
# declaration. Also, it makes use of the fact that a cdef decl
# must appear before the first use, so we don't have to deal with
# "i = 3; cdef int i = i" and can simply move the nodes around.
try:
self.visitchildren(node)
stats = [node]
newdecls = []
for decl in node.declarators:
declbase = decl
while isinstance(declbase, Nodes.CPtrDeclaratorNode):
declbase = declbase.base
if isinstance(declbase, Nodes.CNameDeclaratorNode):
if declbase.default is not None:
if self.scope_type in ('cclass', 'pyclass', 'struct'):
if isinstance(self.scope_node, Nodes.CClassDefNode):
handler = self.specialattribute_handlers.get(decl.name)
if handler:
if decl is not declbase:
raise PostParseError(decl.pos, ERR_INVALID_SPECIALATTR_TYPE)
handler(decl)
continue # Remove declaration
raise PostParseError(decl.pos, ERR_CDEF_INCLASS)
first_assignment = self.scope_type != 'module'
stats.append(Nodes.SingleAssignmentNode(node.pos,
lhs=ExprNodes.NameNode(node.pos, name=declbase.name),
rhs=declbase.default, first=first_assignment))
declbase.default = None
newdecls.append(decl)
node.declarators = newdecls
return stats
except PostParseError, e:
# An error in a cdef clause is ok, simply remove the declaration
# and try to move on to report more errors
self.context.nonfatal_error(e)
return None
# Split parallel assignments (a,b = b,a) into separate partial
# assignments that are executed rhs-first using temps. This
# restructuring must be applied before type analysis so that known
# types on rhs and lhs can be matched directly. It is required in
# the case that the types cannot be coerced to a Python type in
# order to assign from a tuple.
def visit_SingleAssignmentNode(self, node):
self.visitchildren(node)
return self._visit_assignment_node(node, [node.lhs, node.rhs])
def visit_CascadedAssignmentNode(self, node):
self.visitchildren(node)
return self._visit_assignment_node(node, node.lhs_list + [node.rhs])
def _visit_assignment_node(self, node, expr_list):
"""Flatten parallel assignments into separate single
assignments or cascaded assignments.
"""
if sum([ 1 for expr in expr_list
if expr.is_sequence_constructor or expr.is_string_literal ]) < 2:
# no parallel assignments => nothing to do
return node
expr_list_list = []
flatten_parallel_assignments(expr_list, expr_list_list)
temp_refs = []
eliminate_rhs_duplicates(expr_list_list, temp_refs)
nodes = []
for expr_list in expr_list_list:
lhs_list = expr_list[:-1]
rhs = expr_list[-1]
if len(lhs_list) == 1:
node = Nodes.SingleAssignmentNode(rhs.pos,
lhs = lhs_list[0], rhs = rhs)
else:
node = Nodes.CascadedAssignmentNode(rhs.pos,
lhs_list = lhs_list, rhs = rhs)
nodes.append(node)
if len(nodes) == 1:
assign_node = nodes[0]
else:
assign_node = Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes)
if temp_refs:
duplicates_and_temps = [ (temp.expression, temp)
for temp in temp_refs ]
sort_common_subsequences(duplicates_and_temps)
for _, temp_ref in duplicates_and_temps[::-1]:
assign_node = LetNode(temp_ref, assign_node)
return assign_node
def _flatten_sequence(self, seq, result):
for arg in seq.args:
if arg.is_sequence_constructor:
self._flatten_sequence(arg, result)
else:
result.append(arg)
return result
def visit_DelStatNode(self, node):
self.visitchildren(node)
node.args = self._flatten_sequence(node, [])
return node
def visit_ExceptClauseNode(self, node):
if node.is_except_as:
# except-as must delete NameNode target at the end
del_target = Nodes.DelStatNode(
node.pos,
args=[ExprNodes.NameNode(
node.target.pos, name=node.target.name)],
ignore_nonexisting=True)
node.body = Nodes.StatListNode(
node.pos,
stats=[Nodes.TryFinallyStatNode(
node.pos,
body=node.body,
finally_clause=Nodes.StatListNode(
node.pos,
stats=[del_target]))])
self.visitchildren(node)
return node
def eliminate_rhs_duplicates(expr_list_list, ref_node_sequence):
"""Replace rhs items by LetRefNodes if they appear more than once.
Creates a sequence of LetRefNodes that set up the required temps
and appends them to ref_node_sequence. The input list is modified
in-place.
"""
seen_nodes = set()
ref_nodes = {}
def find_duplicates(node):
if node.is_literal or node.is_name:
# no need to replace those; can't include attributes here
# as their access is not necessarily side-effect free
return
if node in seen_nodes:
if node not in ref_nodes:
ref_node = LetRefNode(node)
ref_nodes[node] = ref_node
ref_node_sequence.append(ref_node)
else:
seen_nodes.add(node)
if node.is_sequence_constructor:
for item in node.args:
find_duplicates(item)
for expr_list in expr_list_list:
rhs = expr_list[-1]
find_duplicates(rhs)
if not ref_nodes:
return
def substitute_nodes(node):
if node in ref_nodes:
return ref_nodes[node]
elif node.is_sequence_constructor:
node.args = list(map(substitute_nodes, node.args))
return node
# replace nodes inside of the common subexpressions
for node in ref_nodes:
if node.is_sequence_constructor:
node.args = list(map(substitute_nodes, node.args))
# replace common subexpressions on all rhs items
for expr_list in expr_list_list:
expr_list[-1] = substitute_nodes(expr_list[-1])
def sort_common_subsequences(items):
"""Sort items/subsequences so that all items and subsequences that
an item contains appear before the item itself. This is needed
because each rhs item must only be evaluated once, so its value
must be evaluated first and then reused when packing sequences
that contain it.
This implies a partial order, and the sort must be stable to
preserve the original order as much as possible, so we use a
simple insertion sort (which is very fast for short sequences, the
normal case in practice).
"""
def contains(seq, x):
for item in seq:
if item is x:
return True
elif item.is_sequence_constructor and contains(item.args, x):
return True
return False
def lower_than(a,b):
return b.is_sequence_constructor and contains(b.args, a)
for pos, item in enumerate(items):
key = item[1] # the ResultRefNode which has already been injected into the sequences
new_pos = pos
for i in xrange(pos-1, -1, -1):
if lower_than(key, items[i][0]):
new_pos = i
if new_pos != pos:
for i in xrange(pos, new_pos, -1):
items[i] = items[i-1]
items[new_pos] = item
def unpack_string_to_character_literals(literal):
chars = []
pos = literal.pos
stype = literal.__class__
sval = literal.value
sval_type = sval.__class__
for char in sval:
cval = sval_type(char)
chars.append(stype(pos, value=cval, constant_result=cval))
return chars
def flatten_parallel_assignments(input, output):
# The input is a list of expression nodes, representing the LHSs
# and RHS of one (possibly cascaded) assignment statement. For
# sequence constructors, rearranges the matching parts of both
# sides into a list of equivalent assignments between the
# individual elements. This transformation is applied
# recursively, so that nested structures get matched as well.
rhs = input[-1]
if (not (rhs.is_sequence_constructor or isinstance(rhs, ExprNodes.UnicodeNode))
or not sum([lhs.is_sequence_constructor for lhs in input[:-1]])):
output.append(input)
return
complete_assignments = []
if rhs.is_sequence_constructor:
rhs_args = rhs.args
elif rhs.is_string_literal:
rhs_args = unpack_string_to_character_literals(rhs)
rhs_size = len(rhs_args)
lhs_targets = [ [] for _ in xrange(rhs_size) ]
starred_assignments = []
for lhs in input[:-1]:
if not lhs.is_sequence_constructor:
if lhs.is_starred:
error(lhs.pos, "starred assignment target must be in a list or tuple")
complete_assignments.append(lhs)
continue
lhs_size = len(lhs.args)
starred_targets = sum([1 for expr in lhs.args if expr.is_starred])
if starred_targets > 1:
error(lhs.pos, "more than 1 starred expression in assignment")
output.append([lhs,rhs])
continue
elif lhs_size - starred_targets > rhs_size:
error(lhs.pos, "need more than %d value%s to unpack"
% (rhs_size, (rhs_size != 1) and 's' or ''))
output.append([lhs,rhs])
continue
elif starred_targets:
map_starred_assignment(lhs_targets, starred_assignments,
lhs.args, rhs_args)
elif lhs_size < rhs_size:
error(lhs.pos, "too many values to unpack (expected %d, got %d)"
% (lhs_size, rhs_size))
output.append([lhs,rhs])
continue
else:
for targets, expr in zip(lhs_targets, lhs.args):
targets.append(expr)
if complete_assignments:
complete_assignments.append(rhs)
output.append(complete_assignments)
# recursively flatten partial assignments
for cascade, rhs in zip(lhs_targets, rhs_args):
if cascade:
cascade.append(rhs)
flatten_parallel_assignments(cascade, output)
# recursively flatten starred assignments
for cascade in starred_assignments:
if cascade[0].is_sequence_constructor:
flatten_parallel_assignments(cascade, output)
else:
output.append(cascade)
def map_starred_assignment(lhs_targets, starred_assignments, lhs_args, rhs_args):
# Appends the fixed-position LHS targets to the target list that
# appear left and right of the starred argument.
#
# The starred_assignments list receives a new tuple
# (lhs_target, rhs_values_list) that maps the remaining arguments
# (those that match the starred target) to a list.
# left side of the starred target
for i, (targets, expr) in enumerate(zip(lhs_targets, lhs_args)):
if expr.is_starred:
starred = i
lhs_remaining = len(lhs_args) - i - 1
break
targets.append(expr)
else:
raise InternalError("no starred arg found when splitting starred assignment")
# right side of the starred target
for i, (targets, expr) in enumerate(zip(lhs_targets[-lhs_remaining:],
lhs_args[starred + 1:])):
targets.append(expr)
# the starred target itself, must be assigned a (potentially empty) list
target = lhs_args[starred].target # unpack starred node
starred_rhs = rhs_args[starred:]
if lhs_remaining:
starred_rhs = starred_rhs[:-lhs_remaining]
if starred_rhs:
pos = starred_rhs[0].pos
else:
pos = target.pos
starred_assignments.append([
target, ExprNodes.ListNode(pos=pos, args=starred_rhs)])
class PxdPostParse(CythonTransform, SkipDeclarations):
"""
Basic interpretation/validity checking that should only be
done on pxd trees.
A lot of this checking currently happens in the parser; but
what is listed below happens here.
- "def" functions are let through only if they fill the
getbuffer/releasebuffer slots
- cdef functions are let through only if they are on the
top level and are declared "inline"
"""
ERR_INLINE_ONLY = "function definition in pxd file must be declared 'cdef inline'"
ERR_NOGO_WITH_INLINE = "inline function definition in pxd file cannot be '%s'"
def __call__(self, node):
self.scope_type = 'pxd'
return super(PxdPostParse, self).__call__(node)
def visit_CClassDefNode(self, node):
old = self.scope_type
self.scope_type = 'cclass'
self.visitchildren(node)
self.scope_type = old
return node
def visit_FuncDefNode(self, node):
# FuncDefNode always come with an implementation (without
# an imp they are CVarDefNodes..)
err = self.ERR_INLINE_ONLY
if (isinstance(node, Nodes.DefNode) and self.scope_type == 'cclass'
and node.name in ('__getbuffer__', '__releasebuffer__')):
err = None # allow these slots
if isinstance(node, Nodes.CFuncDefNode):
if (u'inline' in node.modifiers and
self.scope_type in ('pxd', 'cclass')):
node.inline_in_pxd = True
if node.visibility != 'private':
err = self.ERR_NOGO_WITH_INLINE % node.visibility
elif node.api:
err = self.ERR_NOGO_WITH_INLINE % 'api'
else:
err = None # allow inline function
else:
err = self.ERR_INLINE_ONLY
if err:
self.context.nonfatal_error(PostParseError(node.pos, err))
return None
else:
return node
class InterpretCompilerDirectives(CythonTransform, SkipDeclarations):
"""
After parsing, directives can be stored in a number of places:
- #cython-comments at the top of the file (stored in ModuleNode)
- Command-line arguments overriding these
- @cython.directivename decorators
- with cython.directivename: statements
This transform is responsible for interpreting these various sources
and store the directive in two ways:
- Set the directives attribute of the ModuleNode for global directives.
- Use a CompilerDirectivesNode to override directives for a subtree.
(The first one is primarily to not have to modify with the tree
structure, so that ModuleNode stay on top.)
The directives are stored in dictionaries from name to value in effect.
Each such dictionary is always filled in for all possible directives,
using default values where no value is given by the user.
The available directives are controlled in Options.py.
Note that we have to run this prior to analysis, and so some minor
duplication of functionality has to occur: We manually track cimports
and which names the "cython" module may have been imported to.
"""
unop_method_nodes = {
'typeof': ExprNodes.TypeofNode,
'operator.address': ExprNodes.AmpersandNode,
'operator.dereference': ExprNodes.DereferenceNode,
'operator.preincrement' : ExprNodes.inc_dec_constructor(True, '++'),
'operator.predecrement' : ExprNodes.inc_dec_constructor(True, '--'),
'operator.postincrement': ExprNodes.inc_dec_constructor(False, '++'),
'operator.postdecrement': ExprNodes.inc_dec_constructor(False, '--'),
# For backwards compatability.
'address': ExprNodes.AmpersandNode,
}
binop_method_nodes = {
'operator.comma' : ExprNodes.c_binop_constructor(','),
}
special_methods = set(['declare', 'union', 'struct', 'typedef',
'sizeof', 'cast', 'pointer', 'compiled',
'NULL', 'fused_type', 'parallel'])
special_methods.update(unop_method_nodes.keys())
valid_parallel_directives = set([
"parallel",
"prange",
"threadid",
# "threadsavailable",
])
def __init__(self, context, compilation_directive_defaults):
super(InterpretCompilerDirectives, self).__init__(context)
self.compilation_directive_defaults = {}
for key, value in compilation_directive_defaults.items():
self.compilation_directive_defaults[unicode(key)] = copy.deepcopy(value)
self.cython_module_names = set()
self.directive_names = {}
self.parallel_directives = {}
def check_directive_scope(self, pos, directive, scope):
legal_scopes = Options.directive_scopes.get(directive, None)
if legal_scopes and scope not in legal_scopes:
self.context.nonfatal_error(PostParseError(pos, 'The %s compiler directive '
'is not allowed in %s scope' % (directive, scope)))
return False
else:
if (directive not in Options.directive_defaults
and directive not in Options.directive_types):
error(pos, "Invalid directive: '%s'." % (directive,))
return True
# Set up processing and handle the cython: comments.
def visit_ModuleNode(self, node):
for key, value in node.directive_comments.items():
if not self.check_directive_scope(node.pos, key, 'module'):
self.wrong_scope_error(node.pos, key, 'module')
del node.directive_comments[key]
self.module_scope = node.scope
directives = copy.deepcopy(Options.directive_defaults)
directives.update(copy.deepcopy(self.compilation_directive_defaults))
directives.update(node.directive_comments)
self.directives = directives
node.directives = directives
node.parallel_directives = self.parallel_directives
self.visitchildren(node)
node.cython_module_names = self.cython_module_names
return node
# The following four functions track imports and cimports that
# begin with "cython"
def is_cython_directive(self, name):
return (name in Options.directive_types or
name in self.special_methods or
PyrexTypes.parse_basic_type(name))
def is_parallel_directive(self, full_name, pos):
"""
Checks to see if fullname (e.g. cython.parallel.prange) is a valid
parallel directive. If it is a star import it also updates the
parallel_directives.
"""
result = (full_name + ".").startswith("cython.parallel.")
if result:
directive = full_name.split('.')
if full_name == u"cython.parallel":
self.parallel_directives[u"parallel"] = u"cython.parallel"
elif full_name == u"cython.parallel.*":
for name in self.valid_parallel_directives:
self.parallel_directives[name] = u"cython.parallel.%s" % name
elif (len(directive) != 3 or
directive[-1] not in self.valid_parallel_directives):
error(pos, "No such directive: %s" % full_name)
self.module_scope.use_utility_code(
UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c"))
return result
def visit_CImportStatNode(self, node):
if node.module_name == u"cython":
self.cython_module_names.add(node.as_name or u"cython")
elif node.module_name.startswith(u"cython."):
if node.module_name.startswith(u"cython.parallel."):
error(node.pos, node.module_name + " is not a module")
if node.module_name == u"cython.parallel":
if node.as_name and node.as_name != u"cython":
self.parallel_directives[node.as_name] = node.module_name
else:
self.cython_module_names.add(u"cython")
self.parallel_directives[
u"cython.parallel"] = node.module_name
self.module_scope.use_utility_code(
UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c"))
elif node.as_name:
self.directive_names[node.as_name] = node.module_name[7:]
else:
self.cython_module_names.add(u"cython")
# if this cimport was a compiler directive, we don't
# want to leave the cimport node sitting in the tree
return None
return node
def visit_FromCImportStatNode(self, node):
if (node.module_name == u"cython") or \
node.module_name.startswith(u"cython."):
submodule = (node.module_name + u".")[7:]
newimp = []
for pos, name, as_name, kind in node.imported_names:
full_name = submodule + name
qualified_name = u"cython." + full_name
if self.is_parallel_directive(qualified_name, node.pos):
# from cython cimport parallel, or
# from cython.parallel cimport parallel, prange, ...
self.parallel_directives[as_name or name] = qualified_name
elif self.is_cython_directive(full_name):
if as_name is None:
as_name = full_name
self.directive_names[as_name] = full_name
if kind is not None:
self.context.nonfatal_error(PostParseError(pos,
"Compiler directive imports must be plain imports"))
else:
newimp.append((pos, name, as_name, kind))
if not newimp:
return None
node.imported_names = newimp
return node
def visit_FromImportStatNode(self, node):
if (node.module.module_name.value == u"cython") or \
node.module.module_name.value.startswith(u"cython."):
submodule = (node.module.module_name.value + u".")[7:]
newimp = []
for name, name_node in node.items:
full_name = submodule + name
qualified_name = u"cython." + full_name
if self.is_parallel_directive(qualified_name, node.pos):
self.parallel_directives[name_node.name] = qualified_name
elif self.is_cython_directive(full_name):
self.directive_names[name_node.name] = full_name
else:
newimp.append((name, name_node))
if not newimp:
return None
node.items = newimp
return node
def visit_SingleAssignmentNode(self, node):
if isinstance(node.rhs, ExprNodes.ImportNode):
module_name = node.rhs.module_name.value
is_parallel = (module_name + u".").startswith(u"cython.parallel.")
if module_name != u"cython" and not is_parallel:
return node
module_name = node.rhs.module_name.value
as_name = node.lhs.name
node = Nodes.CImportStatNode(node.pos,
module_name = module_name,
as_name = as_name)
node = self.visit_CImportStatNode(node)
else:
self.visitchildren(node)
return node
def visit_NameNode(self, node):
if node.name in self.cython_module_names:
node.is_cython_module = True
else:
node.cython_attribute = self.directive_names.get(node.name)
return node
def try_to_parse_directives(self, node):
# If node is the contents of an directive (in a with statement or
# decorator), returns a list of (directivename, value) pairs.
# Otherwise, returns None
if isinstance(node, ExprNodes.CallNode):
self.visit(node.function)
optname = node.function.as_cython_attribute()
if optname:
directivetype = Options.directive_types.get(optname)
if directivetype:
args, kwds = node.explicit_args_kwds()
directives = []
key_value_pairs = []
if kwds is not None and directivetype is not dict:
for keyvalue in kwds.key_value_pairs:
key, value = keyvalue
sub_optname = "%s.%s" % (optname, key.value)
if Options.directive_types.get(sub_optname):
directives.append(self.try_to_parse_directive(sub_optname, [value], None, keyvalue.pos))
else:
key_value_pairs.append(keyvalue)
if not key_value_pairs:
kwds = None
else:
kwds.key_value_pairs = key_value_pairs
if directives and not kwds and not args:
return directives
directives.append(self.try_to_parse_directive(optname, args, kwds, node.function.pos))
return directives
elif isinstance(node, (ExprNodes.AttributeNode, ExprNodes.NameNode)):
self.visit(node)
optname = node.as_cython_attribute()
if optname:
directivetype = Options.directive_types.get(optname)
if directivetype is bool:
return [(optname, True)]
elif directivetype is None:
return [(optname, None)]
else:
raise PostParseError(
node.pos, "The '%s' directive should be used as a function call." % optname)
return None
def try_to_parse_directive(self, optname, args, kwds, pos):
directivetype = Options.directive_types.get(optname)
if len(args) == 1 and isinstance(args[0], ExprNodes.NoneNode):
return optname, Options.directive_defaults[optname]
elif directivetype is bool:
if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.BoolNode):
raise PostParseError(pos,
'The %s directive takes one compile-time boolean argument' % optname)
return (optname, args[0].value)
elif directivetype is int:
if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.IntNode):
raise PostParseError(pos,
'The %s directive takes one compile-time integer argument' % optname)
return (optname, int(args[0].value))
elif directivetype is str:
if kwds is not None or len(args) != 1 or not isinstance(
args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)):
raise PostParseError(pos,
'The %s directive takes one compile-time string argument' % optname)
return (optname, str(args[0].value))
elif directivetype is type:
if kwds is not None or len(args) != 1:
raise PostParseError(pos,
'The %s directive takes one type argument' % optname)
return (optname, args[0])
elif directivetype is dict:
if len(args) != 0:
raise PostParseError(pos,
'The %s directive takes no prepositional arguments' % optname)
return optname, dict([(key.value, value) for key, value in kwds.key_value_pairs])
elif directivetype is list:
if kwds and len(kwds) != 0:
raise PostParseError(pos,
'The %s directive takes no keyword arguments' % optname)
return optname, [ str(arg.value) for arg in args ]
elif callable(directivetype):
if kwds is not None or len(args) != 1 or not isinstance(
args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)):
raise PostParseError(pos,
'The %s directive takes one compile-time string argument' % optname)
return (optname, directivetype(optname, str(args[0].value)))
else:
assert False
def visit_with_directives(self, body, directives):
olddirectives = self.directives
newdirectives = copy.copy(olddirectives)
newdirectives.update(directives)
self.directives = newdirectives
assert isinstance(body, Nodes.StatListNode), body
retbody = self.visit_Node(body)
directive = Nodes.CompilerDirectivesNode(pos=retbody.pos, body=retbody,
directives=newdirectives)
self.directives = olddirectives
return directive
# Handle decorators
def visit_FuncDefNode(self, node):
directives = self._extract_directives(node, 'function')
if not directives:
return self.visit_Node(node)
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def visit_CVarDefNode(self, node):
directives = self._extract_directives(node, 'function')
if not directives:
return node
for name, value in directives.iteritems():
if name == 'locals':
node.directive_locals = value
elif name != 'final':
self.context.nonfatal_error(PostParseError(
node.pos,
"Cdef functions can only take cython.locals() "
"or final decorators, got %s." % name))
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def visit_CClassDefNode(self, node):
directives = self._extract_directives(node, 'cclass')
if not directives:
return self.visit_Node(node)
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def visit_PyClassDefNode(self, node):
directives = self._extract_directives(node, 'class')
if not directives:
return self.visit_Node(node)
body = Nodes.StatListNode(node.pos, stats=[node])
return self.visit_with_directives(body, directives)
def _extract_directives(self, node, scope_name):
if not node.decorators:
return {}
# Split the decorators into two lists -- real decorators and directives
directives = []
realdecs = []
for dec in node.decorators:
new_directives = self.try_to_parse_directives(dec.decorator)
if new_directives is not None:
for directive in new_directives:
if self.check_directive_scope(node.pos, directive[0], scope_name):
directives.append(directive)
else:
realdecs.append(dec)
if realdecs and isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode)):
raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.")
else:
node.decorators = realdecs
# merge or override repeated directives
optdict = {}
directives.reverse() # Decorators coming first take precedence
for directive in directives:
name, value = directive
if name in optdict:
old_value = optdict[name]
# keywords and arg lists can be merged, everything
# else overrides completely
if isinstance(old_value, dict):
old_value.update(value)
elif isinstance(old_value, list):
old_value.extend(value)
else:
optdict[name] = value
else:
optdict[name] = value
return optdict
# Handle with statements
def visit_WithStatNode(self, node):
directive_dict = {}
for directive in self.try_to_parse_directives(node.manager) or []:
if directive is not None:
if node.target is not None:
self.context.nonfatal_error(
PostParseError(node.pos, "Compiler directive with statements cannot contain 'as'"))
else:
name, value = directive
if name in ('nogil', 'gil'):
# special case: in pure mode, "with nogil" spells "with cython.nogil"
node = Nodes.GILStatNode(node.pos, state = name, body = node.body)
return self.visit_Node(node)
if self.check_directive_scope(node.pos, name, 'with statement'):
directive_dict[name] = value
if directive_dict:
return self.visit_with_directives(node.body, directive_dict)
return self.visit_Node(node)
class ParallelRangeTransform(CythonTransform, SkipDeclarations):
"""
Transform cython.parallel stuff. The parallel_directives come from the
module node, set there by InterpretCompilerDirectives.
x = cython.parallel.threadavailable() -> ParallelThreadAvailableNode
with nogil, cython.parallel.parallel(): -> ParallelWithBlockNode
print cython.parallel.threadid() -> ParallelThreadIdNode
for i in cython.parallel.prange(...): -> ParallelRangeNode
...
"""
# a list of names, maps 'cython.parallel.prange' in the code to
# ['cython', 'parallel', 'prange']
parallel_directive = None
# Indicates whether a namenode in an expression is the cython module
namenode_is_cython_module = False
# Keep track of whether we are the context manager of a 'with' statement
in_context_manager_section = False
# One of 'prange' or 'with parallel'. This is used to disallow closely
# nested 'with parallel:' blocks
state = None
directive_to_node = {
u"cython.parallel.parallel": Nodes.ParallelWithBlockNode,
# u"cython.parallel.threadsavailable": ExprNodes.ParallelThreadsAvailableNode,
u"cython.parallel.threadid": ExprNodes.ParallelThreadIdNode,
u"cython.parallel.prange": Nodes.ParallelRangeNode,
}
def node_is_parallel_directive(self, node):
return node.name in self.parallel_directives or node.is_cython_module
def get_directive_class_node(self, node):
"""
Figure out which parallel directive was used and return the associated
Node class.
E.g. for a cython.parallel.prange() call we return ParallelRangeNode
"""
if self.namenode_is_cython_module:
directive = '.'.join(self.parallel_directive)
else:
directive = self.parallel_directives[self.parallel_directive[0]]
directive = '%s.%s' % (directive,
'.'.join(self.parallel_directive[1:]))
directive = directive.rstrip('.')
cls = self.directive_to_node.get(directive)
if cls is None and not (self.namenode_is_cython_module and
self.parallel_directive[0] != 'parallel'):
error(node.pos, "Invalid directive: %s" % directive)
self.namenode_is_cython_module = False
self.parallel_directive = None
return cls
def visit_ModuleNode(self, node):
"""
If any parallel directives were imported, copy them over and visit
the AST
"""
if node.parallel_directives:
self.parallel_directives = node.parallel_directives
return self.visit_Node(node)
# No parallel directives were imported, so they can't be used :)
return node
def visit_NameNode(self, node):
if self.node_is_parallel_directive(node):
self.parallel_directive = [node.name]
self.namenode_is_cython_module = node.is_cython_module
return node
def visit_AttributeNode(self, node):
self.visitchildren(node)
if self.parallel_directive:
self.parallel_directive.append(node.attribute)
return node
def visit_CallNode(self, node):
self.visit(node.function)
if not self.parallel_directive:
return node
# We are a parallel directive, replace this node with the
# corresponding ParallelSomethingSomething node
if isinstance(node, ExprNodes.GeneralCallNode):
args = node.positional_args.args
kwargs = node.keyword_args
else:
args = node.args
kwargs = {}
parallel_directive_class = self.get_directive_class_node(node)
if parallel_directive_class:
# Note: in case of a parallel() the body is set by
# visit_WithStatNode
node = parallel_directive_class(node.pos, args=args, kwargs=kwargs)
return node
def visit_WithStatNode(self, node):
"Rewrite with cython.parallel.parallel() blocks"
newnode = self.visit(node.manager)
if isinstance(newnode, Nodes.ParallelWithBlockNode):
if self.state == 'parallel with':
error(node.manager.pos,
"Nested parallel with blocks are disallowed")
self.state = 'parallel with'
body = self.visit(node.body)
self.state = None
newnode.body = body
return newnode
elif self.parallel_directive:
parallel_directive_class = self.get_directive_class_node(node)
if not parallel_directive_class:
# There was an error, stop here and now
return None
if parallel_directive_class is Nodes.ParallelWithBlockNode:
error(node.pos, "The parallel directive must be called")
return None
node.body = self.visit(node.body)
return node
def visit_ForInStatNode(self, node):
"Rewrite 'for i in cython.parallel.prange(...):'"
self.visit(node.iterator)
self.visit(node.target)
in_prange = isinstance(node.iterator.sequence,
Nodes.ParallelRangeNode)
previous_state = self.state
if in_prange:
# This will replace the entire ForInStatNode, so copy the
# attributes
parallel_range_node = node.iterator.sequence
parallel_range_node.target = node.target
parallel_range_node.body = node.body
parallel_range_node.else_clause = node.else_clause
node = parallel_range_node
if not isinstance(node.target, ExprNodes.NameNode):
error(node.target.pos,
"Can only iterate over an iteration variable")
self.state = 'prange'
self.visit(node.body)
self.state = previous_state
self.visit(node.else_clause)
return node
def visit(self, node):
"Visit a node that may be None"
if node is not None:
return super(ParallelRangeTransform, self).visit(node)
class WithTransform(CythonTransform, SkipDeclarations):
def visit_WithStatNode(self, node):
self.visitchildren(node, 'body')
pos = node.pos
body, target, manager = node.body, node.target, node.manager
node.enter_call = ExprNodes.SimpleCallNode(
pos, function=ExprNodes.AttributeNode(
pos, obj=ExprNodes.CloneNode(manager),
attribute=EncodedString('__enter__'),
is_special_lookup=True),
args=[],
is_temp=True)
if target is not None:
body = Nodes.StatListNode(
pos, stats = [
Nodes.WithTargetAssignmentStatNode(
pos, lhs = target,
rhs = ResultRefNode(node.enter_call),
orig_rhs = node.enter_call),
body])
excinfo_target = ExprNodes.TupleNode(pos, slow=True, args=[
ExprNodes.ExcValueNode(pos) for _ in range(3)])
except_clause = Nodes.ExceptClauseNode(
pos, body=Nodes.IfStatNode(
pos, if_clauses=[
Nodes.IfClauseNode(
pos, condition=ExprNodes.NotNode(
pos, operand=ExprNodes.WithExitCallNode(
pos, with_stat=node,
test_if_run=False,
args=excinfo_target)),
body=Nodes.ReraiseStatNode(pos),
),
],
else_clause=None),
pattern=None,
target=None,
excinfo_target=excinfo_target,
)
node.body = Nodes.TryFinallyStatNode(
pos, body=Nodes.TryExceptStatNode(
pos, body=body,
except_clauses=[except_clause],
else_clause=None,
),
finally_clause=Nodes.ExprStatNode(
pos, expr=ExprNodes.WithExitCallNode(
pos, with_stat=node,
test_if_run=True,
args=ExprNodes.TupleNode(
pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)]
))),
handle_error_case=False,
)
return node
def visit_ExprNode(self, node):
# With statements are never inside expressions.
return node
class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations):
"""Originally, this was the only place where decorators were
transformed into the corresponding calling code. Now, this is
done directly in DefNode and PyClassDefNode to avoid reassignments
to the function/class name - except for cdef class methods. For
those, the reassignment is required as methods are originally
defined in the PyMethodDef struct.
The IndirectionNode allows DefNode to override the decorator
"""
def visit_DefNode(self, func_node):
scope_type = self.scope_type
func_node = self.visit_FuncDefNode(func_node)
if scope_type != 'cclass' or not func_node.decorators:
return func_node
return self.handle_decorators(func_node, func_node.decorators,
func_node.name)
def handle_decorators(self, node, decorators, name):
decorator_result = ExprNodes.NameNode(node.pos, name = name)
for decorator in decorators[::-1]:
decorator_result = ExprNodes.SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [decorator_result])
name_node = ExprNodes.NameNode(node.pos, name = name)
reassignment = Nodes.SingleAssignmentNode(
node.pos,
lhs = name_node,
rhs = decorator_result)
reassignment = Nodes.IndirectionNode([reassignment])
node.decorator_indirection = reassignment
return [node, reassignment]
class CnameDirectivesTransform(CythonTransform, SkipDeclarations):
"""
Only part of the CythonUtilityCode pipeline. Must be run before
DecoratorTransform in case this is a decorator for a cdef class.
It filters out @cname('my_cname') decorators and rewrites them to
CnameDecoratorNodes.
"""
def handle_function(self, node):
if not getattr(node, 'decorators', None):
return self.visit_Node(node)
for i, decorator in enumerate(node.decorators):
decorator = decorator.decorator
if (isinstance(decorator, ExprNodes.CallNode) and
decorator.function.is_name and
decorator.function.name == 'cname'):
args, kwargs = decorator.explicit_args_kwds()
if kwargs:
raise AssertionError(
"cname decorator does not take keyword arguments")
if len(args) != 1:
raise AssertionError(
"cname decorator takes exactly one argument")
if not (args[0].is_literal and
args[0].type == Builtin.str_type):
raise AssertionError(
"argument to cname decorator must be a string literal")
cname = args[0].compile_time_value(None).decode('UTF-8')
del node.decorators[i]
node = Nodes.CnameDecoratorNode(pos=node.pos, node=node,
cname=cname)
break
return self.visit_Node(node)
visit_FuncDefNode = handle_function
visit_CClassDefNode = handle_function
visit_CEnumDefNode = handle_function
visit_CStructOrUnionDefNode = handle_function
class ForwardDeclareTypes(CythonTransform):
def visit_CompilerDirectivesNode(self, node):
env = self.module_scope
old = env.directives
env.directives = node.directives
self.visitchildren(node)
env.directives = old
return node
def visit_ModuleNode(self, node):
self.module_scope = node.scope
self.module_scope.directives = node.directives
self.visitchildren(node)
return node
def visit_CDefExternNode(self, node):
old_cinclude_flag = self.module_scope.in_cinclude
self.module_scope.in_cinclude = 1
self.visitchildren(node)
self.module_scope.in_cinclude = old_cinclude_flag
return node
def visit_CEnumDefNode(self, node):
node.declare(self.module_scope)
return node
def visit_CStructOrUnionDefNode(self, node):
if node.name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
def visit_CClassDefNode(self, node):
if node.class_name not in self.module_scope.entries:
node.declare(self.module_scope)
return node
class AnalyseDeclarationsTransform(EnvTransform):
basic_property = TreeFragment(u"""
property NAME:
def __get__(self):
return ATTR
def __set__(self, value):
ATTR = value
""", level='c_class', pipeline=[NormalizeTree(None)])
basic_pyobject_property = TreeFragment(u"""
property NAME:
def __get__(self):
return ATTR
def __set__(self, value):
ATTR = value
def __del__(self):
ATTR = None
""", level='c_class', pipeline=[NormalizeTree(None)])
basic_property_ro = TreeFragment(u"""
property NAME:
def __get__(self):
return ATTR
""", level='c_class', pipeline=[NormalizeTree(None)])
struct_or_union_wrapper = TreeFragment(u"""
cdef class NAME:
cdef TYPE value
def __init__(self, MEMBER=None):
cdef int count
count = 0
INIT_ASSIGNMENTS
if IS_UNION and count > 1:
raise ValueError, "At most one union member should be specified."
def __str__(self):
return STR_FORMAT % MEMBER_TUPLE
def __repr__(self):
return REPR_FORMAT % MEMBER_TUPLE
""", pipeline=[NormalizeTree(None)])
init_assignment = TreeFragment(u"""
if VALUE is not None:
ATTR = VALUE
count += 1
""", pipeline=[NormalizeTree(None)])
fused_function = None
in_lambda = 0
def __call__(self, root):
# needed to determine if a cdef var is declared after it's used.
self.seen_vars_stack = []
self.fused_error_funcs = set()
super_class = super(AnalyseDeclarationsTransform, self)
self._super_visit_FuncDefNode = super_class.visit_FuncDefNode
return super_class.__call__(root)
def visit_NameNode(self, node):
self.seen_vars_stack[-1].add(node.name)
return node
def visit_ModuleNode(self, node):
self.seen_vars_stack.append(set())
node.analyse_declarations(self.current_env())
self.visitchildren(node)
self.seen_vars_stack.pop()
return node
def visit_LambdaNode(self, node):
self.in_lambda += 1
node.analyse_declarations(self.current_env())
self.visitchildren(node)
self.in_lambda -= 1
return node
def visit_CClassDefNode(self, node):
node = self.visit_ClassDefNode(node)
if node.scope and node.scope.implemented:
stats = []
for entry in node.scope.var_entries:
if entry.needs_property:
property = self.create_Property(entry)
property.analyse_declarations(node.scope)
self.visit(property)
stats.append(property)
if stats:
node.body.stats += stats
return node
def _handle_fused_def_decorators(self, old_decorators, env, node):
"""
Create function calls to the decorators and reassignments to
the function.
"""
# Delete staticmethod and classmethod decorators, this is
# handled directly by the fused function object.
decorators = []
for decorator in old_decorators:
func = decorator.decorator
if (not func.is_name or
func.name not in ('staticmethod', 'classmethod') or
env.lookup_here(func.name)):
# not a static or classmethod
decorators.append(decorator)
if decorators:
transform = DecoratorTransform(self.context)
def_node = node.node
_, reassignments = transform.handle_decorators(
def_node, decorators, def_node.name)
reassignments.analyse_declarations(env)
node = [node, reassignments]
return node
def _handle_def(self, decorators, env, node):
"Handle def or cpdef fused functions"
# Create PyCFunction nodes for each specialization
node.stats.insert(0, node.py_func)
node.py_func = self.visit(node.py_func)
node.update_fused_defnode_entry(env)
pycfunc = ExprNodes.PyCFunctionNode.from_defnode(node.py_func,
True)
pycfunc = ExprNodes.ProxyNode(pycfunc.coerce_to_temp(env))
node.resulting_fused_function = pycfunc
# Create assignment node for our def function
node.fused_func_assignment = self._create_assignment(
node.py_func, ExprNodes.CloneNode(pycfunc), env)
if decorators:
node = self._handle_fused_def_decorators(decorators, env, node)
return node
def _create_fused_function(self, env, node):
"Create a fused function for a DefNode with fused arguments"
from Cython.Compiler import FusedNode
if self.fused_function or self.in_lambda:
if self.fused_function not in self.fused_error_funcs:
if self.in_lambda:
error(node.pos, "Fused lambdas not allowed")
else:
error(node.pos, "Cannot nest fused functions")
self.fused_error_funcs.add(self.fused_function)
node.body = Nodes.PassStatNode(node.pos)
for arg in node.args:
if arg.type.is_fused:
arg.type = arg.type.get_fused_types()[0]
return node
decorators = getattr(node, 'decorators', None)
node = FusedNode.FusedCFuncDefNode(node, env)
self.fused_function = node
self.visitchildren(node)
self.fused_function = None
if node.py_func:
node = self._handle_def(decorators, env, node)
return node
def _handle_nogil_cleanup(self, lenv, node):
"Handle cleanup for 'with gil' blocks in nogil functions."
if lenv.nogil and lenv.has_with_gil_block:
# Acquire the GIL for cleanup in 'nogil' functions, by wrapping
# the entire function body in try/finally.
# The corresponding release will be taken care of by
# Nodes.FuncDefNode.generate_function_definitions()
node.body = Nodes.NogilTryFinallyStatNode(
node.body.pos,
body=node.body,
finally_clause=Nodes.EnsureGILNode(node.body.pos))
def _handle_fused(self, node):
if node.is_generator and node.has_fused_arguments:
node.has_fused_arguments = False
error(node.pos, "Fused generators not supported")
node.gbody = Nodes.StatListNode(node.pos,
stats=[],
body=Nodes.PassStatNode(node.pos))
return node.has_fused_arguments
def visit_FuncDefNode(self, node):
"""
Analyse a function and its body, as that hasn't happend yet. Also
analyse the directive_locals set by @cython.locals(). Then, if we are
a function with fused arguments, replace the function (after it has
declared itself in the symbol table!) with a FusedCFuncDefNode, and
analyse its children (which are in turn normal functions). If we're a
normal function, just analyse the body of the function.
"""
env = self.current_env()
self.seen_vars_stack.append(set())
lenv = node.local_scope
node.declare_arguments(lenv)
for var, type_node in node.directive_locals.items():
if not lenv.lookup_here(var): # don't redeclare args
type = type_node.analyse_as_type(lenv)
if type:
lenv.declare_var(var, type, type_node.pos)
else:
error(type_node.pos, "Not a type")
if self._handle_fused(node):
node = self._create_fused_function(env, node)
else:
node.body.analyse_declarations(lenv)
self._handle_nogil_cleanup(lenv, node)
self._super_visit_FuncDefNode(node)
self.seen_vars_stack.pop()
return node
def visit_DefNode(self, node):
node = self.visit_FuncDefNode(node)
env = self.current_env()
if (not isinstance(node, Nodes.DefNode) or
node.fused_py_func or node.is_generator_body or
not node.needs_assignment_synthesis(env)):
return node
return [node, self._synthesize_assignment(node, env)]
def visit_GeneratorBodyDefNode(self, node):
return self.visit_FuncDefNode(node)
def _synthesize_assignment(self, node, env):
# Synthesize assignment node and put it right after defnode
genv = env
while genv.is_py_class_scope or genv.is_c_class_scope:
genv = genv.outer_scope
if genv.is_closure_scope:
rhs = node.py_cfunc_node = ExprNodes.InnerFunctionNode(
node.pos, def_node=node,
pymethdef_cname=node.entry.pymethdef_cname,
code_object=ExprNodes.CodeObjectNode(node))
else:
binding = self.current_directives.get('binding')
rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding)
if env.is_py_class_scope:
rhs.binding = True
node.is_cyfunction = rhs.binding
return self._create_assignment(node, rhs, env)
def _create_assignment(self, def_node, rhs, env):
if def_node.decorators:
for decorator in def_node.decorators[::-1]:
rhs = ExprNodes.SimpleCallNode(
decorator.pos,
function = decorator.decorator,
args = [rhs])
def_node.decorators = None
assmt = Nodes.SingleAssignmentNode(
def_node.pos,
lhs=ExprNodes.NameNode(def_node.pos, name=def_node.name),
rhs=rhs)
assmt.analyse_declarations(env)
return assmt
def visit_ScopedExprNode(self, node):
env = self.current_env()
node.analyse_declarations(env)
# the node may or may not have a local scope
if node.has_local_scope:
self.seen_vars_stack.append(set(self.seen_vars_stack[-1]))
self.enter_scope(node, node.expr_scope)
node.analyse_scoped_declarations(node.expr_scope)
self.visitchildren(node)
self.exit_scope()
self.seen_vars_stack.pop()
else:
node.analyse_scoped_declarations(env)
self.visitchildren(node)
return node
def visit_TempResultFromStatNode(self, node):
self.visitchildren(node)
node.analyse_declarations(self.current_env())
return node
def visit_CppClassNode(self, node):
if node.visibility == 'extern':
return None
else:
return self.visit_ClassDefNode(node)
def visit_CStructOrUnionDefNode(self, node):
# Create a wrapper node if needed.
# We want to use the struct type information (so it can't happen
# before this phase) but also create new objects to be declared
# (so it can't happen later).
# Note that we don't return the original node, as it is
# never used after this phase.
if True: # private (default)
return None
self_value = ExprNodes.AttributeNode(
pos = node.pos,
obj = ExprNodes.NameNode(pos=node.pos, name=u"self"),
attribute = EncodedString(u"value"))
var_entries = node.entry.type.scope.var_entries
attributes = []
for entry in var_entries:
attributes.append(ExprNodes.AttributeNode(pos = entry.pos,
obj = self_value,
attribute = entry.name))
# __init__ assignments
init_assignments = []
for entry, attr in zip(var_entries, attributes):
# TODO: branch on visibility
init_assignments.append(self.init_assignment.substitute({
u"VALUE": ExprNodes.NameNode(entry.pos, name = entry.name),
u"ATTR": attr,
}, pos = entry.pos))
# create the class
str_format = u"%s(%s)" % (node.entry.type.name, ("%s, " * len(attributes))[:-2])
wrapper_class = self.struct_or_union_wrapper.substitute({
u"INIT_ASSIGNMENTS": Nodes.StatListNode(node.pos, stats = init_assignments),
u"IS_UNION": ExprNodes.BoolNode(node.pos, value = not node.entry.type.is_struct),
u"MEMBER_TUPLE": ExprNodes.TupleNode(node.pos, args=attributes),
u"STR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format)),
u"REPR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format.replace("%s", "%r"))),
}, pos = node.pos).stats[0]
wrapper_class.class_name = node.name
wrapper_class.shadow = True
class_body = wrapper_class.body.stats
# fix value type
assert isinstance(class_body[0].base_type, Nodes.CSimpleBaseTypeNode)
class_body[0].base_type.name = node.name
# fix __init__ arguments
init_method = class_body[1]
assert isinstance(init_method, Nodes.DefNode) and init_method.name == '__init__'
arg_template = init_method.args[1]
if not node.entry.type.is_struct:
arg_template.kw_only = True
del init_method.args[1]
for entry, attr in zip(var_entries, attributes):
arg = copy.deepcopy(arg_template)
arg.declarator.name = entry.name
init_method.args.append(arg)
# setters/getters
for entry, attr in zip(var_entries, attributes):
# TODO: branch on visibility
if entry.type.is_pyobject:
template = self.basic_pyobject_property
else:
template = self.basic_property
property = template.substitute({
u"ATTR": attr,
}, pos = entry.pos).stats[0]
property.name = entry.name
wrapper_class.body.stats.append(property)
wrapper_class.analyse_declarations(self.current_env())
return self.visit_CClassDefNode(wrapper_class)
# Some nodes are no longer needed after declaration
# analysis and can be dropped. The analysis was performed
# on these nodes in a seperate recursive process from the
# enclosing function or module, so we can simply drop them.
def visit_CDeclaratorNode(self, node):
# necessary to ensure that all CNameDeclaratorNodes are visited.
self.visitchildren(node)
return node
def visit_CTypeDefNode(self, node):
return node
def visit_CBaseTypeNode(self, node):
return None
def visit_CEnumDefNode(self, node):
if node.visibility == 'public':
return node
else:
return None
def visit_CNameDeclaratorNode(self, node):
if node.name in self.seen_vars_stack[-1]:
entry = self.current_env().lookup(node.name)
if (entry is None or entry.visibility != 'extern'
and not entry.scope.is_c_class_scope):
warning(node.pos, "cdef variable '%s' declared after it is used" % node.name, 2)
self.visitchildren(node)
return node
def visit_CVarDefNode(self, node):
# to ensure all CNameDeclaratorNodes are visited.
self.visitchildren(node)
return None
def visit_CnameDecoratorNode(self, node):
child_node = self.visit(node.node)
if not child_node:
return None
if type(child_node) is list: # Assignment synthesized
node.child_node = child_node[0]
return [node] + child_node[1:]
node.node = child_node
return node
def create_Property(self, entry):
if entry.visibility == 'public':
if entry.type.is_pyobject:
template = self.basic_pyobject_property
else:
template = self.basic_property
elif entry.visibility == 'readonly':
template = self.basic_property_ro
property = template.substitute({
u"ATTR": ExprNodes.AttributeNode(pos=entry.pos,
obj=ExprNodes.NameNode(pos=entry.pos, name="self"),
attribute=entry.name),
}, pos=entry.pos).stats[0]
property.name = entry.name
property.doc = entry.doc
return property
class CalculateQualifiedNamesTransform(EnvTransform):
"""
Calculate and store the '__qualname__' and the global
module name on some nodes.
"""
def visit_ModuleNode(self, node):
self.module_name = self.global_scope().qualified_name
self.qualified_name = []
_super = super(CalculateQualifiedNamesTransform, self)
self._super_visit_FuncDefNode = _super.visit_FuncDefNode
self._super_visit_ClassDefNode = _super.visit_ClassDefNode
self.visitchildren(node)
return node
def _set_qualname(self, node, name=None):
if name:
qualname = self.qualified_name[:]
qualname.append(name)
else:
qualname = self.qualified_name
node.qualname = EncodedString('.'.join(qualname))
node.module_name = self.module_name
self.visitchildren(node)
return node
def _append_entry(self, entry):
if entry.is_pyglobal and not entry.is_pyclass_attr:
self.qualified_name = [entry.name]
else:
self.qualified_name.append(entry.name)
def visit_ClassNode(self, node):
return self._set_qualname(node, node.name)
def visit_PyClassNamespaceNode(self, node):
# class name was already added by parent node
return self._set_qualname(node)
def visit_PyCFunctionNode(self, node):
return self._set_qualname(node, node.def_node.name)
def visit_FuncDefNode(self, node):
orig_qualified_name = self.qualified_name[:]
if getattr(node, 'name', None) == '<lambda>':
self.qualified_name.append('<lambda>')
else:
self._append_entry(node.entry)
self.qualified_name.append('<locals>')
self._super_visit_FuncDefNode(node)
self.qualified_name = orig_qualified_name
return node
def visit_ClassDefNode(self, node):
orig_qualified_name = self.qualified_name[:]
entry = (getattr(node, 'entry', None) or # PyClass
self.current_env().lookup_here(node.name)) # CClass
self._append_entry(entry)
self._super_visit_ClassDefNode(node)
self.qualified_name = orig_qualified_name
return node
class AnalyseExpressionsTransform(CythonTransform):
def visit_ModuleNode(self, node):
node.scope.infer_types()
node.body = node.body.analyse_expressions(node.scope)
self.visitchildren(node)
return node
def visit_FuncDefNode(self, node):
node.local_scope.infer_types()
node.body = node.body.analyse_expressions(node.local_scope)
self.visitchildren(node)
return node
def visit_ScopedExprNode(self, node):
if node.has_local_scope:
node.expr_scope.infer_types()
node = node.analyse_scoped_expressions(node.expr_scope)
self.visitchildren(node)
return node
def visit_IndexNode(self, node):
"""
Replace index nodes used to specialize cdef functions with fused
argument types with the Attribute- or NameNode referring to the
function. We then need to copy over the specialization properties to
the attribute or name node.
Because the indexing might be a Python indexing operation on a fused
function, or (usually) a Cython indexing operation, we need to
re-analyse the types.
"""
self.visit_Node(node)
if node.is_fused_index and not node.type.is_error:
node = node.base
elif node.memslice_ellipsis_noop:
# memoryviewslice[...] expression, drop the IndexNode
node = node.base
return node
class FindInvalidUseOfFusedTypes(CythonTransform):
def visit_FuncDefNode(self, node):
# Errors related to use in functions with fused args will already
# have been detected
if not node.has_fused_arguments:
if not node.is_generator_body and node.return_type.is_fused:
error(node.pos, "Return type is not specified as argument type")
else:
self.visitchildren(node)
return node
def visit_ExprNode(self, node):
if node.type and node.type.is_fused:
error(node.pos, "Invalid use of fused types, type cannot be specialized")
else:
self.visitchildren(node)
return node
class ExpandInplaceOperators(EnvTransform):
def visit_InPlaceAssignmentNode(self, node):
lhs = node.lhs
rhs = node.rhs
if lhs.type.is_cpp_class:
# No getting around this exact operator here.
return node
if isinstance(lhs, ExprNodes.IndexNode) and lhs.is_buffer_access:
# There is code to handle this case.
return node
env = self.current_env()
def side_effect_free_reference(node, setting=False):
if isinstance(node, ExprNodes.NameNode):
return node, []
elif node.type.is_pyobject and not setting:
node = LetRefNode(node)
return node, [node]
elif isinstance(node, ExprNodes.IndexNode):
if node.is_buffer_access:
raise ValueError("Buffer access")
base, temps = side_effect_free_reference(node.base)
index = LetRefNode(node.index)
return ExprNodes.IndexNode(node.pos, base=base, index=index), temps + [index]
elif isinstance(node, ExprNodes.AttributeNode):
obj, temps = side_effect_free_reference(node.obj)
return ExprNodes.AttributeNode(node.pos, obj=obj, attribute=node.attribute), temps
else:
node = LetRefNode(node)
return node, [node]
try:
lhs, let_ref_nodes = side_effect_free_reference(lhs, setting=True)
except ValueError:
return node
dup = lhs.__class__(**lhs.__dict__)
binop = ExprNodes.binop_node(node.pos,
operator = node.operator,
operand1 = dup,
operand2 = rhs,
inplace=True)
# Manually analyse types for new node.
lhs.analyse_target_types(env)
dup.analyse_types(env)
binop.analyse_operation(env)
node = Nodes.SingleAssignmentNode(
node.pos,
lhs = lhs,
rhs=binop.coerce_to(lhs.type, env))
# Use LetRefNode to avoid side effects.
let_ref_nodes.reverse()
for t in let_ref_nodes:
node = LetNode(t, node)
return node
def visit_ExprNode(self, node):
# In-place assignments can't happen within an expression.
return node
class AdjustDefByDirectives(CythonTransform, SkipDeclarations):
"""
Adjust function and class definitions by the decorator directives:
@cython.cfunc
@cython.cclass
@cython.ccall
"""
def visit_ModuleNode(self, node):
self.directives = node.directives
self.in_py_class = False
self.visitchildren(node)
return node
def visit_CompilerDirectivesNode(self, node):
old_directives = self.directives
self.directives = node.directives
self.visitchildren(node)
self.directives = old_directives
return node
def visit_DefNode(self, node):
if 'ccall' in self.directives:
node = node.as_cfunction(overridable=True, returns=self.directives.get('returns'))
return self.visit(node)
if 'cfunc' in self.directives:
if self.in_py_class:
error(node.pos, "cfunc directive is not allowed here")
else:
node = node.as_cfunction(overridable=False, returns=self.directives.get('returns'))
return self.visit(node)
self.visitchildren(node)
return node
def visit_PyClassDefNode(self, node):
if 'cclass' in self.directives:
node = node.as_cclass()
return self.visit(node)
else:
old_in_pyclass = self.in_py_class
self.in_py_class = True
self.visitchildren(node)
self.in_py_class = old_in_pyclass
return node
def visit_CClassDefNode(self, node):
old_in_pyclass = self.in_py_class
self.in_py_class = False
self.visitchildren(node)
self.in_py_class = old_in_pyclass
return node
class AlignFunctionDefinitions(CythonTransform):
"""
This class takes the signatures from a .pxd file and applies them to
the def methods in a .py file.
"""
def visit_ModuleNode(self, node):
self.scope = node.scope
self.directives = node.directives
self.imported_names = set() # hack, see visit_FromImportStatNode()
self.visitchildren(node)
return node
def visit_PyClassDefNode(self, node):
pxd_def = self.scope.lookup(node.name)
if pxd_def:
if pxd_def.is_cclass:
return self.visit_CClassDefNode(node.as_cclass(), pxd_def)
elif not pxd_def.scope or not pxd_def.scope.is_builtin_scope:
error(node.pos, "'%s' redeclared" % node.name)
if pxd_def.pos:
error(pxd_def.pos, "previous declaration here")
return None
return node
def visit_CClassDefNode(self, node, pxd_def=None):
if pxd_def is None:
pxd_def = self.scope.lookup(node.class_name)
if pxd_def:
outer_scope = self.scope
self.scope = pxd_def.type.scope
self.visitchildren(node)
if pxd_def:
self.scope = outer_scope
return node
def visit_DefNode(self, node):
pxd_def = self.scope.lookup(node.name)
if pxd_def and (not pxd_def.scope or not pxd_def.scope.is_builtin_scope):
if not pxd_def.is_cfunction:
error(node.pos, "'%s' redeclared" % node.name)
if pxd_def.pos:
error(pxd_def.pos, "previous declaration here")
return None
node = node.as_cfunction(pxd_def)
elif (self.scope.is_module_scope and self.directives['auto_cpdef']
and not node.name in self.imported_names
and node.is_cdef_func_compatible()):
# FIXME: cpdef-ing should be done in analyse_declarations()
node = node.as_cfunction(scope=self.scope)
# Enable this when nested cdef functions are allowed.
# self.visitchildren(node)
return node
def visit_FromImportStatNode(self, node):
# hack to prevent conditional import fallback functions from
# being cdpef-ed (global Python variables currently conflict
# with imports)
if self.scope.is_module_scope:
for name, _ in node.items:
self.imported_names.add(name)
return node
def visit_ExprNode(self, node):
# ignore lambdas and everything else that appears in expressions
return node
class RemoveUnreachableCode(CythonTransform):
def visit_StatListNode(self, node):
if not self.current_directives['remove_unreachable']:
return node
self.visitchildren(node)
for idx, stat in enumerate(node.stats):
idx += 1
if stat.is_terminator:
if idx < len(node.stats):
if self.current_directives['warn.unreachable']:
warning(node.stats[idx].pos, "Unreachable code", 2)
node.stats = node.stats[:idx]
node.is_terminator = True
break
return node
def visit_IfClauseNode(self, node):
self.visitchildren(node)
if node.body.is_terminator:
node.is_terminator = True
return node
def visit_IfStatNode(self, node):
self.visitchildren(node)
if node.else_clause and node.else_clause.is_terminator:
for clause in node.if_clauses:
if not clause.is_terminator:
break
else:
node.is_terminator = True
return node
def visit_TryExceptStatNode(self, node):
self.visitchildren(node)
if node.body.is_terminator and node.else_clause:
if self.current_directives['warn.unreachable']:
warning(node.else_clause.pos, "Unreachable code", 2)
node.else_clause = None
return node
class YieldNodeCollector(TreeVisitor):
def __init__(self):
super(YieldNodeCollector, self).__init__()
self.yields = []
self.returns = []
self.has_return_value = False
def visit_Node(self, node):
self.visitchildren(node)
def visit_YieldExprNode(self, node):
self.yields.append(node)
self.visitchildren(node)
def visit_ReturnStatNode(self, node):
self.visitchildren(node)
if node.value:
self.has_return_value = True
self.returns.append(node)
def visit_ClassDefNode(self, node):
pass
def visit_FuncDefNode(self, node):
pass
def visit_LambdaNode(self, node):
pass
def visit_GeneratorExpressionNode(self, node):
pass
class MarkClosureVisitor(CythonTransform):
def visit_ModuleNode(self, node):
self.needs_closure = False
self.visitchildren(node)
return node
def visit_FuncDefNode(self, node):
self.needs_closure = False
self.visitchildren(node)
node.needs_closure = self.needs_closure
self.needs_closure = True
collector = YieldNodeCollector()
collector.visitchildren(node)
if collector.yields:
if isinstance(node, Nodes.CFuncDefNode):
# Will report error later
return node
for i, yield_expr in enumerate(collector.yields):
yield_expr.label_num = i + 1 # no enumerate start arg in Py2.4
for retnode in collector.returns:
retnode.in_generator = True
gbody = Nodes.GeneratorBodyDefNode(
pos=node.pos, name=node.name, body=node.body)
generator = Nodes.GeneratorDefNode(
pos=node.pos, name=node.name, args=node.args,
star_arg=node.star_arg, starstar_arg=node.starstar_arg,
doc=node.doc, decorators=node.decorators,
gbody=gbody, lambda_name=node.lambda_name)
return generator
return node
def visit_CFuncDefNode(self, node):
self.visit_FuncDefNode(node)
if node.needs_closure:
error(node.pos, "closures inside cdef functions not yet supported")
return node
def visit_LambdaNode(self, node):
self.needs_closure = False
self.visitchildren(node)
node.needs_closure = self.needs_closure
self.needs_closure = True
return node
def visit_ClassDefNode(self, node):
self.visitchildren(node)
self.needs_closure = True
return node
class CreateClosureClasses(CythonTransform):
# Output closure classes in module scope for all functions
# that really need it.
def __init__(self, context):
super(CreateClosureClasses, self).__init__(context)
self.path = []
self.in_lambda = False
def visit_ModuleNode(self, node):
self.module_scope = node.scope
self.visitchildren(node)
return node
def find_entries_used_in_closures(self, node):
from_closure = []
in_closure = []
for name, entry in node.local_scope.entries.items():
if entry.from_closure:
from_closure.append((name, entry))
elif entry.in_closure:
in_closure.append((name, entry))
return from_closure, in_closure
def create_class_from_scope(self, node, target_module_scope, inner_node=None):
# move local variables into closure
if node.is_generator:
for entry in node.local_scope.entries.values():
if not entry.from_closure:
entry.in_closure = True
from_closure, in_closure = self.find_entries_used_in_closures(node)
in_closure.sort()
# Now from the begining
node.needs_closure = False
node.needs_outer_scope = False
func_scope = node.local_scope
cscope = node.entry.scope
while cscope.is_py_class_scope or cscope.is_c_class_scope:
cscope = cscope.outer_scope
if not from_closure and (self.path or inner_node):
if not inner_node:
if not node.py_cfunc_node:
raise InternalError("DefNode does not have assignment node")
inner_node = node.py_cfunc_node
inner_node.needs_self_code = False
node.needs_outer_scope = False
if node.is_generator:
pass
elif not in_closure and not from_closure:
return
elif not in_closure:
func_scope.is_passthrough = True
func_scope.scope_class = cscope.scope_class
node.needs_outer_scope = True
return
as_name = '%s_%s' % (
target_module_scope.next_id(Naming.closure_class_prefix),
node.entry.cname)
entry = target_module_scope.declare_c_class(
name=as_name, pos=node.pos, defining=True,
implementing=True)
entry.type.is_final_type = True
func_scope.scope_class = entry
class_scope = entry.type.scope
class_scope.is_internal = True
if Options.closure_freelist_size:
class_scope.directives['freelist'] = Options.closure_freelist_size
if from_closure:
assert cscope.is_closure_scope
class_scope.declare_var(pos=node.pos,
name=Naming.outer_scope_cname,
cname=Naming.outer_scope_cname,
type=cscope.scope_class.type,
is_cdef=True)
node.needs_outer_scope = True
for name, entry in in_closure:
closure_entry = class_scope.declare_var(pos=entry.pos,
name=entry.name,
cname=entry.cname,
type=entry.type,
is_cdef=True)
if entry.is_declared_generic:
closure_entry.is_declared_generic = 1
node.needs_closure = True
# Do it here because other classes are already checked
target_module_scope.check_c_class(func_scope.scope_class)
def visit_LambdaNode(self, node):
if not isinstance(node.def_node, Nodes.DefNode):
# fused function, an error has been previously issued
return node
was_in_lambda = self.in_lambda
self.in_lambda = True
self.create_class_from_scope(node.def_node, self.module_scope, node)
self.visitchildren(node)
self.in_lambda = was_in_lambda
return node
def visit_FuncDefNode(self, node):
if self.in_lambda:
self.visitchildren(node)
return node
if node.needs_closure or self.path:
self.create_class_from_scope(node, self.module_scope)
self.path.append(node)
self.visitchildren(node)
self.path.pop()
return node
def visit_GeneratorBodyDefNode(self, node):
self.visitchildren(node)
return node
def visit_CFuncDefNode(self, node):
self.visitchildren(node)
return node
class GilCheck(VisitorTransform):
"""
Call `node.gil_check(env)` on each node to make sure we hold the
GIL when we need it. Raise an error when on Python operations
inside a `nogil` environment.
Additionally, raise exceptions for closely nested with gil or with nogil
statements. The latter would abort Python.
"""
def __call__(self, root):
self.env_stack = [root.scope]
self.nogil = False
# True for 'cdef func() nogil:' functions, as the GIL may be held while
# calling this function (thus contained 'nogil' blocks may be valid).
self.nogil_declarator_only = False
return super(GilCheck, self).__call__(root)
def visit_FuncDefNode(self, node):
self.env_stack.append(node.local_scope)
was_nogil = self.nogil
self.nogil = node.local_scope.nogil
if self.nogil:
self.nogil_declarator_only = True
if self.nogil and node.nogil_check:
node.nogil_check(node.local_scope)
self.visitchildren(node)
# This cannot be nested, so it doesn't need backup/restore
self.nogil_declarator_only = False
self.env_stack.pop()
self.nogil = was_nogil
return node
def visit_GILStatNode(self, node):
if self.nogil and node.nogil_check:
node.nogil_check()
was_nogil = self.nogil
self.nogil = (node.state == 'nogil')
if was_nogil == self.nogil and not self.nogil_declarator_only:
if not was_nogil:
error(node.pos, "Trying to acquire the GIL while it is "
"already held.")
else:
error(node.pos, "Trying to release the GIL while it was "
"previously released.")
if isinstance(node.finally_clause, Nodes.StatListNode):
# The finally clause of the GILStatNode is a GILExitNode,
# which is wrapped in a StatListNode. Just unpack that.
node.finally_clause, = node.finally_clause.stats
self.visitchildren(node)
self.nogil = was_nogil
return node
def visit_ParallelRangeNode(self, node):
if node.nogil:
node.nogil = False
node = Nodes.GILStatNode(node.pos, state='nogil', body=node)
return self.visit_GILStatNode(node)
if not self.nogil:
error(node.pos, "prange() can only be used without the GIL")
# Forget about any GIL-related errors that may occur in the body
return None
node.nogil_check(self.env_stack[-1])
self.visitchildren(node)
return node
def visit_ParallelWithBlockNode(self, node):
if not self.nogil:
error(node.pos, "The parallel section may only be used without "
"the GIL")
return None
if node.nogil_check:
# It does not currently implement this, but test for it anyway to
# avoid potential future surprises
node.nogil_check(self.env_stack[-1])
self.visitchildren(node)
return node
def visit_TryFinallyStatNode(self, node):
"""
Take care of try/finally statements in nogil code sections.
"""
if not self.nogil or isinstance(node, Nodes.GILStatNode):
return self.visit_Node(node)
node.nogil_check = None
node.is_try_finally_in_nogil = True
self.visitchildren(node)
return node
def visit_Node(self, node):
if self.env_stack and self.nogil and node.nogil_check:
node.nogil_check(self.env_stack[-1])
self.visitchildren(node)
node.in_nogil_context = self.nogil
return node
class TransformBuiltinMethods(EnvTransform):
def visit_SingleAssignmentNode(self, node):
if node.declaration_only:
return None
else:
self.visitchildren(node)
return node
def visit_AttributeNode(self, node):
self.visitchildren(node)
return self.visit_cython_attribute(node)
def visit_NameNode(self, node):
return self.visit_cython_attribute(node)
def visit_cython_attribute(self, node):
attribute = node.as_cython_attribute()
if attribute:
if attribute == u'compiled':
node = ExprNodes.BoolNode(node.pos, value=True)
elif attribute == u'__version__':
import Cython
node = ExprNodes.StringNode(node.pos, value=EncodedString(Cython.__version__))
elif attribute == u'NULL':
node = ExprNodes.NullNode(node.pos)
elif attribute in (u'set', u'frozenset'):
node = ExprNodes.NameNode(node.pos, name=EncodedString(attribute),
entry=self.current_env().builtin_scope().lookup_here(attribute))
elif PyrexTypes.parse_basic_type(attribute):
pass
elif self.context.cython_scope.lookup_qualified_name(attribute):
pass
else:
error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute)
return node
def visit_ExecStatNode(self, node):
lenv = self.current_env()
self.visitchildren(node)
if len(node.args) == 1:
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_locals(self, node, func_name):
# locals()/dir()/vars() builtins
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry:
# not the builtin
return node
pos = node.pos
if func_name in ('locals', 'vars'):
if func_name == 'locals' and len(node.args) > 0:
error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d"
% len(node.args))
return node
elif func_name == 'vars':
if len(node.args) > 1:
error(self.pos, "Builtin 'vars()' called with wrong number of args, expected 0-1, got %d"
% len(node.args))
if len(node.args) > 0:
return node # nothing to do
return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv)
else: # dir()
if len(node.args) > 1:
error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d"
% len(node.args))
if len(node.args) > 0:
# optimised in Builtin.py
return node
if lenv.is_py_class_scope or lenv.is_module_scope:
if lenv.is_py_class_scope:
pyclass = self.current_scope_node()
locals_dict = ExprNodes.CloneNode(pyclass.dict)
else:
locals_dict = ExprNodes.GlobalsExprNode(pos)
return ExprNodes.SortedDictKeysNode(locals_dict)
local_names = [ var.name for var in lenv.entries.values() if var.name ]
items = [ ExprNodes.IdentifierStringNode(pos, value=var)
for var in local_names ]
return ExprNodes.ListNode(pos, args=items)
def visit_PrimaryCmpNode(self, node):
# special case: for in/not-in test, we do not need to sort locals()
self.visitchildren(node)
if node.operator in 'not_in': # in/not_in
if isinstance(node.operand2, ExprNodes.SortedDictKeysNode):
arg = node.operand2.arg
if isinstance(arg, ExprNodes.NoneCheckNode):
arg = arg.arg
node.operand2 = arg
return node
def visit_CascadedCmpNode(self, node):
return self.visit_PrimaryCmpNode(node)
def _inject_eval(self, node, func_name):
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry or len(node.args) != 1:
return node
# Inject globals and locals
node.args.append(ExprNodes.GlobalsExprNode(node.pos))
if not lenv.is_module_scope:
node.args.append(
ExprNodes.LocalsExprNode(
node.pos, self.current_scope_node(), lenv))
return node
def _inject_super(self, node, func_name):
lenv = self.current_env()
entry = lenv.lookup_here(func_name)
if entry or node.args:
return node
# Inject no-args super
def_node = self.current_scope_node()
if (not isinstance(def_node, Nodes.DefNode) or not def_node.args or
len(self.env_stack) < 2):
return node
class_node, class_scope = self.env_stack[-2]
if class_scope.is_py_class_scope:
def_node.requires_classobj = True
class_node.class_cell.is_active = True
node.args = [
ExprNodes.ClassCellNode(
node.pos, is_generator=def_node.is_generator),
ExprNodes.NameNode(node.pos, name=def_node.args[0].name)
]
elif class_scope.is_c_class_scope:
node.args = [
ExprNodes.NameNode(
node.pos, name=class_node.scope.name,
entry=class_node.entry),
ExprNodes.NameNode(node.pos, name=def_node.args[0].name)
]
return node
def visit_SimpleCallNode(self, node):
# cython.foo
function = node.function.as_cython_attribute()
if function:
if function in InterpretCompilerDirectives.unop_method_nodes:
if len(node.args) != 1:
error(node.function.pos, u"%s() takes exactly one argument" % function)
else:
node = InterpretCompilerDirectives.unop_method_nodes[function](node.function.pos, operand=node.args[0])
elif function in InterpretCompilerDirectives.binop_method_nodes:
if len(node.args) != 2:
error(node.function.pos, u"%s() takes exactly two arguments" % function)
else:
node = InterpretCompilerDirectives.binop_method_nodes[function](node.function.pos, operand1=node.args[0], operand2=node.args[1])
elif function == u'cast':
if len(node.args) != 2:
error(node.function.pos, u"cast() takes exactly two arguments")
else:
type = node.args[0].analyse_as_type(self.current_env())
if type:
node = ExprNodes.TypecastNode(node.function.pos, type=type, operand=node.args[1])
else:
error(node.args[0].pos, "Not a type")
elif function == u'sizeof':
if len(node.args) != 1:
error(node.function.pos, u"sizeof() takes exactly one argument")
else:
type = node.args[0].analyse_as_type(self.current_env())
if type:
node = ExprNodes.SizeofTypeNode(node.function.pos, arg_type=type)
else:
node = ExprNodes.SizeofVarNode(node.function.pos, operand=node.args[0])
elif function == 'cmod':
if len(node.args) != 2:
error(node.function.pos, u"cmod() takes exactly two arguments")
else:
node = ExprNodes.binop_node(node.function.pos, '%', node.args[0], node.args[1])
node.cdivision = True
elif function == 'cdiv':
if len(node.args) != 2:
error(node.function.pos, u"cdiv() takes exactly two arguments")
else:
node = ExprNodes.binop_node(node.function.pos, '/', node.args[0], node.args[1])
node.cdivision = True
elif function == u'set':
node.function = ExprNodes.NameNode(node.pos, name=EncodedString('set'))
elif self.context.cython_scope.lookup_qualified_name(function):
pass
else:
error(node.function.pos,
u"'%s' not a valid cython language construct" % function)
self.visitchildren(node)
if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name:
func_name = node.function.name
if func_name in ('dir', 'locals', 'vars'):
return self._inject_locals(node, func_name)
if func_name == 'eval':
return self._inject_eval(node, func_name)
if func_name == 'super':
return self._inject_super(node, func_name)
return node
class ReplaceFusedTypeChecks(VisitorTransform):
"""
This is not a transform in the pipeline. It is invoked on the specific
versions of a cdef function with fused argument types. It filters out any
type branches that don't match. e.g.
if fused_t is mytype:
...
elif fused_t in other_fused_type:
...
"""
def __init__(self, local_scope):
super(ReplaceFusedTypeChecks, self).__init__()
self.local_scope = local_scope
# defer the import until now to avoid circular import time dependencies
from Cython.Compiler import Optimize
self.transform = Optimize.ConstantFolding(reevaluate=True)
def visit_IfStatNode(self, node):
"""
Filters out any if clauses with false compile time type check
expression.
"""
self.visitchildren(node)
return self.transform(node)
def visit_PrimaryCmpNode(self, node):
type1 = node.operand1.analyse_as_type(self.local_scope)
type2 = node.operand2.analyse_as_type(self.local_scope)
if type1 and type2:
false_node = ExprNodes.BoolNode(node.pos, value=False)
true_node = ExprNodes.BoolNode(node.pos, value=True)
type1 = self.specialize_type(type1, node.operand1.pos)
op = node.operator
if op in ('is', 'is_not', '==', '!='):
type2 = self.specialize_type(type2, node.operand2.pos)
is_same = type1.same_as(type2)
eq = op in ('is', '==')
if (is_same and eq) or (not is_same and not eq):
return true_node
elif op in ('in', 'not_in'):
# We have to do an instance check directly, as operand2
# needs to be a fused type and not a type with a subtype
# that is fused. First unpack the typedef
if isinstance(type2, PyrexTypes.CTypedefType):
type2 = type2.typedef_base_type
if type1.is_fused:
error(node.operand1.pos, "Type is fused")
elif not type2.is_fused:
error(node.operand2.pos,
"Can only use 'in' or 'not in' on a fused type")
else:
types = PyrexTypes.get_specialized_types(type2)
for specialized_type in types:
if type1.same_as(specialized_type):
if op == 'in':
return true_node
else:
return false_node
if op == 'not_in':
return true_node
return false_node
return node
def specialize_type(self, type, pos):
try:
return type.specialize(self.local_scope.fused_to_specific)
except KeyError:
error(pos, "Type is not specific")
return type
def visit_Node(self, node):
self.visitchildren(node)
return node
class DebugTransform(CythonTransform):
"""
Write debug information for this Cython module.
"""
def __init__(self, context, options, result):
super(DebugTransform, self).__init__(context)
self.visited = set()
# our treebuilder and debug output writer
# (see Cython.Debugger.debug_output.CythonDebugWriter)
self.tb = self.context.gdb_debug_outputwriter
#self.c_output_file = options.output_file
self.c_output_file = result.c_file
# Closure support, basically treat nested functions as if the AST were
# never nested
self.nested_funcdefs = []
# tells visit_NameNode whether it should register step-into functions
self.register_stepinto = False
def visit_ModuleNode(self, node):
self.tb.module_name = node.full_module_name
attrs = dict(
module_name=node.full_module_name,
filename=node.pos[0].filename,
c_filename=self.c_output_file)
self.tb.start('Module', attrs)
# serialize functions
self.tb.start('Functions')
# First, serialize functions normally...
self.visitchildren(node)
# ... then, serialize nested functions
for nested_funcdef in self.nested_funcdefs:
self.visit_FuncDefNode(nested_funcdef)
self.register_stepinto = True
self.serialize_modulenode_as_function(node)
self.register_stepinto = False
self.tb.end('Functions')
# 2.3 compatibility. Serialize global variables
self.tb.start('Globals')
entries = {}
for k, v in node.scope.entries.iteritems():
if (v.qualified_name not in self.visited and not
v.name.startswith('__pyx_') and not
v.type.is_cfunction and not
v.type.is_extension_type):
entries[k]= v
self.serialize_local_variables(entries)
self.tb.end('Globals')
# self.tb.end('Module') # end Module after the line number mapping in
# Cython.Compiler.ModuleNode.ModuleNode._serialize_lineno_map
return node
def visit_FuncDefNode(self, node):
self.visited.add(node.local_scope.qualified_name)
if getattr(node, 'is_wrapper', False):
return node
if self.register_stepinto:
self.nested_funcdefs.append(node)
return node
# node.entry.visibility = 'extern'
if node.py_func is None:
pf_cname = ''
else:
pf_cname = node.py_func.entry.func_cname
attrs = dict(
name=node.entry.name or getattr(node, 'name', '<unknown>'),
cname=node.entry.func_cname,
pf_cname=pf_cname,
qualified_name=node.local_scope.qualified_name,
lineno=str(node.pos[1]))
self.tb.start('Function', attrs=attrs)
self.tb.start('Locals')
self.serialize_local_variables(node.local_scope.entries)
self.tb.end('Locals')
self.tb.start('Arguments')
for arg in node.local_scope.arg_entries:
self.tb.start(arg.name)
self.tb.end(arg.name)
self.tb.end('Arguments')
self.tb.start('StepIntoFunctions')
self.register_stepinto = True
self.visitchildren(node)
self.register_stepinto = False
self.tb.end('StepIntoFunctions')
self.tb.end('Function')
return node
def visit_NameNode(self, node):
if (self.register_stepinto and
node.type.is_cfunction and
getattr(node, 'is_called', False) and
node.entry.func_cname is not None):
# don't check node.entry.in_cinclude, as 'cdef extern: ...'
# declared functions are not 'in_cinclude'.
# This means we will list called 'cdef' functions as
# "step into functions", but this is not an issue as they will be
# recognized as Cython functions anyway.
attrs = dict(name=node.entry.func_cname)
self.tb.start('StepIntoFunction', attrs=attrs)
self.tb.end('StepIntoFunction')
self.visitchildren(node)
return node
def serialize_modulenode_as_function(self, node):
"""
Serialize the module-level code as a function so the debugger will know
it's a "relevant frame" and it will know where to set the breakpoint
for 'break modulename'.
"""
name = node.full_module_name.rpartition('.')[-1]
cname_py2 = 'init' + name
cname_py3 = 'PyInit_' + name
py2_attrs = dict(
name=name,
cname=cname_py2,
pf_cname='',
# Ignore the qualified_name, breakpoints should be set using
# `cy break modulename:lineno` for module-level breakpoints.
qualified_name='',
lineno='1',
is_initmodule_function="True",
)
py3_attrs = dict(py2_attrs, cname=cname_py3)
self._serialize_modulenode_as_function(node, py2_attrs)
self._serialize_modulenode_as_function(node, py3_attrs)
def _serialize_modulenode_as_function(self, node, attrs):
self.tb.start('Function', attrs=attrs)
self.tb.start('Locals')
self.serialize_local_variables(node.scope.entries)
self.tb.end('Locals')
self.tb.start('Arguments')
self.tb.end('Arguments')
self.tb.start('StepIntoFunctions')
self.register_stepinto = True
self.visitchildren(node)
self.register_stepinto = False
self.tb.end('StepIntoFunctions')
self.tb.end('Function')
def serialize_local_variables(self, entries):
for entry in entries.values():
if not entry.cname:
# not a local variable
continue
if entry.type.is_pyobject:
vartype = 'PythonObject'
else:
vartype = 'CObject'
if entry.from_closure:
# We're dealing with a closure where a variable from an outer
# scope is accessed, get it from the scope object.
cname = '%s->%s' % (Naming.cur_scope_cname,
entry.outer_entry.cname)
qname = '%s.%s.%s' % (entry.scope.outer_scope.qualified_name,
entry.scope.name,
entry.name)
elif entry.in_closure:
cname = '%s->%s' % (Naming.cur_scope_cname,
entry.cname)
qname = entry.qualified_name
else:
cname = entry.cname
qname = entry.qualified_name
if not entry.pos:
# this happens for variables that are not in the user's code,
# e.g. for the global __builtins__, __doc__, etc. We can just
# set the lineno to 0 for those.
lineno = '0'
else:
lineno = str(entry.pos[1])
attrs = dict(
name=entry.name,
cname=cname,
qualified_name=qname,
type=vartype,
lineno=lineno)
self.tb.start('LocalVar', attrs)
self.tb.end('LocalVar')
| bsd-3-clause |
dhutty/ansible | v1/ansible/cache/base.py | 159 | 1241 | # (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import exceptions
class BaseCacheModule(object):
def get(self, key):
raise exceptions.NotImplementedError
def set(self, key, value):
raise exceptions.NotImplementedError
def keys(self):
raise exceptions.NotImplementedError
def contains(self, key):
raise exceptions.NotImplementedError
def delete(self, key):
raise exceptions.NotImplementedError
def flush(self):
raise exceptions.NotImplementedError
def copy(self):
raise exceptions.NotImplementedError
| gpl-3.0 |
gfrd/gfrd | samples/pushpull/pushpull.py | 1 | 4698 | #!/usr/bin/env python
from egfrd import *
from logger import *
import sys
import os
from fractionS import *
# Args:
# Keq
# koff_ratio
# N_K
# N_P
# V (liter)
# mode: 'normal' 'immobile' 'localized' 'single' 'clustered'
# T
Keq_str = sys.argv[1]
koff_ratio_str = sys.argv[2]
N_S_total = int( sys.argv[3] )
N_K = int( sys.argv[4] )
N_P = int( sys.argv[5] )
V_str = sys.argv[6]
mode = sys.argv[7]
T_str = sys.argv[8]
Keq = float( Keq_str )
koff_ratio = float( koff_ratio_str )
V = float( V_str )
T = float( T_str )
radius = 2.5e-9
sigma = radius * 2
D1 = 1.0e-12
if mode == 'normal':
D2 = D1
elif mode == 'immobile' or mode == 'localized' or mode == 'single':
D2 = 0
else:
raise 'invalid mode'
L = ( V * 1e-3 ) ** ( 1.0 / 3.0 )
s = EGFRDSimulator( L )
N = N_S_total * 1.1
matrixSize = min( max( 3, int( (3 * N) ** (1.0/3.0) ) ), 60 )
print 'matrixSize=', matrixSize
s.setMatrixSize( matrixSize )
#s.setDtFactor( 1e-5 )
print V, L
print C2N( 498e-9, V )
#sys.exit(0)
box1 = CuboidalRegion( [0,0,0],[L,L,L] )
plain1 = CuboidalRegion( [0,0,0],[0,L,L] )
plain2 = CuboidalRegion( [L/2,0,0],[L/2,L,L] )
# not supported yet
#s.addSurface( box1 )
S = Species( 'S', D1, radius )
s.addSpecies( S )
P = Species( 'P', D2, radius )
s.addSpecies( P )
K = Species( 'K', D2, radius )
s.addSpecies( K )
KS = Species( 'KS', D2, radius )
s.addSpecies( KS )
Sp = Species( 'Sp', D1, radius )
s.addSpecies( Sp )
PSp = Species( 'PSp', D2, radius )
s.addSpecies( PSp )
#fracS = fraction_S( N_K, N_P, Keq )
fracS = 1
S_conc = N_S_total / V * 1e3 # in #/m^3
N_S = N_S_total * fracS
N_Sp = N_S_total - N_S
Dtot = D1 + D2
#ka = k_a( kon, k_D( Dtot, sigma ) )
#ka = 9e9 / N_A / 1e3 # 1/M s -> m^3/s
kD = k_D( Dtot, sigma )
kon = Mtom3( 0.02e9 )
ka = k_a( kon, kD )
Keq_S = Keq * S_conc
kcatkoff = Keq_S * kon
koff = kcatkoff * koff_ratio
kcat = kcatkoff - koff
if mode == 'single':
kcat1 = kcat * float( N_K ) / float( N_P )
koff1 = kcatkoff - kcat1
kcat2 = kcat
koff2 = koff
else:
kcat1 = kcat2 = kcat
koff1 = koff2 = koff
kd1 = k_d( koff, kon, kD )
kd2 = k_d( koff2, kon, kD )
print 'ka', ka, 'kD', kD, 'kd1', kd1, 'kd2', kd2
print 'kon m^3/s', kon, '1/M s', kon * N_A * 1e3
print 'koff1 1/s ', koff1
print 'kcat1 1/s ', kcat1
print 'koff2 1/s ', koff2
print 'kcat2 1/s ', kcat2
assert koff2 >= 0
#sys.exit(0)
if mode == 'normal' or mode == 'immobile':
s.throwInParticles( K, N_K, box1 )
s.throwInParticles( P, N_P, box1 )
elif mode == 'localized':
s.throwInParticles( K, N_K, plain1 )
s.throwInParticles( P, N_P, plain2 )
elif mode == 'single':
x = L/2
yz = L/2
tl = L/4
s.placeParticle( K, [ tl, tl, tl ] )
s.placeParticle( K, [ tl, tl, yz+tl ] )
s.placeParticle( K, [ tl, yz+tl, tl ] )
s.placeParticle( K, [ tl, yz+tl, yz+tl ] )
s.placeParticle( P, [ x+tl, tl, tl ] )
s.placeParticle( P, [ x+tl, tl, yz+tl ] )
s.placeParticle( P, [ x+tl, yz+tl, tl ] )
s.placeParticle( P, [ x+tl, yz+tl, yz+tl ] )
else:
assert False
s.throwInParticles( Sp, N_Sp, box1 )
s.throwInParticles( S, N_S, box1 )
# Stir before actually start the sim.
stirTime = 1e-7
while 1:
s.step()
nextTime = s.scheduler.getTopTime()
if nextTime > stirTime:
s.stop( stirTime )
break
s.reset()
# 1 2 S + K <-> KS
# 3 KS -> K + Sp
# 4 5 Sp + P <-> PSp
# 6 PSp -> P + S
r1 = BindingReactionType( S, K, KS, ka )
s.addReactionType( r1 )
r2 = UnbindingReactionType( KS, S, K, kd1 )
s.addReactionType( r2 )
r3 = UnbindingReactionType( KS, K, Sp, kcat1 )
s.addReactionType( r3 )
r4 = BindingReactionType( Sp, P, PSp, ka )
s.addReactionType( r4 )
r5 = UnbindingReactionType( PSp, Sp, P, kd2 )
s.addReactionType( r5 )
r6 = UnbindingReactionType( PSp, P, S, kcat2 )
s.addReactionType( r6 )
model = 'pushpull'
# 'pushpull-Keq-koff_ratio-N_K-N_P-V-mode.dat'
l = Logger( s,
logname = model + '_' + '_'.join( sys.argv[1:8] ) + '_', #+\
# os.environ[ 'SGE_TASK_ID' ],
comment = '@ model=\'%s\'; Keq=%s; koff_ratio=%s\n' %
( model, Keq_str, koff_ratio_str ) +
'#@ V=%s; N_K=%s; N_P=%s; mode=\'%s\'; T=%s\n' %
( V_str, N_K, N_P, mode, T_str ) +
'#@ kon=%g; koff1=%g; koff2=%g; N_S_total=%s\n' %
( kon, koff1, koff2, N_S_total ) +
'#@ kcat1=%g; kcat2=%g\n' %
( kcat1, kcat2 ) +
'#@ ka=%g; kd1=%g; kd2=%g\n' %
( ka, kd1, kd2 ) )
#l.setParticleOutput( ('K','P') )
#l.setInterval( 1e-3 )
#l.writeParticles()
l.log()
while s.t < T:
s.step()
if s.lastReaction:
#log.info( s.dumpPopulation() )
l.log()
| gpl-2.0 |
giuliov/ansible | lib/ansible/module_utils/a10.py | 322 | 4194 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AXAPI_PORT_PROTOCOLS = {
'tcp': 2,
'udp': 3,
}
AXAPI_VPORT_PROTOCOLS = {
'tcp': 2,
'udp': 3,
'fast-http': 9,
'http': 11,
'https': 12,
}
def a10_argument_spec():
return dict(
host=dict(type='str', required=True),
username=dict(type='str', aliases=['user', 'admin'], required=True),
password=dict(type='str', aliases=['pass', 'pwd'], required=True, no_log=True),
write_config=dict(type='bool', default=False)
)
def axapi_failure(result):
if 'response' in result and result['response'].get('status') == 'fail':
return True
return False
def axapi_call(module, url, post=None):
'''
Returns a datastructure based on the result of the API call
'''
rsp, info = fetch_url(module, url, data=post)
if not rsp or info['status'] >= 400:
module.fail_json(msg="failed to connect (status code %s), error was %s" % (info['status'], info.get('msg', 'no error given')))
try:
raw_data = rsp.read()
data = json.loads(raw_data)
except ValueError:
# at least one API call (system.action.write_config) returns
# XML even when JSON is requested, so do some minimal handling
# here to prevent failing even when the call succeeded
if 'status="ok"' in raw_data.lower():
data = {"response": {"status": "OK"}}
else:
data = {"response": {"status": "fail", "err": {"msg": raw_data}}}
except:
module.fail_json(msg="could not read the result from the host")
finally:
rsp.close()
return data
def axapi_authenticate(module, base_url, username, password):
url = '%s&method=authenticate&username=%s&password=%s' % (base_url, username, password)
result = axapi_call(module, url)
if axapi_failure(result):
return module.fail_json(msg=result['response']['err']['msg'])
sessid = result['session_id']
return base_url + '&session_id=' + sessid
def axapi_enabled_disabled(flag):
'''
The axapi uses 0/1 integer values for flags, rather than strings
or booleans, so convert the given flag to a 0 or 1. For now, params
are specified as strings only so thats what we check.
'''
if flag == 'enabled':
return 1
else:
return 0
def axapi_get_port_protocol(protocol):
return AXAPI_PORT_PROTOCOLS.get(protocol.lower(), None)
def axapi_get_vport_protocol(protocol):
return AXAPI_VPORT_PROTOCOLS.get(protocol.lower(), None)
| gpl-3.0 |
douglaskastle/mezzanine | mezzanine/bin/runtests.py | 34 | 2233 | from __future__ import unicode_literals
import atexit
import os
import shutil
import sys
import django
def main(package="mezzanine"):
"""
This is the main test function called via ``python setup.py test``.
It's responsible for hacking the ``project_template`` dir into
an actual project to test against.
"""
from mezzanine.utils.importing import path_for_import
package_path = path_for_import(package)
project_path = os.path.join(package_path, "project_template")
os.environ["DJANGO_SETTINGS_MODULE"] = "project_name.test_settings"
project_app_path = os.path.join(project_path, "project_name")
local_settings_path = os.path.join(project_app_path, "local_settings.py")
test_settings_path = os.path.join(project_app_path, "test_settings.py")
sys.path.insert(0, package_path)
sys.path.insert(0, project_path)
if not os.path.exists(test_settings_path):
shutil.copy(local_settings_path + ".template", test_settings_path)
with open(test_settings_path, "r") as f:
local_settings = f.read()
with open(test_settings_path, "w") as f:
test_settings = """
from . import settings
globals().update(i for i in settings.__dict__.items() if i[0].isupper())
# Require the mezzanine.accounts app. We use settings.INSTALLED_APPS here so
# the syntax test doesn't complain about an undefined name.
if "mezzanine.accounts" not in settings.INSTALLED_APPS:
INSTALLED_APPS = list(settings.INSTALLED_APPS) + ["mezzanine.accounts"]
# Use the MD5 password hasher by default for quicker test runs.
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
"""
f.write(test_settings + local_settings)
def cleanup_test_settings():
import os # Outer scope sometimes unavailable in atexit functions.
for fn in [test_settings_path, test_settings_path + 'c']:
try:
os.remove(fn)
except OSError:
pass
atexit.register(cleanup_test_settings)
django.setup()
from django.core.management.commands import test
sys.exit(test.Command().execute(verbosity=1))
if __name__ == "__main__":
main()
| bsd-2-clause |
KanoComputing/terminal-quest | linux_story/story/playground/terminals.py | 1 | 5414 | # Terminals.py
#
# Copyright (C) 2014-2016 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPL v2
#
# The main terminal class.
import os
import sys
terminal_path = os.path.abspath(__file__)
dir_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if __name__ == '__main__' and __package__ is None:
if dir_path != '/usr':
sys.path.insert(1, dir_path)
from ..commands_fake import cd
from ..commands_real import ls, sudo, grep, shell_command, launch_application
from terminal import Terminal
# If this is not imported, the escape characters used for the colour prompts
# show up as special characters. We don't use any functions from this module,
# simply importing this module fixes the bug
import readline
class Complete_Terminal(Terminal):
def __init__(self, start_dir, end_dir, validation, hints=[""]):
Terminal.__init__(self, start_dir, end_dir, validation, hints=[""])
#######################################################
# Custom commands
def do_ls(self, line):
ls(self.current_dir, self.filetree, line)
def complete_ls(self, text, line, begidx, endidx):
text = text.split(" ")[-1]
return self.autocomplete_desc(text, line, "both")
def do_cd(self, line):
dir = cd(self.current_dir, self.filetree, line)
if dir:
self.current_dir = dir
self.set_prompt()
def complete_cd(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "dirs")
# modified like ls to show colours
def do_grep(self, line):
grep(self.current_dir, self.filetree, line)
#######################################################
# Standard commands called in the shell
# Commands autocompleted on pressing TAB
def do_mv(self, line):
shell_command(self.current_dir, self.filetree, line, "mv")
self.update_tree()
def complete_mv(self, text, line, begidx, endidx):
completions = self.autocomplete_desc(text, line, "both")
return completions
def do_rm(self, line):
shell_command(self.current_dir, self.filetree, line, "rm")
self.update_tree()
def complete_rm(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
def do_cp(self, line):
shell_command(self.current_dir, self.filetree, line, "cp")
self.update_tree()
def complete_cp(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
def do_cat(self, line):
shell_command(self.current_dir, self.filetree, line, "cat")
def complete_cat(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
def do_wc(self, line):
shell_command(self.current_dir, self.filetree, line, "wc")
def complete_wc(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
def do_more(self, line):
launch_application(self.current_dir, self.filetree, line, "more")
def complete_more(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
def do_chmod(self, line):
shell_command(self.current_dir, self.filetree, line, "chmod")
def complete_chmod(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
# Commands not autocompleted on pressing TAB
def do_mkdir(self, line):
shell_command(self.current_dir, self.filetree, line, "mkdir")
self.update_tree()
def do_touch(self, line):
shell_command(self.current_dir, self.filetree, line, "touch")
self.update_tree()
def do_passwd(self, line):
shell_command(self.current_dir, self.filetree, line, "passwd")
self.update_tree()
def do_xargs(self, line):
shell_command(self.current_dir, self.filetree, line, "xargs")
def do_sudo(self, line):
sudo(self.current_dir, self.filetree, line)
def do_clear(self, line):
shell_command(self.current_dir, self.filetree, line, "clear")
def do_find(self, line):
shell_command(self.current_dir, self.filetree, line, "find")
def do_pwd(self, line):
shell_command(self.current_dir, self.filetree, line, "pwd")
def do_alias(self, line):
shell_command(self.current_dir, self.filetree, line, "alias")
def do_unalias(self, line):
shell_command(self.current_dir, self.filetree, line, "unalias")
#######################################################
# Commands that do not use piping when using subprocess
def do_nano(self, line):
launch_application(self.current_dir, self.filetree, line, "nano")
def complete_nano(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
def do_less(self, line):
launch_application(self.current_dir, self.filetree, line, "less")
def complete_less(self, text, line, begidx, endidx):
return self.autocomplete_desc(text, line, "both")
# Tis is listed with the other launched applications because
# the piping only works if no piping is used
def do_echo(self, line):
launch_application(self.current_dir, self.filetree, line, "echo")
def do_man(self, line):
launch_application(self.current_dir, self.filetree, line, "man")
| gpl-2.0 |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Python/Core/Lib/bsddb/dbrecio.py | 1 | 4354 | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: dbrecio.py
"""
File-like objects that read from or write to a bsddb record.
This implements (nearly) all stdio methods.
f = DBRecIO(db, key, txn=None)
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
Notes:
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- There's a simple test set (see end of this file) - not yet updated
for DBRecIO.
- readline() is not implemented yet.
From:
Itamar Shtull-Trauring <itamar@maxnm.com>
"""
import errno
import string
class DBRecIO:
def __init__(self, db, key, txn=None):
self.db = db
self.key = key
self.txn = txn
self.len = None
self.pos = 0
self.closed = 0
self.softspace = 0
return
def close(self):
if not self.closed:
self.closed = 1
del self.db
del self.txn
def isatty(self):
if self.closed:
raise ValueError, 'I/O operation on closed file'
return 0
def seek(self, pos, mode=0):
if self.closed:
raise ValueError, 'I/O operation on closed file'
if mode == 1:
pos = pos + self.pos
elif mode == 2:
pos = pos + self.len
self.pos = max(0, pos)
def tell(self):
if self.closed:
raise ValueError, 'I/O operation on closed file'
return self.pos
def read(self, n=-1):
if self.closed:
raise ValueError, 'I/O operation on closed file'
if n < 0:
newpos = self.len
else:
newpos = min(self.pos + n, self.len)
dlen = newpos - self.pos
r = self.db.get(self.key, txn=self.txn, dlen=dlen, doff=self.pos)
self.pos = newpos
return r
__fixme = '\n def readline(self, length=None):\n if self.closed:\n raise ValueError, "I/O operation on closed file"\n if self.buflist:\n self.buf = self.buf + string.joinfields(self.buflist, \'\')\n self.buflist = []\n i = string.find(self.buf, \'\n\', self.pos)\n if i < 0:\n newpos = self.len\n else:\n newpos = i+1\n if length is not None:\n if self.pos + length < newpos:\n newpos = self.pos + length\n r = self.buf[self.pos:newpos]\n self.pos = newpos\n return r\n\n def readlines(self, sizehint = 0):\n total = 0\n lines = []\n line = self.readline()\n while line:\n lines.append(line)\n total += len(line)\n if 0 < sizehint <= total:\n break\n line = self.readline()\n return lines\n '
def truncate(self, size=None):
if self.closed:
raise ValueError, 'I/O operation on closed file'
if size is None:
size = self.pos
elif size < 0:
raise IOError(errno.EINVAL, 'Negative size not allowed')
elif size < self.pos:
self.pos = size
self.db.put(self.key, '', txn=self.txn, dlen=self.len - size, doff=size)
return
def write(self, s):
if self.closed:
raise ValueError, 'I/O operation on closed file'
if not s:
return
if self.pos > self.len:
self.buflist.append('\x00' * (self.pos - self.len))
self.len = self.pos
newpos = self.pos + len(s)
self.db.put(self.key, s, txn=self.txn, dlen=len(s), doff=self.pos)
self.pos = newpos
def writelines(self, list):
self.write(string.joinfields(list, ''))
def flush(self):
if self.closed:
raise ValueError, 'I/O operation on closed file' | unlicense |
Havate/havate-openstack | proto-build/gui/horizon/Horizon_GUI/horizon/workflows/views.py | 1 | 8095 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
from django import http
from django import shortcuts
from django.views import generic
from horizon import exceptions
from horizon.forms.views import ADD_TO_FIELD_HEADER # noqa
from horizon import messages
class WorkflowView(generic.TemplateView):
"""A generic class-based view which handles the intricacies of workflow
processing with minimal user configuration.
.. attribute:: workflow_class
The :class:`~horizon.workflows.Workflow` class which this view handles.
Required.
.. attribute:: template_name
The template to use when rendering this view via standard HTTP
requests. Required.
.. attribute:: ajax_template_name
The template to use when rendering the workflow for AJAX requests.
In general the default common template should be used. Defaults to
``"horizon/common/_workflow.html"``.
.. attribute:: context_object_name
The key which should be used for the workflow object in the template
context. Defaults to ``"workflow"``.
"""
workflow_class = None
template_name = 'horizon/common/_workflow_base.html'
context_object_name = "workflow"
ajax_template_name = 'horizon/common/_workflow.html'
step_errors = {}
def __init__(self):
if not self.workflow_class:
raise AttributeError("You must set the workflow_class attribute "
"on %s." % self.__class__.__name__)
def get_initial(self):
"""Returns initial data for the workflow. Defaults to using the GET
parameters to allow pre-seeding of the workflow context values.
"""
return copy.copy(self.request.GET)
def get_workflow(self):
"""Returns the instantiated workflow class."""
extra_context = self.get_initial()
entry_point = self.request.GET.get("step", None)
workflow = self.workflow_class(self.request,
context_seed=extra_context,
entry_point=entry_point)
return workflow
def get_context_data(self, **kwargs):
"""Returns the template context, including the workflow class.
This method should be overridden in subclasses to provide additional
context data to the template.
"""
context = super(WorkflowView, self).get_context_data(**kwargs)
workflow = self.get_workflow()
context[self.context_object_name] = workflow
next = self.request.REQUEST.get(workflow.redirect_param_name, None)
context['REDIRECT_URL'] = next
context['layout'] = self.get_layout()
# For consistency with Workflow class
context['modal'] = 'modal' in context['layout']
if ADD_TO_FIELD_HEADER in self.request.META:
context['add_to_field'] = self.request.META[ADD_TO_FIELD_HEADER]
return context
def get_layout(self):
"""returns classes for the workflow element in template based on
the workflow characteristics
"""
if self.request.is_ajax():
layout = ['modal', 'hide', ]
if self.workflow_class.fullscreen:
layout += ['fullscreen', ]
else:
layout = ['static_page', ]
if self.workflow_class.wizard:
layout += ['wizard', ]
return layout
def get_template_names(self):
"""Returns the template name to use for this request."""
if self.request.is_ajax():
template = self.ajax_template_name
else:
template = self.template_name
return template
def get_object_id(self, obj):
return getattr(obj, "id", None)
def get_object_display(self, obj):
return getattr(obj, "name", None)
def add_error_to_step(self, error_msg, step):
self.step_errors[step] = error_msg
def set_workflow_step_errors(self, context):
workflow = context['workflow']
for step in self.step_errors:
error_msg = self.step_errors[step]
workflow.add_error_to_step(error_msg, step)
def get(self, request, *args, **kwargs):
"""Handler for HTTP GET requests."""
context = self.get_context_data(**kwargs)
self.set_workflow_step_errors(context)
return self.render_to_response(context)
def validate_steps(self, request, workflow, start, end):
"""Validates the workflow steps from ``start`` to ``end``, inclusive.
Returns a dict describing the validation state of the workflow.
"""
errors = {}
for step in workflow.steps[start:end + 1]:
if not step.action.is_valid():
errors[step.slug] = dict(
(field, [unicode(error) for error in errors])
for (field, errors) in step.action.errors.iteritems())
return {
'has_errors': bool(errors),
'workflow_slug': workflow.slug,
'errors': errors,
}
def post(self, request, *args, **kwargs):
"""Handler for HTTP POST requests."""
context = self.get_context_data(**kwargs)
workflow = context[self.context_object_name]
try:
# Check for the VALIDATE_STEP* headers, if they are present
# and valid integers, return validation results as JSON,
# otherwise proceed normally.
validate_step_start = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_START', ''))
validate_step_end = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_END', ''))
except ValueError:
# No VALIDATE_STEP* headers, or invalid values. Just proceed
# with normal workflow handling for POSTs.
pass
else:
# There are valid VALIDATE_STEP* headers, so only do validation
# for the specified steps and return results.
data = self.validate_steps(request, workflow,
validate_step_start,
validate_step_end)
return http.HttpResponse(json.dumps(data),
content_type="application/json")
if workflow.is_valid():
try:
success = workflow.finalize()
except Exception:
success = False
exceptions.handle(request)
next = self.request.REQUEST.get(workflow.redirect_param_name, None)
if success:
msg = workflow.format_status_message(workflow.success_message)
messages.success(request, msg)
else:
msg = workflow.format_status_message(workflow.failure_message)
messages.error(request, msg)
if "HTTP_X_HORIZON_ADD_TO_FIELD" in self.request.META:
field_id = self.request.META["HTTP_X_HORIZON_ADD_TO_FIELD"]
data = [self.get_object_id(workflow.object),
self.get_object_display(workflow.object)]
response = http.HttpResponse(json.dumps(data))
response["X-Horizon-Add-To-Field"] = field_id
return response
else:
return shortcuts.redirect(next or workflow.get_success_url())
else:
return self.render_to_response(context)
| apache-2.0 |
jlowin/airflow | airflow/operators/s3_to_hive_operator.py | 21 | 10268 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import next
from builtins import zip
import logging
from tempfile import NamedTemporaryFile
from airflow.utils.file import TemporaryDirectory
import gzip
import bz2
import tempfile
import os
from airflow.exceptions import AirflowException
from airflow.hooks.S3_hook import S3Hook
from airflow.hooks.hive_hooks import HiveCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.compression import uncompress_file
class S3ToHiveTransfer(BaseOperator):
"""
Moves data from S3 to Hive. The operator downloads a file from S3,
stores the file locally before loading it into a Hive table.
If the ``create`` or ``recreate`` arguments are set to ``True``,
a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated.
Hive data types are inferred from the cursor's metadata from.
Note that the table generated in Hive uses ``STORED AS textfile``
which isn't the most efficient serialization format. If a
large amount of data is loaded and/or if the tables gets
queried considerably, you may want to use this operator only to
stage the data into a temporary table before loading it into its
final destination using a ``HiveOperator``.
:param s3_key: The key to be retrieved from S3
:type s3_key: str
:param field_dict: A dictionary of the fields name in the file
as keys and their Hive types as values
:type field_dict: dict
:param hive_table: target Hive table, use dot notation to target a
specific database
:type hive_table: str
:param create: whether to create the table if it doesn't exist
:type create: bool
:param recreate: whether to drop and recreate the table at every
execution
:type recreate: bool
:param partition: target partition as a dict of partition columns
and values
:type partition: dict
:param headers: whether the file contains column names on the first
line
:type headers: bool
:param check_headers: whether the column names on the first line should be
checked against the keys of field_dict
:type check_headers: bool
:param wildcard_match: whether the s3_key should be interpreted as a Unix
wildcard pattern
:type wildcard_match: bool
:param delimiter: field delimiter in the file
:type delimiter: str
:param s3_conn_id: source s3 connection
:type s3_conn_id: str
:param hive_cli_conn_id: destination hive connection
:type hive_cli_conn_id: str
:param input_compressed: Boolean to determine if file decompression is
required to process headers
:type input_compressed: bool
"""
template_fields = ('s3_key', 'partition', 'hive_table')
template_ext = ()
ui_color = '#a0e08c'
@apply_defaults
def __init__(
self,
s3_key,
field_dict,
hive_table,
delimiter=',',
create=True,
recreate=False,
partition=None,
headers=False,
check_headers=False,
wildcard_match=False,
s3_conn_id='s3_default',
hive_cli_conn_id='hive_cli_default',
input_compressed=False,
*args, **kwargs):
super(S3ToHiveTransfer, self).__init__(*args, **kwargs)
self.s3_key = s3_key
self.field_dict = field_dict
self.hive_table = hive_table
self.delimiter = delimiter
self.create = create
self.recreate = recreate
self.partition = partition
self.headers = headers
self.check_headers = check_headers
self.wildcard_match = wildcard_match
self.hive_cli_conn_id = hive_cli_conn_id
self.s3_conn_id = s3_conn_id
self.input_compressed = input_compressed
if (self.check_headers and
not (self.field_dict is not None and self.headers)):
raise AirflowException("To check_headers provide " +
"field_dict and headers")
def execute(self, context):
# Downloading file from S3
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
self.hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
logging.info("Downloading S3 file")
if self.wildcard_match:
if not self.s3.check_for_wildcard_key(self.s3_key):
raise AirflowException("No key matches {0}"
.format(self.s3_key))
s3_key_object = self.s3.get_wildcard_key(self.s3_key)
else:
if not self.s3.check_for_key(self.s3_key):
raise AirflowException(
"The key {0} does not exists".format(self.s3_key))
s3_key_object = self.s3.get_key(self.s3_key)
root, file_ext = os.path.splitext(s3_key_object.key)
with TemporaryDirectory(prefix='tmps32hive_') as tmp_dir,\
NamedTemporaryFile(mode="w",
dir=tmp_dir,
suffix=file_ext) as f:
logging.info("Dumping S3 key {0} contents to local"
" file {1}".format(s3_key_object.key, f.name))
s3_key_object.get_contents_to_file(f)
f.flush()
self.s3.connection.close()
if not self.headers:
logging.info("Loading file {0} into Hive".format(f.name))
self.hive.load_file(
f.name,
self.hive_table,
field_dict=self.field_dict,
create=self.create,
partition=self.partition,
delimiter=self.delimiter,
recreate=self.recreate)
else:
# Decompressing file
if self.input_compressed:
logging.info("Uncompressing file {0}".format(f.name))
fn_uncompressed = uncompress_file(f.name,
file_ext,
tmp_dir)
logging.info("Uncompressed to {0}".format(fn_uncompressed))
# uncompressed file available now so deleting
# compressed file to save disk space
f.close()
else:
fn_uncompressed = f.name
# Testing if header matches field_dict
if self.check_headers:
logging.info("Matching file header against field_dict")
header_list = self._get_top_row_as_list(fn_uncompressed)
if not self._match_headers(header_list):
raise AirflowException("Header check failed")
# Deleting top header row
logging.info("Removing header from file {0}".
format(fn_uncompressed))
headless_file = (
self._delete_top_row_and_compress(fn_uncompressed,
file_ext,
tmp_dir))
logging.info("Headless file {0}".format(headless_file))
logging.info("Loading file {0} into Hive".format(headless_file))
self.hive.load_file(headless_file,
self.hive_table,
field_dict=self.field_dict,
create=self.create,
partition=self.partition,
delimiter=self.delimiter,
recreate=self.recreate)
def _get_top_row_as_list(self, file_name):
with open(file_name, 'rt') as f:
header_line = f.readline().strip()
header_list = header_line.split(self.delimiter)
return header_list
def _match_headers(self, header_list):
if not header_list:
raise AirflowException("Unable to retrieve header row from file")
field_names = self.field_dict.keys()
if len(field_names) != len(header_list):
logging.warning("Headers count mismatch"
"File headers:\n {header_list}\n"
"Field names: \n {field_names}\n"
"".format(**locals()))
return False
test_field_match = [h1.lower() == h2.lower()
for h1, h2 in zip(header_list, field_names)]
if not all(test_field_match):
logging.warning("Headers do not match field names"
"File headers:\n {header_list}\n"
"Field names: \n {field_names}\n"
"".format(**locals()))
return False
else:
return True
def _delete_top_row_and_compress(
self,
input_file_name,
output_file_ext,
dest_dir):
# When output_file_ext is not defined, file is not compressed
open_fn = open
if output_file_ext.lower() == '.gz':
open_fn = gzip.GzipFile
elif output_file_ext.lower() == '.bz2':
open_fn = bz2.BZ2File
os_fh_output, fn_output = \
tempfile.mkstemp(suffix=output_file_ext, dir=dest_dir)
with open(input_file_name, 'rb') as f_in,\
open_fn(fn_output, 'wb') as f_out:
f_in.seek(0)
next(f_in)
for line in f_in:
f_out.write(line)
return fn_output
| apache-2.0 |
godfather1103/WeiboRobot | python27/1.0/lib/email/test/test_email_codecs_renamed.py | 298 | 2842 | # Copyright (C) 2002-2006 Python Software Foundation
# Contact: email-sig@python.org
# email package unit tests for (optional) Asian codecs
import unittest
from test.test_support import run_unittest
from email.test.test_email import TestEmailBase
from email.charset import Charset
from email.header import Header, decode_header
from email.message import Message
# We're compatible with Python 2.3, but it doesn't have the built-in Asian
# codecs, so we have to skip all these tests.
try:
unicode('foo', 'euc-jp')
except LookupError:
raise unittest.SkipTest
class TestEmailAsianCodecs(TestEmailBase):
def test_japanese_codecs(self):
eq = self.ndiffAssertEqual
j = Charset("euc-jp")
g = Charset("iso-8859-1")
h = Header("Hello World!")
jhello = '\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc\xa5\xeb\xa5\xc9\xa1\xaa'
ghello = 'Gr\xfc\xdf Gott!'
h.append(jhello, j)
h.append(ghello, g)
# BAW: This used to -- and maybe should -- fold the two iso-8859-1
# chunks into a single encoded word. However it doesn't violate the
# standard to have them as two encoded chunks and maybe it's
# reasonable <wink> for each .append() call to result in a separate
# encoded word.
eq(h.encode(), """\
Hello World! =?iso-2022-jp?b?GyRCJU8lbSE8JW8hPCVrJUkhKhsoQg==?=
=?iso-8859-1?q?Gr=FC=DF?= =?iso-8859-1?q?_Gott!?=""")
eq(decode_header(h.encode()),
[('Hello World!', None),
('\x1b$B%O%m!<%o!<%k%I!*\x1b(B', 'iso-2022-jp'),
('Gr\xfc\xdf Gott!', 'iso-8859-1')])
long = 'test-ja \xa4\xd8\xc5\xea\xb9\xc6\xa4\xb5\xa4\xec\xa4\xbf\xa5\xe1\xa1\xbc\xa5\xeb\xa4\xcf\xbb\xca\xb2\xf1\xbc\xd4\xa4\xce\xbe\xb5\xc7\xa7\xa4\xf2\xc2\xd4\xa4\xc3\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9'
h = Header(long, j, header_name="Subject")
# test a very long header
enc = h.encode()
# TK: splitting point may differ by codec design and/or Header encoding
eq(enc , """\
=?iso-2022-jp?b?dGVzdC1qYSAbJEIkWEVqOUYkNSRsJD8lYSE8JWskTztKGyhC?=
=?iso-2022-jp?b?GyRCMnE8VCROPjVHJyRyQlQkQyRGJCQkXiQ5GyhC?=""")
# TK: full decode comparison
eq(h.__unicode__().encode('euc-jp'), long)
def test_payload_encoding(self):
jhello = '\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc\xa5\xeb\xa5\xc9\xa1\xaa'
jcode = 'euc-jp'
msg = Message()
msg.set_payload(jhello, jcode)
ustr = unicode(msg.get_payload(), msg.get_content_charset())
self.assertEqual(jhello, ustr.encode(jcode))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestEmailAsianCodecs))
return suite
def test_main():
run_unittest(TestEmailAsianCodecs)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| gpl-3.0 |
wndhydrnt/airflow | tests/sensors/test_http_sensor.py | 14 | 6591 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import requests
from mock import patch
from airflow import DAG, configuration
from airflow.exceptions import AirflowException, AirflowSensorTimeout
from airflow.operators.http_operator import SimpleHttpOperator
from airflow.sensors.http_sensor import HttpSensor
from airflow.utils.timezone import datetime
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
configuration.load_test_config()
DEFAULT_DATE = datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
TEST_DAG_ID = 'unit_test_dag'
class HttpSensorTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': DEFAULT_DATE
}
self.dag = DAG(TEST_DAG_ID, default_args=args)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_poke_exception(self, mock_session_send):
"""
Exception occurs in poke function should not be ignored.
"""
response = requests.Response()
response.status_code = 200
mock_session_send.return_value = response
def resp_check(resp):
raise AirflowException('AirflowException raised here!')
task = HttpSensor(
task_id='http_sensor_poke_exception',
http_conn_id='http_default',
endpoint='',
request_params={},
response_check=resp_check,
timeout=5,
poke_interval=1)
with self.assertRaisesRegexp(AirflowException, 'AirflowException raised here!'):
task.execute(None)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_head_method(self, mock_session_send):
def resp_check(resp):
return True
task = HttpSensor(
dag=self.dag,
task_id='http_sensor_head_method',
http_conn_id='http_default',
endpoint='',
request_params={},
method='HEAD',
response_check=resp_check,
timeout=5,
poke_interval=1)
task.execute(None)
args, kwargs = mock_session_send.call_args
received_request = args[0]
prep_request = requests.Request(
'HEAD',
'https://www.google.com',
{}).prepare()
self.assertEqual(prep_request.url, received_request.url)
self.assertTrue(prep_request.method, received_request.method)
@patch("airflow.hooks.http_hook.requests.Session.send")
def test_logging_head_error_request(
self,
mock_session_send
):
def resp_check(resp):
return True
response = requests.Response()
response.status_code = 404
response.reason = 'Not Found'
mock_session_send.return_value = response
task = HttpSensor(
dag=self.dag,
task_id='http_sensor_head_method',
http_conn_id='http_default',
endpoint='',
request_params={},
method='HEAD',
response_check=resp_check,
timeout=5,
poke_interval=1
)
with mock.patch.object(task.hook.log, 'error') as mock_errors:
with self.assertRaises(AirflowSensorTimeout):
task.execute(None)
self.assertTrue(mock_errors.called)
mock_errors.assert_called_with('HTTP error: %s', 'Not Found')
class FakeSession(object):
def __init__(self):
self.response = requests.Response()
self.response.status_code = 200
self.response._content = 'airbnb/airflow'.encode('ascii', 'ignore')
def send(self, request, **kwargs):
return self.response
def prepare_request(self, request):
if 'date' in request.params:
self.response._content += (
'/' + request.params['date']
).encode('ascii', 'ignore')
return self.response
class HttpOpSensorTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
args = {'owner': 'airflow', 'start_date': DEFAULT_DATE_ISO}
dag = DAG(TEST_DAG_ID, default_args=args)
self.dag = dag
@mock.patch('requests.Session', FakeSession)
def test_get(self):
t = SimpleHttpOperator(
task_id='get_op',
method='GET',
endpoint='/search',
data={"client": "ubuntu", "q": "airflow"},
headers={},
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
@mock.patch('requests.Session', FakeSession)
def test_get_response_check(self):
t = SimpleHttpOperator(
task_id='get_op',
method='GET',
endpoint='/search',
data={"client": "ubuntu", "q": "airflow"},
response_check=lambda response: ("airbnb/airflow" in response.text),
headers={},
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
@mock.patch('requests.Session', FakeSession)
def test_sensor(self):
sensor = HttpSensor(
task_id='http_sensor_check',
http_conn_id='http_default',
endpoint='/search',
request_params={"client": "ubuntu", "q": "airflow", 'date': '{{ds}}'},
headers={},
response_check=lambda response: (
"airbnb/airflow/" + DEFAULT_DATE.strftime('%Y-%m-%d')
in response.text),
poke_interval=5,
timeout=15,
dag=self.dag)
sensor.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
| apache-2.0 |
2014c2g2/teamwork | exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/unittest/test/testmock/testhelpers.py | 737 | 25793 | import unittest
from unittest.mock import (
call, _Call, create_autospec, MagicMock,
Mock, ANY, _CallList, patch, PropertyMock
)
from datetime import datetime
class SomeClass(object):
def one(self, a, b):
pass
def two(self):
pass
def three(self, a=None):
pass
class AnyTest(unittest.TestCase):
def test_any(self):
self.assertEqual(ANY, object())
mock = Mock()
mock(ANY)
mock.assert_called_with(ANY)
mock = Mock()
mock(foo=ANY)
mock.assert_called_with(foo=ANY)
def test_repr(self):
self.assertEqual(repr(ANY), '<ANY>')
self.assertEqual(str(ANY), '<ANY>')
def test_any_and_datetime(self):
mock = Mock()
mock(datetime.now(), foo=datetime.now())
mock.assert_called_with(ANY, foo=ANY)
def test_any_mock_calls_comparison_order(self):
mock = Mock()
d = datetime.now()
class Foo(object):
def __eq__(self, other):
return False
def __ne__(self, other):
return True
for d in datetime.now(), Foo():
mock.reset_mock()
mock(d, foo=d, bar=d)
mock.method(d, zinga=d, alpha=d)
mock().method(a1=d, z99=d)
expected = [
call(ANY, foo=ANY, bar=ANY),
call.method(ANY, zinga=ANY, alpha=ANY),
call(), call().method(a1=ANY, z99=ANY)
]
self.assertEqual(expected, mock.mock_calls)
self.assertEqual(mock.mock_calls, expected)
class CallTest(unittest.TestCase):
def test_call_with_call(self):
kall = _Call()
self.assertEqual(kall, _Call())
self.assertEqual(kall, _Call(('',)))
self.assertEqual(kall, _Call(((),)))
self.assertEqual(kall, _Call(({},)))
self.assertEqual(kall, _Call(('', ())))
self.assertEqual(kall, _Call(('', {})))
self.assertEqual(kall, _Call(('', (), {})))
self.assertEqual(kall, _Call(('foo',)))
self.assertEqual(kall, _Call(('bar', ())))
self.assertEqual(kall, _Call(('baz', {})))
self.assertEqual(kall, _Call(('spam', (), {})))
kall = _Call(((1, 2, 3),))
self.assertEqual(kall, _Call(((1, 2, 3),)))
self.assertEqual(kall, _Call(('', (1, 2, 3))))
self.assertEqual(kall, _Call(((1, 2, 3), {})))
self.assertEqual(kall, _Call(('', (1, 2, 3), {})))
kall = _Call(((1, 2, 4),))
self.assertNotEqual(kall, _Call(('', (1, 2, 3))))
self.assertNotEqual(kall, _Call(('', (1, 2, 3), {})))
kall = _Call(('foo', (1, 2, 4),))
self.assertNotEqual(kall, _Call(('', (1, 2, 4))))
self.assertNotEqual(kall, _Call(('', (1, 2, 4), {})))
self.assertNotEqual(kall, _Call(('bar', (1, 2, 4))))
self.assertNotEqual(kall, _Call(('bar', (1, 2, 4), {})))
kall = _Call(({'a': 3},))
self.assertEqual(kall, _Call(('', (), {'a': 3})))
self.assertEqual(kall, _Call(('', {'a': 3})))
self.assertEqual(kall, _Call(((), {'a': 3})))
self.assertEqual(kall, _Call(({'a': 3},)))
def test_empty__Call(self):
args = _Call()
self.assertEqual(args, ())
self.assertEqual(args, ('foo',))
self.assertEqual(args, ((),))
self.assertEqual(args, ('foo', ()))
self.assertEqual(args, ('foo',(), {}))
self.assertEqual(args, ('foo', {}))
self.assertEqual(args, ({},))
def test_named_empty_call(self):
args = _Call(('foo', (), {}))
self.assertEqual(args, ('foo',))
self.assertEqual(args, ('foo', ()))
self.assertEqual(args, ('foo',(), {}))
self.assertEqual(args, ('foo', {}))
self.assertNotEqual(args, ((),))
self.assertNotEqual(args, ())
self.assertNotEqual(args, ({},))
self.assertNotEqual(args, ('bar',))
self.assertNotEqual(args, ('bar', ()))
self.assertNotEqual(args, ('bar', {}))
def test_call_with_args(self):
args = _Call(((1, 2, 3), {}))
self.assertEqual(args, ((1, 2, 3),))
self.assertEqual(args, ('foo', (1, 2, 3)))
self.assertEqual(args, ('foo', (1, 2, 3), {}))
self.assertEqual(args, ((1, 2, 3), {}))
def test_named_call_with_args(self):
args = _Call(('foo', (1, 2, 3), {}))
self.assertEqual(args, ('foo', (1, 2, 3)))
self.assertEqual(args, ('foo', (1, 2, 3), {}))
self.assertNotEqual(args, ((1, 2, 3),))
self.assertNotEqual(args, ((1, 2, 3), {}))
def test_call_with_kwargs(self):
args = _Call(((), dict(a=3, b=4)))
self.assertEqual(args, (dict(a=3, b=4),))
self.assertEqual(args, ('foo', dict(a=3, b=4)))
self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
self.assertEqual(args, ((), dict(a=3, b=4)))
def test_named_call_with_kwargs(self):
args = _Call(('foo', (), dict(a=3, b=4)))
self.assertEqual(args, ('foo', dict(a=3, b=4)))
self.assertEqual(args, ('foo', (), dict(a=3, b=4)))
self.assertNotEqual(args, (dict(a=3, b=4),))
self.assertNotEqual(args, ((), dict(a=3, b=4)))
def test_call_with_args_call_empty_name(self):
args = _Call(((1, 2, 3), {}))
self.assertEqual(args, call(1, 2, 3))
self.assertEqual(call(1, 2, 3), args)
self.assertTrue(call(1, 2, 3) in [args])
def test_call_ne(self):
self.assertNotEqual(_Call(((1, 2, 3),)), call(1, 2))
self.assertFalse(_Call(((1, 2, 3),)) != call(1, 2, 3))
self.assertTrue(_Call(((1, 2), {})) != call(1, 2, 3))
def test_call_non_tuples(self):
kall = _Call(((1, 2, 3),))
for value in 1, None, self, int:
self.assertNotEqual(kall, value)
self.assertFalse(kall == value)
def test_repr(self):
self.assertEqual(repr(_Call()), 'call()')
self.assertEqual(repr(_Call(('foo',))), 'call.foo()')
self.assertEqual(repr(_Call(((1, 2, 3), {'a': 'b'}))),
"call(1, 2, 3, a='b')")
self.assertEqual(repr(_Call(('bar', (1, 2, 3), {'a': 'b'}))),
"call.bar(1, 2, 3, a='b')")
self.assertEqual(repr(call), 'call')
self.assertEqual(str(call), 'call')
self.assertEqual(repr(call()), 'call()')
self.assertEqual(repr(call(1)), 'call(1)')
self.assertEqual(repr(call(zz='thing')), "call(zz='thing')")
self.assertEqual(repr(call().foo), 'call().foo')
self.assertEqual(repr(call(1).foo.bar(a=3).bing),
'call().foo.bar().bing')
self.assertEqual(
repr(call().foo(1, 2, a=3)),
"call().foo(1, 2, a=3)"
)
self.assertEqual(repr(call()()), "call()()")
self.assertEqual(repr(call(1)(2)), "call()(2)")
self.assertEqual(
repr(call()().bar().baz.beep(1)),
"call()().bar().baz.beep(1)"
)
def test_call(self):
self.assertEqual(call(), ('', (), {}))
self.assertEqual(call('foo', 'bar', one=3, two=4),
('', ('foo', 'bar'), {'one': 3, 'two': 4}))
mock = Mock()
mock(1, 2, 3)
mock(a=3, b=6)
self.assertEqual(mock.call_args_list,
[call(1, 2, 3), call(a=3, b=6)])
def test_attribute_call(self):
self.assertEqual(call.foo(1), ('foo', (1,), {}))
self.assertEqual(call.bar.baz(fish='eggs'),
('bar.baz', (), {'fish': 'eggs'}))
mock = Mock()
mock.foo(1, 2 ,3)
mock.bar.baz(a=3, b=6)
self.assertEqual(mock.method_calls,
[call.foo(1, 2, 3), call.bar.baz(a=3, b=6)])
def test_extended_call(self):
result = call(1).foo(2).bar(3, a=4)
self.assertEqual(result, ('().foo().bar', (3,), dict(a=4)))
mock = MagicMock()
mock(1, 2, a=3, b=4)
self.assertEqual(mock.call_args, call(1, 2, a=3, b=4))
self.assertNotEqual(mock.call_args, call(1, 2, 3))
self.assertEqual(mock.call_args_list, [call(1, 2, a=3, b=4)])
self.assertEqual(mock.mock_calls, [call(1, 2, a=3, b=4)])
mock = MagicMock()
mock.foo(1).bar()().baz.beep(a=6)
last_call = call.foo(1).bar()().baz.beep(a=6)
self.assertEqual(mock.mock_calls[-1], last_call)
self.assertEqual(mock.mock_calls, last_call.call_list())
def test_call_list(self):
mock = MagicMock()
mock(1)
self.assertEqual(call(1).call_list(), mock.mock_calls)
mock = MagicMock()
mock(1).method(2)
self.assertEqual(call(1).method(2).call_list(),
mock.mock_calls)
mock = MagicMock()
mock(1).method(2)(3)
self.assertEqual(call(1).method(2)(3).call_list(),
mock.mock_calls)
mock = MagicMock()
int(mock(1).method(2)(3).foo.bar.baz(4)(5))
kall = call(1).method(2)(3).foo.bar.baz(4)(5).__int__()
self.assertEqual(kall.call_list(), mock.mock_calls)
def test_call_any(self):
self.assertEqual(call, ANY)
m = MagicMock()
int(m)
self.assertEqual(m.mock_calls, [ANY])
self.assertEqual([ANY], m.mock_calls)
def test_two_args_call(self):
args = _Call(((1, 2), {'a': 3}), two=True)
self.assertEqual(len(args), 2)
self.assertEqual(args[0], (1, 2))
self.assertEqual(args[1], {'a': 3})
other_args = _Call(((1, 2), {'a': 3}))
self.assertEqual(args, other_args)
class SpecSignatureTest(unittest.TestCase):
def _check_someclass_mock(self, mock):
self.assertRaises(AttributeError, getattr, mock, 'foo')
mock.one(1, 2)
mock.one.assert_called_with(1, 2)
self.assertRaises(AssertionError,
mock.one.assert_called_with, 3, 4)
self.assertRaises(TypeError, mock.one, 1)
mock.two()
mock.two.assert_called_with()
self.assertRaises(AssertionError,
mock.two.assert_called_with, 3)
self.assertRaises(TypeError, mock.two, 1)
mock.three()
mock.three.assert_called_with()
self.assertRaises(AssertionError,
mock.three.assert_called_with, 3)
self.assertRaises(TypeError, mock.three, 3, 2)
mock.three(1)
mock.three.assert_called_with(1)
mock.three(a=1)
mock.three.assert_called_with(a=1)
def test_basic(self):
for spec in (SomeClass, SomeClass()):
mock = create_autospec(spec)
self._check_someclass_mock(mock)
def test_create_autospec_return_value(self):
def f():
pass
mock = create_autospec(f, return_value='foo')
self.assertEqual(mock(), 'foo')
class Foo(object):
pass
mock = create_autospec(Foo, return_value='foo')
self.assertEqual(mock(), 'foo')
def test_autospec_reset_mock(self):
m = create_autospec(int)
int(m)
m.reset_mock()
self.assertEqual(m.__int__.call_count, 0)
def test_mocking_unbound_methods(self):
class Foo(object):
def foo(self, foo):
pass
p = patch.object(Foo, 'foo')
mock_foo = p.start()
Foo().foo(1)
mock_foo.assert_called_with(1)
def test_create_autospec_unbound_methods(self):
# see mock issue 128
# this is expected to fail until the issue is fixed
return
class Foo(object):
def foo(self):
pass
klass = create_autospec(Foo)
instance = klass()
self.assertRaises(TypeError, instance.foo, 1)
# Note: no type checking on the "self" parameter
klass.foo(1)
klass.foo.assert_called_with(1)
self.assertRaises(TypeError, klass.foo)
def test_create_autospec_keyword_arguments(self):
class Foo(object):
a = 3
m = create_autospec(Foo, a='3')
self.assertEqual(m.a, '3')
def test_create_autospec_keyword_only_arguments(self):
def foo(a, *, b=None):
pass
m = create_autospec(foo)
m(1)
m.assert_called_with(1)
self.assertRaises(TypeError, m, 1, 2)
m(2, b=3)
m.assert_called_with(2, b=3)
def test_function_as_instance_attribute(self):
obj = SomeClass()
def f(a):
pass
obj.f = f
mock = create_autospec(obj)
mock.f('bing')
mock.f.assert_called_with('bing')
def test_spec_as_list(self):
# because spec as a list of strings in the mock constructor means
# something very different we treat a list instance as the type.
mock = create_autospec([])
mock.append('foo')
mock.append.assert_called_with('foo')
self.assertRaises(AttributeError, getattr, mock, 'foo')
class Foo(object):
foo = []
mock = create_autospec(Foo)
mock.foo.append(3)
mock.foo.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.foo, 'foo')
def test_attributes(self):
class Sub(SomeClass):
attr = SomeClass()
sub_mock = create_autospec(Sub)
for mock in (sub_mock, sub_mock.attr):
self._check_someclass_mock(mock)
def test_builtin_functions_types(self):
# we could replace builtin functions / methods with a function
# with *args / **kwargs signature. Using the builtin method type
# as a spec seems to work fairly well though.
class BuiltinSubclass(list):
def bar(self, arg):
pass
sorted = sorted
attr = {}
mock = create_autospec(BuiltinSubclass)
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.append, 'foo')
mock.bar('foo')
mock.bar.assert_called_with('foo')
self.assertRaises(TypeError, mock.bar, 'foo', 'bar')
self.assertRaises(AttributeError, getattr, mock.bar, 'foo')
mock.sorted([1, 2])
mock.sorted.assert_called_with([1, 2])
self.assertRaises(AttributeError, getattr, mock.sorted, 'foo')
mock.attr.pop(3)
mock.attr.pop.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock.attr, 'foo')
def test_method_calls(self):
class Sub(SomeClass):
attr = SomeClass()
mock = create_autospec(Sub)
mock.one(1, 2)
mock.two()
mock.three(3)
expected = [call.one(1, 2), call.two(), call.three(3)]
self.assertEqual(mock.method_calls, expected)
mock.attr.one(1, 2)
mock.attr.two()
mock.attr.three(3)
expected.extend(
[call.attr.one(1, 2), call.attr.two(), call.attr.three(3)]
)
self.assertEqual(mock.method_calls, expected)
def test_magic_methods(self):
class BuiltinSubclass(list):
attr = {}
mock = create_autospec(BuiltinSubclass)
self.assertEqual(list(mock), [])
self.assertRaises(TypeError, int, mock)
self.assertRaises(TypeError, int, mock.attr)
self.assertEqual(list(mock), [])
self.assertIsInstance(mock['foo'], MagicMock)
self.assertIsInstance(mock.attr['foo'], MagicMock)
def test_spec_set(self):
class Sub(SomeClass):
attr = SomeClass()
for spec in (Sub, Sub()):
mock = create_autospec(spec, spec_set=True)
self._check_someclass_mock(mock)
self.assertRaises(AttributeError, setattr, mock, 'foo', 'bar')
self.assertRaises(AttributeError, setattr, mock.attr, 'foo', 'bar')
def test_descriptors(self):
class Foo(object):
@classmethod
def f(cls, a, b):
pass
@staticmethod
def g(a, b):
pass
class Bar(Foo):
pass
class Baz(SomeClass, Bar):
pass
for spec in (Foo, Foo(), Bar, Bar(), Baz, Baz()):
mock = create_autospec(spec)
mock.f(1, 2)
mock.f.assert_called_once_with(1, 2)
mock.g(3, 4)
mock.g.assert_called_once_with(3, 4)
def test_recursive(self):
class A(object):
def a(self):
pass
foo = 'foo bar baz'
bar = foo
A.B = A
mock = create_autospec(A)
mock()
self.assertFalse(mock.B.called)
mock.a()
mock.B.a()
self.assertEqual(mock.method_calls, [call.a(), call.B.a()])
self.assertIs(A.foo, A.bar)
self.assertIsNot(mock.foo, mock.bar)
mock.foo.lower()
self.assertRaises(AssertionError, mock.bar.lower.assert_called_with)
def test_spec_inheritance_for_classes(self):
class Foo(object):
def a(self):
pass
class Bar(object):
def f(self):
pass
class_mock = create_autospec(Foo)
self.assertIsNot(class_mock, class_mock())
for this_mock in class_mock, class_mock():
this_mock.a()
this_mock.a.assert_called_with()
self.assertRaises(TypeError, this_mock.a, 'foo')
self.assertRaises(AttributeError, getattr, this_mock, 'b')
instance_mock = create_autospec(Foo())
instance_mock.a()
instance_mock.a.assert_called_with()
self.assertRaises(TypeError, instance_mock.a, 'foo')
self.assertRaises(AttributeError, getattr, instance_mock, 'b')
# The return value isn't isn't callable
self.assertRaises(TypeError, instance_mock)
instance_mock.Bar.f()
instance_mock.Bar.f.assert_called_with()
self.assertRaises(AttributeError, getattr, instance_mock.Bar, 'g')
instance_mock.Bar().f()
instance_mock.Bar().f.assert_called_with()
self.assertRaises(AttributeError, getattr, instance_mock.Bar(), 'g')
def test_inherit(self):
class Foo(object):
a = 3
Foo.Foo = Foo
# class
mock = create_autospec(Foo)
instance = mock()
self.assertRaises(AttributeError, getattr, instance, 'b')
attr_instance = mock.Foo()
self.assertRaises(AttributeError, getattr, attr_instance, 'b')
# instance
mock = create_autospec(Foo())
self.assertRaises(AttributeError, getattr, mock, 'b')
self.assertRaises(TypeError, mock)
# attribute instance
call_result = mock.Foo()
self.assertRaises(AttributeError, getattr, call_result, 'b')
def test_builtins(self):
# used to fail with infinite recursion
create_autospec(1)
create_autospec(int)
create_autospec('foo')
create_autospec(str)
create_autospec({})
create_autospec(dict)
create_autospec([])
create_autospec(list)
create_autospec(set())
create_autospec(set)
create_autospec(1.0)
create_autospec(float)
create_autospec(1j)
create_autospec(complex)
create_autospec(False)
create_autospec(True)
def test_function(self):
def f(a, b):
pass
mock = create_autospec(f)
self.assertRaises(TypeError, mock)
mock(1, 2)
mock.assert_called_with(1, 2)
f.f = f
mock = create_autospec(f)
self.assertRaises(TypeError, mock.f)
mock.f(3, 4)
mock.f.assert_called_with(3, 4)
def test_skip_attributeerrors(self):
class Raiser(object):
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance')
class RaiserClass(object):
raiser = Raiser()
@staticmethod
def existing(a, b):
return a + b
s = create_autospec(RaiserClass)
self.assertRaises(TypeError, lambda x: s.existing(1, 2, 3))
s.existing(1, 2)
self.assertRaises(AttributeError, lambda: s.nonexisting)
# check we can fetch the raiser attribute and it has no spec
obj = s.raiser
obj.foo, obj.bar
def test_signature_class(self):
class Foo(object):
def __init__(self, a, b=3):
pass
mock = create_autospec(Foo)
self.assertRaises(TypeError, mock)
mock(1)
mock.assert_called_once_with(1)
mock(4, 5)
mock.assert_called_with(4, 5)
def test_class_with_no_init(self):
# this used to raise an exception
# due to trying to get a signature from object.__init__
class Foo(object):
pass
create_autospec(Foo)
def test_signature_callable(self):
class Callable(object):
def __init__(self):
pass
def __call__(self, a):
pass
mock = create_autospec(Callable)
mock()
mock.assert_called_once_with()
self.assertRaises(TypeError, mock, 'a')
instance = mock()
self.assertRaises(TypeError, instance)
instance(a='a')
instance.assert_called_once_with(a='a')
instance('a')
instance.assert_called_with('a')
mock = create_autospec(Callable())
mock(a='a')
mock.assert_called_once_with(a='a')
self.assertRaises(TypeError, mock)
mock('a')
mock.assert_called_with('a')
def test_signature_noncallable(self):
class NonCallable(object):
def __init__(self):
pass
mock = create_autospec(NonCallable)
instance = mock()
mock.assert_called_once_with()
self.assertRaises(TypeError, mock, 'a')
self.assertRaises(TypeError, instance)
self.assertRaises(TypeError, instance, 'a')
mock = create_autospec(NonCallable())
self.assertRaises(TypeError, mock)
self.assertRaises(TypeError, mock, 'a')
def test_create_autospec_none(self):
class Foo(object):
bar = None
mock = create_autospec(Foo)
none = mock.bar
self.assertNotIsInstance(none, type(None))
none.foo()
none.foo.assert_called_once_with()
def test_autospec_functions_with_self_in_odd_place(self):
class Foo(object):
def f(a, self):
pass
a = create_autospec(Foo)
a.f(self=10)
a.f.assert_called_with(self=10)
def test_autospec_property(self):
class Foo(object):
@property
def foo(self):
return 3
foo = create_autospec(Foo)
mock_property = foo.foo
# no spec on properties
self.assertTrue(isinstance(mock_property, MagicMock))
mock_property(1, 2, 3)
mock_property.abc(4, 5, 6)
mock_property.assert_called_once_with(1, 2, 3)
mock_property.abc.assert_called_once_with(4, 5, 6)
def test_autospec_slots(self):
class Foo(object):
__slots__ = ['a']
foo = create_autospec(Foo)
mock_slot = foo.a
# no spec on slots
mock_slot(1, 2, 3)
mock_slot.abc(4, 5, 6)
mock_slot.assert_called_once_with(1, 2, 3)
mock_slot.abc.assert_called_once_with(4, 5, 6)
class TestCallList(unittest.TestCase):
def test_args_list_contains_call_list(self):
mock = Mock()
self.assertIsInstance(mock.call_args_list, _CallList)
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
for kall in call(1, 2), call(a=3), call(3, 4), call(b=6):
self.assertTrue(kall in mock.call_args_list)
calls = [call(a=3), call(3, 4)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(1, 2), call(a=3)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(3, 4), call(b=6)]
self.assertTrue(calls in mock.call_args_list)
calls = [call(3, 4)]
self.assertTrue(calls in mock.call_args_list)
self.assertFalse(call('fish') in mock.call_args_list)
self.assertFalse([call('fish')] in mock.call_args_list)
def test_call_list_str(self):
mock = Mock()
mock(1, 2)
mock.foo(a=3)
mock.foo.bar().baz('fish', cat='dog')
expected = (
"[call(1, 2),\n"
" call.foo(a=3),\n"
" call.foo.bar(),\n"
" call.foo.bar().baz('fish', cat='dog')]"
)
self.assertEqual(str(mock.mock_calls), expected)
def test_propertymock(self):
p = patch('%s.SomeClass.one' % __name__, new_callable=PropertyMock)
mock = p.start()
try:
SomeClass.one
mock.assert_called_once_with()
s = SomeClass()
s.one
mock.assert_called_with()
self.assertEqual(mock.mock_calls, [call(), call()])
s.one = 3
self.assertEqual(mock.mock_calls, [call(), call(), call(3)])
finally:
p.stop()
def test_propertymock_returnvalue(self):
m = MagicMock()
p = PropertyMock()
type(m).foo = p
returned = m.foo
p.assert_called_once_with()
self.assertIsInstance(returned, MagicMock)
self.assertNotIsInstance(returned, PropertyMock)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
piffey/ansible | lib/ansible/modules/network/ironware/ironware_command.py | 15 | 5154 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ironware_command
version_added: "2.5"
author: "Paul Baker (@paulquack)"
short_description: Run arbitrary commands on Brocade IronWare devices
description:
- Sends arbitrary commands to a Brocade Ironware node and returns the
results read from the device. This module includes a I(wait_for)
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
extends_documentation_fragment: ironware
options:
commands:
description:
- List of commands to send to the remote device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retires as expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. If the value
is set to C(all) then all conditionals in the I(wait_for) must be
satisfied. If the value is set to C(any) then only one of the
values must be satisfied.
default: all
choices: ['any', 'all']
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
default: 1
"""
EXAMPLES = """
- ironware_command:
commands:
- show version
- ironware_command:
commands:
- show interfaces brief wide
- show mpls vll
"""
RETURN = """
stdout:
description: the set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: the conditionals that failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.ironware.ironware import ironware_argument_spec, check_args
from ansible.module_utils.network.ironware.ironware import run_commands
from ansible.module_utils.network.common.parsing import Conditional
from ansible.module_utils.six import string_types
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def main():
spec = dict(
# { command: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list'),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
spec.update(ironware_argument_spec)
module = AnsibleModule(argument_spec=spec, supports_check_mode=True)
check_args(module)
result = {'changed': False}
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
commands = module.params['commands']
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result.update({
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
davidbrazdil/nacl | src/trusted/validator_ragel/spec_val.py | 13 | 4965 | # Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import spec
class Validator(object):
BITNESS = None
MAX_SUPERINSTRUCTION_LENGTH = None
def ValidateSuperinstruction(self, superinstruction):
raise NotImplementedError()
def FitsWithinBundle(self, insns):
offset = insns[0].address
last_byte_offset = offset + sum(len(insn.bytes) for insn in insns) - 1
return offset // spec.BUNDLE_SIZE == last_byte_offset // spec.BUNDLE_SIZE
def CheckConditions(
self, insns, precondition, postcondition):
raise NotImplementedError()
def CheckFinalCondition(self, end_offset):
raise NotImplementedError()
def Validate(self, insns):
self.messages = []
self.valid_jump_targets = set()
self.jumps = {}
self.condition = spec.Condition()
i = 0
while i < len(insns):
offset = insns[i].address
self.valid_jump_targets.add(offset)
try:
# Greedy: try to match longest superinstructions first.
for n in range(self.MAX_SUPERINSTRUCTION_LENGTH, 1, -1):
if i + n > len(insns):
continue
try:
self.ValidateSuperinstruction(insns[i:i+n])
if not self.FitsWithinBundle(insns[i:i+n]):
self.messages.append(
(offset, 'superinstruction crosses bundle boundary'))
self.CheckConditions(
insns[i:i+n],
precondition=spec.Condition(),
postcondition=spec.Condition())
i += n
break
except spec.DoNotMatchError:
continue
else:
try:
jump_destination, precondition, postcondition = (
spec.ValidateDirectJumpOrRegularInstruction(
insns[i],
self.BITNESS))
if not self.FitsWithinBundle(insns[i:i+1]):
self.messages.append(
(offset, 'instruction crosses bundle boundary'))
self.CheckConditions(
insns[i:i+1], precondition, postcondition)
if jump_destination is not None:
self.jumps[insns[i].address] = jump_destination
i += 1
except spec.DoNotMatchError:
self.messages.append(
(offset, 'unrecognized instruction %r' % insns[i].disasm))
i += 1
except spec.SandboxingError as e:
self.messages.append((offset, str(e)))
i += 1
self.condition = spec.Condition()
assert i == len(insns)
end_offset = insns[-1].address + len(insns[-1].bytes)
self.valid_jump_targets.add(end_offset)
self.CheckFinalCondition(end_offset)
for source, destination in sorted(self.jumps.items()):
if (destination % spec.BUNDLE_SIZE != 0 and
destination not in self.valid_jump_targets):
self.messages.append(
(source, 'jump into a middle of instruction (0x%x)' % destination))
return self.messages
class Validator32(Validator):
BITNESS = 32
MAX_SUPERINSTRUCTION_LENGTH = 2
def ValidateSuperinstruction(self, superinstruction):
spec.ValidateSuperinstruction32(superinstruction)
def CheckConditions(
self, insns, precondition, postcondition):
assert precondition == postcondition == spec.Condition()
def CheckFinalCondition(self, end_offset):
assert self.condition == spec.Condition()
class Validator64(Validator):
BITNESS = 64
MAX_SUPERINSTRUCTION_LENGTH = 5
def ValidateSuperinstruction(self, superinstruction):
spec.ValidateSuperinstruction64(superinstruction)
def CheckConditions(
self, insns, precondition, postcondition):
offset = insns[0].address
if not self.condition.Implies(precondition):
self.messages.append((offset, self.condition.WhyNotImplies(precondition)))
if not spec.Condition().Implies(precondition):
self.valid_jump_targets.remove(offset)
self.condition = postcondition
end_offset = offset + sum(len(insn.bytes) for insn in insns)
if end_offset % spec.BUNDLE_SIZE == 0:
# At the end of bundle we reset condition to default value
# (because anybody can jump to this point), so we have to check
# that it's safe to do so.
if not self.condition.Implies(spec.Condition()):
self.messages.append((
end_offset,
'%s at the end of bundle'
% self.condition.WhyNotImplies(spec.Condition())))
self.condition = spec.Condition()
def CheckFinalCondition(self, end_offset):
# If chunk ends mid-bundle, we have to check final condition separately.
if end_offset % spec.BUNDLE_SIZE != 0:
if not self.condition.Implies(spec.Condition()):
self.messages.append((
end_offset,
'%s at the end of chunk'
% self.condition.WhyNotImplies(spec.Condition())))
| bsd-3-clause |
chhao91/QGIS | python/plugins/fTools/tools/doGeometry.py | 11 | 49678 | # -*- coding: utf-8 -*-
#-----------------------------------------------------------
#
# fTools
# Copyright (C) 2008-2011 Carson Farmer
# EMAIL: carson.farmer (at) gmail.com
# WEB : http://www.ftools.ca/fTools.html
#
# A collection of data management and analysis tools for vector data
#
#-----------------------------------------------------------
#
# licensed under the terms of GNU GPL 2
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#---------------------------------------------------------------------
from PyQt4.QtCore import QObject, SIGNAL, QFile, QThread, QSettings, QVariant
from PyQt4.QtGui import QDialog, QDialogButtonBox, QMessageBox
from qgis.core import QGis, QgsVectorFileWriter, QgsFeature, QgsGeometry, QgsCoordinateTransform, QgsFields, QgsField, QgsFeatureRequest, QgsPoint, QgsDistanceArea
from ui_frmGeometry import Ui_Dialog
import ftools_utils
import voronoi
from sets import Set
class GeometryDialog(QDialog, Ui_Dialog):
def __init__(self, iface, function):
QDialog.__init__(self, iface.mainWindow())
self.iface = iface
self.setupUi(self)
self.myFunction = function
self.buttonOk = self.buttonBox_2.button(QDialogButtonBox.Ok)
QObject.connect(self.toolOut, SIGNAL("clicked()"), self.outFile)
if self.myFunction == 1:
QObject.connect(self.inShape, SIGNAL("currentIndexChanged( QString )"), self.update)
elif self.myFunction == 5:
QObject.connect(self.chkWriteShapefile, SIGNAL("stateChanged( int )"), self.updateGui)
self.updateGui()
self.manageGui()
self.success = False
self.cancel_close = self.buttonBox_2.button(QDialogButtonBox.Close)
self.progressBar.setValue(0)
def update(self):
self.cmbField.clear()
inputLayer = unicode(self.inShape.currentText())
if inputLayer != "":
changedLayer = ftools_utils.getVectorLayerByName(inputLayer)
changedField = ftools_utils.getFieldList(changedLayer)
for f in changedField:
self.cmbField.addItem(unicode(f.name()))
self.cmbField.addItem("--- " + self.tr("Merge all") + " ---")
def accept(self):
if self.inShape.currentText() == "":
QMessageBox.information(self, self.tr("Geometry"),
self.tr("Please specify input vector layer"))
elif self.outShape.text() == "" and self.myFunction != 5:
QMessageBox.information(self, self.tr("Geometry"),
self.tr("Please specify output shapefile"))
elif self.lineEdit.isVisible() and self.lineEdit.value() < 0.00:
QMessageBox.information(self, self.tr("Geometry"),
self.tr("Please specify valid tolerance value"))
elif self.cmbField.isVisible() and self.cmbField.currentText() == "":
QMessageBox.information(self, self.tr("Geometry"),
self.tr("Please specify valid UID field"))
else:
self.outShape.clear()
self.geometry(self.inShape.currentText(), self.lineEdit.value(),
self.cmbField.currentText())
def outFile(self):
self.outShape.clear()
(self.shapefileName, self.encoding) = ftools_utils.saveDialog(self)
if self.shapefileName is None or self.encoding is None:
return
self.outShape.setText(self.shapefileName)
def manageGui(self):
self.lblField.setVisible(False)
self.cmbField.setVisible(False)
self.lblCalcType.setVisible(False)
self.cmbCalcType.setVisible(False)
self.chkUseSelection.setVisible(False)
self.chkByFeatures.setVisible(False)
self.chkWriteShapefile.setVisible(False)
if self.myFunction == 1: # Singleparts to multipart
self.setWindowTitle(self.tr("Singleparts to multipart"))
self.lineEdit.setVisible(False)
self.label.setVisible(False)
self.lblOutputShapefile.setText(self.tr("Output shapefile"))
self.cmbField.setVisible(True)
self.lblField.setVisible(True)
elif self.myFunction == 2: # Multipart to singleparts
self.setWindowTitle(self.tr("Multipart to singleparts"))
self.lineEdit.setVisible(False)
self.label.setVisible(False)
self.lblOutputShapefile.setText(self.tr("Output shapefile"))
elif self.myFunction == 3: # Extract nodes
self.setWindowTitle(self.tr("Extract nodes"))
self.lineEdit.setVisible(False)
self.label.setVisible(False)
elif self.myFunction == 4: # Polygons to lines
self.setWindowTitle(self.tr("Polygons to lines"))
self.lblOutputShapefile.setText(self.tr("Output shapefile"))
self.label_3.setText(self.tr("Input polygon vector layer"))
self.label.setVisible(False)
self.lineEdit.setVisible(False)
elif self.myFunction == 5: # Export/Add geometry columns
self.setWindowTitle(self.tr("Export/Add geometry columns"))
self.lblOutputShapefile.setText(self.tr("Output shapefile"))
self.label_3.setText(self.tr("Input vector layer"))
self.label.setVisible(False)
self.lineEdit.setVisible(False)
# populate calculation types
self.lblCalcType.setVisible(True)
self.cmbCalcType.setVisible(True)
self.cmbCalcType.addItem(self.tr("Layer CRS"))
self.cmbCalcType.addItem(self.tr("Project CRS"))
self.cmbCalcType.addItem(self.tr("Ellipsoid"))
self.chkWriteShapefile.setVisible(True)
self.chkWriteShapefile.setChecked(False)
self.lblOutputShapefile.setVisible(False)
elif self.myFunction == 7: # Polygon centroids
self.setWindowTitle(self.tr("Polygon centroids"))
self.lblOutputShapefile.setText(self.tr("Output point shapefile"))
self.label_3.setText(self.tr("Input polygon vector layer"))
self.label.setVisible(False)
self.lineEdit.setVisible(False)
else:
if self.myFunction == 8: # Delaunay triangulation
self.setWindowTitle(self.tr("Delaunay triangulation"))
self.label_3.setText(self.tr("Input point vector layer"))
self.label.setVisible(False)
self.lineEdit.setVisible(False)
elif self.myFunction == 10: # Voronoi Polygons
self.setWindowTitle(self.tr("Voronoi polygon"))
self.label_3.setText(self.tr("Input point vector layer"))
self.label.setText(self.tr("Buffer region"))
self.lineEdit.setSuffix(" %")
self.lineEdit.setRange(0, 100)
self.lineEdit.setSingleStep(5)
self.lineEdit.setValue(0)
elif self.myFunction == 11: # Lines to polygons
self.setWindowTitle(self.tr("Lines to polygons"))
self.lblOutputShapefile.setText(self.tr("Output shapefile"))
self.label_3.setText(self.tr("Input line vector layer"))
self.label.setVisible(False)
self.lineEdit.setVisible(False)
else: # Polygon from layer extent
self.setWindowTitle(self.tr("Polygon from layer extent"))
self.label_3.setText(self.tr("Input layer"))
self.label.setVisible(False)
self.lineEdit.setVisible(False)
self.chkByFeatures.setVisible(True)
self.chkUseSelection.setVisible(True)
self.lblOutputShapefile.setText(self.tr("Output polygon shapefile"))
self.resize(381, 100)
self.populateLayers()
def updateGui(self):
if self.chkWriteShapefile.isChecked():
self.lineEdit.setEnabled(True)
self.toolOut.setEnabled(True)
self.addToCanvasCheck.setEnabled(True)
else:
self.lineEdit.setEnabled(False)
self.toolOut.setEnabled(False)
self.addToCanvasCheck.setEnabled(False)
def populateLayers(self):
self.inShape.clear()
if self.myFunction == 3 or self.myFunction == 6:
myList = ftools_utils.getLayerNames([QGis.Polygon, QGis.Line])
elif self.myFunction == 4 or self.myFunction == 7:
myList = ftools_utils.getLayerNames([QGis.Polygon])
elif self.myFunction == 8 or self.myFunction == 10:
myList = ftools_utils.getLayerNames([QGis.Point])
elif self.myFunction == 9:
myList = ftools_utils.getLayerNames("all")
elif self.myFunction == 11:
myList = ftools_utils.getLayerNames([QGis.Line])
else:
myList = ftools_utils.getLayerNames([QGis.Point, QGis.Line, QGis.Polygon])
self.inShape.addItems(myList)
#1: Singleparts to multipart
#2: Multipart to singleparts
#3: Extract nodes
#4: Polygons to lines
#5: Export/Add geometry columns
#6: Simplify geometries (disabled)
#7: Polygon centroids
#8: Delaunay triangulation
#9: Polygon from layer extent
#10: Voronoi polygons
#11: Lines to polygons
def geometry(self, myLayer, myParam, myField):
if self.myFunction == 9:
vlayer = ftools_utils.getMapLayerByName(myLayer)
else:
vlayer = ftools_utils.getVectorLayerByName(myLayer)
if (self.myFunction == 5 and self.chkWriteShapefile.isChecked()) or self.myFunction != 5:
check = QFile(self.shapefileName)
if check.exists():
if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName):
QMessageBox.warning(self, self.tr("Geometry"),
self.tr("Unable to delete existing shapefile."))
return
if self.myFunction == 5 and not self.chkWriteShapefile.isChecked():
self.shapefileName = None
self.encoding = None
res = QMessageBox.warning(self, self.tr("Geometry"),
self.tr("Currently QGIS doesn't allow simultaneous access from "
"different threads to the same datasource. Make sure your layer's "
"attribute tables are closed. Continue?"),
QMessageBox.Yes | QMessageBox.No)
if res == QMessageBox.No:
return
self.buttonOk.setEnabled(False)
self.testThread = geometryThread(self.iface.mainWindow(), self, self.myFunction,
vlayer, myParam, myField, self.shapefileName, self.encoding,
self.cmbCalcType.currentIndex(), self.chkWriteShapefile.isChecked(),
self.chkByFeatures.isChecked(), self.chkUseSelection.isChecked())
QObject.connect(self.testThread, SIGNAL("runFinished( PyQt_PyObject )"), self.runFinishedFromThread)
QObject.connect(self.testThread, SIGNAL("runStatus( PyQt_PyObject )"), self.runStatusFromThread)
QObject.connect(self.testThread, SIGNAL("runRange( PyQt_PyObject )"), self.runRangeFromThread)
self.cancel_close.setText(self.tr("Cancel"))
QObject.connect(self.cancel_close, SIGNAL("clicked()"), self.cancelThread)
self.testThread.start()
def cancelThread(self):
self.testThread.stop()
self.buttonOk.setEnabled(True)
def runFinishedFromThread(self, success):
self.testThread.stop()
self.buttonOk.setEnabled(True)
extra = ""
if success == "math_error":
QMessageBox.warning(self, self.tr("Geometry"),
self.tr("Error processing specified tolerance!\nPlease choose larger tolerance..."))
if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName):
QMessageBox.warning(self, self.tr("Geometry"),
self.tr("Unable to delete incomplete shapefile."))
elif success == "attr_error":
QMessageBox.warning(self, self.tr("Geometry"),
self.tr("At least two features must have same attribute value!\nPlease choose another field..."))
if not QgsVectorFileWriter.deleteShapeFile(self.shapefileName):
QMessageBox.warning(self, self.tr("Geometry"),
self.tr("Unable to delete incomplete shapefile."))
else:
if success == "valid_error":
extra = self.tr("One or more features in the output layer may have invalid "
+ "geometry, please check using the check validity tool\n")
success = True
self.cancel_close.setText("Close")
QObject.disconnect(self.cancel_close, SIGNAL("clicked()"), self.cancelThread)
if success:
if (self.myFunction == 5 and self.chkWriteShapefile.isChecked()) or self.myFunction != 5:
if self.addToCanvasCheck.isChecked():
addCanvasCheck = ftools_utils.addShapeToCanvas(unicode(self.shapefileName))
if not addCanvasCheck:
QMessageBox.warning(self, self.tr("Geometry"), self.tr("Error loading output shapefile:\n%s") % (unicode(self.shapefileName)))
self.populateLayers()
else:
QMessageBox.information(self, self.tr("Geometry"), self.tr("Created output shapefile:\n%s\n%s") % (unicode(self.shapefileName), extra))
else:
QMessageBox.information(self, self.tr("Geometry"),
self.tr("Layer '{0}' updated").format(self.inShape.currentText()))
else:
QMessageBox.warning(self, self.tr("Geometry"), self.tr("Error writing output shapefile."))
def runStatusFromThread(self, status):
self.progressBar.setValue(status)
def runRangeFromThread(self, range_vals):
self.progressBar.setRange(range_vals[0], range_vals[1])
class geometryThread(QThread):
def __init__(self, parentThread, parentObject, function, vlayer, myParam,
myField, myName, myEncoding, myCalcType, myNewShape, myByFeatures,
myUseSelection):
QThread.__init__(self, parentThread)
self.parent = parentObject
self.running = False
self.myFunction = function
self.vlayer = vlayer
self.myParam = myParam
self.myField = myField
self.myName = myName
self.myEncoding = myEncoding
self.myCalcType = myCalcType
self.writeShape = myNewShape
self.byFeatures = myByFeatures
self.useSelection = myUseSelection
def run(self):
self.running = True
if self.myFunction == 1: # Singleparts to multipart
success = self.single_to_multi()
elif self.myFunction == 2: # Multipart to singleparts
success = self.multi_to_single()
elif self.myFunction == 3: # Extract nodes
success = self.extract_nodes()
elif self.myFunction == 4: # Polygons to lines
success = self.polygons_to_lines()
elif self.myFunction == 5: # Export/Add geometry columns
success = self.export_geometry_info()
# note that 6 used to be associated with simplify_geometry
elif self.myFunction == 7: # Polygon centroids
success = self.polygon_centroids()
elif self.myFunction == 8: # Delaunay triangulation
success = self.delaunay_triangulation()
elif self.myFunction == 9: # Polygon from layer extent
if self.byFeatures:
success = self.feature_extent()
else:
success = self.layer_extent()
elif self.myFunction == 10: # Voronoi Polygons
success = self.voronoi_polygons()
elif self.myFunction == 11: # Lines to polygons
success = self.lines_to_polygons()
self.emit(SIGNAL("runFinished( PyQt_PyObject )"), success)
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
def stop(self):
self.running = False
def single_to_multi(self):
vprovider = self.vlayer.dataProvider()
allValid = True
geomType = self.singleToMultiGeom(vprovider.geometryType())
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
geomType, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
outGeom = QgsGeometry()
index = vprovider.fieldNameIndex(self.myField)
if not index == -1:
unique = ftools_utils.getUniqueValues(vprovider, int(index))
else:
unique = [""]
nFeat = vprovider.featureCount() * len(unique)
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
merge_all = self.myField == "--- " + self.tr("Merge all") + " ---"
if not len(unique) == self.vlayer.featureCount() or merge_all:
for i in unique:
# Strip spaces for strings, so " A " and "A" will be grouped
# TODO: Make this optional (opt-out to keep it easy for beginners)
if isinstance(i, basestring):
iMod = i.strip()
else:
iMod = i
multi_feature = []
first = True
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
atMap = inFeat.attributes()
if not merge_all:
idVar = atMap[index]
if isinstance(idVar, basestring):
idVarMod = idVar.strip()
else:
idVarMod = idVar
else:
idVarMod = ""
if idVarMod == iMod or merge_all:
if first:
atts = atMap
first = False
inGeom = QgsGeometry(inFeat.geometry())
vType = inGeom.type()
feature_list = self.extractAsMulti(inGeom)
multi_feature.extend(feature_list)
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
if not first:
outFeat.setAttributes(atts)
outGeom = QgsGeometry(self.convertGeometry(multi_feature, vType))
if not outGeom.isGeosValid():
allValid = "valid_error"
outFeat.setGeometry(outGeom)
writer.addFeature(outFeat)
del writer
else:
return "attr_error"
return allValid
def multi_to_single(self):
vprovider = self.vlayer.dataProvider()
geomType = self.multiToSingleGeom(vprovider.geometryType())
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
geomType, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nFeat = vprovider.featureCount()
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
inGeom = inFeat.geometry()
atMap = inFeat.attributes()
featList = self.extractAsSingle(inGeom)
outFeat.setAttributes(atMap)
for i in featList:
outFeat.setGeometry(i)
writer.addFeature(outFeat)
del writer
return True
def extract_nodes(self):
vprovider = self.vlayer.dataProvider()
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
QGis.WKBPoint, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
outGeom = QgsGeometry()
nFeat = vprovider.featureCount()
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
inGeom = inFeat.geometry()
atMap = inFeat.attributes()
pointList = ftools_utils.extractPoints(inGeom)
outFeat.setAttributes(atMap)
for i in pointList:
outFeat.setGeometry(outGeom.fromPoint(i))
writer.addFeature(outFeat)
del writer
return True
def polygons_to_lines(self):
vprovider = self.vlayer.dataProvider()
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
QGis.WKBLineString, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
outGeom = QgsGeometry()
nFeat = vprovider.featureCount()
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
inGeom = inFeat.geometry()
atMap = inFeat.attributes()
lineList = self.extractAsLine(inGeom)
outFeat.setAttributes(atMap)
for h in lineList:
outFeat.setGeometry(outGeom.fromPolyline(h))
writer.addFeature(outFeat)
del writer
return True
def lines_to_polygons(self):
vprovider = self.vlayer.dataProvider()
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
QGis.WKBPolygon, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
nFeat = vprovider.featureCount()
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
outGeomList = []
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
if inFeat.geometry().isMultipart():
outGeomList = inFeat.geometry().asMultiPolyline()
else:
outGeomList.append(inFeat.geometry().asPolyline())
polyGeom = self.remove_bad_lines(outGeomList)
if len(polyGeom) != 0:
outFeat.setGeometry(QgsGeometry.fromPolygon(polyGeom))
atMap = inFeat.attributes()
outFeat.setAttributes(atMap)
writer.addFeature(outFeat)
del writer
return True
def export_geometry_info(self):
ellips = None
crs = None
coordTransform = None
# calculate with:
# 0 - layer CRS
# 1 - project CRS
# 2 - ellipsoidal
if self.myCalcType == 2:
settings = QSettings()
ellips = settings.value("/qgis/measure/ellipsoid", "WGS84")
crs = self.vlayer.crs().srsid()
elif self.myCalcType == 1:
mapCRS = self.parent.iface.mapCanvas().mapRenderer().destinationCrs()
layCRS = self.vlayer.crs()
coordTransform = QgsCoordinateTransform(layCRS, mapCRS)
inFeat = QgsFeature()
outFeat = QgsFeature()
inGeom = QgsGeometry()
nElement = 0
vprovider = self.vlayer.dataProvider()
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, vprovider.featureCount()))
(fields, index1, index2) = self.checkMeasurementFields(self.vlayer, not self.writeShape)
if self.writeShape:
writer = QgsVectorFileWriter(self.myName, self.myEncoding, fields,
vprovider.geometryType(), vprovider.crs())
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
nElement += 1
inGeom = inFeat.geometry()
if self.myCalcType == 1:
inGeom.transform(coordTransform)
(attr1, attr2) = self.simpleMeasure(inGeom, self.myCalcType, ellips, crs)
if self.writeShape:
outFeat.setGeometry(inGeom)
atMap = inFeat.attributes()
maxIndex = index1 if index1 > index2 else index2
if maxIndex >= len(atMap):
atMap += [""] * (index2 + 1 - len(atMap))
atMap[index1] = attr1
if index1 != index2:
atMap[index2] = attr2
outFeat.setAttributes(atMap)
writer.addFeature(outFeat)
else:
changeMap = {}
changeMap[inFeat.id()] = {}
changeMap[inFeat.id()][index1] = attr1
if index1 != index2:
changeMap[inFeat.id()][index2] = attr2
vprovider.changeAttributeValues(changeMap)
self.vlayer.updateFields()
if self.writeShape:
del writer
return True
def polygon_centroids(self):
vprovider = self.vlayer.dataProvider()
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
QGis.WKBPoint, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
nFeat = vprovider.featureCount()
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
inGeom = inFeat.geometry()
atMap = inFeat.attributes()
outGeom = inGeom.centroid()
if outGeom is None:
return "math_error"
outFeat.setAttributes(atMap)
outFeat.setGeometry(QgsGeometry(outGeom))
writer.addFeature(outFeat)
del writer
return True
def delaunay_triangulation(self):
import voronoi
from sets import Set
vprovider = self.vlayer.dataProvider()
fields = QgsFields()
fields.append(QgsField("POINTA", QVariant.Double))
fields.append(QgsField("POINTB", QVariant.Double))
fields.append(QgsField("POINTC", QVariant.Double))
writer = QgsVectorFileWriter(self.myName, self.myEncoding, fields,
QGis.WKBPolygon, vprovider.crs())
inFeat = QgsFeature()
c = voronoi.Context()
pts = []
ptDict = {}
ptNdx = -1
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
geom = QgsGeometry(inFeat.geometry())
point = geom.asPoint()
x = point.x()
y = point.y()
pts.append((x, y))
ptNdx += 1
ptDict[ptNdx] = inFeat.id()
if len(pts) < 3:
return False
uniqueSet = Set(item for item in pts)
ids = [pts.index(item) for item in uniqueSet]
sl = voronoi.SiteList([voronoi.Site(*i) for i in uniqueSet])
c.triangulate = True
voronoi.voronoi(sl, c)
triangles = c.triangles
feat = QgsFeature()
nFeat = len(triangles)
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
for triangle in triangles:
indicies = list(triangle)
indicies.append(indicies[0])
polygon = []
attrs = []
step = 0
for index in indicies:
vprovider.getFeatures(QgsFeatureRequest().setFilterFid(ptDict[ids[index]])).nextFeature(inFeat)
geom = QgsGeometry(inFeat.geometry())
point = QgsPoint(geom.asPoint())
polygon.append(point)
if step <= 3:
attrs.append(ids[index])
step += 1
feat.setAttributes(attrs)
geometry = QgsGeometry().fromPolygon([polygon])
feat.setGeometry(geometry)
writer.addFeature(feat)
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
del writer
return True
def voronoi_polygons(self):
vprovider = self.vlayer.dataProvider()
writer = QgsVectorFileWriter(self.myName, self.myEncoding, vprovider.fields(),
QGis.WKBPolygon, vprovider.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
extent = self.vlayer.extent()
extraX = extent.height() * (self.myParam / 100.00)
extraY = extent.width() * (self.myParam / 100.00)
height = extent.height()
width = extent.width()
c = voronoi.Context()
pts = []
ptDict = {}
ptNdx = -1
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
geom = QgsGeometry(inFeat.geometry())
point = geom.asPoint()
x = point.x() - extent.xMinimum()
y = point.y() - extent.yMinimum()
pts.append((x, y))
ptNdx += 1
ptDict[ptNdx] = inFeat.id()
self.vlayer = None
if len(pts) < 3:
return False
uniqueSet = Set(item for item in pts)
ids = [pts.index(item) for item in uniqueSet]
sl = voronoi.SiteList([voronoi.Site(i[0], i[1], sitenum=j) for j, i in enumerate(uniqueSet)])
voronoi.voronoi(sl, c)
inFeat = QgsFeature()
nFeat = len(c.polygons)
nElement = 0
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, nFeat))
for site, edges in c.polygons.iteritems():
vprovider.getFeatures(QgsFeatureRequest().setFilterFid(ptDict[ids[site]])).nextFeature(inFeat)
lines = self.clip_voronoi(edges, c, width, height, extent, extraX, extraY)
geom = QgsGeometry.fromMultiPoint(lines)
geom = QgsGeometry(geom.convexHull())
outFeat.setGeometry(geom)
outFeat.setAttributes(inFeat.attributes())
writer.addFeature(outFeat)
nElement += 1
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
del writer
return True
def clip_voronoi(self, edges, c, width, height, extent, exX, exY):
""" Clip voronoi function based on code written for Inkscape
Copyright (C) 2010 Alvin Penner, penner@vaxxine.com
"""
def clip_line(x1, y1, x2, y2, w, h, x, y):
if x1 < 0 - x and x2 < 0 - x:
return [0, 0, 0, 0]
if x1 > w + x and x2 > w + x:
return [0, 0, 0, 0]
if x1 < 0 - x:
y1 = (y1 * x2 - y2 * x1) / (x2 - x1)
x1 = 0 - x
if x2 < 0 - x:
y2 = (y1 * x2 - y2 * x1) / (x2 - x1)
x2 = 0 - x
if x1 > w + x:
y1 = y1 + (w + x - x1) * (y2 - y1) / (x2 - x1)
x1 = w + x
if x2 > w + x:
y2 = y1 + (w + x - x1) * (y2 - y1) / (x2 - x1)
x2 = w + x
if y1 < 0 - y and y2 < 0 - y:
return [0, 0, 0, 0]
if y1 > h + y and y2 > h + y:
return [0, 0, 0, 0]
if x1 == x2 and y1 == y2:
return [0, 0, 0, 0]
if y1 < 0 - y:
x1 = (x1 * y2 - x2 * y1) / (y2 - y1)
y1 = 0 - y
if y2 < 0 - y:
x2 = (x1 * y2 - x2 * y1) / (y2 - y1)
y2 = 0 - y
if y1 > h + y:
x1 = x1 + (h + y - y1) * (x2 - x1) / (y2 - y1)
y1 = h + y
if y2 > h + y:
x2 = x1 + (h + y - y1) * (x2 - x1) / (y2 - y1)
y2 = h + y
return [x1, y1, x2, y2]
lines = []
hasXMin = False
hasYMin = False
hasXMax = False
hasYMax = False
for edge in edges:
if edge[1] >= 0 and edge[2] >= 0: # two vertices
[x1, y1, x2, y2] = clip_line(c.vertices[edge[1]][0], c.vertices[edge[1]][1], c.vertices[edge[2]][0], c.vertices[edge[2]][1], width, height, exX, exY)
elif edge[1] >= 0: # only one vertex
if c.lines[edge[0]][1] == 0: # vertical line
xtemp = c.lines[edge[0]][2] / c.lines[edge[0]][0]
if c.vertices[edge[1]][1] > (height + exY) / 2:
ytemp = height + exY
else:
ytemp = 0 - exX
else:
xtemp = width + exX
ytemp = (c.lines[edge[0]][2] - (width + exX) * c.lines[edge[0]][0]) / c.lines[edge[0]][1]
[x1, y1, x2, y2] = clip_line(c.vertices[edge[1]][0], c.vertices[edge[1]][1], xtemp, ytemp, width, height, exX, exY)
elif edge[2] >= 0: # only one vertex
if c.lines[edge[0]][1] == 0: # vertical line
xtemp = c.lines[edge[0]][2] / c.lines[edge[0]][0]
if c.vertices[edge[2]][1] > (height + exY) / 2:
ytemp = height + exY
else:
ytemp = 0.0 - exY
else:
xtemp = 0.0 - exX
ytemp = c.lines[edge[0]][2] / c.lines[edge[0]][1]
[x1, y1, x2, y2] = clip_line(xtemp, ytemp, c.vertices[edge[2]][0], c.vertices[edge[2]][1], width, height, exX, exY)
if x1 or x2 or y1 or y2:
lines.append(QgsPoint(x1 + extent.xMinimum(), y1 + extent.yMinimum()))
lines.append(QgsPoint(x2 + extent.xMinimum(), y2 + extent.yMinimum()))
if 0 - exX in (x1, x2):
hasXMin = True
if 0 - exY in (y1, y2):
hasYMin = True
if height + exY in (y1, y2):
hasYMax = True
if width + exX in (x1, x2):
hasXMax = True
if hasXMin:
if hasYMax:
lines.append(QgsPoint(extent.xMinimum() - exX, height + extent.yMinimum() + exY))
if hasYMin:
lines.append(QgsPoint(extent.xMinimum() - exX, extent.yMinimum() - exY))
if hasXMax:
if hasYMax:
lines.append(QgsPoint(width + extent.xMinimum() + exX, height + extent.yMinimum() + exY))
if hasYMin:
lines.append(QgsPoint(width + extent.xMinimum() + exX, extent.yMinimum() - exY))
return lines
def layer_extent(self):
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, 0))
fields = QgsFields()
fields.append(QgsField("MINX", QVariant.Double))
fields.append(QgsField("MINY", QVariant.Double))
fields.append(QgsField("MAXX", QVariant.Double))
fields.append(QgsField("MAXY", QVariant.Double))
fields.append(QgsField("CNTX", QVariant.Double))
fields.append(QgsField("CNTY", QVariant.Double))
fields.append(QgsField("AREA", QVariant.Double))
fields.append(QgsField("PERIM", QVariant.Double))
fields.append(QgsField("HEIGHT", QVariant.Double))
fields.append(QgsField("WIDTH", QVariant.Double))
writer = QgsVectorFileWriter(self.myName, self.myEncoding, fields,
QGis.WKBPolygon, self.vlayer.crs())
rect = self.vlayer.extent()
minx = rect.xMinimum()
miny = rect.yMinimum()
maxx = rect.xMaximum()
maxy = rect.yMaximum()
height = rect.height()
width = rect.width()
cntx = minx + (width / 2.0)
cnty = miny + (height / 2.0)
area = width * height
perim = (2 * width) + (2 * height)
rect = [QgsPoint(minx, miny),
QgsPoint(minx, maxy),
QgsPoint(maxx, maxy),
QgsPoint(maxx, miny),
QgsPoint(minx, miny)]
geometry = QgsGeometry().fromPolygon([rect])
feat = QgsFeature()
feat.setGeometry(geometry)
feat.setAttributes([minx,
miny,
maxx,
maxy,
cntx,
cnty,
area,
perim,
height,
width])
writer.addFeature(feat)
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, 100))
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
del writer
return True
def feature_extent(self):
vprovider = self.vlayer.dataProvider()
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), 0)
fields = QgsFields()
fields.append(QgsField("MINX", QVariant.Double))
fields.append(QgsField("MINY", QVariant.Double))
fields.append(QgsField("MAXX", QVariant.Double))
fields.append(QgsField("MAXY", QVariant.Double))
fields.append(QgsField("CNTX", QVariant.Double))
fields.append(QgsField("CNTY", QVariant.Double))
fields.append(QgsField("AREA", QVariant.Double))
fields.append(QgsField("PERIM", QVariant.Double))
fields.append(QgsField("HEIGHT", QVariant.Double))
fields.append(QgsField("WIDTH", QVariant.Double))
writer = QgsVectorFileWriter(self.myName, self.myEncoding, fields,
QGis.WKBPolygon, self.vlayer.crs())
inFeat = QgsFeature()
outFeat = QgsFeature()
nElement = 0
if self.useSelection:
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, self.vlayer.selectedFeatureCount()))
for inFeat in self.vlayer.selectedFeatures():
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
nElement += 1
rect = inFeat.geometry().boundingBox()
minx = rect.xMinimum()
miny = rect.yMinimum()
maxx = rect.xMaximum()
maxy = rect.yMaximum()
height = rect.height()
width = rect.width()
cntx = minx + (width / 2.0)
cnty = miny + (height / 2.0)
area = width * height
perim = (2 * width) + (2 * height)
rect = [QgsPoint(minx, miny),
QgsPoint(minx, maxy),
QgsPoint(maxx, maxy),
QgsPoint(maxx, miny),
QgsPoint(minx, miny)]
geometry = QgsGeometry().fromPolygon([rect])
outFeat.setGeometry(geometry)
outFeat.setAttributes([minx,
miny,
maxx,
maxy,
cntx,
cnty,
area,
perim,
height,
width])
writer.addFeature(outFeat)
else:
self.emit(SIGNAL("runRange( PyQt_PyObject )"), (0, vprovider.featureCount()))
fit = vprovider.getFeatures()
while fit.nextFeature(inFeat):
self.emit(SIGNAL("runStatus( PyQt_PyObject )"), nElement)
nElement += 1
rect = inFeat.geometry().boundingBox()
minx = rect.xMinimum()
miny = rect.yMinimum()
maxx = rect.xMaximum()
maxy = rect.yMaximum()
height = rect.height()
width = rect.width()
cntx = minx + (width / 2.0)
cnty = miny + (height / 2.0)
area = width * height
perim = (2 * width) + (2 * height)
rect = [QgsPoint(minx, miny),
QgsPoint(minx, maxy),
QgsPoint(maxx, maxy),
QgsPoint(maxx, miny),
QgsPoint(minx, miny)]
geometry = QgsGeometry().fromPolygon([rect])
outFeat.setGeometry(geometry)
outFeat.setAttributes([minx,
miny,
maxx,
maxy,
cntx,
cnty,
area,
perim,
height,
width])
writer.addFeature(outFeat)
del writer
return True
def simpleMeasure(self, inGeom, calcType, ellips, crs):
if inGeom.wkbType() in (QGis.WKBPoint, QGis.WKBPoint25D):
pt = inGeom.asPoint()
attr1 = pt.x()
attr2 = pt.y()
elif inGeom.wkbType() in (QGis.WKBMultiPoint, QGis.WKBMultiPoint25D):
pt = inGeom.asMultiPoint()
attr1 = pt[0].x()
attr2 = pt[0].y()
else:
measure = QgsDistanceArea()
if calcType == 2:
measure.setSourceCrs(crs)
measure.setEllipsoid(ellips)
measure.setEllipsoidalMode(True)
attr1 = measure.measure(inGeom)
if inGeom.type() == QGis.Polygon:
attr2 = self.perimMeasure(inGeom, measure)
else:
attr2 = attr1
return (attr1, attr2)
def perimMeasure(self, inGeom, measure):
value = 0.00
if inGeom.isMultipart():
poly = inGeom.asMultiPolygon()
for k in poly:
for j in k:
value = value + measure.measureLine(j)
else:
poly = inGeom.asPolygon()
for k in poly:
value = value + measure.measureLine(k)
return value
def doubleFieldIndex(self, name, desc, fieldList):
i = 0
for f in fieldList:
if name == f.name().upper():
return (i, fieldList)
i += 1
fieldList.append(QgsField(name, QVariant.Double, "double precision", 21, 6, desc))
return (len(fieldList) - 1, fieldList)
def checkMeasurementFields(self, vlayer, add):
vprovider = vlayer.dataProvider()
geomType = vlayer.geometryType()
fieldList = vprovider.fields()
idx = len(fieldList)
if geomType == QGis.Polygon:
(index1, fieldList) = self.doubleFieldIndex("AREA", self.tr("Polygon area"), fieldList)
(index2, fieldList) = self.doubleFieldIndex("PERIMETER", self.tr("Polygon perimeter"), fieldList)
elif geomType == QGis.Line:
(index1, fieldList) = self.doubleFieldIndex("LENGTH", self.tr("Line length"), fieldList)
index2 = index1
else:
(index1, fieldList) = self.doubleFieldIndex("XCOORD", self.tr("Point x ordinate"), fieldList)
(index2, fieldList) = self.doubleFieldIndex("YCOORD", self.tr("Point y ordinate"), fieldList)
if add and idx < len(fieldList):
newFields = []
for i in range(idx, len(fieldList)):
newFields.append(fieldList[i])
vprovider.addAttributes(newFields)
return (fieldList, index1, index2)
def extractAsLine(self, geom):
multi_geom = QgsGeometry()
temp_geom = []
if geom.type() == 2:
if geom.isMultipart():
multi_geom = geom.asMultiPolygon()
for i in multi_geom:
temp_geom.extend(i)
else:
multi_geom = geom.asPolygon()
temp_geom = multi_geom
return temp_geom
else:
return []
def remove_bad_lines(self, lines):
temp_geom = []
if len(lines) == 1:
if len(lines[0]) > 2:
temp_geom = lines
else:
temp_geom = []
else:
temp_geom = [elem for elem in lines if len(elem) > 2]
return temp_geom
def singleToMultiGeom(self, wkbType):
try:
if wkbType in (QGis.WKBPoint, QGis.WKBMultiPoint,
QGis.WKBPoint25D, QGis.WKBMultiPoint25D):
return QGis.WKBMultiPoint
elif wkbType in (QGis.WKBLineString, QGis.WKBMultiLineString,
QGis.WKBMultiLineString25D, QGis.WKBLineString25D):
return QGis.WKBMultiLineString
elif wkbType in (QGis.WKBPolygon, QGis.WKBMultiPolygon,
QGis.WKBMultiPolygon25D, QGis.WKBPolygon25D):
return QGis.WKBMultiPolygon
else:
return QGis.WKBUnknown
except Exception as err:
print unicode(err)
def multiToSingleGeom(self, wkbType):
try:
if wkbType in (QGis.WKBPoint, QGis.WKBMultiPoint,
QGis.WKBPoint25D, QGis.WKBMultiPoint25D):
return QGis.WKBPoint
elif wkbType in (QGis.WKBLineString, QGis.WKBMultiLineString,
QGis.WKBMultiLineString25D, QGis.WKBLineString25D):
return QGis.WKBLineString
elif wkbType in (QGis.WKBPolygon, QGis.WKBMultiPolygon,
QGis.WKBMultiPolygon25D, QGis.WKBPolygon25D):
return QGis.WKBPolygon
else:
return QGis.WKBUnknown
except Exception as err:
print unicode(err)
def extractAsSingle(self, geom):
multi_geom = QgsGeometry()
temp_geom = []
if geom.type() == 0:
if geom.isMultipart():
multi_geom = geom.asMultiPoint()
for i in multi_geom:
temp_geom.append(QgsGeometry().fromPoint(i))
else:
temp_geom.append(geom)
elif geom.type() == 1:
if geom.isMultipart():
multi_geom = geom.asMultiPolyline()
for i in multi_geom:
temp_geom.append(QgsGeometry().fromPolyline(i))
else:
temp_geom.append(geom)
elif geom.type() == 2:
if geom.isMultipart():
multi_geom = geom.asMultiPolygon()
for i in multi_geom:
temp_geom.append(QgsGeometry().fromPolygon(i))
else:
temp_geom.append(geom)
return temp_geom
def extractAsMulti(self, geom):
if geom.type() == 0:
if geom.isMultipart():
return geom.asMultiPoint()
else:
return [geom.asPoint()]
elif geom.type() == 1:
if geom.isMultipart():
return geom.asMultiPolyline()
else:
return [geom.asPolyline()]
else:
if geom.isMultipart():
return geom.asMultiPolygon()
else:
return [geom.asPolygon()]
def convertGeometry(self, geom_list, vType):
if vType == 0:
return QgsGeometry().fromMultiPoint(geom_list)
elif vType == 1:
return QgsGeometry().fromMultiPolyline(geom_list)
else:
return QgsGeometry().fromMultiPolygon(geom_list)
| gpl-2.0 |
fabada/pootle | pootle/apps/pootle_store/urls.py | 4 | 1777 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
from django.conf.urls import patterns, url
urlpatterns = patterns('pootle_store.views',
# permalinks
url(r'^unit/(?P<uid>[0-9]+)/?$',
'permalink_redirect',
name='pootle-unit-permalink'),
# XHR
url(r'^xhr/stats/checks/?$',
'get_qualitycheck_stats',
name='pootle-xhr-stats-checks'),
url(r'^xhr/stats/?$',
'get_stats',
name='pootle-xhr-stats'),
url(r'^xhr/units/?$',
'get_units',
name='pootle-xhr-units'),
url(r'^xhr/units/(?P<uid>[0-9]+)/?$',
'submit',
name='pootle-xhr-units-submit'),
url(r'^xhr/units/(?P<uid>[0-9]+)/comment/?$',
'comment',
name='pootle-xhr-units-comment'),
url(r'^xhr/units/(?P<uid>[0-9]+)/context/?$',
'get_more_context',
name='pootle-xhr-units-context'),
url(r'^xhr/units/(?P<uid>[0-9]+)/edit/?$',
'get_edit_unit',
name='pootle-xhr-units-edit'),
url(r'^xhr/units/(?P<uid>[0-9]+)/timeline/?$',
'timeline',
name='pootle-xhr-units-timeline'),
url(r'^xhr/units/(?P<uid>[0-9]+)/suggestions/?$',
'suggest',
name='pootle-xhr-units-suggest'),
url(r'^xhr/units/(?P<uid>[0-9]+)/suggestions/(?P<sugg_id>[0-9]+)/?$',
'manage_suggestion',
name='pootle-xhr-units-suggest-manage'),
url(r'^xhr/units/(?P<uid>[0-9]+)/checks/(?P<check_id>[0-9]+)/toggle/?$',
'toggle_qualitycheck',
name='pootle-xhr-units-checks-toggle'),
)
| gpl-3.0 |
encbladexp/ansible | lib/ansible/modules/set_stats.py | 15 | 1940 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Ansible RedHat, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: set_stats
short_description: Define and display stats for the current ansible run
description:
- This module allows setting/accumulating stats on the current ansible run, either per host or for all hosts in the run.
- This module is also supported for Windows targets.
author: Brian Coca (@bcoca)
options:
data:
description:
- A dictionary of which each key represents a stat (or variable) you want to keep track of.
type: dict
required: true
per_host:
description:
- whether the stats are per host or for all hosts in the run.
type: bool
default: no
aggregate:
description:
- Whether the provided value is aggregated to the existing stat C(yes) or will replace it C(no).
type: bool
default: yes
notes:
- In order for custom stats to be displayed, you must set C(show_custom_stats) in section C([defaults]) in C(ansible.cfg)
or by defining environment variable C(ANSIBLE_SHOW_CUSTOM_STATS) to C(yes).
- This module is also supported for Windows targets.
version_added: "2.3"
'''
EXAMPLES = r'''
- name: Aggregating packages_installed stat per host
ansible.builtin.set_stats:
data:
packages_installed: 31
per_host: yes
- name: Aggregating random stats for all hosts using complex arguments
ansible.builtin.set_stats:
data:
one_stat: 11
other_stat: "{{ local_var * 2 }}"
another_stat: "{{ some_registered_var.results | map(attribute='ansible_facts.some_fact') | list }}"
per_host: no
- name: Setting stats (not aggregating)
ansible.builtin.set_stats:
data:
the_answer: 42
aggregate: no
'''
| gpl-3.0 |
brenton/openshift-ansible | roles/lib_openshift/src/lib/deploymentconfig.py | 58 | 10726 | # pylint: skip-file
# flake8: noqa
# pylint: disable=too-many-public-methods
class DeploymentConfig(Yedit):
''' Class to model an openshift DeploymentConfig'''
default_deployment_config = '''
apiVersion: v1
kind: DeploymentConfig
metadata:
name: default_dc
namespace: default
spec:
replicas: 0
selector:
default_dc: default_dc
strategy:
resources: {}
rollingParams:
intervalSeconds: 1
maxSurge: 0
maxUnavailable: 25%
timeoutSeconds: 600
updatePercent: -25
updatePeriodSeconds: 1
type: Rolling
template:
metadata:
spec:
containers:
- env:
- name: default
value: default
image: default
imagePullPolicy: IfNotPresent
name: default_dc
ports:
- containerPort: 8000
hostPort: 8000
protocol: TCP
name: default_port
resources: {}
terminationMessagePath: /dev/termination-log
dnsPolicy: ClusterFirst
hostNetwork: true
nodeSelector:
type: compute
restartPolicy: Always
securityContext: {}
serviceAccount: default
serviceAccountName: default
terminationGracePeriodSeconds: 30
triggers:
- type: ConfigChange
'''
replicas_path = "spec.replicas"
env_path = "spec.template.spec.containers[0].env"
volumes_path = "spec.template.spec.volumes"
container_path = "spec.template.spec.containers"
volume_mounts_path = "spec.template.spec.containers[0].volumeMounts"
def __init__(self, content=None):
''' Constructor for deploymentconfig '''
if not content:
content = DeploymentConfig.default_deployment_config
super(DeploymentConfig, self).__init__(content=content)
def add_env_value(self, key, value):
''' add key, value pair to env array '''
rval = False
env = self.get_env_vars()
if env:
env.append({'name': key, 'value': value})
rval = True
else:
result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value})
rval = result[0]
return rval
def exists_env_value(self, key, value):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key and result['value'] == value:
return True
return False
def exists_env_key(self, key):
''' return whether a key, value pair exists '''
results = self.get_env_vars()
if not results:
return False
for result in results:
if result['name'] == key:
return True
return False
def get_env_var(self, key):
'''return a environment variables '''
results = self.get(DeploymentConfig.env_path) or []
if not results:
return None
for env_var in results:
if env_var['name'] == key:
return env_var
return None
def get_env_vars(self):
'''return a environment variables '''
return self.get(DeploymentConfig.env_path) or []
def delete_env_var(self, keys):
'''delete a list of keys '''
if not isinstance(keys, list):
keys = [keys]
env_vars_array = self.get_env_vars()
modified = False
idx = None
for key in keys:
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
modified = True
del env_vars_array[idx]
if modified:
return True
return False
def update_env_var(self, key, value):
'''place an env in the env var list'''
env_vars_array = self.get_env_vars()
idx = None
for env_idx, env_var in enumerate(env_vars_array):
if env_var['name'] == key:
idx = env_idx
break
if idx:
env_vars_array[idx]['value'] = value
else:
self.add_env_value(key, value)
return True
def exists_volume_mount(self, volume_mount):
''' return whether a volume mount exists '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts:
return False
volume_mount_found = False
for exist_volume_mount in exist_volume_mounts:
if exist_volume_mount['name'] == volume_mount['name']:
volume_mount_found = True
break
return volume_mount_found
def exists_volume(self, volume):
''' return whether a volume exists '''
exist_volumes = self.get_volumes()
volume_found = False
for exist_volume in exist_volumes:
if exist_volume['name'] == volume['name']:
volume_found = True
break
return volume_found
def find_volume_by_name(self, volume, mounts=False):
''' return the index of a volume '''
volumes = []
if mounts:
volumes = self.get_volume_mounts()
else:
volumes = self.get_volumes()
for exist_volume in volumes:
if exist_volume['name'] == volume['name']:
return exist_volume
return None
def get_replicas(self):
''' return replicas setting '''
return self.get(DeploymentConfig.replicas_path)
def get_volume_mounts(self):
'''return volume mount information '''
return self.get_volumes(mounts=True)
def get_volumes(self, mounts=False):
'''return volume mount information '''
if mounts:
return self.get(DeploymentConfig.volume_mounts_path) or []
return self.get(DeploymentConfig.volumes_path) or []
def delete_volume_by_name(self, volume):
'''delete a volume '''
modified = False
exist_volume_mounts = self.get_volume_mounts()
exist_volumes = self.get_volumes()
del_idx = None
for idx, exist_volume in enumerate(exist_volumes):
if 'name' in exist_volume and exist_volume['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volumes[del_idx]
modified = True
del_idx = None
for idx, exist_volume_mount in enumerate(exist_volume_mounts):
if 'name' in exist_volume_mount and exist_volume_mount['name'] == volume['name']:
del_idx = idx
break
if del_idx != None:
del exist_volume_mounts[idx]
modified = True
return modified
def add_volume_mount(self, volume_mount):
''' add a volume or volume mount to the proper location '''
exist_volume_mounts = self.get_volume_mounts()
if not exist_volume_mounts and volume_mount:
self.put(DeploymentConfig.volume_mounts_path, [volume_mount])
else:
exist_volume_mounts.append(volume_mount)
def add_volume(self, volume):
''' add a volume or volume mount to the proper location '''
exist_volumes = self.get_volumes()
if not volume:
return
if not exist_volumes:
self.put(DeploymentConfig.volumes_path, [volume])
else:
exist_volumes.append(volume)
def update_replicas(self, replicas):
''' update replicas value '''
self.put(DeploymentConfig.replicas_path, replicas)
def update_volume(self, volume):
'''place an env in the env var list'''
exist_volumes = self.get_volumes()
if not volume:
return False
# update the volume
update_idx = None
for idx, exist_vol in enumerate(exist_volumes):
if exist_vol['name'] == volume['name']:
update_idx = idx
break
if update_idx != None:
exist_volumes[update_idx] = volume
else:
self.add_volume(volume)
return True
def update_volume_mount(self, volume_mount):
'''place an env in the env var list'''
modified = False
exist_volume_mounts = self.get_volume_mounts()
if not volume_mount:
return False
# update the volume mount
for exist_vol_mount in exist_volume_mounts:
if exist_vol_mount['name'] == volume_mount['name']:
if 'mountPath' in exist_vol_mount and \
str(exist_vol_mount['mountPath']) != str(volume_mount['mountPath']):
exist_vol_mount['mountPath'] = volume_mount['mountPath']
modified = True
break
if not modified:
self.add_volume_mount(volume_mount)
modified = True
return modified
def needs_update_volume(self, volume, volume_mount):
''' verify a volume update is needed '''
exist_volume = self.find_volume_by_name(volume)
exist_volume_mount = self.find_volume_by_name(volume, mounts=True)
results = []
results.append(exist_volume['name'] == volume['name'])
if 'secret' in volume:
results.append('secret' in exist_volume)
results.append(exist_volume['secret']['secretName'] == volume['secret']['secretName'])
results.append(exist_volume_mount['name'] == volume_mount['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif 'emptyDir' in volume:
results.append(exist_volume_mount['name'] == volume['name'])
results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath'])
elif 'persistentVolumeClaim' in volume:
pvc = 'persistentVolumeClaim'
results.append(pvc in exist_volume)
if results[-1]:
results.append(exist_volume[pvc]['claimName'] == volume[pvc]['claimName'])
if 'claimSize' in volume[pvc]:
results.append(exist_volume[pvc]['claimSize'] == volume[pvc]['claimSize'])
elif 'hostpath' in volume:
results.append('hostPath' in exist_volume)
results.append(exist_volume['hostPath']['path'] == volume_mount['mountPath'])
return not all(results)
def needs_update_replicas(self, replicas):
''' verify whether a replica update is needed '''
current_reps = self.get(DeploymentConfig.replicas_path)
return not current_reps == replicas
| apache-2.0 |
henriquejensen/CheckIO | Home/Speech_Module.py | 1 | 1721 | FIRST_TEN = ["one", "two", "three", "four", "five", "six", "seven",
"eight", "nine"]
SECOND_TEN = ["ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen",
"sixteen", "seventeen", "eighteen", "nineteen"]
OTHER_TENS = ["twenty", "thirty", "forty", "fifty", "sixty", "seventy",
"eighty", "ninety"]
HUNDRED = "hundred"
def checkio(number):
array = FIRST_TEN + SECOND_TEN #concatena as listas de 1 a 19
array.insert(0,'') #adiciona vazio a posição 0
if number < 20: #se o numero e menor que vinte ele esta na lista dos arrays
num = array[number]
elif 19 < number < 100:
num = OTHER_TENS[int(str(number)[0])-2]
if int(str(number)[1]) > 0:
num += ' ' + array[int(str(number)[1])]
else:
num = array[int(str(number)[0])] + ' ' + HUNDRED
if 9 < int(str(number)[1:]) < 20: #se os dois ultimos digitos do numero são menores que vinte
num += ' ' + array[int(str(number)[1])+11] #entao o numero esta dentro do array
else:
if int(str(number)[1]) > 0:
num += ' ' + OTHER_TENS[int(str(number)[1])-2]
if 0 < int(str(number)[2]) < 10:
num += ' ' + array[int(str(number)[2])]
#replace this for solution
return num
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(4) == 'four'
assert checkio(133) == 'one hundred thirty three'
assert checkio(12) == 'twelve'
assert checkio(101) == 'one hundred one'
assert checkio(212) == 'two hundred twelve'
assert checkio(40) == 'forty'
assert not checkio(212).endswith(' ')
| mit |
tsotetsi/django-seed | src/project/settings.py | 1 | 3192 | """
Django settings for project project.
Generated by 'django-admin startproject' using Django 1.8.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from django.contrib import admin
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '50d-f_v10t^1s*hdd(yje1jd=&ch6^b0!^ur(1h#a4tnb-kqb0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'project',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Email Configurations.
PRELAUNCH_EMAIL = 'prelaunch@example.com'
EMAIL_HOST = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_HOST_USER = ''
EMAIL_PORT = ''
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
# Use console backend during development.
if DEBUG:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = False
USE_L10N = False
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = 'static/'
STATIC_URL = '/static/'
MEDIA_ROOT = 'media/'
MEDIA_URL = '/media/'
# Django admin page headers.
admin.site.site_header = 'django-seed'
admin.site.site_title = 'django-seed'
| mit |
molobrakos/home-assistant | tests/components/config/test_zwave.py | 12 | 16965 | """Test Z-Wave config panel."""
import asyncio
import json
from unittest.mock import MagicMock, patch
import pytest
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from homeassistant.components.zwave import DATA_NETWORK, const
from tests.mock.zwave import MockNode, MockValue, MockEntityValues
VIEW_NAME = 'api:config:zwave:device_config'
@pytest.fixture
def client(loop, hass, hass_client):
"""Client to communicate with Z-Wave config views."""
with patch.object(config, 'SECTIONS', ['zwave']):
loop.run_until_complete(async_setup_component(hass, 'config', {}))
return loop.run_until_complete(hass_client())
@asyncio.coroutine
def test_get_device_config(client):
"""Test getting device config."""
def mock_read(path):
"""Mock reading data."""
return {
'hello.beer': {
'free': 'beer',
},
'other.entity': {
'do': 'something',
},
}
with patch('homeassistant.components.config._read', mock_read):
resp = yield from client.get(
'/api/config/zwave/device_config/hello.beer')
assert resp.status == 200
result = yield from resp.json()
assert result == {'free': 'beer'}
@asyncio.coroutine
def test_update_device_config(client):
"""Test updating device config."""
orig_data = {
'hello.beer': {
'ignored': True,
},
'other.entity': {
'polling_intensity': 2,
},
}
def mock_read(path):
"""Mock reading data."""
return orig_data
written = []
def mock_write(path, data):
"""Mock writing data."""
written.append(data)
with patch('homeassistant.components.config._read', mock_read), \
patch('homeassistant.components.config._write', mock_write):
resp = yield from client.post(
'/api/config/zwave/device_config/hello.beer', data=json.dumps({
'polling_intensity': 2
}))
assert resp.status == 200
result = yield from resp.json()
assert result == {'result': 'ok'}
orig_data['hello.beer']['polling_intensity'] = 2
assert written[0] == orig_data
@asyncio.coroutine
def test_update_device_config_invalid_key(client):
"""Test updating device config."""
resp = yield from client.post(
'/api/config/zwave/device_config/invalid_entity', data=json.dumps({
'polling_intensity': 2
}))
assert resp.status == 400
@asyncio.coroutine
def test_update_device_config_invalid_data(client):
"""Test updating device config."""
resp = yield from client.post(
'/api/config/zwave/device_config/hello.beer', data=json.dumps({
'invalid_option': 2
}))
assert resp.status == 400
@asyncio.coroutine
def test_update_device_config_invalid_json(client):
"""Test updating device config."""
resp = yield from client.post(
'/api/config/zwave/device_config/hello.beer', data='not json')
assert resp.status == 400
@asyncio.coroutine
def test_get_values(hass, client):
"""Test getting values on node."""
node = MockNode(node_id=1)
value = MockValue(value_id=123456, node=node, label='Test Label',
instance=1, index=2, poll_intensity=4)
values = MockEntityValues(primary=value)
node2 = MockNode(node_id=2)
value2 = MockValue(value_id=234567, node=node2, label='Test Label 2')
values2 = MockEntityValues(primary=value2)
hass.data[const.DATA_ENTITY_VALUES] = [values, values2]
resp = yield from client.get('/api/zwave/values/1')
assert resp.status == 200
result = yield from resp.json()
assert result == {
'123456': {
'label': 'Test Label',
'instance': 1,
'index': 2,
'poll_intensity': 4,
}
}
@asyncio.coroutine
def test_get_groups(hass, client):
"""Test getting groupdata on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
node.groups.associations = 'assoc'
node.groups.associations_instances = 'inst'
node.groups.label = 'the label'
node.groups.max_associations = 'max'
node.groups = {1: node.groups}
network.nodes = {2: node}
resp = yield from client.get('/api/zwave/groups/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {
'1': {
'association_instances': 'inst',
'associations': 'assoc',
'label': 'the label',
'max_associations': 'max'
}
}
@asyncio.coroutine
def test_get_groups_nogroups(hass, client):
"""Test getting groupdata on node with no groups."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
network.nodes = {2: node}
resp = yield from client.get('/api/zwave/groups/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_get_groups_nonode(hass, client):
"""Test getting groupdata on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = yield from client.get('/api/zwave/groups/2')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'Node not found'}
@asyncio.coroutine
def test_get_config(hass, client):
"""Test getting config on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
value = MockValue(
index=12,
command_class=const.COMMAND_CLASS_CONFIGURATION)
value.label = 'label'
value.help = 'help'
value.type = 'type'
value.data = 'data'
value.data_items = ['item1', 'item2']
value.max = 'max'
value.min = 'min'
node.values = {12: value}
network.nodes = {2: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/config/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {'12': {'data': 'data',
'data_items': ['item1', 'item2'],
'help': 'help',
'label': 'label',
'max': 'max',
'min': 'min',
'type': 'type'}}
@asyncio.coroutine
def test_get_config_noconfig_node(hass, client):
"""Test getting config on node without config."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=2)
network.nodes = {2: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/config/2')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_get_config_nonode(hass, client):
"""Test getting config on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = yield from client.get('/api/zwave/config/2')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'Node not found'}
@asyncio.coroutine
def test_get_usercodes_nonode(hass, client):
"""Test getting usercodes on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
network.nodes = {1: 1, 5: 5}
resp = yield from client.get('/api/zwave/usercodes/2')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'Node not found'}
@asyncio.coroutine
def test_get_usercodes(hass, client):
"""Test getting usercodes on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_USER_CODE])
value = MockValue(
index=0,
command_class=const.COMMAND_CLASS_USER_CODE)
value.genre = const.GENRE_USER
value.label = 'label'
value.data = '1234'
node.values = {0: value}
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/usercodes/18')
assert resp.status == 200
result = yield from resp.json()
assert result == {'0': {'code': '1234',
'label': 'label',
'length': 4}}
@asyncio.coroutine
def test_get_usercode_nousercode_node(hass, client):
"""Test getting usercodes on node without usercodes."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18)
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/usercodes/18')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_get_usercodes_no_genreuser(hass, client):
"""Test getting usercodes on node missing genre user."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_USER_CODE])
value = MockValue(
index=0,
command_class=const.COMMAND_CLASS_USER_CODE)
value.genre = const.GENRE_SYSTEM
value.label = 'label'
value.data = '1234'
node.values = {0: value}
network.nodes = {18: node}
node.get_values.return_value = node.values
resp = yield from client.get('/api/zwave/usercodes/18')
assert resp.status == 200
result = yield from resp.json()
assert result == {}
@asyncio.coroutine
def test_save_config_no_network(hass, client):
"""Test saving configuration without network data."""
resp = yield from client.post('/api/zwave/saveconfig')
assert resp.status == 404
result = yield from resp.json()
assert result == {'message': 'No Z-Wave network data found'}
@asyncio.coroutine
def test_save_config(hass, client):
"""Test saving configuration."""
network = hass.data[DATA_NETWORK] = MagicMock()
resp = yield from client.post('/api/zwave/saveconfig')
assert resp.status == 200
result = yield from resp.json()
assert network.write_config.called
assert result == {'message': 'Z-Wave configuration saved to file.'}
async def test_get_protection_values(hass, client):
"""Test getting protection values on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {18: node}
node.value = value
node.get_protection_item.return_value = "Unprotected"
node.get_protection_items.return_value = value.data_items
node.get_protections.return_value = {value.value_id: 'Object'}
resp = await client.get('/api/zwave/protection/18')
assert resp.status == 200
result = await resp.json()
assert node.get_protections.called
assert node.get_protection_item.called
assert node.get_protection_items.called
assert result == {
'value_id': '123456',
'selected': 'Unprotected',
'options': ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
}
async def test_get_protection_values_nonexisting_node(hass, client):
"""Test getting protection values on node with wrong nodeid."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {17: node}
node.value = value
resp = await client.get('/api/zwave/protection/18')
assert resp.status == 404
result = await resp.json()
assert not node.get_protections.called
assert not node.get_protection_item.called
assert not node.get_protection_items.called
assert result == {'message': 'Node not found'}
async def test_get_protection_values_without_protectionclass(hass, client):
"""Test getting protection values on node without protectionclass."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18)
value = MockValue(
value_id=123456,
index=0,
instance=1)
network.nodes = {18: node}
node.value = value
resp = await client.get('/api/zwave/protection/18')
assert resp.status == 200
result = await resp.json()
assert not node.get_protections.called
assert not node.get_protection_item.called
assert not node.get_protection_items.called
assert result == {}
async def test_set_protection_value(hass, client):
"""Test setting protection value on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {18: node}
node.value = value
resp = await client.post(
'/api/zwave/protection/18', data=json.dumps({
'value_id': '123456', 'selection': 'Protection by Sequence'}))
assert resp.status == 200
result = await resp.json()
assert node.set_protection.called
assert result == {'message': 'Protection setting succsessfully set'}
async def test_set_protection_value_failed(hass, client):
"""Test setting protection value failed on node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=18,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {18: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
'/api/zwave/protection/18', data=json.dumps({
'value_id': '123456', 'selection': 'Protecton by Seuence'}))
assert resp.status == 202
result = await resp.json()
assert node.set_protection.called
assert result == {'message': 'Protection setting did not complete'}
async def test_set_protection_value_nonexisting_node(hass, client):
"""Test setting protection value on nonexisting node."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=17,
command_classes=[const.COMMAND_CLASS_PROTECTION])
value = MockValue(
value_id=123456,
index=0,
instance=1,
command_class=const.COMMAND_CLASS_PROTECTION)
value.label = 'Protection Test'
value.data_items = ['Unprotected', 'Protection by Sequence',
'No Operation Possible']
value.data = 'Unprotected'
network.nodes = {17: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
'/api/zwave/protection/18', data=json.dumps({
'value_id': '123456', 'selection': 'Protecton by Seuence'}))
assert resp.status == 404
result = await resp.json()
assert not node.set_protection.called
assert result == {'message': 'Node not found'}
async def test_set_protection_value_missing_class(hass, client):
"""Test setting protection value on node without protectionclass."""
network = hass.data[DATA_NETWORK] = MagicMock()
node = MockNode(node_id=17)
value = MockValue(
value_id=123456,
index=0,
instance=1)
network.nodes = {17: node}
node.value = value
node.set_protection.return_value = False
resp = await client.post(
'/api/zwave/protection/17', data=json.dumps({
'value_id': '123456', 'selection': 'Protecton by Seuence'}))
assert resp.status == 404
result = await resp.json()
assert not node.set_protection.called
assert result == {'message': 'No protection commandclass on this node'}
| apache-2.0 |
zlfben/gem5 | util/decode_inst_trace.py | 28 | 5386 | #!/usr/bin/env python
# Copyright (c) 2013-2014 ARM Limited
# All rights reserved
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Andreas Hansson
# This script is used to dump protobuf instruction traces to ASCII
# format. It assumes that protoc has been executed and already
# generated the Python package for the inst messages. This can
# be done manually using:
# protoc --python_out=. inst.proto
# The ASCII trace format uses one line per request.
import protolib
import sys
# Import the packet proto definitions
try:
import inst_pb2
except:
print "Did not find protobuf inst definitions, attempting to generate"
from subprocess import call
error = call(['protoc', '--python_out=util', '--proto_path=src/proto',
'src/proto/inst.proto'])
if not error:
print "Generated inst proto definitions"
try:
import google.protobuf
except:
print "Please install Python protobuf module"
exit(-1)
import inst_pb2
else:
print "Failed to import inst proto definitions"
exit(-1)
def main():
if len(sys.argv) != 3:
print "Usage: ", sys.argv[0], " <protobuf input> <ASCII output>"
exit(-1)
# Open the file in read mode
proto_in = protolib.openFileRd(sys.argv[1])
try:
ascii_out = open(sys.argv[2], 'w')
except IOError:
print "Failed to open ", sys.argv[2], " for writing"
exit(-1)
# Read the magic number in 4-byte Little Endian
magic_number = proto_in.read(4)
if magic_number != "gem5":
print "Unrecognized file", sys.argv[1]
exit(-1)
print "Parsing instruction header"
# Add the packet header
header = inst_pb2.InstHeader()
protolib.decodeMessage(proto_in, header)
print "Object id:", header.obj_id
print "Tick frequency:", header.tick_freq
print "Memory addresses included:", header.has_mem
if header.ver != 0:
print "Warning: file version newer than decoder:", header.ver
print "This decoder may not understand how to decode this file"
print "Parsing instructions"
num_insts = 0
inst = inst_pb2.Inst()
# Decode the inst messages until we hit the end of the file
optional_fields = ('tick', 'type', 'inst_flags', 'addr', 'size', 'mem_flags')
while protolib.decodeMessage(proto_in, inst):
# If we have a tick use it, otherwise count instructions
if inst.HasField('tick'):
tick = inst.tick
else:
tick = num_insts
if inst.HasField('nodeid'):
node_id = inst.nodeid
else:
node_id = 0;
if inst.HasField('cpuid'):
cpu_id = inst.cpuid
else:
cpu_id = 0;
ascii_out.write('%-20d: (%03d/%03d) %#010x @ %#016x ' % (tick, node_id, cpu_id,
inst.inst, inst.pc))
if inst.HasField('type'):
ascii_out.write(' : %10s' % inst_pb2._INST_INSTTYPE.values_by_number[inst.type].name)
for mem_acc in inst.mem_access:
ascii_out.write(" %#x-%#x;" % (mem_acc.addr, mem_acc.addr + mem_acc.size))
ascii_out.write('\n')
num_insts += 1
print "Parsed instructions:", num_insts
# We're done
ascii_out.close()
proto_in.close()
if __name__ == "__main__":
main()
| bsd-3-clause |
marqh/cartopy | lib/cartopy/gshhs.py | 1 | 6450 | # (C) British Crown Copyright 2011 - 2012, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
# PLEASE NOTE: DUE TO SOME MPL RELATED ISSUES, THE GSHHS SUPPORT HAS BEEN DISABLED.
# IT IS ANTICIPATED THAT BY 0.5 THERE WILL BE A CLEAN AND TIDY INTERFACE
# TO USE THIS USEFUL DATASET. - pelson
import matplotlib.patches as mpatches
import matplotlib.path as mpath
from matplotlib.collections import PatchCollection
import matplotlib.cm
import numpy
import os
from shapely.geometry import Polygon
# XXX Make the data dir configurable
project_dir = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
data_dir = os.path.join(project_dir, 'data')
gshhs_data_dir = os.path.join(data_dir, 'gshhs')
fnames = {
'coarse': os.path.join(gshhs_data_dir, 'gshhs_c.b'),
'low': os.path.join(gshhs_data_dir, 'gshhs_l.b'),
'intermediate': os.path.join(gshhs_data_dir, 'gshhs_i.b'),
'high': os.path.join(gshhs_data_dir, 'gshhs_h.b'),
'full': os.path.join(gshhs_data_dir, 'gshhs_f.b'),
}
def read_gshhc(filename, poly=True, domain=None, filter_predicate=None):
"""
Reads:
Global Self-consistent Hierarchical High-resolution Shorelines
version 2.0 July 15, 2009
.. seealso:: http://www.soest.hawaii.edu/pwessel/gshhs/README.TXT
XXX: Return internal polygons when appropriate
"""
DEBUG = False
fh = open(filename, 'rb')
#(0, 360, -90, 90)
if domain is None:
domain = Polygon([[0, -90], [360, -90], [360, 90], [0, 90], [0, -90]])
extent_w, extent_s, extent_e, extent_n = [v * 1e6 for v in domain.bounds]
# corners = [extent_w, extent_n], [extent_w, extent_s], [extent_e, extent_s], [extent_e, extent_n]
#
# poly_extent = Polygon(numpy.array(corners) / 1e6)
poly_extent = domain
i=-1
# XXX
while True:
i += 1
# for i in xrange(10000):
# if i % 10000 == 1: print i
header = numpy.fromfile(fh, dtype='>i4', count=11)
# If no header was received, we are at the end of the file
if len(header) == 0:
break
if DEBUG:
if i not in ([
# 0, # Europe & Asia
# 1, # Africa
# 2, # USA
# 3, # S.America
# 4, # Antarctic
14, # UK
# 25, # Ireland
]):
continue
flag = header[2]
crosses_greenwich = (flag >> 16) & 1
flag = header[2]
level = flag & 255
# ###########################
# Filter the shapes by extent
# ###########################
# get the maximum extent in microdegrees
w, e, south, north = header[3:7]
in_x_range = False
in_y_range = False
# handle the case where the uk has an extent of -6230861 1765806 and Ireland has an extent of 349515833 354569167
# XXX I'm sure this could be done more cleanly
for off in range(2):
west = w - 360 * 1e6 * off
east = e - 360 * 1e6 * off
in_x_range = in_x_range or (extent_w <= west <= extent_e or extent_w <= east <= extent_e or (east >= extent_e and west <= extent_w))
in_y_range = in_y_range or (extent_s <= south <= extent_n or extent_s <= north <= extent_n or (north >= extent_n and south <= extent_s))
if not (in_x_range and in_y_range):
if DEBUG: print in_x_range, in_y_range, w, e, south, north, extent_w, extent_e
fh.seek(header[1]*2 * 4, 1)
continue
else:
if DEBUG: print in_x_range, in_y_range, w, e, south, north, extent_w, extent_e
points = numpy.fromfile(fh, dtype='>i4', count=header[1]*2) * 1.0e-6
points = points.reshape(-1, 2)
intersects = False
for off in range(2):
## west = points - numpy.array([[360 * off, 0]])
# east = points - numpy.array([[360 * off, 0]])
poly_shape = Polygon(points - numpy.array([[360 * off, 0]]))
# print (points - numpy.array([[360 * off, 0]]))[:10, ...]
# print corners
# print 'intersect? ', i, off*360, poly_shape.intersects(poly_extent)
intersects = intersects or poly_shape.intersects(poly_extent)
if not intersects:
continue
lons, lats = points[:, 0:1], points[:, 1:2]
if poly:
if ( level == 1 and
points.shape[0] > 4
):
yield points
else:
yield points
# break
# yield header, lons, lats
# if points.shape[0] > 4:
# yield header, lons, lats
# yield points
# if crosses_greenwich:
# # If the greenwich has been crossed, then 360 is added to any number below 0 in this format.
# # To fix this, identify any points which are more than 180 degrees apart, using this information we can identify
# # polygon groups and shift them appropriately.
# delta = numpy.diff(lons)
# step = numpy.where(numpy.abs(delta) > 180)[0]
# step = [0] + list(step+1) + [None]
# for s1, s2 in zip(step[:-1] , step[1:]):
# if delta[s1-1] > 180:
# lons[s1:s2] -= 360
#
# if i == 4:
# # antarctic
# lons = numpy.array(list(lons) + [lons[-1], lons[0], lons[0]])
# lats = numpy.array(list(lats) + [-90, -90, lats[0]])
# yield header, lons, lats
| gpl-3.0 |
kapilt/cloud-custodian | tools/sandbox/c7n_index/setup.py | 5 | 1168 | # Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
setup(
name="c7n_indexer",
version='0.0.2',
description="Cloud Custodian - Metrics/Resource Indexer",
classifiers=[
"Topic :: System :: Systems Administration",
"Topic :: System :: Distributed Computing"
],
url="https://github.com/cloud-custodian/cloud-custodian",
license="Apache-2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'c7n-indexer = c7n_index.metrics:cli']},
install_requires=["c7n", "click", "influxdb", "elasticsearch"],
)
| apache-2.0 |
javierparis/mediawiki | maintenance/language/zhtable/Makefile.py | 37 | 14682 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author Philip
import tarfile as tf
import zipfile as zf
import os, re, shutil, sys, platform
pyversion = platform.python_version()
islinux = platform.system().lower() == 'linux'
if pyversion[:3] in ['2.6', '2.7']:
import urllib as urllib_request
import codecs
open = codecs.open
_unichr = unichr
if sys.maxunicode < 0x10000:
def unichr(i):
if i < 0x10000:
return _unichr(i)
else:
return _unichr( 0xD7C0 + ( i>>10 ) ) + _unichr( 0xDC00 + ( i & 0x3FF ) )
elif pyversion[:2] == '3.':
import urllib.request as urllib_request
unichr = chr
def unichr2( *args ):
return [unichr( int( i.split('<')[0][2:], 16 ) ) for i in args]
def unichr3( *args ):
return [unichr( int( i[2:7], 16 ) ) for i in args if i[2:7]]
# DEFINE
UNIHAN_VER = '6.3.0'
SF_MIRROR = 'dfn'
SCIM_TABLES_VER = '0.5.13'
SCIM_PINYIN_VER = '0.5.92'
LIBTABE_VER = '0.2.3'
# END OF DEFINE
def download( url, dest ):
if os.path.isfile( dest ):
print( 'File %s is up to date.' % dest )
return
global islinux
if islinux:
# we use wget instead urlretrieve under Linux,
# because wget could display details like download progress
os.system( 'wget %s -O %s' % ( url, dest ) )
else:
print( 'Downloading from [%s] ...' % url )
urllib_request.urlretrieve( url, dest )
print( 'Download complete.\n' )
return
def uncompress( fp, member, encoding = 'U8' ):
name = member.rsplit( '/', 1 )[-1]
print( 'Extracting %s ...' % name )
fp.extract( member )
shutil.move( member, name )
if '/' in member:
shutil.rmtree( member.split( '/', 1 )[0] )
if pyversion[:1] in ['2']:
fc = open( name, 'rb', encoding, 'ignore' )
else:
fc = open( name, 'r', encoding = encoding, errors = 'ignore' )
return fc
unzip = lambda path, member, encoding = 'U8': \
uncompress( zf.ZipFile( path ), member, encoding )
untargz = lambda path, member, encoding = 'U8': \
uncompress( tf.open( path, 'r:gz' ), member, encoding )
def parserCore( fp, pos, beginmark = None, endmark = None ):
if beginmark and endmark:
start = False
else: start = True
mlist = set()
for line in fp:
if beginmark and line.startswith( beginmark ):
start = True
continue
elif endmark and line.startswith( endmark ):
break
if start and not line.startswith( '#' ):
elems = line.split()
if len( elems ) < 2:
continue
elif len( elems[0] ) > 1 and \
len( elems[pos] ) > 1: # words only
mlist.add( elems[pos] )
return mlist
def tablesParser( path, name ):
""" Read file from scim-tables and parse it. """
global SCIM_TABLES_VER
src = 'scim-tables-%s/tables/zh/%s' % ( SCIM_TABLES_VER, name )
fp = untargz( path, src, 'U8' )
return parserCore( fp, 1, 'BEGIN_TABLE', 'END_TABLE' )
ezbigParser = lambda path: tablesParser( path, 'EZ-Big.txt.in' )
wubiParser = lambda path: tablesParser( path, 'Wubi.txt.in' )
zrmParser = lambda path: tablesParser( path, 'Ziranma.txt.in' )
def phraseParser( path ):
""" Read phrase_lib.txt and parse it. """
global SCIM_PINYIN_VER
src = 'scim-pinyin-%s/data/phrase_lib.txt' % SCIM_PINYIN_VER
dst = 'phrase_lib.txt'
fp = untargz( path, src, 'U8' )
return parserCore( fp, 0 )
def tsiParser( path ):
""" Read tsi.src and parse it. """
src = 'libtabe/tsi-src/tsi.src'
dst = 'tsi.src'
fp = untargz( path, src, 'big5hkscs' )
return parserCore( fp, 0 )
def unihanParser( path ):
""" Read Unihan_Variants.txt and parse it. """
fp = unzip( path, 'Unihan_Variants.txt', 'U8' )
t2s = dict()
s2t = dict()
for line in fp:
if line.startswith( '#' ):
continue
else:
elems = line.split()
if len( elems ) < 3:
continue
type = elems.pop( 1 )
elems = unichr2( *elems )
if type == 'kTraditionalVariant':
s2t[elems[0]] = elems[1:]
elif type == 'kSimplifiedVariant':
t2s[elems[0]] = elems[1:]
fp.close()
return ( t2s, s2t )
def applyExcludes( mlist, path ):
""" Apply exclude rules from path to mlist. """
if pyversion[:1] in ['2']:
excludes = open( path, 'rb', 'U8' ).read().split()
else:
excludes = open( path, 'r', encoding = 'U8' ).read().split()
excludes = [word.split( '#' )[0].strip() for word in excludes]
excludes = '|'.join( excludes )
excptn = re.compile( '.*(?:%s).*' % excludes )
diff = [mword for mword in mlist if excptn.search( mword )]
mlist.difference_update( diff )
return mlist
def charManualTable( path ):
fp = open( path, 'r', encoding = 'U8' )
for line in fp:
elems = line.split( '#' )[0].split( '|' )
elems = unichr3( *elems )
if len( elems ) > 1:
yield elems[0], elems[1:]
def toManyRules( src_table ):
tomany = set()
if pyversion[:1] in ['2']:
for ( f, t ) in src_table.iteritems():
for i in range( 1, len( t ) ):
tomany.add( t[i] )
else:
for ( f, t ) in src_table.items():
for i in range( 1, len( t ) ):
tomany.add( t[i] )
return tomany
def removeRules( path, table ):
fp = open( path, 'r', encoding = 'U8' )
texc = list()
for line in fp:
elems = line.split( '=>' )
f = t = elems[0].strip()
if len( elems ) == 2:
t = elems[1].strip()
f = f.strip('"').strip("'")
t = t.strip('"').strip("'")
if f:
try:
table.pop( f )
except:
pass
if t:
texc.append( t )
texcptn = re.compile( '^(?:%s)$' % '|'.join( texc ) )
if pyversion[:1] in ['2']:
for (tmp_f, tmp_t) in table.copy().iteritems():
if texcptn.match( tmp_t ):
table.pop( tmp_f )
else:
for (tmp_f, tmp_t) in table.copy().items():
if texcptn.match( tmp_t ):
table.pop( tmp_f )
return table
def customRules( path ):
fp = open( path, 'r', encoding = 'U8' )
ret = dict()
for line in fp:
line = line.rstrip( '\r\n' )
if '#' in line:
line = line.split( '#' )[0].rstrip()
elems = line.split( '\t' )
if len( elems ) > 1:
ret[elems[0]] = elems[1]
return ret
def dictToSortedList( src_table, pos ):
return sorted( src_table.items(), key = lambda m: ( m[pos], m[1 - pos] ) )
def translate( text, conv_table ):
i = 0
while i < len( text ):
for j in range( len( text ) - i, 0, -1 ):
f = text[i:][:j]
t = conv_table.get( f )
if t:
text = text[:i] + t + text[i:][j:]
i += len(t) - 1
break
i += 1
return text
def manualWordsTable( path, conv_table, reconv_table ):
fp = open( path, 'r', encoding = 'U8' )
reconv_table = {}
wordlist = [line.split( '#' )[0].strip() for line in fp]
wordlist = list( set( wordlist ) )
wordlist.sort( key = lambda w: ( len(w), w ), reverse = True )
while wordlist:
word = wordlist.pop()
new_word = translate( word, conv_table )
rcv_word = translate( word, reconv_table )
if word != rcv_word:
reconv_table[word] = word
reconv_table[new_word] = word
return reconv_table
def defaultWordsTable( src_wordlist, src_tomany, char_conv_table, char_reconv_table ):
wordlist = list( src_wordlist )
wordlist.sort( key = lambda w: ( len(w), w ), reverse = True )
word_conv_table = {}
word_reconv_table = {}
conv_table = char_conv_table.copy()
reconv_table = char_reconv_table.copy()
tomanyptn = re.compile( '(?:%s)' % '|'.join( src_tomany ) )
while wordlist:
conv_table.update( word_conv_table )
reconv_table.update( word_reconv_table )
word = wordlist.pop()
new_word_len = word_len = len( word )
while new_word_len == word_len:
add = False
test_word = translate( word, reconv_table )
new_word = translate( word, conv_table )
if not reconv_table.get( new_word ) \
and ( test_word != word \
or ( tomanyptn.search( word ) \
and word != translate( new_word, reconv_table ) ) ):
word_conv_table[word] = new_word
word_reconv_table[new_word] = word
try:
word = wordlist.pop()
except IndexError:
break
new_word_len = len(word)
return word_reconv_table
def PHPArray( table ):
lines = ['\'%s\' => \'%s\',' % (f, t) for (f, t) in table if f and t]
return '\n'.join(lines)
def main():
#Get Unihan.zip:
url = 'http://www.unicode.org/Public/%s/ucd/Unihan.zip' % UNIHAN_VER
han_dest = 'Unihan-%s.zip' % UNIHAN_VER
download( url, han_dest )
# Get scim-tables-$(SCIM_TABLES_VER).tar.gz:
url = 'http://%s.dl.sourceforge.net/sourceforge/scim/scim-tables-%s.tar.gz' % ( SF_MIRROR, SCIM_TABLES_VER )
tbe_dest = 'scim-tables-%s.tar.gz' % SCIM_TABLES_VER
download( url, tbe_dest )
# Get scim-pinyin-$(SCIM_PINYIN_VER).tar.gz:
url = 'http://%s.dl.sourceforge.net/sourceforge/scim/scim-pinyin-%s.tar.gz' % ( SF_MIRROR, SCIM_PINYIN_VER )
pyn_dest = 'scim-pinyin-%s.tar.gz' % SCIM_PINYIN_VER
download( url, pyn_dest )
# Get libtabe-$(LIBTABE_VER).tgz:
url = 'http://%s.dl.sourceforge.net/sourceforge/libtabe/libtabe-%s.tgz' % ( SF_MIRROR, LIBTABE_VER )
lbt_dest = 'libtabe-%s.tgz' % LIBTABE_VER
download( url, lbt_dest )
# Unihan.txt
( t2s_1tomany, s2t_1tomany ) = unihanParser( han_dest )
t2s_1tomany.update( charManualTable( 'symme_supp.manual' ) )
t2s_1tomany.update( charManualTable( 'trad2simp.manual' ) )
s2t_1tomany.update( ( t[0], [f] ) for ( f, t ) in charManualTable( 'symme_supp.manual' ) )
s2t_1tomany.update( charManualTable( 'simp2trad.manual' ) )
if pyversion[:1] in ['2']:
t2s_1to1 = dict( [( f, t[0] ) for ( f, t ) in t2s_1tomany.iteritems()] )
s2t_1to1 = dict( [( f, t[0] ) for ( f, t ) in s2t_1tomany.iteritems()] )
else:
t2s_1to1 = dict( [( f, t[0] ) for ( f, t ) in t2s_1tomany.items()] )
s2t_1to1 = dict( [( f, t[0] ) for ( f, t ) in s2t_1tomany.items()] )
s_tomany = toManyRules( t2s_1tomany )
t_tomany = toManyRules( s2t_1tomany )
# noconvert rules
t2s_1to1 = removeRules( 'trad2simp_noconvert.manual', t2s_1to1 )
s2t_1to1 = removeRules( 'simp2trad_noconvert.manual', s2t_1to1 )
# the supper set for word to word conversion
t2s_1to1_supp = t2s_1to1.copy()
s2t_1to1_supp = s2t_1to1.copy()
t2s_1to1_supp.update( customRules( 'trad2simp_supp_set.manual' ) )
s2t_1to1_supp.update( customRules( 'simp2trad_supp_set.manual' ) )
# word to word manual rules
t2s_word2word_manual = manualWordsTable( 'simpphrases.manual', s2t_1to1_supp, t2s_1to1_supp )
t2s_word2word_manual.update( customRules( 'toSimp.manual' ) )
s2t_word2word_manual = manualWordsTable( 'tradphrases.manual', t2s_1to1_supp, s2t_1to1_supp )
s2t_word2word_manual.update( customRules( 'toTrad.manual' ) )
# word to word rules from input methods
t_wordlist = set()
s_wordlist = set()
t_wordlist.update( ezbigParser( tbe_dest ),
tsiParser( lbt_dest ) )
s_wordlist.update( wubiParser( tbe_dest ),
zrmParser( tbe_dest ),
phraseParser( pyn_dest ) )
# exclude
s_wordlist = applyExcludes( s_wordlist, 'simpphrases_exclude.manual' )
t_wordlist = applyExcludes( t_wordlist, 'tradphrases_exclude.manual' )
s2t_supp = s2t_1to1_supp.copy()
s2t_supp.update( s2t_word2word_manual )
t2s_supp = t2s_1to1_supp.copy()
t2s_supp.update( t2s_word2word_manual )
# parse list to dict
t2s_word2word = defaultWordsTable( s_wordlist, s_tomany, s2t_1to1_supp, t2s_supp )
t2s_word2word.update( t2s_word2word_manual )
s2t_word2word = defaultWordsTable( t_wordlist, t_tomany, t2s_1to1_supp, s2t_supp )
s2t_word2word.update( s2t_word2word_manual )
# Final tables
# sorted list toHans
if pyversion[:1] in ['2']:
t2s_1to1 = dict( [( f, t ) for ( f, t ) in t2s_1to1.iteritems() if f != t] )
else:
t2s_1to1 = dict( [( f, t ) for ( f, t ) in t2s_1to1.items() if f != t] )
toHans = dictToSortedList( t2s_1to1, 0 ) + dictToSortedList( t2s_word2word, 1 )
# sorted list toHant
if pyversion[:1] in ['2']:
s2t_1to1 = dict( [( f, t ) for ( f, t ) in s2t_1to1.iteritems() if f != t] )
else:
s2t_1to1 = dict( [( f, t ) for ( f, t ) in s2t_1to1.items() if f != t] )
toHant = dictToSortedList( s2t_1to1, 0 ) + dictToSortedList( s2t_word2word, 1 )
# sorted list toCN
toCN = dictToSortedList( customRules( 'toCN.manual' ), 1 )
# sorted list toHK
toHK = dictToSortedList( customRules( 'toHK.manual' ), 1 )
# sorted list toTW
toTW = dictToSortedList( customRules( 'toTW.manual' ), 1 )
# Get PHP Array
php = '''<?php
/**
* Simplified / Traditional Chinese conversion tables
*
* Automatically generated using code and data in maintenance/language/zhtable/
* Do not modify directly!
*
* @file
*/
$zh2Hant = array(\n'''
php += PHPArray( toHant ) \
+ '\n);\n\n$zh2Hans = array(\n' \
+ PHPArray( toHans ) \
+ '\n);\n\n$zh2TW = array(\n' \
+ PHPArray( toTW ) \
+ '\n);\n\n$zh2HK = array(\n' \
+ PHPArray( toHK ) \
+ '\n);\n\n$zh2CN = array(\n' \
+ PHPArray( toCN ) \
+ '\n);\n'
if pyversion[:1] in ['2']:
f = open( os.path.join( '..', '..', '..', 'includes', 'ZhConversion.php' ), 'wb', encoding = 'utf8' )
else:
f = open( os.path.join( '..', '..', '..', 'includes', 'ZhConversion.php' ), 'w', buffering = 4096, encoding = 'utf8' )
print ('Writing ZhConversion.php ... ')
f.write( php )
f.close()
# Remove temporary files
print ('Deleting temporary files ... ')
os.remove('EZ-Big.txt.in')
os.remove('phrase_lib.txt')
os.remove('tsi.src')
os.remove('Unihan_Variants.txt')
os.remove('Wubi.txt.in')
os.remove('Ziranma.txt.in')
if __name__ == '__main__':
main()
| gpl-2.0 |
wangjun/kivy | kivy/uix/vkeyboard.py | 4 | 28046 | '''
VKeyboard
=========
.. image:: images/vkeyboard.jpg
:align: right
.. versionadded:: 1.0.8
.. warning::
This is experimental and subject to change as long as this warning notice is
present.
VKeyboard is an onscreen keyboard for Kivy. Its operation is intended to be
transparent to the user. Using the widget directly is NOT recommended. Read the
section `Request keyboard`_ first.
Modes
-----
This virtual keyboard has a docked and free mode:
* docked mode (:data:`VKeyboard.docked` = True)
Generally used when only one person is using the computer, like tablet,
personal computer etc.
* free mode: (:data:`VKeyboard.docked` = False)
Mostly for multitouch table. This mode allows more than one virtual
keyboard on the screen.
If the docked mode changes, you need to manually call
:meth:`VKeyboard.setup_mode`. Otherwise the change will have no impact.
During that call, the VKeyboard, implemented in top of scatter, will change the
behavior of the scatter, and position the keyboard near the target (if target
and docked mode is set).
Layouts
-------
The virtual keyboard is able to load a custom layout. If you create a new
layout, put the JSON in :file:`<kivy_data_dir>/keyboards/<layoutid>.json`.
Load it by setting :data:`VKeyboard.layout` to your layoutid.
The JSON must be structured like this::
{
"title": "Title of your layout",
"description": "Description of your layout",
"cols": 15,
"rows": 5,
...
}
Then, you need to describe keys in each row, for either a "normal" mode or a
"shift" mode. Keys for this row data must be named `normal_<row>` and
`shift_<row>`. Replace `row` with the row number.
Inside each row, you will describe the key. A key is a 4 element list in the
format::
[ <text displayed on the keyboard>, <text to put when the key is pressed>,
<text that represents the keycode>, <size of cols> ]
Here are example keys::
# f key
["f", "f", "f", 1]
# capslock
["\u21B9", "\t", "tab", 1.5]
Finally, complete the JSON::
{
...
"normal_1": [
["`", "`", "`", 1], ["1", "1", "1", 1], ["2", "2", "2", 1],
["3", "3", "3", 1], ["4", "4", "4", 1], ["5", "5", "5", 1],
["6", "6", "6", 1], ["7", "7", "7", 1], ["8", "8", "8", 1],
["9", "9", "9", 1], ["0", "0", "0", 1], ["+", "+", "+", 1],
["=", "=", "=", 1], ["\u232b", null, "backspace", 2]
],
"shift_1": [ ... ],
"normal_2": [ ... ],
...
}
Request Keyboard
----------------
The instantiation of the virtual keyboard is controlled by the configuration.
Check `keyboard_mode` and `keyboard_layout` in the :doc:`api-kivy.config`.
If you intend to create a widget that requires a keyboard, do not use the
virtual keyboard directly, but prefer to use the best method available on
the platform. Check the :meth:`~kivy.core.window.WindowBase.request_keyboard`
method in the :doc:`api-kivy.core.window`.
If you want a specific layout with your request keyboard, you must write
something like this (from 1.8.0, numeric.json is in the same directory as your
main.py)::
keyboard = Window.request_keyboard(
self._keyboard_close, self)
if keyboard.widget:
vkeyboard = self._keyboard.widget
vkeyboard.layout = 'numeric.json'
'''
__all__ = ('VKeyboard', )
from kivy import kivy_data_dir
from kivy.vector import Vector
from kivy.config import Config
from kivy.uix.scatter import Scatter
from kivy.uix.label import Label
from kivy.properties import ObjectProperty, NumericProperty, StringProperty, \
BooleanProperty, DictProperty, OptionProperty, ListProperty
from kivy.logger import Logger
from kivy.graphics import Color, BorderImage, Canvas
from kivy.core.image import Image
from kivy.resources import resource_find
from kivy.clock import Clock
from os.path import join, splitext, basename
from os import listdir
from json import loads
default_layout_path = join(kivy_data_dir, 'keyboards')
class VKeyboard(Scatter):
'''
VKeyboard is an onscreen keyboard with multitouch support.
Its layout is entirely customizable and you can switch between available
layouts using a button in the bottom right of the widget.
:Events:
`on_key_down`: keycode, internal, modifiers
Fired when the keyboard received a key down event (key press).
`on_key_up`: keycode, internal, modifiers
Fired when the keyboard received a key up event (key release).
'''
target = ObjectProperty(None, allownone=True)
'''Target widget associated to VKeyboard. If set, it will be used to send
keyboard events, and if the VKeyboard mode is "free", it will also be used
to set the initial position.
:data:`target` is a :class:`~kivy.properties.ObjectProperty` instance,
default to None.
'''
callback = ObjectProperty(None, allownone=True)
'''Callback can be set to a function that will be called if the VKeyboard is
closed by the user.
:data:`target` is a :class:`~kivy.properties.ObjectProperty` instance,
default to None.
'''
layout = StringProperty(None)
'''Layout to use for the VKeyboard. By default, it will be the layout set in
the configuration, according to the `keyboard_layout` in `[kivy]` section.
.. versionchanged:: 1.8.0
If layout is a .json filename, it will loaded and added to the
available_layouts.
:data:`layout` is a :class:`~kivy.properties.StringProperty`, default to
None.
'''
layout_path = StringProperty(default_layout_path)
'''Path from which layouts are read.
:data:`layout` is a :class:`~kivy.properties.StringProperty`, default to
:file:`<kivy_data_dir>/keyboards/`
'''
available_layouts = DictProperty({})
'''Dictionary of all available layouts. Keys are the layout ID, and the
value is the JSON (translated in Python object).
:data:`available_layouts` is a :class:`~kivy.properties.DictProperty`,
default to {}
'''
docked = BooleanProperty(False)
'''Indicate if the VKeyboard is docked on the screen or not. If you change
it, you must manually call :meth:`setup_mode`. Otherwise, it will have no
impact. If the VKeyboard is created by the Window, the docked mode will be
automatically set by the configuration, with `keyboard_mode` token in
`[kivy]` section.
:data:`docked` is a :class:`~kivy.properties.BooleanProperty`, default to
False.
'''
margin_hint = ListProperty([.05, .06, .05, .06])
'''Margin hint, used as spacing between keyboard background and keys
content. The margin is composed of four values, between 0 and 1::
margin_hint = [top, right, bottom, left]
The margin hints will be multiplied by width and height, according to their
position.
:data:`margin_hint` is a :class:`~kivy.properties.ListProperty`, default to
[.05, .06, .05, .06]
'''
key_margin = ListProperty([2, 2, 2, 2])
'''Key margin, used to create space between keys. The margin is composed of
four values, in pixels::
key_margin = [top, right, bottom, left]
:data:`key_margin` is a :class:`~kivy.properties.ListProperty`, default to
[2, 2, 2, 2]
'''
background_color = ListProperty([1, 1, 1, 1])
'''Background color, in the format (r, g, b, a). If a background is set, the
color will be combined with the background texture.
:data:`background_color` is a :class:`~kivy.properties.ListProperty`,
default to [1, 1, 1, 1].
'''
background = StringProperty(
'atlas://data/images/defaulttheme/vkeyboard_background')
'''Filename of the background image.
:data:`background` a :class:`~kivy.properties.StringProperty`, default to
:file:`atlas://data/images/defaulttheme/vkeyboard_background`.
'''
background_disabled = StringProperty(
'atlas://data/images/defaulttheme/vkeyboard_disabled_background')
'''Filename of the background image when vkeyboard is disabled.
.. versionadded:: 1.8.0
:data:`background_disabled` a
:class:`~kivy.properties.StringProperty`, default to
:file:`atlas://data/images/defaulttheme/vkeyboard__disabled_background`.
'''
key_background_color = ListProperty([1, 1, 1, 1])
'''Key background color, in the format (r, g, b, a). If a key background is
set, the color will be combined with the key background texture.
:data:`key_background_color` is a :class:`~kivy.properties.ListProperty`,
default to [1, 1, 1, 1].
'''
key_background_normal = StringProperty(
'atlas://data/images/defaulttheme/vkeyboard_key_normal')
'''Filename of the key background image for use when no touches are active
on the widget.
:data:`key_background_normal` a :class:`~kivy.properties.StringProperty`,
default to :file:`atlas://data/images/defaulttheme/vkeyboard_key_normal`.
'''
key_disabled_background_normal = StringProperty(
'atlas://data/images/defaulttheme/vkeyboard_key_normal')
'''Filename of the key background image for use when no touches are active
on the widget and vkeyboard is disabled.
..versionadded:: 1.8.0
:data:`key_disabled_background_normal` a
:class:`~kivy.properties.StringProperty`, default to
:file:`atlas://data/images/defaulttheme/vkeyboard_disabled_key_normal`.
'''
key_background_down = StringProperty(
'atlas://data/images/defaulttheme/vkeyboard_key_down')
'''Filename of the key background image for use when a touch is active
on the widget.
:data:`key_background_down` a :class:`~kivy.properties.StringProperty`,
default to :file:`atlas://data/images/defaulttheme/vkeyboard_key_down`.
'''
background_border = ListProperty([16, 16, 16, 16])
'''Background image border. Used for controlling the
:data:`~kivy.graphics.vertex_instructions.BorderImage.border` property of
the background.
:data:`background_border` is a :class:`~kivy.properties.ListProperty`,
default to [16, 16, 16, 16]
'''
key_border = ListProperty([8, 8, 8, 8])
'''Key image border. Used for controlling the
:data:`~kivy.graphics.vertex_instructions.BorderImage.border` property of
the key.
:data:`key_border` is a :class:`~kivy.properties.ListProperty`,
default to [16, 16, 16, 16]
'''
# XXX internal variables
layout_mode = OptionProperty('normal', options=('normal', 'shift'))
layout_geometry = DictProperty({})
have_capslock = BooleanProperty(False)
have_shift = BooleanProperty(False)
active_keys = DictProperty({})
font_size = NumericProperty('20dp')
font_name = StringProperty('data/fonts/DejaVuSans.ttf')
__events__ = ('on_key_down', 'on_key_up')
def __init__(self, **kwargs):
# XXX move to style.kv
kwargs.setdefault('size_hint', (None, None))
kwargs.setdefault('scale_min', .4)
kwargs.setdefault('scale_max', 1.6)
kwargs.setdefault('size', (700, 200))
kwargs.setdefault('docked', False)
self._trigger_update_layout_mode = Clock.create_trigger(
self._update_layout_mode)
self._trigger_load_layouts = Clock.create_trigger(
self._load_layouts)
self._trigger_load_layout = Clock.create_trigger(
self._load_layout)
self.bind(
docked=self.setup_mode,
have_shift=self._trigger_update_layout_mode,
have_capslock=self._trigger_update_layout_mode,
layout_path=self._trigger_load_layouts,
layout=self._trigger_load_layout)
super(VKeyboard, self).__init__(**kwargs)
# load all the layouts found in the layout_path directory
self._load_layouts()
# ensure we have default layouts
available_layouts = self.available_layouts
if not available_layouts:
Logger.critical('VKeyboard: unable to load default layouts')
# load the default layout from configuration
if self.layout is None:
self.layout = Config.get('kivy', 'keyboard_layout')
else:
# ensure the current layout is found on the available layout
self._trigger_load_layout()
# update layout mode (shift or normal)
self._trigger_update_layout_mode()
# create a top layer to draw active keys on
with self.canvas:
self.background_key_layer = Canvas()
self.active_keys_layer = Canvas()
# prepare layout widget
self.refresh_keys_hint()
self.refresh_keys()
def on_disabled(self, intance, value):
self.refresh_keys()
def _update_layout_mode(self, *l):
# update mode according to capslock and shift key
mode = self.have_capslock != self.have_shift
mode = 'shift' if mode else 'normal'
if mode != self.layout_mode:
self.layout_mode = mode
self.refresh(False)
def _load_layout(self, *largs):
# ensure new layouts are loaded first
if self._trigger_load_layouts.is_triggered:
self._load_layouts()
self._trigger_load_layouts.cancel()
value = self.layout
available_layouts = self.available_layouts
# it's a filename, try to load it directly
if self.layout[-5:] == '.json':
if value not in available_layouts:
fn = resource_find(self.layout)
self._load_layout_fn(fn, self.layout)
if not available_layouts:
return
if value not in available_layouts and value != 'qwerty':
Logger.error(
'Vkeyboard: <%s> keyboard layout mentioned in '
'conf file was not found, fallback on qwerty' %
value)
self.layout = 'qwerty'
self.refresh(True)
def _load_layouts(self, *largs):
# first load available layouts from json files
# XXX fix to be able to reload layout when path is changing
value = self.layout_path
for fn in listdir(value):
self._load_layout_fn(join(value, fn),
basename(splitext(fn)[0]))
def _load_layout_fn(self, fn, name):
available_layouts = self.available_layouts
if fn[-5:] != '.json':
return
with open(fn, 'r') as fd:
json_content = fd.read()
layout = loads(json_content)
available_layouts[name] = layout
def setup_mode(self, *largs):
'''Call this method when you want to readjust the keyboard according to
options: :data:`docked` or not, with attached :data:`target` or not:
* If :data:`docked` is True, it will call :meth:`setup_mode_dock`
* If :data:`docked` is False, it will call :meth:`setup_mode_free`
Feel free to overload theses methods to create a new
positioning behavior.
'''
if self.docked:
self.setup_mode_dock()
else:
self.setup_mode_free()
def setup_mode_dock(self, *largs):
'''Setup the keyboard in docked mode.
Dock mode will reset the rotation, disable translation, rotation and
scale. Scale and position will be automatically adjusted to attach the
keyboard in the bottom of the screen.
.. note::
Don't call this method directly, use :meth:`setup_mode` instead.
'''
self.do_translation = False
self.do_rotation = False
self.do_scale = False
self.rotation = 0
win = self.get_parent_window()
scale = win.width / float(self.width)
self.scale = scale
self.pos = 0, 0
win.bind(on_resize=self._update_dock_mode)
def _update_dock_mode(self, win, *largs):
scale = win.width / float(self.width)
self.scale = scale
self.pos = 0, 0
def setup_mode_free(self):
'''Setup the keyboard in free mode.
Free mode is designed to let the user control the position and
orientation of the keyboard. The only real usage is for a multiuser
environment, but you might found other ways to use it.
If a :data:`target` is set, it will place the vkeyboard under the
target.
.. note::
Don't call this method directly, use :meth:`setup_mode` instead.
'''
self.do_translation = True
self.do_rotation = True
self.do_scale = True
target = self.target
if not target:
return
# NOTE all math will be done in window point of view
# determine rotation of the target
a = Vector(1, 0)
b = Vector(target.to_window(0, 0))
c = Vector(target.to_window(1, 0)) - b
self.rotation = -a.angle(c)
# determine the position of center/top of the keyboard
dpos = Vector(self.to_window(self.width / 2., self.height))
# determine the position of center/bottom of the target
cpos = Vector(target.to_window(target.center_x, target.y))
# the goal now is to map both point, calculate the diff between them
diff = dpos - cpos
# we still have an issue, self.pos represent the bounding box, not the
# 0,0 coordinate of the scatter. we need to apply also the diff between
# them (inside and outside coordinate matrix). It's hard to explain, but
# do a scheme on a paper, wrote all the vector i'm calculating, and
# you'll understand. :)
diff2 = Vector(self.x + self.width / 2., self.y + self.height) - \
Vector(self.to_parent(self.width / 2., self.height))
diff -= diff2
# now we have a good "diff", set it as a pos.
self.pos = -diff
def change_layout(self):
# XXX implement popup with all available layouts
pass
def refresh(self, force=False):
'''(internal) Recreate the entire widget and graphics according to the
selected layout.
'''
self.clear_widgets()
if force:
self.refresh_keys_hint()
self.refresh_keys()
self.refresh_active_keys_layer()
def refresh_active_keys_layer(self):
self.active_keys_layer.clear()
active_keys = self.active_keys
layout_geometry = self.layout_geometry
background = resource_find(self.key_background_down)
texture = Image(background, mipmap=True).texture
with self.active_keys_layer:
Color(1, 1, 1)
for line_nb, index in active_keys.values():
pos, size = layout_geometry['LINE_%d' % line_nb][index]
BorderImage(texture=texture, pos=pos, size=size,
border=self.key_border)
def refresh_keys_hint(self):
layout = self.available_layouts[self.layout]
layout_cols = layout['cols']
layout_rows = layout['rows']
layout_geometry = self.layout_geometry
mtop, mright, mbottom, mleft = self.margin_hint
# get relative EFFICIENT surface of the layout without external margins
el_hint = 1. - mleft - mright
eh_hint = 1. - mtop - mbottom
ex_hint = 0 + mleft
ey_hint = 0 + mbottom
# get relative unit surface
uw_hint = (1. / layout_cols) * el_hint
uh_hint = (1. / layout_rows) * eh_hint
layout_geometry['U_HINT'] = (uw_hint, uh_hint)
# calculate individual key RELATIVE surface and pos (without key margin)
current_y_hint = ey_hint + eh_hint
for line_nb in range(1, layout_rows + 1):
current_y_hint -= uh_hint
# get line_name
line_name = '%s_%d' % (self.layout_mode, line_nb)
line_hint = 'LINE_HINT_%d' % line_nb
layout_geometry[line_hint] = []
current_x_hint = ex_hint
# go through the list of keys (tuples of 4)
for key in layout[line_name]:
# calculate relative pos, size
layout_geometry[line_hint].append([
(current_x_hint, current_y_hint),
(key[3] * uw_hint, uh_hint)])
current_x_hint += key[3] * uw_hint
self.layout_geometry = layout_geometry
def refresh_keys(self):
layout = self.available_layouts[self.layout]
layout_rows = layout['rows']
layout_geometry = self.layout_geometry
w, h = self.size
kmtop, kmright, kmbottom, kmleft = self.key_margin
uw_hint, uh_hint = layout_geometry['U_HINT']
for line_nb in range(1, layout_rows + 1):
llg = layout_geometry['LINE_%d' % line_nb] = []
llg_append = llg.append
for key in layout_geometry['LINE_HINT_%d' % line_nb]:
x_hint, y_hint = key[0]
w_hint, h_hint = key[1]
kx = x_hint * w
ky = y_hint * h
kw = w_hint * w
kh = h_hint * h
# now adjust, considering the key margin
kx = int(kx + kmleft)
ky = int(ky + kmbottom)
kw = int(kw - kmleft - kmright)
kh = int(kh - kmbottom - kmtop)
pos = (kx, ky)
size = (kw, kh)
llg_append((pos, size))
self.layout_geometry = layout_geometry
self.draw_keys()
def draw_keys(self):
layout = self.available_layouts[self.layout]
layout_rows = layout['rows']
layout_geometry = self.layout_geometry
layout_mode = self.layout_mode
# draw background
w, h = self.size
background = resource_find(self.background_disabled
if self.disabled else
self.background)
texture = Image(background, mipmap=True).texture
self.background_key_layer.clear()
with self.background_key_layer:
Color(*self.background_color)
BorderImage(texture=texture, size=self.size,
border=self.background_border)
# XXX seperate drawing the keys and the fonts to avoid
# XXX reloading the texture each time
# first draw keys without the font
key_normal = resource_find(self.key_background_disabled_normal
if self.disabled else
self.key_background_normal)
texture = Image(key_normal, mipmap=True).texture
with self.background_key_layer:
for line_nb in range(1, layout_rows + 1):
for pos, size in layout_geometry['LINE_%d' % line_nb]:
BorderImage(texture=texture, pos=pos, size=size,
border=self.key_border)
# then draw the text
# calculate font_size
font_size = int(w) / 46
# draw
for line_nb in range(1, layout_rows + 1):
key_nb = 0
for pos, size in layout_geometry['LINE_%d' % line_nb]:
# retrieve the relative text
text = layout[layout_mode + '_' + str(line_nb)][key_nb][0]
l = Label(text=text, font_size=font_size, pos=pos, size=size,
font_name=self.font_name)
self.add_widget(l)
key_nb += 1
def on_key_down(self, *largs):
pass
def on_key_up(self, *largs):
pass
def get_key_at_pos(self, x, y):
w, h = self.size
x_hint = x / w
# focus on the surface without margins
layout_geometry = self.layout_geometry
layout = self.available_layouts[self.layout]
layout_rows = layout['rows']
mtop, mright, mbottom, mleft = self.margin_hint
# get the line of the layout
e_height = h - (mbottom + mtop) * h # efficient height in pixels
line_height = e_height / layout_rows # line height in px
y = y - mbottom * h
line_nb = layout_rows - int(y / line_height)
if line_nb > layout_rows:
line_nb = layout_rows
if line_nb < 1:
line_nb = 1
# get the key within the line
key_index = ''
current_key_index = 0
for key in layout_geometry['LINE_HINT_%d' % line_nb]:
if x_hint >= key[0][0] and x_hint < key[0][0] + key[1][0]:
key_index = current_key_index
break
else:
current_key_index += 1
if key_index == '':
return None
# get the full character
key = layout['%s_%d' % (self.layout_mode, line_nb)][key_index]
return [key, (line_nb, key_index)]
def collide_margin(self, x, y):
'''Do a collision test, and return True if the (x, y) is inside the
vkeyboard margin.
'''
mtop, mright, mbottom, mleft = self.margin_hint
x_hint = x / self.width
y_hint = y / self.height
if x_hint > mleft and x_hint < 1. - mright \
and y_hint > mbottom and y_hint < 1. - mtop:
return False
return True
def process_key_on(self, touch):
x, y = self.to_local(*touch.pos)
key = self.get_key_at_pos(x, y)
if not key:
return
key_data = key[0]
displayed_char, internal, special_char, size = key_data
line_nb, key_index = key[1]
# save pressed key on the touch
ud = touch.ud[self.uid] = {}
ud['key'] = key
# for caps lock or shift only:
uid = touch.uid
if special_char is not None:
if special_char == 'capslock':
self.have_capslock = not self.have_capslock
uid = -1
elif special_char == 'shift':
self.have_shift = True
elif special_char == 'layout':
self.change_layout()
# send info to the bus
b_keycode = special_char
b_modifiers = self._get_modifiers()
self.dispatch('on_key_down', b_keycode, internal, b_modifiers)
# save key as an active key for drawing
self.active_keys[uid] = key[1]
self.refresh_active_keys_layer()
def process_key_up(self, touch):
uid = touch.uid
if self.uid not in touch.ud:
return
# save pressed key on the touch
key_data, key = touch.ud[self.uid]['key']
displayed_char, internal, special_char, size = key_data
# send info to the bus
b_keycode = special_char
b_modifiers = self._get_modifiers()
self.dispatch('on_key_up', b_keycode, internal, b_modifiers)
if special_char == 'capslock':
uid = -1
if uid in self.active_keys:
self.active_keys.pop(uid, None)
if special_char == 'shift':
self.have_shift = False
if special_char == 'capslock' and self.have_capslock:
self.active_keys[-1] = key
self.refresh_active_keys_layer()
def _get_modifiers(self):
ret = []
if self.have_shift:
ret.append('shift')
if self.have_capslock:
ret.append('capslock')
return ret
def on_touch_down(self, touch):
x, y = touch.pos
if not self.collide_point(x, y):
return
if self.disabled:
return True
x, y = self.to_local(x, y)
if not self.collide_margin(x, y):
self.process_key_on(touch)
touch.grab(self, exclusive=True)
else:
super(VKeyboard, self).on_touch_down(touch)
return True
def on_touch_up(self, touch):
if touch.grab_current is self:
self.process_key_up(touch)
return super(VKeyboard, self).on_touch_up(touch)
if __name__ == '__main__':
from kivy.base import runTouchApp
vk = VKeyboard(layout='azerty')
runTouchApp(vk)
| mit |
fkorotkov/pants | src/python/pants/bin/repro.py | 16 | 3955 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import sys
from pants.base.build_environment import get_buildroot
from pants.subsystem.subsystem import Subsystem
from pants.util.contextutil import open_tar, temporary_file
from pants.util.dirutil import chmod_plus_x
logger = logging.getLogger(__name__)
class ReproError(Exception):
pass
class Reproducer(Subsystem):
options_scope = 'repro'
@classmethod
def register_options(cls, register):
register('--capture', metavar='<repro_path>', default=None,
help='Capture information about this pants run (including the entire workspace) '
'into a tar.gz file that can be used to help debug build problems.')
register('--ignore', type=list,
help='Any paths specified here will not be included in repro tarballs.')
def create_repro(self):
"""Return a Repro instance for capturing a repro of the current workspace state.
:return: a Repro instance, or None if no repro was requested.
:rtype: `pants.bin.repro.Repro`
"""
path = self.get_options().capture
if path is None:
return None
buildroot = get_buildroot()
# Ignore a couple of common cases. Note: If we support SCMs other than git in the future,
# add their (top-level only) metadata dirs here if relevant.
ignore = ['.git', os.path.relpath(self.get_options().pants_distdir, buildroot)]
if self.get_options().ignore:
ignore.extend(self.get_options().ignore)
return Repro(path, buildroot, ignore)
class Repro(object):
def __init__(self, path, buildroot, ignore):
"""Create a Repro instance.
:param string path: Write the captured repro data to this path.
:param string buildroot: Capture the workspace at this buildroot.
:param ignore: Ignore these top-level files/dirs under buildroot.
"""
path = os.path.expanduser(path)
if os.path.realpath(path).startswith(buildroot):
raise ReproError('Repro capture file location must be outside the build root.')
if not path.endswith('tar.gz') and not path.endswith('.tgz'):
path += '.tar.gz'
if os.path.exists(path):
raise ReproError('Repro capture file already exists: {}'.format(path))
self._path = path
self._buildroot = buildroot
self._ignore = ignore
def capture(self, run_info_dict):
# Force the scm discovery logging messages to appear before ours, so the startup delay
# is properly associated in the user's mind with us and not with scm.
logger.info('Capturing repro information to {}'.format(self._path))
with open_tar(self._path, 'w:gz', dereference=True, compresslevel=6) as tarout:
for relpath in os.listdir(self._buildroot):
if relpath not in self._ignore:
tarout.add(os.path.join(self._buildroot, relpath), relpath)
with temporary_file() as tmpfile:
tmpfile.write('# Pants repro captured for the following build:\n')
for k, v in sorted(run_info_dict.items()):
tmpfile.write('# {}: {}\n'.format(k, v))
cmd_line = list(sys.argv)
# Use 'pants' instead of whatever the full executable path was on the user's system.
cmd_line[0] = 'pants'
# Remove any repro-related flags. The repro-ing user won't want to call those.
cmd_line = [x for x in cmd_line if not x.startswith('--repro-')]
tmpfile.write("'" +"' '".join(cmd_line) + "'\n")
tmpfile.flush()
chmod_plus_x(tmpfile.name)
tarout.add(tmpfile.name, 'repro.sh')
def log_location_of_repro_file(self):
if not self._path:
return # No repro requested.
logger.info('Captured repro information to {}'.format(self._path))
| apache-2.0 |
0x90sled/catapult | perf_insights/perf_insights/value/value_unittest.py | 6 | 1314 | # Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from perf_insights import value as value_module
from perf_insights.value import run_info as run_info_module
class ValueTests(unittest.TestCase):
def testDict(self):
run_info = run_info_module.RunInfo('file:///a.json', '/a.json',
metadata={'m': 1})
d = {
'run_id': run_info.run_id,
'type': 'dict',
'name': 'MyDictValue',
'important': False,
'value': {'a': 1, 'b': 'b'}
}
v = value_module.Value.FromDict(run_info, d)
self.assertTrue(isinstance(v, value_module.DictValue))
d2 = v.AsDict()
self.assertEquals(d, d2)
def testFailure(self):
run_info = run_info_module.RunInfo('file:///a.json', '/a.json',
metadata={'m': 1})
d = {
'run_id': run_info.run_id,
'type': 'failure',
'name': 'Error',
'important': False,
'description': 'Some error message',
'stack_str': 'Some stack string'
}
v = value_module.Value.FromDict(run_info, d)
self.assertTrue(isinstance(v, value_module.FailureValue))
d2 = v.AsDict()
self.assertEquals(d, d2) | bsd-3-clause |
bitmovin/bitmovin-python | bitmovin/resources/models/encodings/encoding_status.py | 1 | 1597 | from bitmovin.resources import AbstractIdResource
class EncodingStatus(AbstractIdResource):
def __init__(self, status, number_of_segments=None, id_=None, messages=None, subtasks=None,
created_at=None, queued_at=None, finished_at=None, error_at=None, progress=None):
super().__init__(id_=id_)
self.status = status
self.numberOfSegments = number_of_segments
self.messages = messages
self.subtasks = subtasks
self.created_at = created_at
self.queued_at = queued_at
self.finished_at = finished_at
self.error_at = error_at
self.progress = progress
@classmethod
def parse_from_json_object(cls, json_object):
id_ = json_object.get('id')
status = json_object['status']
messages = json_object.get('messages')
subtasks = json_object.get('subtasks')
created_at = json_object.get('createdAt')
queued_at = json_object.get('queuedAt')
finished_at = json_object.get('finishedAt')
error_at = json_object.get('errorAt')
number_of_segments = json_object.get('numberOfSegments')
progress = json_object.get('progress')
encoding_status = EncodingStatus(status=status, number_of_segments=number_of_segments, id_=id_,
messages=messages, subtasks=subtasks, created_at=created_at,
queued_at=queued_at, finished_at=finished_at, error_at=error_at,
progress=progress)
return encoding_status
| unlicense |
cloudbase/cinder | cinder/volume/drivers/dell/dell_storagecenter_api.py | 3 | 137316 | # Copyright 2016 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interface for interacting with the Dell Storage Center array."""
import json
import os.path
import eventlet
from oslo_log import log as logging
from oslo_utils import excutils
import requests
from simplejson import scanner
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import utils
LOG = logging.getLogger(__name__)
class PayloadFilter(object):
"""PayloadFilter
Simple class for creating filters for interacting with the Dell
Storage API 15.3 and later.
"""
def __init__(self, filtertype='AND'):
self.payload = {}
self.payload['filter'] = {'filterType': filtertype,
'filters': []}
def append(self, name, val, filtertype='Equals'):
if val is not None:
apifilter = {}
apifilter['attributeName'] = name
apifilter['attributeValue'] = val
apifilter['filterType'] = filtertype
self.payload['filter']['filters'].append(apifilter)
class LegacyPayloadFilter(object):
"""LegacyPayloadFilter
Simple class for creating filters for interacting with the Dell
Storage API 15.1 and 15.2.
"""
def __init__(self, filter_type='AND'):
self.payload = {'filterType': filter_type,
'filters': []}
def append(self, name, val, filtertype='Equals'):
if val is not None:
apifilter = {}
apifilter['attributeName'] = name
apifilter['attributeValue'] = val
apifilter['filterType'] = filtertype
self.payload['filters'].append(apifilter)
class HttpClient(object):
"""HttpClient
Helper for making the REST calls.
"""
def __init__(self, host, port, user, password, verify, apiversion):
"""HttpClient handles the REST requests.
:param host: IP address of the Dell Data Collector.
:param port: Port the Data Collector is listening on.
:param user: User account to login with.
:param password: Password.
:param verify: Boolean indicating whether certificate verification
should be turned on or not.
:param apiversion: Dell API version.
"""
self.baseUrl = 'https://%s:%s/' % (host, port)
self.session = requests.Session()
self.session.auth = (user, password)
self.header = {}
self.header['Content-Type'] = 'application/json; charset=utf-8'
self.header['Accept'] = 'application/json'
self.header['x-dell-api-version'] = apiversion
self.verify = verify
# Verify is a configurable option. So if this is false do not
# spam the c-vol log.
if not verify:
requests.packages.urllib3.disable_warnings()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.session.close()
def __formatUrl(self, url):
baseurl = self.baseUrl
# Some url sources have api/rest and some don't. Handle.
if 'api/rest' not in url:
baseurl += 'api/rest/'
return '%s%s' % (baseurl, url if url[0] != '/' else url[1:])
def _get_header(self, async):
if async:
header = self.header.copy()
header['async'] = 'True'
return header
return self.header
def _get_async_url(self, asyncTask):
"""Handle a bug in SC API that gives a full url."""
try:
# strip off the https.
url = asyncTask.get('returnValue').split(
'https://')[1].split('/', 1)[1]
except IndexError:
url = asyncTask.get('returnValue')
# Check for incomplete url error case.
if url.endswith('/'):
# Try to fix.
id = asyncTask.get('instanceId')
if id:
# We have an id so note the error and add the id.
LOG.debug('_get_async_url: url format error. (%s)', asyncTask)
url = url + id
else:
# No hope.
LOG.error(_LE('_get_async_url: Bogus return url %s'), url)
raise exception.VolumeBackendAPIException(
message=_('_get_async_url: Invalid URL.'))
return url
def _wait_for_async_complete(self, asyncTask):
url = self._get_async_url(asyncTask)
while True and url:
try:
r = self.get(url)
# We can leave this loop for a variety of reasons.
# Nothing returned.
# r.content blanks.
# Object returned switches to one without objectType or with
# a different objectType.
if not StorageCenterApi._check_result(r):
LOG.debug('Async error:\n'
'\tstatus_code: %(code)s\n'
'\ttext: %(text)s\n',
{'code': r.status_code,
'text': r.text})
else:
# In theory we have a good run.
if r.content:
content = r.json()
if content.get('objectType') == 'AsyncTask':
url = self._get_async_url(content)
eventlet.sleep(1)
continue
else:
LOG.debug('Async debug: r.content is None')
return r
except Exception:
methodname = asyncTask.get('methodName')
objectTypeName = asyncTask.get('objectTypeName')
msg = (_('Async error: Unable to retrieve %(obj)s '
'method %(method)s result')
% {'obj': objectTypeName, 'method': methodname})
raise exception.VolumeBackendAPIException(message=msg)
return None
def _rest_ret(self, rest_response, async):
# If we made an async call and it was accepted
# we wait for our response.
if async:
if rest_response.status_code == 202:
asyncTask = rest_response.json()
return self._wait_for_async_complete(asyncTask)
else:
LOG.debug('REST Async error command not accepted:\n'
'\tUrl: %(url)s\n'
'\tCode: %(code)d\n'
'\tReason: %(reason)s\n',
{'url': rest_response.url,
'code': rest_response.status_code,
'reason': rest_response.reason})
msg = _('REST Async Error: Command not accepted.')
raise exception.VolumeBackendAPIException(message=msg)
return rest_response
@utils.retry(exceptions=(requests.ConnectionError,
exception.DellDriverRetryableException))
def get(self, url):
LOG.debug('get: %(url)s', {'url': url})
rest_response = self.session.get(self.__formatUrl(url),
headers=self.header,
verify=self.verify)
if rest_response and rest_response.status_code == 400 and (
'Unhandled Exception' in rest_response.text):
raise exception.DellDriverRetryableException()
return rest_response
@utils.retry(exceptions=(requests.ConnectionError,))
def post(self, url, payload, async=False):
LOG.debug('post: %(url)s data: %(payload)s',
{'url': url,
'payload': payload})
return self._rest_ret(self.session.post(
self.__formatUrl(url),
data=json.dumps(payload,
ensure_ascii=False).encode('utf-8'),
headers=self._get_header(async),
verify=self.verify), async)
@utils.retry(exceptions=(requests.ConnectionError,))
def put(self, url, payload, async=False):
LOG.debug('put: %(url)s data: %(payload)s',
{'url': url,
'payload': payload})
return self._rest_ret(self.session.put(
self.__formatUrl(url),
data=json.dumps(payload,
ensure_ascii=False).encode('utf-8'),
headers=self._get_header(async),
verify=self.verify), async)
@utils.retry(exceptions=(requests.ConnectionError,))
def delete(self, url, payload=None, async=False):
LOG.debug('delete: %(url)s data: %(payload)s',
{'url': url, 'payload': payload})
if payload:
return self._rest_ret(
self.session.delete(self.__formatUrl(url),
data=json.dumps(payload,
ensure_ascii=False
).encode('utf-8'),
headers=self._get_header(async),
verify=self.verify), async)
return self._rest_ret(
self.session.delete(self.__formatUrl(url),
headers=self._get_header(async),
verify=self.verify), async)
class StorageCenterApiHelper(object):
"""StorageCenterApiHelper
Helper class for API access. Handles opening and closing the
connection to the Dell REST API.
"""
def __init__(self, config, active_backend_id, storage_protocol):
self.config = config
# Now that active_backend_id is set on failover.
# Use that if set. Mark the backend as failed over.
self.active_backend_id = active_backend_id
self.primaryssn = self.config.dell_sc_ssn
self.storage_protocol = storage_protocol
self.san_ip = self.config.san_ip
self.san_login = self.config.san_login
self.san_password = self.config.san_password
self.san_port = self.config.dell_sc_api_port
self.apiversion = '2.0'
def _swap_credentials(self):
"""Change out to our secondary credentials
Or back to our primary creds.
:return: True if swapped. False if no alt credentials supplied.
"""
if self.san_ip == self.config.san_ip:
# Do we have a secondary IP and credentials?
if (self.config.secondary_san_ip and
self.config.secondary_san_login and
self.config.secondary_san_password):
self.san_ip = self.config.secondary_san_ip
self.san_login = self.config.secondary_san_login
self.san_password = self.config.secondary_san_password
else:
# Cannot swap.
return False
# Odds on this hasn't changed so no need to make setting this a
# requirement.
if self.config.secondary_sc_api_port:
self.san_port = self.config.secondary_sc_api_port
else:
# These have to be set.
self.san_ip = self.config.san_ip
self.san_login = self.config.san_login
self.san_password = self.config.san_password
self.san_port = self.config.dell_sc_api_port
return True
def _setup_connection(self):
"""Attempts to open a connection to the storage center."""
connection = StorageCenterApi(self.san_ip,
self.san_port,
self.san_login,
self.san_password,
self.config.dell_sc_verify_cert,
self.apiversion)
# This instance is for a single backend. That backend has a
# few items of information we should save rather than passing them
# about.
connection.vfname = self.config.dell_sc_volume_folder
connection.sfname = self.config.dell_sc_server_folder
connection.excluded_domain_ips = self.config.excluded_domain_ip
if not connection.excluded_domain_ips:
connection.excluded_domain_ips = []
# Our primary SSN doesn't change
connection.primaryssn = self.primaryssn
if self.storage_protocol == 'FC':
connection.protocol = 'FibreChannel'
# Set appropriate ssn and failover state.
if self.active_backend_id:
# active_backend_id is a string. Convert to int.
connection.ssn = int(self.active_backend_id)
else:
connection.ssn = self.primaryssn
# Make the actual connection to the DSM.
connection.open_connection()
return connection
def open_connection(self):
"""Creates the StorageCenterApi object.
:return: StorageCenterApi object.
:raises: VolumeBackendAPIException
"""
connection = None
LOG.info(_LI('open_connection to %(ssn)s at %(ip)s'),
{'ssn': self.primaryssn,
'ip': self.config.san_ip})
if self.primaryssn:
try:
"""Open connection to REST API."""
connection = self._setup_connection()
except Exception:
# If we have credentials to swap to we try it here.
if self._swap_credentials():
connection = self._setup_connection()
else:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Failed to connect to the API. '
'No backup DSM provided.'))
# Save our api version for next time.
if self.apiversion != connection.apiversion:
LOG.info(_LI('open_connection: Updating API version to %s'),
connection.apiversion)
self.apiversion = connection.apiversion
else:
raise exception.VolumeBackendAPIException(
data=_('Configuration error: dell_sc_ssn not set.'))
return connection
class StorageCenterApi(object):
"""StorageCenterApi
Handles calls to Dell SC and EM via the REST API interface.
Version history:
1.0.0 - Initial driver
1.1.0 - Added extra spec support for Storage Profile selection
1.2.0 - Added consistency group support.
2.0.0 - Switched to inheriting functional objects rather than volume
driver.
2.1.0 - Added support for ManageableVD.
2.2.0 - Added API 2.2 support.
2.3.0 - Added Legacy Port Mode Support
2.3.1 - Updated error handling.
2.4.0 - Added Replication V2 support.
2.4.1 - Updated Replication support to V2.1.
2.5.0 - ManageableSnapshotsVD implemented.
3.0.0 - ProviderID utilized.
3.1.0 - Failback supported.
3.2.0 - Live Volume support.
3.3.0 - Support for a secondary DSM.
"""
APIDRIVERVERSION = '3.3.0'
def __init__(self, host, port, user, password, verify, apiversion):
"""This creates a connection to Dell SC or EM.
:param host: IP address of the REST interface..
:param port: Port the REST interface is listening on.
:param user: User account to login with.
:param password: Password.
:param verify: Boolean indicating whether certificate verification
should be turned on or not.
:param apiversion: Version used on login.
"""
self.notes = 'Created by Dell Cinder Driver'
self.repl_prefix = 'Cinder repl of '
self.ssn = None
# primaryssn is the ssn of the SC we are configured to use. This
# doesn't change in the case of a failover.
self.primaryssn = None
self.failed_over = False
self.vfname = 'openstack'
self.sfname = 'openstack'
self.excluded_domain_ips = []
self.legacypayloadfilters = False
self.consisgroups = True
self.protocol = 'Iscsi'
self.apiversion = apiversion
# Nothing other than Replication should care if we are direct connect
# or not.
self.is_direct_connect = False
self.client = HttpClient(host, port, user, password,
verify, apiversion)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close_connection()
@staticmethod
def _check_result(rest_response):
"""Checks and logs API responses.
:param rest_response: The result from a REST API call.
:returns: ``True`` if success, ``False`` otherwise.
"""
if rest_response is not None:
if 200 <= rest_response.status_code < 300:
# API call was a normal success
return True
# Some versions return this as a dict.
try:
response_json = rest_response.json()
response_text = response_json.text['result']
except Exception:
# We do not care why that failed. Just use the text.
response_text = rest_response.text
LOG.debug('REST call result:\n'
'\tUrl: %(url)s\n'
'\tCode: %(code)d\n'
'\tReason: %(reason)s\n'
'\tText: %(text)s',
{'url': rest_response.url,
'code': rest_response.status_code,
'reason': rest_response.reason,
'text': response_text})
else:
LOG.warning(_LW('Failed to get REST call result.'))
return False
@staticmethod
def _path_to_array(path):
"""Breaks a path into a reversed string array.
:param path: Path to a folder on the Storage Center.
:return: A reversed array of each path element.
"""
array = []
while True:
(path, tail) = os.path.split(path)
if tail == '':
array.reverse()
return array
array.append(tail)
def _first_result(self, blob):
"""Get the first result from the JSON return value.
:param blob: Full return from a REST call.
:return: The JSON encoded dict or the first item in a JSON encoded
list.
"""
return self._get_result(blob, None, None)
def _get_result(self, blob, attribute, value):
"""Find the result specified by attribute and value.
If the JSON blob is a list then it will be searched for the attribute
and value combination. If attribute and value are not specified then
the first item is returned. If the JSON blob is a dict then it
will be returned so long as the dict matches the attribute and value
combination or attribute is None.
:param blob: The REST call's JSON response. Can be a list or dict.
:param attribute: The attribute we are looking for. If it is None
the first item in the list, or the dict, is returned.
:param value: The attribute value we are looking for. If the attribute
is None this value is ignored.
:returns: The JSON content in blob, the dict specified by matching the
attribute and value or None.
"""
rsp = None
content = self._get_json(blob)
if content is not None:
# We can get a list or a dict or nothing
if isinstance(content, list):
for r in content:
if attribute is None or r.get(attribute) == value:
rsp = r
break
elif isinstance(content, dict):
if attribute is None or content.get(attribute) == value:
rsp = content
elif attribute is None:
rsp = content
if rsp is None:
LOG.debug('Unable to find result where %(attr)s is %(val)s',
{'attr': attribute,
'val': value})
LOG.debug('Blob was %(blob)s', {'blob': blob.text})
return rsp
def _get_json(self, blob):
"""Returns a dict from the JSON of a REST response.
:param blob: The response from a REST call.
:returns: JSON or None on error.
"""
try:
return blob.json()
except AttributeError:
LOG.error(_LE('Error invalid json: %s'),
blob)
except TypeError as ex:
LOG.error(_LE('Error TypeError. %s'), ex)
except scanner.JSONDecodeError as ex:
LOG.error(_LE('Error JSONDecodeError. %s'), ex)
# We are here so this went poorly. Log our blob.
LOG.debug('_get_json blob %s', blob)
return None
def _get_id(self, blob):
"""Returns the instanceId from a Dell REST object.
:param blob: A Dell SC REST call's response.
:returns: The instanceId from the Dell SC object or None on error.
"""
try:
if isinstance(blob, dict):
return blob.get('instanceId')
except AttributeError:
LOG.error(_LE('Invalid API object: %s'),
blob)
except TypeError as ex:
LOG.error(_LE('Error TypeError. %s'), ex)
except scanner.JSONDecodeError as ex:
LOG.error(_LE('Error JSONDecodeError. %s'), ex)
LOG.debug('_get_id failed: blob %s', blob)
return None
def _get_payload_filter(self, filterType='AND'):
# 2.1 or earlier and we are talking LegacyPayloadFilters.
if self.legacypayloadfilters:
return LegacyPayloadFilter(filterType)
return PayloadFilter(filterType)
def _check_version_fail(self, payload, response):
try:
# Is it even our error?
result = self._get_json(response).get('result')
if result and result.startswith(
'Invalid API version specified, '
'the version must be in the range ['):
# We're looking for something very specific. The except
# will catch any errors.
# Update our version and update our header.
self.apiversion = response.text.split('[')[1].split(',')[0]
self.client.header['x-dell-api-version'] = self.apiversion
LOG.debug('API version updated to %s', self.apiversion)
# Give login another go.
r = self.client.post('ApiConnection/Login', payload)
return r
except Exception:
# We don't care what failed. The clues are already in the logs.
# Just log a parsing error and move on.
LOG.error(_LE('_check_version_fail: Parsing error.'))
# Just eat this if it isn't a version error.
return response
def open_connection(self):
"""Authenticate with Dell REST interface.
:raises: VolumeBackendAPIException.
"""
# Set our fo state.
self.failed_over = (self.primaryssn != self.ssn)
# Login
payload = {}
payload['Application'] = 'Cinder REST Driver'
payload['ApplicationVersion'] = self.APIDRIVERVERSION
r = self.client.post('ApiConnection/Login', payload)
if not self._check_result(r):
# SC requires a specific version. See if we can get it.
r = self._check_version_fail(payload, r)
# Either we tried to login and have a new result or we are
# just checking the same result. Either way raise on fail.
if not self._check_result(r):
raise exception.VolumeBackendAPIException(
data=_('Failed to connect to Dell REST API'))
# We should be logged in. Try to grab the api version out of the
# response.
try:
apidict = self._get_json(r)
version = apidict['apiVersion']
self.is_direct_connect = apidict['provider'] == 'StorageCenter'
splitver = version.split('.')
if splitver[0] == '2':
if splitver[1] == '0':
self.consisgroups = False
self.legacypayloadfilters = True
elif splitver[1] == '1':
self.legacypayloadfilters = True
return
except Exception:
# Good return but not the login response we were expecting.
# Log it and error out.
LOG.error(_LE('Unrecognized Login Response: %s'), r)
def close_connection(self):
"""Logout of Dell REST API."""
r = self.client.post('ApiConnection/Logout', {})
# 204 expected.
self._check_result(r)
self.client = None
def _use_provider_id(self, provider_id):
"""See if our provider_id points at our current backend.
provider_id is instanceId. The instanceId contains the ssn of the
StorageCenter it is hosted on. This must equal our current ssn or
it isn't valid.
:param provider_id: Provider_id from an volume or snapshot object.
:returns: True/False
"""
ret = False
if provider_id:
try:
if provider_id.split('.')[0] == six.text_type(self.ssn):
ret = True
except Exception:
LOG.error(_LE('_use_provider_id: provider_id %s is invalid!'),
provider_id)
return ret
def find_sc(self, ssn=-1):
"""Check that the SC is there and being managed by EM.
:returns: The SC SSN.
:raises: VolumeBackendAPIException
"""
# We might be looking for another ssn. If not then
# look for our default.
ssn = self._vet_ssn(ssn)
r = self.client.get('StorageCenter/StorageCenter')
result = self._get_result(r, 'scSerialNumber', ssn)
if result is None:
LOG.error(_LE('Failed to find %(s)s. Result %(r)s'),
{'s': ssn,
'r': r})
raise exception.VolumeBackendAPIException(
data=_('Failed to find Storage Center'))
return self._get_id(result)
# Folder functions
def _create_folder(self, url, parent, folder, ssn=-1):
"""Creates folder under parent.
This can create both to server and volume folders. The REST url
sent in defines the folder type being created on the Dell Storage
Center backend.
:param url: This is the Dell SC rest url for creating the specific
(server or volume) folder type.
:param parent: The instance ID of this folder's parent folder.
:param folder: The folder name to be created. This is one level deep.
:returns: The REST folder object.
"""
ssn = self._vet_ssn(ssn)
scfolder = None
payload = {}
payload['Name'] = folder
payload['StorageCenter'] = ssn
if parent != '':
payload['Parent'] = parent
payload['Notes'] = self.notes
r = self.client.post(url, payload, True)
if self._check_result(r):
scfolder = self._first_result(r)
return scfolder
def _create_folder_path(self, url, foldername, ssn=-1):
"""Creates a folder path from a fully qualified name.
The REST url sent in defines the folder type being created on the Dell
Storage Center backend. Thus this is generic to server and volume
folders.
:param url: This is the Dell SC REST url for creating the specific
(server or volume) folder type.
:param foldername: The full folder name with path.
:returns: The REST folder object.
"""
ssn = self._vet_ssn(ssn)
path = self._path_to_array(foldername)
folderpath = ''
instanceId = ''
# Technically the first folder is the root so that is already created.
found = True
scfolder = None
for folder in path:
folderpath = folderpath + folder
# If the last was found see if this part of the path exists too
if found:
listurl = url + '/GetList'
scfolder = self._find_folder(listurl, folderpath, ssn)
if scfolder is None:
found = False
# We didn't find it so create it
if found is False:
scfolder = self._create_folder(url, instanceId, folder, ssn)
# If we haven't found a folder or created it then leave
if scfolder is None:
LOG.error(_LE('Unable to create folder path %s'), folderpath)
break
# Next part of the path will need this
instanceId = self._get_id(scfolder)
folderpath = folderpath + '/'
return scfolder
def _find_folder(self, url, foldername, ssn=-1):
"""Find a folder on the SC using the specified url.
Most of the time the folder will already have been created so
we look for the end folder and check that the rest of the path is
right.
The REST url sent in defines the folder type being created on the Dell
Storage Center backend. Thus this is generic to server and volume
folders.
:param url: The portion of the url after the base url (see http class)
to use for this operation. (Can be for Server or Volume
folders.)
:param foldername: Full path to the folder we are looking for.
:returns: Dell folder object.
"""
ssn = self._vet_ssn(ssn)
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
basename = os.path.basename(foldername)
pf.append('Name', basename)
# If we have any kind of path we throw it into the filters.
folderpath = os.path.dirname(foldername)
if folderpath != '':
# SC convention is to end with a '/' so make sure we do.
folderpath += '/'
pf.append('folderPath', folderpath)
folder = None
r = self.client.post(url, pf.payload)
if self._check_result(r):
folder = self._get_result(r, 'folderPath', folderpath)
return folder
def _find_volume_folder(self, create=False, ssn=-1):
"""Looks for the volume folder where backend volumes will be created.
Volume folder is specified in the cindef.conf. See __init.
:param create: If True will create the folder if not found.
:returns: Folder object.
"""
folder = self._find_folder('StorageCenter/ScVolumeFolder/GetList',
self.vfname, ssn)
# Doesn't exist? make it
if folder is None and create is True:
folder = self._create_folder_path('StorageCenter/ScVolumeFolder',
self.vfname, ssn)
return folder
def _init_volume(self, scvolume):
"""Initializes the volume.
Maps the volume to a random server and immediately unmaps
it. This initializes the volume.
Don't wig out if this fails.
:param scvolume: Dell Volume object.
"""
pf = self._get_payload_filter()
pf.append('scSerialNumber', scvolume.get('scSerialNumber'))
r = self.client.post('StorageCenter/ScServer/GetList', pf.payload)
if self._check_result(r):
scservers = self._get_json(r)
# Sort through the servers looking for one with connectivity.
for scserver in scservers:
# This needs to be either a physical or virtual server.
# Outside of tempest tests this should not matter as we only
# "init" a volume to allow snapshotting of an empty volume.
if scserver.get('status', '').lower() != 'down':
# Map to actually create the volume
self.map_volume(scvolume, scserver)
# We have changed the volume so grab a new copy of it.
scvolume = self.get_volume(self._get_id(scvolume))
self.unmap_volume(scvolume, scserver)
return
# We didn't map/unmap the volume. So no initialization done.
# Warn the user before we leave. Note that this is almost certainly
# a tempest test failure we are trying to catch here. A snapshot
# has likely been attempted before the volume has been instantiated
# on the Storage Center. In the real world no one will snapshot
# a volume without first putting some data in that volume.
LOG.warning(_LW('Volume %s initialization failure.'), scvolume['name'])
def _find_storage_profile(self, storage_profile):
"""Looks for a Storage Profile on the array.
Storage Profiles determine tiering settings. If not specified a volume
will use the Default storage profile.
:param storage_profile: The Storage Profile name to find with any
spaces stripped.
:returns: The Storage Profile object or None.
"""
if not storage_profile:
return None
# Since we are stripping out spaces for convenience we are not
# able to just filter on name. Need to get all Storage Profiles
# and look through for the one we want. Never many profiles, so
# this doesn't cause as much overhead as it might seem.
storage_profile = storage_profile.replace(' ', '').lower()
pf = self._get_payload_filter()
pf.append('scSerialNumber', self.ssn)
r = self.client.post('StorageCenter/ScStorageProfile/GetList',
pf.payload)
if self._check_result(r):
profiles = self._get_json(r)
for profile in profiles:
# Look for the stripped, case insensitive match
name = profile.get('name', '').replace(' ', '').lower()
if name == storage_profile:
return profile
return None
def _find_user_replay_profiles(self):
"""Find user default profiles.
Note that this only deals with standard and not cg profiles.
:return: List of replay profiles.
"""
user_prefs = self._get_user_preferences()
if user_prefs:
profileids = [profile['instanceId'] for profile in
user_prefs['replayProfileList']]
return profileids
return []
def _find_daily_replay_profile(self):
"""Find the system replay profile named "Daily".
:return: Profile instanceId or None.
"""
pf = self._get_payload_filter()
pf.append('scSerialNumber', self.ssn)
pf.append('instanceName', 'Daily')
r = self.client.post('StorageCenter/ScReplayProfile/GetList',
pf.payload)
if self._check_result(r):
profiles = self._get_json(r)
if profiles:
return profiles[0]['instanceId']
return None
def _find_replay_profiles(self, replay_profile_string):
"""Find our replay profiles.
Note that if called on volume creation the removeids list can be safely
ignored.
:param replay_profile_string: Comma separated list of profile names.
:return: List replication profiles to use, List to remove.
:raises VolumeBackendAPIException: If we can't find our profiles.
"""
addids = []
removeids = []
replay_profiles = []
if replay_profile_string:
replay_profiles = replay_profile_string.split(',')
# Most of the time they will not specify this so don't call anything.
if replay_profiles:
pf = self._get_payload_filter()
pf.append('scSerialNumber', self.ssn)
pf.append('type', 'Standard')
r = self.client.post('StorageCenter/ScReplayProfile/GetList',
pf.payload)
if self._check_result(r):
profiles = self._get_json(r)
for profile in profiles:
if replay_profiles.count(profile['name']) > 0:
addids.append(profile['instanceId'])
else:
# in the volume.
removeids.append(profile['instanceId'])
# Check that we've found what we are looking for if anything
if len(addids) != len(replay_profiles):
msg = (_('Unable to locate specified replay profiles %s ') %
replay_profile_string)
raise exception.VolumeBackendAPIException(data=msg)
return addids, removeids
def update_replay_profiles(self, scvolume, replay_profile_string):
"""Update our replay profiles.
If the replay_profile_string is empty we look for the user's default
profiles. If those aren't found we look for the Daily profile.
Note that this is in addition to the CG profiles which we do not touch.
:param scvolume: SC Volume object.
:param replay_profile_string: Comma separated string of replay profile
names.
:return: True/False.
"""
# Find our replay_profiles.
addids, removeids = self._find_replay_profiles(replay_profile_string)
# We either found what we were looking for.
# If we are clearing out our ids then find a default.
if not addids:
# if no replay profiles specified we must be clearing out.
addids = self._find_user_replay_profiles()
if not addids:
addids = [self._find_daily_replay_profile()]
# Do any removals first.
for id in removeids:
# We might have added to the addids list after creating removeids.
# User preferences or the daily profile could have been added.
# If our id is in both lists just skip it and remove it from
# The add list.
if addids.count(id):
addids.remove(id)
elif not self._update_volume_profiles(
scvolume, addid=None, removeid=id):
return False
# Add anything new.
for id in addids:
if not self._update_volume_profiles(
scvolume, addid=id, removeid=None):
return False
return True
def create_volume(self, name, size, storage_profile=None,
replay_profile_string=None):
"""Creates a new volume on the Storage Center.
It will create it in a folder called self.vfname. If self.vfname
does not exist it will create it. If it cannot create it
the volume will be created in the root.
:param name: Name of the volume to be created on the Dell SC backend.
This is the cinder volume ID.
:param size: The size of the volume to be created in GB.
:param storage_profile: Optional storage profile to set for the volume.
:param replay_profile_string: Optional replay profile to set for
the volume.
:returns: Dell Volume object or None.
"""
LOG.debug('create_volume: %(name)s %(ssn)s %(folder)s %(profile)s',
{'name': name,
'ssn': self.ssn,
'folder': self.vfname,
'profile': storage_profile,
'replay': replay_profile_string
})
# Find our folder
folder = self._find_volume_folder(True)
# If we actually have a place to put our volume create it
if folder is None:
LOG.warning(_LW('Unable to create folder %s'), self.vfname)
# See if we need a storage profile
profile = self._find_storage_profile(storage_profile)
if storage_profile and profile is None:
msg = _('Storage Profile %s not found.') % storage_profile
raise exception.VolumeBackendAPIException(data=msg)
# Find our replay_profiles.
addids, removeids = self._find_replay_profiles(replay_profile_string)
# Init our return.
scvolume = None
# Create the volume
payload = {}
payload['Name'] = name
payload['Notes'] = self.notes
payload['Size'] = '%d GB' % size
payload['StorageCenter'] = self.ssn
if folder is not None:
payload['VolumeFolder'] = self._get_id(folder)
if profile:
payload['StorageProfile'] = self._get_id(profile)
# This is a new volume so there is nothing to remove.
if addids:
payload['ReplayProfileList'] = addids
r = self.client.post('StorageCenter/ScVolume', payload, True)
if self._check_result(r):
# Our volume should be in the return.
scvolume = self._get_json(r)
if scvolume:
LOG.info(_LI('Created volume %(instanceId)s: %(name)s'),
{'instanceId': scvolume['instanceId'],
'name': scvolume['name']})
else:
LOG.error(_LE('ScVolume returned success with empty payload.'
' Attempting to locate volume'))
# In theory it is there since success was returned.
# Try one last time to find it before returning.
scvolume = self._search_for_volume(name)
else:
LOG.error(_LE('Unable to create volume on SC: %s'), name)
return scvolume
def _get_volume_list(self, name, deviceid, filterbyvfname=True, ssn=-1):
"""Return the specified list of volumes.
:param name: Volume name.
:param deviceid: Volume device ID on the SC backend.
:param filterbyvfname: If set to true then this filters by the preset
folder name.
:param ssn: SSN to search on.
:return: Returns the scvolume list or None.
"""
ssn = self._vet_ssn(ssn)
result = None
# We need a name or a device ID to find a volume.
if name or deviceid:
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
if name is not None:
pf.append('Name', name)
if deviceid is not None:
pf.append('DeviceId', deviceid)
# set folderPath
if filterbyvfname:
vfname = (self.vfname if self.vfname.endswith('/')
else self.vfname + '/')
pf.append('volumeFolderPath', vfname)
r = self.client.post('StorageCenter/ScVolume/GetList', pf.payload)
if self._check_result(r):
result = self._get_json(r)
# We return None if there was an error and a list if the command
# succeeded. It might be an empty list.
return result
def _autofailback(self, lv):
# if we have a working replication state.
ret = False
LOG.debug('Attempting autofailback of %s', lv)
if (lv and lv['status'] == 'Up' and lv['replicationState'] == 'Up' and
lv['failoverState'] == 'Protected' and lv['secondaryStatus'] == 'Up'
and lv['primarySwapRoleState'] == 'NotSwapping'):
ret = self.swap_roles_live_volume(lv)
return ret
def _find_volume_primary(self, provider_id, name):
# if there is no live volume then we return our provider_id.
primary_id = provider_id
lv = self.get_live_volume(provider_id, name)
LOG.info(_LI('Volume %(provider)s at primary %(primary)s.'),
{'provider': provider_id, 'primary': primary_id})
# If we have a live volume and are swapped and are not failed over
# at least give failback a shot.
if lv and self.is_swapped(provider_id, lv) and not self.failed_over:
if self._autofailback(lv):
lv = self.get_live_volume(provider_id)
LOG.info(_LI('After failback %s'), lv)
if lv:
primary_id = lv['primaryVolume']['instanceId']
return primary_id
def find_volume(self, name, provider_id, islivevol=False):
"""Find the volume by name or instanceId.
We check if we can use provider_id before using it. If so then
we expect to find it by provider_id.
We also conclude our failover at this point. If we are failed over we
run _import_one to rename the volume.
:param name: Volume name.
:param provider_id: instanceId of the volume if known.
:param islivevol: Is this a live volume.
:return: sc volume object or None.
:raises VolumeBackendAPIException: if unable to import.
"""
scvolume = None
if islivevol:
# Just get the primary from the sc live vol.
primary_id = self._find_volume_primary(provider_id, name)
scvolume = self.get_volume(primary_id)
elif self._use_provider_id(provider_id):
# just get our volume
scvolume = self.get_volume(provider_id)
# if we are failed over we need to check if we
# need to import the failed over volume.
if self.failed_over:
if scvolume['name'] == self._repl_name(name):
scvolume = self._import_one(scvolume, name)
if not scvolume:
msg = (_('Unable to complete failover of %s.')
% name)
raise exception.VolumeBackendAPIException(data=msg)
LOG.info(_LI('Imported %(fail)s to %(guid)s.'),
{'fail': self._repl_name(name),
'guid': name})
else:
# No? Then search for it.
scvolume = self._search_for_volume(name)
return scvolume
def _search_for_volume(self, name):
"""Search self.ssn for volume of name.
This searches the folder self.vfname (specified in the cinder.conf)
for the volume first. If not found it searches the entire array for
the volume.
:param name: Name of the volume to search for. This is the cinder
volume ID.
:returns: Dell Volume object or None if not found.
:raises VolumeBackendAPIException: If multiple copies are found.
"""
LOG.debug('Searching %(sn)s for %(name)s',
{'sn': self.ssn,
'name': name})
# Cannot find a volume without the name.
if name is None:
return None
# Look for our volume in our folder.
vollist = self._get_volume_list(name, None, True)
# If an empty list was returned they probably moved the volumes or
# changed the folder name so try again without the folder.
if not vollist:
LOG.debug('Cannot find volume %(n)s in %(v)s. Searching SC.',
{'n': name,
'v': self.vfname})
vollist = self._get_volume_list(name, None, False)
# If multiple volumes of the same name are found we need to error.
if len(vollist) > 1:
# blow up
msg = _('Multiple copies of volume %s found.') % name
raise exception.VolumeBackendAPIException(data=msg)
# We made it and should have a valid volume.
return None if not vollist else vollist[0]
def get_volume(self, provider_id):
"""Returns the scvolume associated with provider_id.
:param provider_id: This is the instanceId
:return: Dell SCVolume object.
"""
result = None
if provider_id:
r = self.client.get('StorageCenter/ScVolume/%s' % provider_id)
if self._check_result(r):
result = self._get_json(r)
return result
def delete_volume(self, name, provider_id=None):
"""Deletes the volume from the SC backend array.
If the volume cannot be found we claim success.
:param name: Name of the volume to search for. This is the cinder
volume ID.
:param provider_id: This is the instanceId
:returns: Boolean indicating success or failure.
"""
# No provider id? Then do a search.
if not provider_id:
vol = self._search_for_volume(name)
if vol:
provider_id = self._get_id(vol)
# If we have an id then delete the volume.
if provider_id:
r = self.client.delete('StorageCenter/ScVolume/%s' % provider_id,
async=True)
if not self._check_result(r):
msg = _('Error deleting volume %(ssn)s: %(volume)s') % {
'ssn': self.ssn,
'volume': provider_id}
raise exception.VolumeBackendAPIException(data=msg)
# json return should be true or false
return self._get_json(r)
# If we can't find the volume then it is effectively gone.
LOG.warning(_LW('delete_volume: unable to find volume '
'provider_id: %s'), provider_id)
return True
def _find_server_folder(self, create=False, ssn=-1):
"""Looks for the server folder on the Dell Storage Center.
This is the folder where a server objects for mapping volumes will be
created. Server folder is specified in cinder.conf. See __init.
:param create: If True will create the folder if not found.
:return: Folder object.
"""
ssn = self._vet_ssn(ssn)
folder = self._find_folder('StorageCenter/ScServerFolder/GetList',
self.sfname, ssn)
if folder is None and create is True:
folder = self._create_folder_path('StorageCenter/ScServerFolder',
self.sfname, ssn)
return folder
def _add_hba(self, scserver, wwnoriscsiname):
"""This adds a server HBA to the Dell server object.
The HBA is taken from the connector provided in initialize_connection.
The Dell server object is largely a container object for the list of
HBAs associated with a single server (or vm or cluster) for the
purposes of mapping volumes.
:param scserver: Dell server object.
:param wwnoriscsiname: The WWN or IQN to add to this server.
:returns: Boolean indicating success or failure.
"""
payload = {}
payload['HbaPortType'] = self.protocol
payload['WwnOrIscsiName'] = wwnoriscsiname
payload['AllowManual'] = True
r = self.client.post('StorageCenter/ScPhysicalServer/%s/AddHba'
% self._get_id(scserver), payload, True)
if not self._check_result(r):
LOG.error(_LE('_add_hba error: %(wwn)s to %(srvname)s'),
{'wwn': wwnoriscsiname,
'srvname': scserver['name']})
return False
return True
def _find_serveros(self, osname='Red Hat Linux 6.x', ssn=-1):
"""Returns the serveros instance id of the specified osname.
Required to create a Dell server object.
We do not know that we are Red Hat Linux 6.x but that works
best for Red Hat and Ubuntu. So we use that.
:param osname: The name of the OS to look for.
:param ssn: ssn of the backend SC to use. Default if -1.
:returns: InstanceId of the ScServerOperatingSystem object.
"""
ssn = self._vet_ssn(ssn)
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
r = self.client.post('StorageCenter/ScServerOperatingSystem/GetList',
pf.payload)
if self._check_result(r):
oslist = self._get_json(r)
for srvos in oslist:
name = srvos.get('name', 'nope')
if name.lower() == osname.lower():
# Found it return the id
return self._get_id(srvos)
LOG.warning(_LW('Unable to find appropriate OS %s'), osname)
return None
def create_server(self, wwnlist, serveros, ssn=-1):
"""Creates a server with multiple WWNS associated with it.
Same as create_server except it can take a list of HBAs.
:param wwnlist: A list of FC WWNs or iSCSI IQNs associated with this
server.
:param serveros: Name of server OS to use when creating the server.
:param ssn: ssn of the backend SC to use. Default if -1.
:returns: Dell server object.
"""
# Find our folder or make it
folder = self._find_server_folder(True, ssn)
# Create our server.
scserver = self._create_server('Server_' + wwnlist[0], folder,
serveros, ssn)
if not scserver:
return None
# Add our HBAs.
if scserver:
for wwn in wwnlist:
if not self._add_hba(scserver, wwn):
# We failed so log it. Delete our server and return None.
LOG.error(_LE('Error adding HBA %s to server'), wwn)
self._delete_server(scserver)
return None
return scserver
def _create_server(self, servername, folder, serveros, ssn):
ssn = self._vet_ssn(ssn)
LOG.info(_LI('Creating server %s'), servername)
payload = {}
payload['Name'] = servername
payload['StorageCenter'] = ssn
payload['Notes'] = self.notes
payload['AlertOnConnectivity'] = False
# We pick Red Hat Linux 6.x because it supports multipath and
# will attach luns to paths as they are found.
scserveros = self._find_serveros(serveros, ssn)
if not scserveros:
scserveros = self._find_serveros(ssn=ssn)
if scserveros is not None:
payload['OperatingSystem'] = scserveros
# At this point it doesn't matter if we have a folder or not.
# Let it be in the root if the folder creation fails.
if folder is not None:
payload['ServerFolder'] = self._get_id(folder)
# create our server
r = self.client.post('StorageCenter/ScPhysicalServer', payload, True)
if self._check_result(r):
# Server was created
scserver = self._first_result(r)
LOG.info(_LI('SC server created %s'), scserver)
return scserver
LOG.error(_LE('Unable to create SC server %s'), servername)
return None
def _vet_ssn(self, ssn):
"""Returns the default if a ssn was not set.
Added to support live volume as we aren't always on the primary ssn
anymore
:param ssn: ssn to check.
:return: Current ssn or the ssn sent down.
"""
if ssn == -1:
return self.ssn
return ssn
def find_server(self, instance_name, ssn=-1):
"""Hunts for a server on the Dell backend by instance_name.
The instance_name is the same as the server's HBA. This is the IQN or
WWN listed in the connector. If found, the server the HBA is attached
to, if any, is returned.
:param instance_name: instance_name is a FC WWN or iSCSI IQN from
the connector. In cinder a server is identified
by its HBA.
:param ssn: Storage center to search.
:returns: Dell server object or None.
"""
ssn = self._vet_ssn(ssn)
scserver = None
# We search for our server by first finding our HBA
hba = self._find_serverhba(instance_name, ssn)
# Once created hbas stay in the system. So it isn't enough
# that we found one it actually has to be attached to a
# server.
if hba is not None and hba.get('server') is not None:
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
pf.append('instanceId', self._get_id(hba['server']))
r = self.client.post('StorageCenter/ScServer/GetList', pf.payload)
if self._check_result(r):
scserver = self._first_result(r)
if scserver is None:
LOG.debug('Server (%s) not found.', instance_name)
return scserver
def _find_serverhba(self, instance_name, ssn):
"""Hunts for a server HBA on the Dell backend by instance_name.
Instance_name is the same as the IQN or WWN specified in the
connector.
:param instance_name: Instance_name is a FC WWN or iSCSI IQN from
the connector.
:param ssn: Storage center to search.
:returns: Dell server HBA object.
"""
scserverhba = None
# We search for our server by first finding our HBA
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
pf.append('instanceName', instance_name)
r = self.client.post('StorageCenter/ScServerHba/GetList', pf.payload)
if self._check_result(r):
scserverhba = self._first_result(r)
return scserverhba
def _find_domains(self, cportid):
"""Find the list of Dell domain objects associated with the cportid.
:param cportid: The Instance ID of the Dell controller port.
:returns: List of fault domains associated with this controller port.
"""
r = self.client.get('StorageCenter/ScControllerPort/%s/FaultDomainList'
% cportid)
if self._check_result(r):
domains = self._get_json(r)
return domains
LOG.error(_LE('Error getting FaultDomainList for %s'), cportid)
return None
def _find_initiators(self, scserver):
"""Returns a list of WWNs associated with the specified Dell server.
:param scserver: The Dell backend server object.
:returns: A list of WWNs associated with this server.
"""
initiators = []
r = self.client.get('StorageCenter/ScServer/%s/HbaList'
% self._get_id(scserver))
if self._check_result(r):
hbas = self._get_json(r)
for hba in hbas:
wwn = hba.get('instanceName')
if (hba.get('portType') == self.protocol and
wwn is not None):
initiators.append(wwn)
else:
LOG.error(_LE('Unable to find initiators'))
LOG.debug('_find_initiators: %s', initiators)
return initiators
def get_volume_count(self, scserver):
"""Returns the number of volumes attached to specified Dell server.
:param scserver: The Dell backend server object.
:returns: Mapping count. -1 if there was an error.
"""
r = self.client.get('StorageCenter/ScServer/%s/MappingList'
% self._get_id(scserver))
if self._check_result(r):
mappings = self._get_json(r)
return len(mappings)
# Panic mildly but do not return 0.
return -1
def _find_mappings(self, scvolume):
"""Find the Dell volume object mappings.
:param scvolume: Dell volume object.
:returns: A list of Dell mappings objects.
"""
mappings = []
if scvolume.get('active', False):
r = self.client.get('StorageCenter/ScVolume/%s/MappingList'
% self._get_id(scvolume))
if self._check_result(r):
mappings = self._get_json(r)
else:
LOG.error(_LE('_find_mappings: volume is not active'))
LOG.info(_LI('Volume mappings for %(name)s: %(mappings)s'),
{'name': scvolume.get('name'),
'mappings': mappings})
return mappings
def _find_mapping_profiles(self, scvolume):
"""Find the Dell volume object mapping profiles.
:param scvolume: Dell volume object.
:returns: A list of Dell mapping profile objects.
"""
mapping_profiles = []
r = self.client.get('StorageCenter/ScVolume/%s/MappingProfileList'
% self._get_id(scvolume))
if self._check_result(r):
mapping_profiles = self._get_json(r)
else:
LOG.error(_LE('Unable to find mapping profiles: %s'),
scvolume.get('name'))
LOG.debug(mapping_profiles)
return mapping_profiles
def _find_controller_port(self, cportid):
"""Finds the SC controller port object for the specified cportid.
:param cportid: The instanceID of the Dell backend controller port.
:returns: The controller port object.
"""
controllerport = None
r = self.client.get('StorageCenter/ScControllerPort/%s' % cportid)
if self._check_result(r):
controllerport = self._first_result(r)
LOG.debug('_find_controller_port: %s', controllerport)
return controllerport
def find_wwns(self, scvolume, scserver):
"""Finds the lun and wwns of the mapped volume.
:param scvolume: Storage Center volume object.
:param scserver: Storage Center server opbject.
:returns: Lun, wwns, initiator target map
"""
lun = None # our lun. We return the first lun.
wwns = [] # list of targets
itmap = {} # dict of initiators and the associated targets
# Make sure we know our server's initiators. Only return
# mappings that contain HBA for this server.
initiators = self._find_initiators(scserver)
# Get our volume mappings
mappings = self._find_mappings(scvolume)
# We check each of our mappings. We want to return
# the mapping we have been configured to use.
for mapping in mappings:
# Find the controller port for this mapping
cport = mapping.get('controllerPort')
controllerport = self._find_controller_port(self._get_id(cport))
if controllerport is not None:
# This changed case at one point or another.
# Look for both keys.
wwn = controllerport.get('wwn', controllerport.get('WWN'))
if wwn:
serverhba = mapping.get('serverHba')
if serverhba:
hbaname = serverhba.get('instanceName')
if hbaname in initiators:
if itmap.get(hbaname) is None:
itmap[hbaname] = []
itmap[hbaname].append(wwn)
wwns.append(wwn)
mappinglun = mapping.get('lun')
if lun is None:
lun = mappinglun
elif lun != mappinglun:
LOG.warning(_LW('Inconsistent Luns.'))
else:
LOG.debug('%s not found in initiator list',
hbaname)
else:
LOG.warning(_LW('_find_wwn: serverhba is None.'))
else:
LOG.warning(_LW('_find_wwn: Unable to find port wwn.'))
else:
LOG.warning(_LW('_find_wwn: controllerport is None.'))
LOG.info(_LI('_find_wwns-lun: %(lun)s wwns: %(wwn)s itmap: %(map)s'),
{'lun': lun,
'wwn': wwns,
'map': itmap})
return lun, wwns, itmap
def _find_active_controller(self, scvolume):
"""Finds the controller on which the Dell volume is active.
There can be more than one Dell backend controller per Storage center
but a given volume can only be active on one of them at a time.
:param scvolume: Dell backend volume object.
:returns: Active controller ID.
"""
actvctrl = None
volconfig = self._get_volume_configuration(scvolume)
if volconfig:
controller = volconfig.get('controller')
actvctrl = self._get_id(controller)
else:
LOG.error(_LE('Unable to retrieve VolumeConfiguration: %s'),
self._get_id(scvolume))
LOG.debug('_find_active_controller: %s', actvctrl)
return actvctrl
def _get_controller_id(self, mapping):
# The mapping lists the associated controller.
return self._get_id(mapping.get('controller'))
def _get_domains(self, mapping):
# Return a list of domains associated with this controller port.
return self._find_domains(self._get_id(mapping.get('controllerPort')))
def _get_iqn(self, mapping):
# Get our iqn from the controller port listed in our our mapping.
iqn = None
cportid = self._get_id(mapping.get('controllerPort'))
controllerport = self._find_controller_port(cportid)
if controllerport:
iqn = controllerport.get('iscsiName')
LOG.debug('_get_iqn: %s', iqn)
return iqn
def _is_virtualport_mode(self):
isvpmode = False
r = self.client.get('StorageCenter/ScConfiguration/%s' % self.ssn)
if self._check_result(r):
scconfig = self._get_json(r)
if scconfig and scconfig['iscsiTransportMode'] == 'VirtualPort':
isvpmode = True
return isvpmode
def _find_controller_port_iscsi_config(self, cportid):
"""Finds the SC controller port object for the specified cportid.
:param cportid: The instanceID of the Dell backend controller port.
:returns: The controller port object.
"""
controllerport = None
r = self.client.get(
'StorageCenter/ScControllerPortIscsiConfiguration/%s' % cportid)
if self._check_result(r):
controllerport = self._first_result(r)
else:
LOG.error(_LE('_find_controller_port_iscsi_config: '
'Error finding configuration: %s'), cportid)
return controllerport
def find_iscsi_properties(self, scvolume):
"""Finds target information for a given Dell scvolume object mapping.
The data coming back is both the preferred path and all the paths.
:param scvolume: The dell sc volume object.
:returns: iSCSI property dictionary.
:raises: VolumeBackendAPIException
"""
LOG.debug('find_iscsi_properties: scvolume: %s', scvolume)
# Our mutable process object.
pdata = {'active': -1,
'up': -1}
# Our output lists.
portals = []
luns = []
iqns = []
# Process just looks for the best port to return.
def process(lun, iqn, address, port, status, active):
"""Process this mapping information.
:param lun: SCSI Lun.
:param iqn: iSCSI IQN address.
:param address: IP address.
:param port: IP Port number
:param readonly: Boolean indicating mapping is readonly.
:param status: String indicating mapping status. (Up is what we
are looking for.)
:param active: Boolean indicating whether this is on the active
controller or not.
:return: Nothing
"""
if self.excluded_domain_ips.count(address) == 0:
# Make sure this isn't a duplicate.
newportal = address + ':' + six.text_type(port)
for idx, portal in enumerate(portals):
if portal == newportal and iqns[idx] == iqn:
LOG.debug('Skipping duplicate portal %(ptrl)s and'
'iqn %(iqn)s.', {'ptrl': portal, 'iqn': iqn})
return
# It isn't in the list so process it.
portals.append(newportal)
iqns.append(iqn)
luns.append(lun)
# We need to point to the best link.
# So state active and status up is preferred
# but we don't actually need the state to be
# up at this point.
if pdata['up'] == -1:
if active:
pdata['active'] = len(iqns) - 1
if status == 'Up':
pdata['up'] = pdata['active']
# Start by getting our mappings.
mappings = self._find_mappings(scvolume)
# We should have mappings at the time of this call but do check.
if len(mappings) > 0:
# In multipath (per Liberty) we will return all paths. But
# if multipath is not set (ip and port are None) then we need
# to return a mapping from the controller on which the volume
# is active. So find that controller.
actvctrl = self._find_active_controller(scvolume)
# Two different methods are used to find our luns and portals
# depending on whether we are in virtual or legacy port mode.
isvpmode = self._is_virtualport_mode()
# Trundle through our mappings.
for mapping in mappings:
# Don't return remote sc links.
msrv = mapping.get('server')
if msrv and msrv.get('objectType') == 'ScRemoteStorageCenter':
continue
# The lun, ro mode and status are in the mapping.
LOG.debug('find_iscsi_properties: mapping: %s', mapping)
lun = mapping.get('lun')
status = mapping.get('status')
# Get our IQN from our mapping.
iqn = self._get_iqn(mapping)
# Check if our controller ID matches our active controller ID.
isactive = True if (self._get_controller_id(mapping) ==
actvctrl) else False
# If we have an IQN and are in virtual port mode.
if isvpmode and iqn:
domains = self._get_domains(mapping)
if domains:
for dom in domains:
LOG.debug('find_iscsi_properties: domain: %s', dom)
ipaddress = dom.get('targetIpv4Address',
dom.get('wellKnownIpAddress'))
portnumber = dom.get('portNumber')
# We have all our information. Process this portal.
process(lun, iqn, ipaddress, portnumber,
status, isactive)
# Else we are in legacy mode.
elif iqn:
# Need to get individual ports
cportid = self._get_id(mapping.get('controllerPort'))
# Legacy mode stuff is in the ISCSI configuration object.
cpconfig = self._find_controller_port_iscsi_config(cportid)
# This should really never fail. Things happen so if it
# does just keep moving. Return what we can.
if cpconfig:
ipaddress = cpconfig.get('ipAddress')
portnumber = cpconfig.get('portNumber')
# We have all our information. Process this portal.
process(lun, iqn, ipaddress, portnumber,
status, isactive)
# We've gone through all our mappings.
# Make sure we found something to return.
if len(luns) == 0:
# Since we just mapped this and can't find that mapping the world
# is wrong so we raise exception.
raise exception.VolumeBackendAPIException(
data=_('Unable to find iSCSI mappings.'))
# Make sure we point to the best portal we can. This means it is
# on the active controller and, preferably, up. If it isn't return
# what we have.
if pdata['up'] != -1:
# We found a connection that is already up. Return that.
pdata['active'] = pdata['up']
elif pdata['active'] == -1:
# This shouldn't be able to happen. Maybe a controller went
# down in the middle of this so just return the first one and
# hope the ports are up by the time the connection is attempted.
LOG.debug('find_iscsi_properties: '
'Volume is not yet active on any controller.')
pdata['active'] = 0
# Make sure we have a good item at the top of the list.
iqns.insert(0, iqns.pop(pdata['active']))
portals.insert(0, portals.pop(pdata['active']))
luns.insert(0, luns.pop(pdata['active']))
data = {'target_discovered': False,
'target_iqn': iqns[0],
'target_iqns': iqns,
'target_portal': portals[0],
'target_portals': portals,
'target_lun': luns[0],
'target_luns': luns
}
LOG.debug('find_iscsi_properties: %s', data)
return data
def map_volume(self, scvolume, scserver):
"""Maps the Dell backend volume object to the Dell server object.
The check for the Dell server object existence is elsewhere; does not
create the Dell server object.
:param scvolume: Storage Center volume object.
:param scserver: Storage Center server object.
:returns: SC mapping profile or None
"""
# Make sure we have what we think we have
serverid = self._get_id(scserver)
volumeid = self._get_id(scvolume)
if serverid is not None and volumeid is not None:
# If we have a mapping to our server return it here.
mprofiles = self._find_mapping_profiles(scvolume)
for mprofile in mprofiles:
if self._get_id(mprofile.get('server')) == serverid:
LOG.info(_LI('Volume %(vol)s already mapped to %(srv)s'),
{'vol': scvolume['name'],
'srv': scserver['name']})
return mprofile
# No? Then map it up.
payload = {}
payload['server'] = serverid
payload['Advanced'] = {'MapToDownServerHbas': True}
r = self.client.post('StorageCenter/ScVolume/%s/MapToServer'
% volumeid, payload, True)
if self._check_result(r):
# We just return our mapping
LOG.info(_LI('Volume %(vol)s mapped to %(srv)s'),
{'vol': scvolume['name'],
'srv': scserver['name']})
return self._first_result(r)
# Error out
LOG.error(_LE('Unable to map %(vol)s to %(srv)s'),
{'vol': scvolume['name'],
'srv': scserver['name']})
return None
def unmap_volume(self, scvolume, scserver):
"""Unmaps the Dell volume object from the Dell server object.
Deletes all mappings to a Dell server object, not just the ones on
the path defined in cinder.conf.
:param scvolume: Storage Center volume object.
:param scserver: Storage Center server object.
:returns: True or False.
"""
rtn = True
serverid = self._get_id(scserver)
volumeid = self._get_id(scvolume)
if serverid is not None and volumeid is not None:
profiles = self._find_mapping_profiles(scvolume)
for profile in profiles:
prosrv = profile.get('server')
if prosrv is not None and self._get_id(prosrv) == serverid:
r = self.client.delete('StorageCenter/ScMappingProfile/%s'
% self._get_id(profile),
async=True)
if self._check_result(r):
# Check our result in the json.
result = self._get_json(r)
# EM 15.1 and 15.2 return a boolean directly.
# 15.3 on up return it in a dict under 'result'.
if result is True or (type(result) is dict and
result.get('result')):
LOG.info(
_LI('Volume %(vol)s unmapped from %(srv)s'),
{'vol': scvolume['name'],
'srv': scserver['name']})
continue
LOG.error(_LE('Unable to unmap %(vol)s from %(srv)s'),
{'vol': scvolume['name'],
'srv': scserver['name']})
# 1 failed unmap is as good as 100.
# Fail it and leave
rtn = False
break
# return true/false.
return rtn
def get_storage_usage(self):
"""Gets the storage usage object from the Dell backend.
This contains capacity and usage information for the SC.
:returns: The SC storageusage object.
"""
storageusage = None
if self.ssn is not None:
r = self.client.get(
'StorageCenter/StorageCenter/%s/StorageUsage' % self.ssn)
if self._check_result(r):
storageusage = self._get_json(r)
return storageusage
def create_replay(self, scvolume, replayid, expire):
"""Takes a snapshot of a volume.
One could snap a volume before it has been activated, so activate
by mapping and unmapping to a random server and let them. This
should be a fail but the Tempest tests require it.
:param scvolume: Volume to snapshot.
:param replayid: Name to use for the snapshot. This is a portion of
the snapshot ID as we do not have space for the
entire GUID in the replay description.
:param expire: Time in minutes before the replay expires. For most
snapshots this will be 0 (never expire) but if we are
cloning a volume we will snap it right before creating
the clone.
:returns: The Dell replay object or None.
"""
replay = None
if scvolume is not None:
if (scvolume.get('active') is not True or
scvolume.get('replayAllowed') is not True):
self._init_volume(scvolume)
payload = {}
payload['description'] = replayid
payload['expireTime'] = expire
r = self.client.post('StorageCenter/ScVolume/%s/CreateReplay'
% self._get_id(scvolume), payload, True)
if self._check_result(r):
replay = self._first_result(r)
# Quick double check.
if replay is None:
LOG.warning(_LW('Unable to create snapshot %s'), replayid)
# Return replay or None.
return replay
def find_replay(self, scvolume, replayid):
"""Searches for the replay by replayid.
replayid is stored in the replay's description attribute.
:param scvolume: Dell volume object.
:param replayid: Name to search for. This is a portion of the
snapshot ID as we do not have space for the entire
GUID in the replay description.
:returns: Dell replay object or None.
"""
r = self.client.get('StorageCenter/ScVolume/%s/ReplayList'
% self._get_id(scvolume))
try:
replays = self._get_json(r)
# This will be a list. If it isn't bail
if isinstance(replays, list):
for replay in replays:
# The only place to save our information with the public
# api is the description field which isn't quite long
# enough. So we check that our description is pretty much
# the max length and we compare that to the start of
# the snapshot id.
description = replay.get('description')
if (len(description) >= 30 and
replayid.startswith(description) is True and
replay.get('markedForExpiration') is not True):
# We found our replay so return it.
return replay
except Exception:
LOG.error(_LE('Invalid ReplayList return: %s'),
r)
# If we are here then we didn't find the replay so warn and leave.
LOG.warning(_LW('Unable to find snapshot %s'),
replayid)
return None
def manage_replay(self, screplay, replayid):
"""Basically renames the screplay and sets it to never expire.
:param screplay: DellSC object.
:param replayid: New name for replay.
:return: True on success. False on fail.
"""
if screplay and replayid:
payload = {}
payload['description'] = replayid
payload['expireTime'] = 0
r = self.client.put('StorageCenter/ScReplay/%s' %
self._get_id(screplay), payload, True)
if self._check_result(r):
return True
LOG.error(_LE('Error managing replay %s'),
screplay.get('description'))
return False
def unmanage_replay(self, screplay):
"""Basically sets the expireTime
:param screplay: DellSC object.
:return: True on success. False on fail.
"""
if screplay:
payload = {}
payload['expireTime'] = 1440
r = self.client.put('StorageCenter/ScReplay/%s' %
self._get_id(screplay), payload, True)
if self._check_result(r):
return True
LOG.error(_LE('Error unmanaging replay %s'),
screplay.get('description'))
return False
def delete_replay(self, scvolume, replayid):
"""Finds a Dell replay by replayid string and expires it.
Once marked for expiration we do not return the replay as a snapshot
even though it might still exist. (Backend requirements.)
:param scvolume: Dell volume object.
:param replayid: Name to search for. This is a portion of the snapshot
ID as we do not have space for the entire GUID in the
replay description.
:returns: Boolean for success or failure.
"""
ret = True
LOG.debug('Expiring replay %s', replayid)
# if we do not have the instanceid then we have to find the replay.
replay = self.find_replay(scvolume, replayid)
if replay is not None:
# expire our replay.
r = self.client.post('StorageCenter/ScReplay/%s/Expire' %
self._get_id(replay), {}, True)
ret = self._check_result(r)
# If we couldn't find it we call that a success.
return ret
def create_view_volume(self, volname, screplay, replay_profile_string):
"""Creates a new volume named volname from the screplay.
:param volname: Name of new volume. This is the cinder volume ID.
:param screplay: Dell replay object from which to make a new volume.
:param replay_profile_string: Profiles to be applied to the volume
:returns: Dell volume object or None.
"""
folder = self._find_volume_folder(True)
# Find our replay_profiles.
addids, removeids = self._find_replay_profiles(replay_profile_string)
# payload is just the volume name and folder if we have one.
payload = {}
payload['Name'] = volname
payload['Notes'] = self.notes
if folder is not None:
payload['VolumeFolder'] = self._get_id(folder)
if addids:
payload['ReplayProfileList'] = addids
r = self.client.post('StorageCenter/ScReplay/%s/CreateView'
% self._get_id(screplay), payload, True)
volume = None
if self._check_result(r):
volume = self._first_result(r)
if volume is None:
LOG.error(_LE('Unable to create volume %s from replay'),
volname)
return volume
def create_cloned_volume(self, volumename, scvolume, replay_profile_list):
"""Creates a volume named volumename from a copy of scvolume.
This is done by creating a replay and then a view volume from
that replay. The replay is set to expire after an hour. It is only
needed long enough to create the volume. (1 minute should be enough
but we set an hour in case the world has gone mad.)
:param volumename: Name of new volume. This is the cinder volume ID.
:param scvolume: Dell volume object.
:param replay_profile_list: List of snapshot profiles.
:returns: The new volume's Dell volume object.
"""
replay = self.create_replay(scvolume, 'Cinder Clone Replay', 60)
if replay is not None:
return self.create_view_volume(volumename, replay,
replay_profile_list)
LOG.error(_LE('Error: unable to snap replay'))
return None
def expand_volume(self, scvolume, newsize):
"""Expands scvolume to newsize GBs.
:param scvolume: Dell volume object to be expanded.
:param newsize: The new size of the volume object.
:returns: The updated Dell volume object on success or None on failure.
"""
vol = None
payload = {}
payload['NewSize'] = '%d GB' % newsize
r = self.client.post('StorageCenter/ScVolume/%s/ExpandToSize'
% self._get_id(scvolume), payload, True)
if self._check_result(r):
vol = self._get_json(r)
# More info might be good.
if vol is not None:
LOG.debug('Volume expanded: %(name)s %(size)s',
{'name': vol['name'],
'size': vol['configuredSize']})
else:
LOG.error(_LE('Error expanding volume %s.'), scvolume['name'])
return vol
def rename_volume(self, scvolume, name):
"""Rename scvolume to name.
This is mostly used by update_migrated_volume.
:param scvolume: The Dell volume object to be renamed.
:param name: The new volume name.
:returns: Boolean indicating success or failure.
"""
payload = {}
payload['Name'] = name
r = self.client.put('StorageCenter/ScVolume/%s'
% self._get_id(scvolume),
payload, True)
if self._check_result(r):
return True
LOG.error(_LE('Error renaming volume %(original)s to %(name)s'),
{'original': scvolume['name'],
'name': name})
return False
def update_storage_profile(self, scvolume, storage_profile):
"""Update a volume's Storage Profile.
Changes the volume setting to use a different Storage Profile. If
storage_profile is None, will reset to the default profile for the
cinder user account.
:param scvolume: The Storage Center volume to be updated.
:param storage_profile: The requested Storage Profile name.
:returns: True if successful, False otherwise.
"""
prefs = self._get_user_preferences()
if not prefs:
return False
if not prefs.get('allowStorageProfileSelection'):
LOG.error(_LE('User does not have permission to change '
'Storage Profile selection.'))
return False
profile = self._find_storage_profile(storage_profile)
if storage_profile:
if not profile:
LOG.error(_LE('Storage Profile %s was not found.'),
storage_profile)
return False
else:
# Going from specific profile to the user default
profile = prefs.get('storageProfile')
if not profile:
LOG.error(_LE('Default Storage Profile was not found.'))
return False
LOG.info(_LI('Switching volume %(vol)s to profile %(prof)s.'),
{'vol': scvolume['name'],
'prof': profile.get('name')})
payload = {}
payload['StorageProfile'] = self._get_id(profile)
r = self.client.put('StorageCenter/ScVolumeConfiguration/%s'
% self._get_id(scvolume), payload, True)
if self._check_result(r):
return True
LOG.error(_LE('Error changing Storage Profile for volume '
'%(original)s to %(name)s'),
{'original': scvolume['name'],
'name': storage_profile})
return False
def _get_user_preferences(self):
"""Gets the preferences and defaults for this user.
There are a set of preferences and defaults for each user on the
Storage Center. This retrieves all settings for the current account
used by Cinder.
"""
r = self.client.get('StorageCenter/StorageCenter/%s/UserPreferences' %
self.ssn)
if self._check_result(r):
return self._get_json(r)
return {}
def _delete_server(self, scserver):
"""Deletes scserver from the backend.
Just give it a shot. If it fails it doesn't matter to cinder. This
is generally used when a create_server call fails in the middle of
creation. Cinder knows nothing of the servers objects on Dell backends
so success or failure is purely an internal thing.
Note that we do not delete a server object in normal operation.
:param scserver: Dell server object to delete.
:returns: Nothing. Only logs messages.
"""
LOG.debug('ScServer delete %s', self._get_id(scserver))
if scserver.get('deleteAllowed') is True:
r = self.client.delete('StorageCenter/ScServer/%s'
% self._get_id(scserver), async=True)
if self._check_result(r):
LOG.debug('ScServer deleted.')
else:
LOG.debug('_delete_server: deleteAllowed is False.')
def find_replay_profile(self, name):
"""Finds the Dell SC replay profile object name.
:param name: Name of the replay profile object. This is the
consistency group id.
:return: Dell SC replay profile or None.
:raises: VolumeBackendAPIException
"""
self.cg_except_on_no_support()
pf = self._get_payload_filter()
pf.append('ScSerialNumber', self.ssn)
pf.append('Name', name)
r = self.client.post('StorageCenter/ScReplayProfile/GetList',
pf.payload)
if self._check_result(r):
profilelist = self._get_json(r)
if profilelist:
if len(profilelist) > 1:
LOG.error(_LE('Multiple replay profiles under name %s'),
name)
raise exception.VolumeBackendAPIException(
data=_('Multiple profiles found.'))
return profilelist[0]
return None
def create_replay_profile(self, name):
"""Creates a replay profile on the Dell SC.
:param name: The ID of the consistency group. This will be matched to
the name on the Dell SC.
:return: SC profile or None.
"""
self.cg_except_on_no_support()
profile = self.find_replay_profile(name)
if not profile:
payload = {}
payload['StorageCenter'] = self.ssn
payload['Name'] = name
payload['Type'] = 'Consistent'
payload['Notes'] = self.notes
r = self.client.post('StorageCenter/ScReplayProfile',
payload, True)
# 201 expected.
if self._check_result(r):
profile = self._first_result(r)
return profile
def delete_replay_profile(self, profile):
"""Delete the replay profile from the Dell SC.
:param profile: SC replay profile.
:return: Nothing.
:raises: VolumeBackendAPIException
"""
self.cg_except_on_no_support()
r = self.client.delete('StorageCenter/ScReplayProfile/%s' %
self._get_id(profile), async=True)
if self._check_result(r):
LOG.info(_LI('Profile %s has been deleted.'),
profile.get('name'))
else:
# We failed due to a failure to delete an existing profile.
# This is reason to raise an exception.
LOG.error(_LE('Unable to delete profile %s.'), profile.get('name'))
raise exception.VolumeBackendAPIException(
data=_('Error deleting replay profile.'))
def _get_volume_configuration(self, scvolume):
"""Get the ScVolumeConfiguration object.
:param scvolume: The Dell SC volume object.
:return: The SCVolumeConfiguration object or None.
"""
r = self.client.get('StorageCenter/ScVolume/%s/VolumeConfiguration' %
self._get_id(scvolume))
if self._check_result(r):
return self._first_result(r)
return None
def _update_volume_profiles(self, scvolume, addid=None, removeid=None):
"""Either Adds or removes the listed profile from the SC volume.
:param scvolume: Dell SC volume object.
:param addid: Profile ID to be added to the SC volume configuration.
:param removeid: ID to be removed to the SC volume configuration.
:return: True/False on success/failure.
"""
if scvolume:
scvolumecfg = self._get_volume_configuration(scvolume)
if scvolumecfg:
profilelist = scvolumecfg.get('replayProfileList', [])
newprofilelist = []
# Do we have one to add? Start the list with it.
if addid:
newprofilelist = [addid]
# Re-add our existing profiles.
for profile in profilelist:
profileid = self._get_id(profile)
# Make sure it isn't one we want removed and that we
# haven't already added it. (IE it isn't the addid.)
if (profileid != removeid and
newprofilelist.count(profileid) == 0):
newprofilelist.append(profileid)
# Update our volume configuration.
payload = {}
payload['ReplayProfileList'] = newprofilelist
r = self.client.put('StorageCenter/ScVolumeConfiguration/%s' %
self._get_id(scvolumecfg), payload, True)
# check result
LOG.debug('_update_volume_profiles %s : %s : %s',
self._get_id(scvolume),
profilelist,
r)
# Good return?
if self._check_result(r):
return True
return False
def _add_cg_volumes(self, profileid, add_volumes):
"""Trundles through add_volumes and adds the replay profile to them.
:param profileid: The ID of the replay profile.
:param add_volumes: List of Dell SC volume objects that are getting
added to the consistency group.
:return: True/False on success/failure.
"""
for vol in add_volumes:
scvolume = self.find_volume(vol['id'], vol['provider_id'])
if (self._update_volume_profiles(scvolume,
addid=profileid,
removeid=None)):
LOG.info(_LI('Added %s to cg.'), vol['id'])
else:
LOG.error(_LE('Failed to add %s to cg.'), vol['id'])
return False
return True
def _remove_cg_volumes(self, profileid, remove_volumes):
"""Removes the replay profile from the remove_volumes list of vols.
:param profileid: The ID of the replay profile.
:param remove_volumes: List of Dell SC volume objects that are getting
removed from the consistency group.
:return: True/False on success/failure.
"""
for vol in remove_volumes:
scvolume = self.find_volume(vol['id'], vol['provider_id'])
if (self._update_volume_profiles(scvolume,
addid=None,
removeid=profileid)):
LOG.info(_LI('Removed %s from cg.'), vol['id'])
else:
LOG.error(_LE('Failed to remove %s from cg.'), vol['id'])
return False
return True
def update_cg_volumes(self, profile, add_volumes=None,
remove_volumes=None):
"""Adds or removes the profile from the specified volumes
:param profile: Dell SC replay profile object.
:param add_volumes: List of volumes we are adding to the consistency
group. (Which is to say we are adding the profile
to this list of volumes.)
:param remove_volumes: List of volumes we are removing from the
consistency group. (Which is to say we are
removing the profile from this list of volumes.)
:return: True/False on success/failure.
"""
self.cg_except_on_no_support()
ret = True
profileid = self._get_id(profile)
if add_volumes:
LOG.info(_LI('Adding volumes to cg %s.'), profile['name'])
ret = self._add_cg_volumes(profileid, add_volumes)
if ret and remove_volumes:
LOG.info(_LI('Removing volumes from cg %s.'), profile['name'])
ret = self._remove_cg_volumes(profileid, remove_volumes)
return ret
def _init_cg_volumes(self, profileid):
"""Gets the cg volume list and maps/unmaps the non active volumes.
:param profileid: Replay profile identifier.
:return: Nothing
"""
r = self.client.get('StorageCenter/ScReplayProfile/%s/VolumeList' %
profileid)
if self._check_result(r):
vols = self._get_json(r)
for vol in vols:
if (vol.get('active') is not True or
vol.get('replayAllowed') is not True):
self._init_volume(vol)
def snap_cg_replay(self, profile, replayid, expire):
"""Snaps a replay of a consistency group.
:param profile: The name of the consistency group profile.
:param replayid: The name of the replay.
:param expire: Time in mintues before a replay expires. 0 means no
expiration.
:returns: Dell SC replay object.
"""
self.cg_except_on_no_support()
if profile:
# We have to make sure these are snappable.
self._init_cg_volumes(self._get_id(profile))
# Succeed or fail we soldier on.
payload = {}
payload['description'] = replayid
payload['expireTime'] = expire
r = self.client.post('StorageCenter/ScReplayProfile/%s/'
'CreateReplay'
% self._get_id(profile), payload, True)
if self._check_result(r):
LOG.info(_LI('CreateReplay success %s'), replayid)
return True
return False
def _find_sc_cg(self, profile, replayid):
"""Finds the sc consistency group that matches replayid
:param profile: Dell profile object.
:param replayid: Name to search for. This is a portion of the
snapshot ID as we do not have space for the entire
GUID in the replay description.
:return: Consistency group object or None.
"""
self.cg_except_on_no_support()
r = self.client.get(
'StorageCenter/ScReplayProfile/%s/ConsistencyGroupList'
% self._get_id(profile))
if self._check_result(r):
cglist = self._get_json(r)
if cglist and isinstance(cglist, list):
for cg in cglist:
desc = cg.get('description')
if (len(desc) >= 30 and
replayid.startswith(desc) is True):
# We found our cg so return it.
return cg
return None
def _find_cg_replays(self, profile, replayid):
"""Searches for the replays that match replayid for a given profile.
replayid is stored in the replay's description attribute.
:param profile: Dell profile object.
:param replayid: Name to search for. This is a portion of the
snapshot ID as we do not have space for the entire
GUID in the replay description.
:returns: Dell replay object array.
"""
self.cg_except_on_no_support()
replays = []
sccg = self._find_sc_cg(profile, replayid)
if sccg:
r = self.client.get(
'StorageCenter/ScReplayConsistencyGroup/%s/ReplayList'
% self._get_id(sccg))
replays = self._get_json(r)
else:
LOG.error(_LE('Unable to locate snapshot %s'), replayid)
return replays
def delete_cg_replay(self, profile, replayid):
"""Finds a Dell cg replay by replayid string and expires it.
Once marked for expiration we do not return the replay as a snapshot
even though it might still exist. (Backend requirements.)
:param cg_name: Consistency Group name. This is the ReplayProfileName.
:param replayid: Name to search for. This is a portion of the snapshot
ID as we do not have space for the entire GUID in the
replay description.
:returns: Boolean for success or failure.
"""
self.cg_except_on_no_support()
LOG.debug('Expiring consistency group replay %s', replayid)
replays = self._find_cg_replays(profile,
replayid)
for replay in replays:
instanceid = self._get_id(replay)
LOG.debug('Expiring replay %s', instanceid)
r = self.client.post('StorageCenter/ScReplay/%s/Expire'
% instanceid, {}, True)
if not self._check_result(r):
return False
# We either couldn't find it or expired it.
return True
def cg_except_on_no_support(self):
if not self.consisgroups:
msg = _('Dell API 2.1 or later required'
' for Consistency Group support')
raise NotImplementedError(data=msg)
@staticmethod
def size_to_gb(spacestring):
"""Splits a SC size string into GB and a remainder.
Space is returned in a string like ...
7.38197504E8 Bytes
Need to split that apart and convert to GB.
:param spacestring: SC size string.
:return: Size in GB and remainder in byte.
"""
try:
n = spacestring.split(' ', 1)
fgb = int(float(n[0]) // 1073741824)
frem = int(float(n[0]) % 1073741824)
return fgb, frem
except Exception:
# We received an invalid size string. Blow up.
raise exception.VolumeBackendAPIException(
data=_('Error retrieving volume size'))
def _import_one(self, scvolume, newname):
# Find our folder
folder = self._find_volume_folder(True)
# If we actually have a place to put our volume create it
if folder is None:
LOG.warning(_LW('Unable to create folder %s'), self.vfname)
# Rename and move our volume.
payload = {}
payload['Name'] = newname
if folder:
payload['VolumeFolder'] = self._get_id(folder)
r = self.client.put('StorageCenter/ScVolume/%s' %
self._get_id(scvolume), payload, True)
if self._check_result(r):
return self._get_json(r)
return None
def manage_existing(self, newname, existing):
"""Finds the volume named existing and renames it.
This checks a few things. The volume has to exist. There can
only be one volume by that name. Since cinder manages volumes
by the GB it has to be defined on a GB boundary.
This renames existing to newname. newname is the guid from
the cinder volume['id']. The volume is moved to the defined
cinder volume folder.
:param newname: Name to rename the volume to.
:param existing: The existing volume dict..
:return: scvolume.
:raises: VolumeBackendAPIException, ManageExistingInvalidReference
"""
vollist = self._get_volume_list(existing.get('source-name'),
existing.get('source-id'),
False)
count = len(vollist)
# If we found one volume with that name we can work with it.
if count == 1:
# First thing to check is if the size is something we can
# work with.
sz, rem = self.size_to_gb(vollist[0]['configuredSize'])
if rem > 0:
raise exception.VolumeBackendAPIException(
data=_('Volume size must multiple of 1 GB.'))
# We only want to grab detached volumes.
mappings = self._find_mappings(vollist[0])
if len(mappings) > 0:
msg = _('Volume is attached to a server. (%s)') % existing
raise exception.VolumeBackendAPIException(data=msg)
scvolume = self._import_one(vollist[0], newname)
if scvolume:
return scvolume
msg = _('Unable to manage volume %s') % existing
raise exception.VolumeBackendAPIException(data=msg)
elif count > 1:
raise exception.ManageExistingInvalidReference(
existing_ref=existing, reason=_('Volume not unique.'))
else:
raise exception.ManageExistingInvalidReference(
existing_ref=existing, reason=_('Volume not found.'))
def get_unmanaged_volume_size(self, existing):
"""Looks up the volume named existing and returns its size string.
:param existing: Existing volume dict.
:return: The SC configuredSize string.
:raises: ManageExistingInvalidReference
"""
vollist = self._get_volume_list(existing.get('source-name'),
existing.get('source-id'),
False)
count = len(vollist)
# If we found one volume with that name we can work with it.
if count == 1:
sz, rem = self.size_to_gb(vollist[0]['configuredSize'])
if rem > 0:
raise exception.VolumeBackendAPIException(
data=_('Volume size must multiple of 1 GB.'))
return sz
elif count > 1:
raise exception.ManageExistingInvalidReference(
existing_ref=existing, reason=_('Volume not unique.'))
else:
raise exception.ManageExistingInvalidReference(
existing_ref=existing, reason=_('Volume not found.'))
def unmanage(self, scvolume):
"""Unmanage our volume.
We simply rename with with a prefix of `Unmanaged_` That's it.
:param scvolume: The Dell SC volume object.
:return: Nothing.
:raises: VolumeBackendAPIException
"""
newname = 'Unmanaged_' + scvolume['name']
payload = {}
payload['Name'] = newname
r = self.client.put('StorageCenter/ScVolume/%s' %
self._get_id(scvolume), payload, True)
if self._check_result(r):
LOG.info(_LI('Volume %s unmanaged.'), scvolume['name'])
else:
msg = _('Unable to rename volume %(existing)s to %(newname)s') % {
'existing': scvolume['name'],
'newname': newname}
raise exception.VolumeBackendAPIException(data=msg)
def _find_qos(self, qosnode, ssn=-1):
"""Find Dell SC QOS Node entry for replication.
:param qosnode: Name of qosnode.
:param ssn: SSN to search on.
:return: scqos node object.
"""
ssn = self._vet_ssn(ssn)
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
pf.append('name', qosnode)
r = self.client.post('StorageCenter/ScReplicationQosNode/GetList',
pf.payload)
if self._check_result(r):
nodes = self._get_json(r)
if len(nodes) > 0:
return nodes[0]
else:
payload = {}
payload['LinkSpeed'] = '1 Gbps'
payload['Name'] = qosnode
payload['StorageCenter'] = ssn
payload['BandwidthLimited'] = False
r = self.client.post('StorageCenter/ScReplicationQosNode',
payload, True)
if self._check_result(r):
return self._get_json(r)
LOG.error(_LE('Unable to find or create QoS Node named %s'), qosnode)
raise exception.VolumeBackendAPIException(
data=_('Failed to find QoSnode'))
def update_replicate_active_replay(self, scvolume, replactive):
"""Enables or disables replicating the active replay for given vol.
:param scvolume: SC Volume object.
:param replactive: True or False
:return: True or False
"""
r = self.client.get('StorageCenter/ScVolume/%s/ReplicationSourceList' %
self._get_id(scvolume))
if self._check_result(r):
replications = self._get_json(r)
for replication in replications:
if replication['replicateActiveReplay'] != replactive:
payload = {'ReplicateActiveReplay': replactive}
r = self.client.put('StorageCenter/ScReplication/%s' %
replication['instanceId'],
payload, True)
if not self._check_result(r):
return False
return True
def get_screplication(self, scvolume, destssn):
"""Find the screplication object for the volume on the dest backend.
:param scvolume:
:param destssn:
:return:
"""
LOG.debug('get_screplication')
r = self.client.get('StorageCenter/ScVolume/%s/ReplicationSourceList' %
self._get_id(scvolume))
if self._check_result(r):
replications = self._get_json(r)
for replication in replications:
# So we need to find the replication we are looking for.
LOG.debug(replication)
LOG.debug('looking for %s', destssn)
if replication.get('destinationScSerialNumber') == destssn:
return replication
# Unable to locate replication.
LOG.warning(_LW('Unable to locate replication %(vol)s to %(ssn)s'),
{'vol': scvolume.get('name'),
'ssn': destssn})
return None
def delete_replication(self, scvolume, destssn, deletedestvolume=True):
"""Deletes the SC replication object from scvolume to the destssn.
:param scvolume: Dell SC Volume object.
:param destssn: SC the replication is replicating to.
:param deletedestvolume: Delete or keep dest volume.
:return: True on success. False on fail.
"""
replication = self.get_screplication(scvolume, destssn)
if replication:
payload = {}
payload['DeleteDestinationVolume'] = deletedestvolume
payload['RecycleDestinationVolume'] = deletedestvolume
payload['DeleteRestorePoint'] = True
r = self.client.delete('StorageCenter/ScReplication/%s' %
self._get_id(replication), payload=payload,
async=True)
if self._check_result(r):
# check that we whacked the dest volume
LOG.info(_LI('Replication %(vol)s to %(dest)s.'),
{'vol': scvolume.get('name'),
'dest': destssn})
return True
LOG.error(_LE('Unable to delete replication for '
'%(vol)s to %(dest)s.'),
{'vol': scvolume.get('name'),
'dest': destssn})
return False
def _repl_name(self, name):
return self.repl_prefix + name
def _get_disk_folder(self, ssn, foldername):
diskfolder = None
# If no folder name we just pass through this.
if foldername:
pf = self._get_payload_filter()
pf.append('scSerialNumber', ssn)
pf.append('name', foldername)
r = self.client.post('StorageCenter/ScDiskFolder/GetList',
pf.payload)
if self._check_result(r):
try:
# Go for broke.
diskfolder = self._get_json(r)[0]
except Exception:
# We just log this as an error and return nothing.
LOG.error(_LE('Unable to find '
'disk folder %(name)s on %(ssn)s'),
{'name': foldername,
'ssn': ssn})
return diskfolder
def create_replication(self, scvolume, destssn, qosnode,
synchronous, diskfolder, replicate_active):
"""Create repl from scvol to destssn.
:param scvolume: Dell SC volume object.
:param destssn: Destination SSN string.
:param qosnode: Name of Dell SC QOS Node for this replication.
:param synchronous: Boolean.
:param diskfolder: optional disk folder name.
:param replicate_active: replicate active replay.
:return: Dell SC replication object.
"""
screpl = None
ssn = self.find_sc(int(destssn))
payload = {}
payload['DestinationStorageCenter'] = ssn
payload['QosNode'] = self._get_id(self._find_qos(qosnode))
payload['SourceVolume'] = self._get_id(scvolume)
payload['StorageCenter'] = self.find_sc()
# Have to replicate the active replay.
payload['ReplicateActiveReplay'] = replicate_active or synchronous
if synchronous:
payload['Type'] = 'Synchronous'
# If our type is synchronous we prefer high availability be set.
payload['SyncMode'] = 'HighAvailability'
else:
payload['Type'] = 'Asynchronous'
destinationvolumeattributes = {}
destinationvolumeattributes['CreateSourceVolumeFolderPath'] = True
destinationvolumeattributes['Notes'] = self.notes
destinationvolumeattributes['Name'] = self._repl_name(scvolume['name'])
# Find our disk folder. If they haven't specified one this will just
# drop through. If they have specified one and it can't be found the
# error will be logged but this will keep going.
df = self._get_disk_folder(destssn, diskfolder)
if df:
destinationvolumeattributes['DiskFolder'] = self._get_id(df)
payload['DestinationVolumeAttributes'] = destinationvolumeattributes
r = self.client.post('StorageCenter/ScReplication', payload, True)
# 201 expected.
if self._check_result(r):
LOG.info(_LI('Replication created for %(volname)s to %(destsc)s'),
{'volname': scvolume.get('name'),
'destsc': destssn})
screpl = self._get_json(r)
# Check we did something.
if not screpl:
# Failed to launch. Inform user. Throw.
LOG.error(_LE('Unable to replicate %(volname)s to %(destsc)s'),
{'volname': scvolume.get('name'),
'destsc': destssn})
return screpl
def find_repl_volume(self, name, destssn, instance_id=None,
source=False, destination=True):
"""Find our replay destination volume on the destssn.
:param name: Name to search for.
:param destssn: Where to look for the volume.
:param instance_id: If we know our exact volume ID use that.
:param source: Replication source boolen.
:param destination: Replication destination boolean.
:return: SC Volume object or None
"""
# Do a normal volume search.
pf = self._get_payload_filter()
pf.append('scSerialNumber', destssn)
# Are we looking for a replication destination?
pf.append('ReplicationDestination', destination)
# Are we looking for a replication source?
pf.append('ReplicationSource', source)
# There is a chance we know the exact volume. If so then use that.
if instance_id:
pf.append('instanceId', instance_id)
else:
# Try the name.
pf.append('Name', name)
r = self.client.post('StorageCenter/ScVolume/GetList',
pf.payload)
if self._check_result(r):
volumes = self._get_json(r)
if len(volumes) == 1:
return volumes[0]
return None
def remove_mappings(self, scvol):
"""Peels all the mappings off of scvol.
:param scvol: Storage Center volume object.
:return: True/False on Success/Failure.
"""
if scvol:
r = self.client.post('StorageCenter/ScVolume/%s/Unmap' %
self._get_id(scvol), {}, True)
return self._check_result(r)
return False
def find_replication_dest(self, instance_id, destssn):
pass
def break_replication(self, volumename, instance_id, destssn):
"""This just breaks the replication.
If we find the source we just delete the replication. If the source
is down then we find the destination and unmap it. Fail pretty much
every time this goes south.
:param volumename: Volume name is the guid from the cinder volume.
:param instance_id: Storage Center volume object instance id.
:param destssn: Destination ssn.
:return: Replication SC volume object.
"""
replinstanceid = None
scvolume = self.find_volume(volumename, instance_id)
if scvolume:
screplication = self.get_screplication(scvolume, destssn)
# if we got our replication volume we can do this nicely.
if screplication:
replinstanceid = (
screplication['destinationVolume']['instanceId'])
screplvol = self.find_repl_volume(self._repl_name(volumename),
destssn, replinstanceid)
# delete_replication fails to delete replication without also
# stuffing it into the recycle bin.
# Instead we try to unmap the destination volume which will break
# the replication but leave the replication object on the SC.
if self.remove_mappings(screplvol):
# Try to kill mappings on the source.
# We don't care that this succeeded or failed. Just move on.
self.remove_mappings(scvolume)
return screplvol
def _get_replay_list(self, scvolume):
r = self.client.get('StorageCenter/ScVolume/%s/ReplayList'
% self._get_id(scvolume))
if self._check_result(r):
return self._get_json(r)
return []
def find_common_replay(self, svolume, dvolume):
"""Finds the common replay between two volumes.
This assumes that one volume was replicated from the other. This
should return the most recent replay.
:param svolume: Source SC Volume.
:param dvolume: Destination SC Volume.
:return: Common replay or None.
"""
if svolume and dvolume:
sreplays = self._get_replay_list(svolume)
dreplays = self._get_replay_list(dvolume)
for dreplay in dreplays:
for sreplay in sreplays:
if dreplay['globalIndex'] == sreplay['globalIndex']:
return dreplay
return None
def start_replication(self, svolume, dvolume,
replicationtype, qosnode, activereplay):
"""Starts a replication between volumes.
Requires the dvolume to be in an appropriate state to start this.
:param svolume: Source SC Volume.
:param dvolume: Destiation SC Volume
:param replicationtype: Asynchronous or synchronous.
:param qosnode: QOS node name.
:param activereplay: Boolean to replicate the active replay or not.
:return: ScReplication object or None.
"""
if svolume and dvolume:
qos = self._find_qos(qosnode, svolume['scSerialNumber'])
if qos:
payload = {}
payload['QosNode'] = self._get_id(qos)
payload['SourceVolume'] = self._get_id(svolume)
payload['StorageCenter'] = svolume['scSerialNumber']
# Have to replicate the active replay.
payload['ReplicateActiveReplay'] = activereplay
payload['Type'] = replicationtype
payload['DestinationVolume'] = self._get_id(dvolume)
payload['DestinationStorageCenter'] = dvolume['scSerialNumber']
r = self.client.post('StorageCenter/ScReplication', payload,
True)
# 201 expected.
if self._check_result(r):
LOG.info(_LI('Replication created for '
'%(src)s to %(dest)s'),
{'src': svolume.get('name'),
'dest': dvolume.get('name')})
screpl = self._get_json(r)
return screpl
return None
def replicate_to_common(self, svolume, dvolume, qosnode):
"""Reverses a replication between two volumes.
:param fovolume: Failed over volume. (Current)
:param ovolume: Original source volume.
:param qosnode: QOS node name to use to create the replay.
:return: ScReplication object or None.
"""
# find our common replay.
creplay = self.find_common_replay(svolume, dvolume)
# if we found one.
if creplay:
# create a view volume from the common replay.
payload = {}
# funky name.
payload['Name'] = 'fback:' + dvolume['name']
payload['Notes'] = self.notes
payload['VolumeFolder'] = self._get_id(dvolume['volumeFolder'])
r = self.client.post('StorageCenter/ScReplay/%s/CreateView'
% self._get_id(creplay), payload, True)
if self._check_result(r):
vvolume = self._get_json(r)
if vvolume:
# snap a replay and start replicating.
if self.create_replay(svolume, 'failback', 600):
return self.start_replication(svolume, vvolume,
'Asynchronous', qosnode,
False)
# No joy. Error the volume.
return None
def flip_replication(self, svolume, dvolume, name,
replicationtype, qosnode, activereplay):
"""Enables replication from current destination volume to source.
:param svolume: Current source. New destination.
:param dvolume: Current destination. New source.
:param name: Volume name.
:param replicationtype: Sync or async
:param qosnode: qos node for the new source ssn.
:param activereplay: replicate the active replay.
:return: True/False.
"""
# We are flipping a replication. That means there was a replication to
# start with. Delete that.
if self.delete_replication(svolume, dvolume['scSerialNumber'], False):
# Kick off a replication going the other way.
if self.start_replication(dvolume, svolume, replicationtype,
qosnode, activereplay) is not None:
# rename
if (self.rename_volume(svolume, self._repl_name(name)) and
self.rename_volume(dvolume, name)):
return True
LOG.warning(_LW('flip_replication: Unable to replicate '
'%(name)s from %(src)s to %(dst)s'),
{'name': name,
'src': dvolume['scSerialNumber'],
'dst': svolume['scSerialNumber']})
return False
def replication_progress(self, screplid):
"""Get's the current progress of the replication.
:param screplid: instanceId of the ScReplication object.
:return: Boolean for synced, float of remaining bytes. (Or None, None.)
"""
if screplid:
r = self.client.get(
'StorageCenter/ScReplication/%s/CurrentProgress' % screplid)
if self._check_result(r):
progress = self._get_json(r)
try:
remaining = float(
progress['amountRemaining'].split(' ', 1)[0])
return progress['synced'], remaining
except Exception:
LOG.warning(_LW('replication_progress: Invalid replication'
' progress information returned: %s'),
progress)
return None, None
def is_swapped(self, provider_id, sclivevolume):
if (sclivevolume.get('primaryVolume') and
sclivevolume['primaryVolume']['instanceId'] != provider_id):
return True
return False
def is_failed_over(self, provider_id, sclivevolume):
# either the secondary is active or the secondary is now our primary.
if (sclivevolume.get('secondaryRole') == 'Activated' or
self.is_swapped(provider_id, sclivevolume)):
return True
return False
def _sc_live_volumes(self, ssn):
if ssn:
r = self.client.get('StorageCenter/StorageCenter/%s/LiveVolumeList'
% ssn)
if self._check_result(r):
return self._get_json(r)
return []
def _get_live_volumes(self):
# Work around for a FW bug. Instead of grabbing the entire list at
# once we have to Trundle through each SC's list.
lvs = []
pf = self._get_payload_filter()
pf.append('connected', True)
r = self.client.post('StorageCenter/StorageCenter/GetList',
pf.payload)
if self._check_result(r):
# Should return [] if nothing there.
# Just in case do the or.
scs = self._get_json(r) or []
for sc in scs:
lvs += self._sc_live_volumes(self._get_id(sc))
return lvs
def get_live_volume(self, primaryid, name=None):
"""Get's the live ScLiveVolume object for the vol with primaryid.
:param primaryid: InstanceId of the primary volume.
:parma name: Volume name associated with this live volume.
:return: ScLiveVolume object or None
"""
sclivevol = None
if primaryid:
# Try from our primary SSN. This would be the authoritay on the
# Live Volume in question.
lvs = self._sc_live_volumes(primaryid.split('.')[0])
# No, grab them all and see if we are on the secondary.
if not lvs:
lvs = self._get_live_volumes()
if lvs:
# Look for our primaryid.
for lv in lvs:
if ((lv.get('primaryVolume') and
lv['primaryVolume']['instanceId'] == primaryid) or
(lv.get('secondaryVolume') and
lv['secondaryVolume']['instanceId'] == primaryid)):
sclivevol = lv
break
# Sometimes the lv object returns without a secondary
# volume. Make sure we find this by name if we have to.
if (name and sclivevol is None and
lv['instanceName'].endswith(name)):
sclivevol = lv
return sclivevol
def _get_hbas(self, serverid):
# Helper to get the hba's of a given server.
r = self.client.get('StorageCenter/ScServer/%s/HbaList' % serverid)
if self._check_result(r):
return self._get_json(r)
return None
def map_secondary_volume(self, sclivevol, scdestsrv):
"""Map's the secondary volume or a LiveVolume to destsrv.
:param sclivevol: ScLiveVolume object.
:param scdestsrv: ScServer object for the destination.
:return: ScMappingProfile object or None on failure.
"""
payload = {}
payload['Server'] = self._get_id(scdestsrv)
payload['Advanced'] = {'MapToDownServerHbas': True}
r = self.client.post('StorageCenter/ScLiveVolume/%s/MapSecondaryVolume'
% self._get_id(sclivevol), payload, True)
if self._check_result(r):
return self._get_json(r)
return None
def create_live_volume(self, scvolume, remotessn, active=False, sync=False,
autofailover=False, primaryqos='CinderQOS',
secondaryqos='CinderQOS'):
"""This create's a live volume instead of a replication.
Servers are not created at this point so we cannot map up a remote
server immediately.
:param scvolume: Source SC Volume
:param remotessn: Destination SSN.
:param active: Replicate the active replay boolean.
:param sync: Sync replication boolean.
:param autofailover: enable autofailover and failback boolean.
:param primaryqos: QOS node name for the primary side.
:param secondaryqos: QOS node name for the remote side.
:return: ScLiveVolume object or None on failure.
"""
destssn = self.find_sc(int(remotessn))
pscqos = self._find_qos(primaryqos)
sscqos = self._find_qos(secondaryqos, destssn)
if not destssn:
LOG.error(_LE('create_live_volume: Unable to find remote %s'),
remotessn)
elif not pscqos:
LOG.error(_LE('create_live_volume: Unable to find or create '
'qos node %s'), primaryqos)
elif not sscqos:
LOG.error(_LE('create_live_volume: Unable to find or create remote'
' qos node %(qos)s on %(ssn)s'),
{'qos': secondaryqos, 'ssn': destssn})
else:
payload = {}
payload['PrimaryVolume'] = self._get_id(scvolume)
payload['PrimaryQosNode'] = self._get_id(pscqos)
payload['SecondaryQosNode'] = self._get_id(sscqos)
payload['SecondaryStorageCenter'] = destssn
payload['StorageCenter'] = self.ssn
# payload['Dedup'] = False
payload['FailoverAutomaticallyEnabled'] = autofailover
payload['RestoreAutomaticallyEnabled'] = autofailover
payload['SwapRolesAutomaticallyEnabled'] = False
payload['ReplicateActiveReplay'] = (active or autofailover)
if sync or autofailover:
payload['Type'] = 'Synchronous'
payload['SyncMode'] = 'HighAvailability'
else:
payload['Type'] = 'Asynchronous'
secondaryvolumeattributes = {}
secondaryvolumeattributes['CreateSourceVolumeFolderPath'] = True
secondaryvolumeattributes['Notes'] = self.notes
secondaryvolumeattributes['Name'] = scvolume['name']
payload[
'SecondaryVolumeAttributes'] = secondaryvolumeattributes
r = self.client.post('StorageCenter/ScLiveVolume', payload, True)
if self._check_result(r):
LOG.info(_LI('create_live_volume: Live Volume created from'
'%(svol)s to %(ssn)s'),
{'svol': self._get_id(scvolume), 'ssn': remotessn})
return self._get_json(r)
LOG.error(_LE('create_live_volume: Failed to create Live Volume from'
'%(svol)s to %(ssn)s'),
{'svol': self._get_id(scvolume), 'ssn': remotessn})
return None
def delete_live_volume(self, sclivevolume, deletesecondaryvolume):
"""Deletes the live volume.
:param sclivevolume: ScLiveVolume object to be whacked.
:return: Boolean on success/fail.
"""
payload = {}
payload['ConvertToReplication'] = False
payload['DeleteSecondaryVolume'] = deletesecondaryvolume
payload['RecycleSecondaryVolume'] = deletesecondaryvolume
payload['DeleteRestorePoint'] = deletesecondaryvolume
r = self.client.delete('StorageCenter/ScLiveVolume/%s' %
self._get_id(sclivevolume), payload, True)
if self._check_result(r):
return True
return False
def swap_roles_live_volume(self, sclivevolume):
"""Swap live volume roles.
:param sclivevolume: Dell SC live volume object.
:return: True/False on success/failure.
"""
r = self.client.post('StorageCenter/ScLiveVolume/%s/SwapRoles' %
self._get_id(sclivevolume), {}, True)
if self._check_result(r):
return True
return False
| apache-2.0 |
Dapid/numpy | numpy/core/tests/test_shape_base.py | 25 | 11503 | from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.compat import long
from numpy.core import (array, arange, atleast_1d, atleast_2d, atleast_3d,
vstack, hstack, newaxis, concatenate, stack)
from numpy.testing import (TestCase, assert_, assert_raises, assert_array_equal,
assert_equal, run_module_suite, assert_raises_regex)
class TestAtleast1d(TestCase):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_1d(a), atleast_1d(b)]
desired = [array([1]), array([2])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_1d(a), atleast_1d(b)]
desired = [array([1, 2]), array([2, 3])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_1d(a), atleast_1d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_1d(a), atleast_1d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_r1array(self):
""" Test to make sure equivalent Travis O's r1array function
"""
assert_(atleast_1d(3).shape == (1,))
assert_(atleast_1d(3j).shape == (1,))
assert_(atleast_1d(long(3)).shape == (1,))
assert_(atleast_1d(3.0).shape == (1,))
assert_(atleast_1d([[2, 3], [4, 5]]).shape == (2, 2))
class TestAtleast2d(TestCase):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_2d(a), atleast_2d(b)]
desired = [array([[1]]), array([[2]])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_2d(a), atleast_2d(b)]
desired = [array([[1, 2]]), array([[2, 3]])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_2d(a), atleast_2d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_2d(a), atleast_2d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_r2array(self):
""" Test to make sure equivalent Travis O's r2array function
"""
assert_(atleast_2d(3).shape == (1, 1))
assert_(atleast_2d([3j, 1]).shape == (1, 2))
assert_(atleast_2d([[[3, 1], [4, 5]], [[3, 5], [1, 2]]]).shape == (2, 2, 2))
class TestAtleast3d(TestCase):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_3d(a), atleast_3d(b)]
desired = [array([[[1]]]), array([[[2]]])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_3d(a), atleast_3d(b)]
desired = [array([[[1], [2]]]), array([[[2], [3]]])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_3d(a), atleast_3d(b)]
desired = [a[:,:, newaxis], b[:,:, newaxis]]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_3d(a), atleast_3d(b)]
desired = [a, b]
assert_array_equal(res, desired)
class TestHstack(TestCase):
def test_0D_array(self):
a = array(1)
b = array(2)
res = hstack([a, b])
desired = array([1, 2])
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1])
b = array([2])
res = hstack([a, b])
desired = array([1, 2])
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1], [2]])
b = array([[1], [2]])
res = hstack([a, b])
desired = array([[1, 1], [2, 2]])
assert_array_equal(res, desired)
class TestVstack(TestCase):
def test_0D_array(self):
a = array(1)
b = array(2)
res = vstack([a, b])
desired = array([[1], [2]])
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1])
b = array([2])
res = vstack([a, b])
desired = array([[1], [2]])
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1], [2]])
b = array([[1], [2]])
res = vstack([a, b])
desired = array([[1], [2], [1], [2]])
assert_array_equal(res, desired)
def test_2D_array2(self):
a = array([1, 2])
b = array([1, 2])
res = vstack([a, b])
desired = array([[1, 2], [1, 2]])
assert_array_equal(res, desired)
class TestConcatenate(TestCase):
def test_exceptions(self):
# test axis must be in bounds
for ndim in [1, 2, 3]:
a = np.ones((1,)*ndim)
np.concatenate((a, a), axis=0) # OK
assert_raises(IndexError, np.concatenate, (a, a), axis=ndim)
assert_raises(IndexError, np.concatenate, (a, a), axis=-(ndim + 1))
# Scalars cannot be concatenated
assert_raises(ValueError, concatenate, (0,))
assert_raises(ValueError, concatenate, (np.array(0),))
# test shapes must match except for concatenation axis
a = np.ones((1, 2, 3))
b = np.ones((2, 2, 3))
axis = list(range(3))
for i in range(3):
np.concatenate((a, b), axis=axis[0]) # OK
assert_raises(ValueError, np.concatenate, (a, b), axis=axis[1])
assert_raises(ValueError, np.concatenate, (a, b), axis=axis[2])
a = np.rollaxis(a, -1)
b = np.rollaxis(b, -1)
axis.append(axis.pop(0))
# No arrays to concatenate raises ValueError
assert_raises(ValueError, concatenate, ())
def test_concatenate_axis_None(self):
a = np.arange(4, dtype=np.float64).reshape((2, 2))
b = list(range(3))
c = ['x']
r = np.concatenate((a, a), axis=None)
assert_equal(r.dtype, a.dtype)
assert_equal(r.ndim, 1)
r = np.concatenate((a, b), axis=None)
assert_equal(r.size, a.size + len(b))
assert_equal(r.dtype, a.dtype)
r = np.concatenate((a, b, c), axis=None)
d = array(['0.0', '1.0', '2.0', '3.0',
'0', '1', '2', 'x'])
assert_array_equal(r, d)
def test_large_concatenate_axis_None(self):
# When no axis is given, concatenate uses flattened versions.
# This also had a bug with many arrays (see gh-5979).
x = np.arange(1, 100)
r = np.concatenate(x, None)
assert_array_equal(x, r)
# This should probably be deprecated:
r = np.concatenate(x, 100) # axis is >= MAXDIMS
assert_array_equal(x, r)
def test_concatenate(self):
# Test concatenate function
# One sequence returns unmodified (but as array)
r4 = list(range(4))
assert_array_equal(concatenate((r4,)), r4)
# Any sequence
assert_array_equal(concatenate((tuple(r4),)), r4)
assert_array_equal(concatenate((array(r4),)), r4)
# 1D default concatenation
r3 = list(range(3))
assert_array_equal(concatenate((r4, r3)), r4 + r3)
# Mixed sequence types
assert_array_equal(concatenate((tuple(r4), r3)), r4 + r3)
assert_array_equal(concatenate((array(r4), r3)), r4 + r3)
# Explicit axis specification
assert_array_equal(concatenate((r4, r3), 0), r4 + r3)
# Including negative
assert_array_equal(concatenate((r4, r3), -1), r4 + r3)
# 2D
a23 = array([[10, 11, 12], [13, 14, 15]])
a13 = array([[0, 1, 2]])
res = array([[10, 11, 12], [13, 14, 15], [0, 1, 2]])
assert_array_equal(concatenate((a23, a13)), res)
assert_array_equal(concatenate((a23, a13), 0), res)
assert_array_equal(concatenate((a23.T, a13.T), 1), res.T)
assert_array_equal(concatenate((a23.T, a13.T), -1), res.T)
# Arrays much match shape
assert_raises(ValueError, concatenate, (a23.T, a13.T), 0)
# 3D
res = arange(2 * 3 * 7).reshape((2, 3, 7))
a0 = res[..., :4]
a1 = res[..., 4:6]
a2 = res[..., 6:]
assert_array_equal(concatenate((a0, a1, a2), 2), res)
assert_array_equal(concatenate((a0, a1, a2), -1), res)
assert_array_equal(concatenate((a0.T, a1.T, a2.T), 0), res.T)
def test_stack():
# 0d input
for input_ in [(1, 2, 3),
[np.int32(1), np.int32(2), np.int32(3)],
[np.array(1), np.array(2), np.array(3)]]:
assert_array_equal(stack(input_), [1, 2, 3])
# 1d input examples
a = np.array([1, 2, 3])
b = np.array([4, 5, 6])
r1 = array([[1, 2, 3], [4, 5, 6]])
assert_array_equal(np.stack((a, b)), r1)
assert_array_equal(np.stack((a, b), axis=1), r1.T)
# all input types
assert_array_equal(np.stack(list([a, b])), r1)
assert_array_equal(np.stack(array([a, b])), r1)
# all shapes for 1d input
arrays = [np.random.randn(3) for _ in range(10)]
axes = [0, 1, -1, -2]
expected_shapes = [(10, 3), (3, 10), (3, 10), (10, 3)]
for axis, expected_shape in zip(axes, expected_shapes):
assert_equal(np.stack(arrays, axis).shape, expected_shape)
assert_raises_regex(IndexError, 'out of bounds', stack, arrays, axis=2)
assert_raises_regex(IndexError, 'out of bounds', stack, arrays, axis=-3)
# all shapes for 2d input
arrays = [np.random.randn(3, 4) for _ in range(10)]
axes = [0, 1, 2, -1, -2, -3]
expected_shapes = [(10, 3, 4), (3, 10, 4), (3, 4, 10),
(3, 4, 10), (3, 10, 4), (10, 3, 4)]
for axis, expected_shape in zip(axes, expected_shapes):
assert_equal(np.stack(arrays, axis).shape, expected_shape)
# empty arrays
assert_(stack([[], [], []]).shape == (3, 0))
assert_(stack([[], [], []], axis=1).shape == (0, 3))
# edge cases
assert_raises_regex(ValueError, 'need at least one array', stack, [])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [1, np.arange(3)])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(3), 1])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(3), 1], axis=1)
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.zeros((3, 3)), np.zeros(3)], axis=1)
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(2), np.arange(3)])
# np.matrix
m = np.matrix([[1, 2], [3, 4]])
assert_raises_regex(ValueError, 'shape too large to be a matrix',
stack, [m, m])
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
alaunay/bigtop | bigtop-packages/src/charm/hadoop/layer-hadoop-namenode/tests/01-basic-deployment.py | 12 | 1353 | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import amulet
class TestDeploy(unittest.TestCase):
"""
Trivial deployment test for Apache Hadoop NameNode.
This charm cannot do anything useful by itself, so integration testing
is done in the bundle.
"""
def test_deploy(self):
self.d = amulet.Deployment(series='xenial')
self.d.add('namenode', 'hadoop-namenode')
self.d.setup(timeout=900)
self.d.sentry.wait(timeout=1800)
self.unit = self.d.sentry['namenode'][0]
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
howels/pi-tank | node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py | 379 | 42639 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
self.dxsdk_dir = _FindDirectXInstallation()
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
self.wdk_dir = os.environ.get('WDK_DIR')
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
replacements = {
'$(OutDir)\\': target_dir,
'$(TargetDir)\\': target_dir,
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(InputPath)': '${source}',
'$(InputName)': '${root}',
'$(ProjectName)': self.spec['target_name'],
'$(TargetName)': target_name,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if self.vs_version.Path():
replacements['$(VSInstallDir)'] = self.vs_version.Path()
replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(),
'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info:
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def GetPrecompiledHeader(self, config, gyp_to_build_path):
"""Returns an object that handles the generation of precompiled header
build steps."""
config = self._TargetConfig(config)
return _PchHelper(self, config, gyp_to_build_path)
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def HasExplicitIdlRules(self, spec):
"""Determine if there's an explicit rule for idl files. When there isn't we
need to generate implicit rules to build MIDL .idl files."""
return self._HasExplicitRuleForExtension(spec, 'idl')
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatability
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
| gpl-3.0 |
epiphone/tiea207_c2264 | web/form.py | 53 | 13440 | """
HTML forms
(part of web.py)
"""
import copy, re
import webapi as web
import utils, net
def attrget(obj, attr, value=None):
try:
if hasattr(obj, 'has_key') and obj.has_key(attr):
return obj[attr]
except TypeError:
# Handle the case where has_key takes different number of arguments.
# This is the case with Model objects on appengine. See #134
pass
if hasattr(obj, attr):
return getattr(obj, attr)
return value
class Form(object):
r"""
HTML form.
>>> f = Form(Textbox("x"))
>>> f.render()
u'<table>\n <tr><th><label for="x">x</label></th><td><input type="text" id="x" name="x"/></td></tr>\n</table>'
"""
def __init__(self, *inputs, **kw):
self.inputs = inputs
self.valid = True
self.note = None
self.validators = kw.pop('validators', [])
def __call__(self, x=None):
o = copy.deepcopy(self)
if x: o.validates(x)
return o
def render(self):
out = ''
out += self.rendernote(self.note)
out += '<table>\n'
for i in self.inputs:
html = utils.safeunicode(i.pre) + i.render() + self.rendernote(i.note) + utils.safeunicode(i.post)
if i.is_hidden():
out += ' <tr style="display: none;"><th></th><td>%s</td></tr>\n' % (html)
else:
out += ' <tr><th><label for="%s">%s</label></th><td>%s</td></tr>\n' % (i.id, net.websafe(i.description), html)
out += "</table>"
return out
def render_css(self):
out = []
out.append(self.rendernote(self.note))
for i in self.inputs:
if not i.is_hidden():
out.append('<label for="%s">%s</label>' % (i.id, net.websafe(i.description)))
out.append(i.pre)
out.append(i.render())
out.append(self.rendernote(i.note))
out.append(i.post)
out.append('\n')
return ''.join(out)
def rendernote(self, note):
if note: return '<strong class="wrong">%s</strong>' % net.websafe(note)
else: return ""
def validates(self, source=None, _validate=True, **kw):
source = source or kw or web.input()
out = True
for i in self.inputs:
v = attrget(source, i.name)
if _validate:
out = i.validate(v) and out
else:
i.set_value(v)
if _validate:
out = out and self._validate(source)
self.valid = out
return out
def _validate(self, value):
self.value = value
for v in self.validators:
if not v.valid(value):
self.note = v.msg
return False
return True
def fill(self, source=None, **kw):
return self.validates(source, _validate=False, **kw)
def __getitem__(self, i):
for x in self.inputs:
if x.name == i: return x
raise KeyError, i
def __getattr__(self, name):
# don't interfere with deepcopy
inputs = self.__dict__.get('inputs') or []
for x in inputs:
if x.name == name: return x
raise AttributeError, name
def get(self, i, default=None):
try:
return self[i]
except KeyError:
return default
def _get_d(self): #@@ should really be form.attr, no?
return utils.storage([(i.name, i.get_value()) for i in self.inputs])
d = property(_get_d)
class Input(object):
def __init__(self, name, *validators, **attrs):
self.name = name
self.validators = validators
self.attrs = attrs = AttributeList(attrs)
self.description = attrs.pop('description', name)
self.value = attrs.pop('value', None)
self.pre = attrs.pop('pre', "")
self.post = attrs.pop('post', "")
self.note = None
self.id = attrs.setdefault('id', self.get_default_id())
if 'class_' in attrs:
attrs['class'] = attrs['class_']
del attrs['class_']
def is_hidden(self):
return False
def get_type(self):
raise NotImplementedError
def get_default_id(self):
return self.name
def validate(self, value):
self.set_value(value)
for v in self.validators:
if not v.valid(value):
self.note = v.msg
return False
return True
def set_value(self, value):
self.value = value
def get_value(self):
return self.value
def render(self):
attrs = self.attrs.copy()
attrs['type'] = self.get_type()
if self.value is not None:
attrs['value'] = self.value
attrs['name'] = self.name
return '<input %s/>' % attrs
def rendernote(self, note):
if note: return '<strong class="wrong">%s</strong>' % net.websafe(note)
else: return ""
def addatts(self):
# add leading space for backward-compatibility
return " " + str(self.attrs)
class AttributeList(dict):
"""List of atributes of input.
>>> a = AttributeList(type='text', name='x', value=20)
>>> a
<attrs: 'type="text" name="x" value="20"'>
"""
def copy(self):
return AttributeList(self)
def __str__(self):
return " ".join(['%s="%s"' % (k, net.websafe(v)) for k, v in self.items()])
def __repr__(self):
return '<attrs: %s>' % repr(str(self))
class Textbox(Input):
"""Textbox input.
>>> Textbox(name='foo', value='bar').render()
u'<input type="text" id="foo" value="bar" name="foo"/>'
>>> Textbox(name='foo', value=0).render()
u'<input type="text" id="foo" value="0" name="foo"/>'
"""
def get_type(self):
return 'text'
class Password(Input):
"""Password input.
>>> Password(name='password', value='secret').render()
u'<input type="password" id="password" value="secret" name="password"/>'
"""
def get_type(self):
return 'password'
class Textarea(Input):
"""Textarea input.
>>> Textarea(name='foo', value='bar').render()
u'<textarea id="foo" name="foo">bar</textarea>'
"""
def render(self):
attrs = self.attrs.copy()
attrs['name'] = self.name
value = net.websafe(self.value or '')
return '<textarea %s>%s</textarea>' % (attrs, value)
class Dropdown(Input):
r"""Dropdown/select input.
>>> Dropdown(name='foo', args=['a', 'b', 'c'], value='b').render()
u'<select id="foo" name="foo">\n <option value="a">a</option>\n <option selected="selected" value="b">b</option>\n <option value="c">c</option>\n</select>\n'
>>> Dropdown(name='foo', args=[('a', 'aa'), ('b', 'bb'), ('c', 'cc')], value='b').render()
u'<select id="foo" name="foo">\n <option value="a">aa</option>\n <option selected="selected" value="b">bb</option>\n <option value="c">cc</option>\n</select>\n'
"""
def __init__(self, name, args, *validators, **attrs):
self.args = args
super(Dropdown, self).__init__(name, *validators, **attrs)
def render(self):
attrs = self.attrs.copy()
attrs['name'] = self.name
x = '<select %s>\n' % attrs
for arg in self.args:
x += self._render_option(arg)
x += '</select>\n'
return x
def _render_option(self, arg, indent=' '):
if isinstance(arg, (tuple, list)):
value, desc= arg
else:
value, desc = arg, arg
if self.value == value or (isinstance(self.value, list) and value in self.value):
select_p = ' selected="selected"'
else:
select_p = ''
return indent + '<option%s value="%s">%s</option>\n' % (select_p, net.websafe(value), net.websafe(desc))
class GroupedDropdown(Dropdown):
r"""Grouped Dropdown/select input.
>>> GroupedDropdown(name='car_type', args=(('Swedish Cars', ('Volvo', 'Saab')), ('German Cars', ('Mercedes', 'Audi'))), value='Audi').render()
u'<select id="car_type" name="car_type">\n <optgroup label="Swedish Cars">\n <option value="Volvo">Volvo</option>\n <option value="Saab">Saab</option>\n </optgroup>\n <optgroup label="German Cars">\n <option value="Mercedes">Mercedes</option>\n <option selected="selected" value="Audi">Audi</option>\n </optgroup>\n</select>\n'
>>> GroupedDropdown(name='car_type', args=(('Swedish Cars', (('v', 'Volvo'), ('s', 'Saab'))), ('German Cars', (('m', 'Mercedes'), ('a', 'Audi')))), value='a').render()
u'<select id="car_type" name="car_type">\n <optgroup label="Swedish Cars">\n <option value="v">Volvo</option>\n <option value="s">Saab</option>\n </optgroup>\n <optgroup label="German Cars">\n <option value="m">Mercedes</option>\n <option selected="selected" value="a">Audi</option>\n </optgroup>\n</select>\n'
"""
def __init__(self, name, args, *validators, **attrs):
self.args = args
super(Dropdown, self).__init__(name, *validators, **attrs)
def render(self):
attrs = self.attrs.copy()
attrs['name'] = self.name
x = '<select %s>\n' % attrs
for label, options in self.args:
x += ' <optgroup label="%s">\n' % net.websafe(label)
for arg in options:
x += self._render_option(arg, indent = ' ')
x += ' </optgroup>\n'
x += '</select>\n'
return x
class Radio(Input):
def __init__(self, name, args, *validators, **attrs):
self.args = args
super(Radio, self).__init__(name, *validators, **attrs)
def render(self):
x = '<span>'
for arg in self.args:
if isinstance(arg, (tuple, list)):
value, desc= arg
else:
value, desc = arg, arg
attrs = self.attrs.copy()
attrs['name'] = self.name
attrs['type'] = 'radio'
attrs['value'] = value
if self.value == value:
attrs['checked'] = 'checked'
x += '<input %s/> %s' % (attrs, net.websafe(desc))
x += '</span>'
return x
class Checkbox(Input):
"""Checkbox input.
>>> Checkbox('foo', value='bar', checked=True).render()
u'<input checked="checked" type="checkbox" id="foo_bar" value="bar" name="foo"/>'
>>> Checkbox('foo', value='bar').render()
u'<input type="checkbox" id="foo_bar" value="bar" name="foo"/>'
>>> c = Checkbox('foo', value='bar')
>>> c.validate('on')
True
>>> c.render()
u'<input checked="checked" type="checkbox" id="foo_bar" value="bar" name="foo"/>'
"""
def __init__(self, name, *validators, **attrs):
self.checked = attrs.pop('checked', False)
Input.__init__(self, name, *validators, **attrs)
def get_default_id(self):
value = utils.safestr(self.value or "")
return self.name + '_' + value.replace(' ', '_')
def render(self):
attrs = self.attrs.copy()
attrs['type'] = 'checkbox'
attrs['name'] = self.name
attrs['value'] = self.value
if self.checked:
attrs['checked'] = 'checked'
return '<input %s/>' % attrs
def set_value(self, value):
self.checked = bool(value)
def get_value(self):
return self.checked
class Button(Input):
"""HTML Button.
>>> Button("save").render()
u'<button id="save" name="save">save</button>'
>>> Button("action", value="save", html="<b>Save Changes</b>").render()
u'<button id="action" value="save" name="action"><b>Save Changes</b></button>'
"""
def __init__(self, name, *validators, **attrs):
super(Button, self).__init__(name, *validators, **attrs)
self.description = ""
def render(self):
attrs = self.attrs.copy()
attrs['name'] = self.name
if self.value is not None:
attrs['value'] = self.value
html = attrs.pop('html', None) or net.websafe(self.name)
return '<button %s>%s</button>' % (attrs, html)
class Hidden(Input):
"""Hidden Input.
>>> Hidden(name='foo', value='bar').render()
u'<input type="hidden" id="foo" value="bar" name="foo"/>'
"""
def is_hidden(self):
return True
def get_type(self):
return 'hidden'
class File(Input):
"""File input.
>>> File(name='f').render()
u'<input type="file" id="f" name="f"/>'
"""
def get_type(self):
return 'file'
class Validator:
def __deepcopy__(self, memo): return copy.copy(self)
def __init__(self, msg, test, jstest=None): utils.autoassign(self, locals())
def valid(self, value):
try: return self.test(value)
except: return False
notnull = Validator("Required", bool)
class regexp(Validator):
def __init__(self, rexp, msg):
self.rexp = re.compile(rexp)
self.msg = msg
def valid(self, value):
return bool(self.rexp.match(value))
if __name__ == "__main__":
import doctest
doctest.testmod()
| agpl-3.0 |
UltrosBot/Ultros | system/metrics/public.py | 1 | 12528 | # coding=utf-8
import json
import platform
import psutil
import sys
import traceback
import urllib
import urllib2
from twisted.internet.task import LoopingCall
from system.constants import version_info
from system.decorators.threads import run_async_threadpool
from system.events.manager import EventManager
from system.logging.logger import getLogger
from system.singleton import Singleton
from system.storage.formats import JSON
from system.storage.manager import StorageManager
from system.translations import Translations
from utils.packages.packages import Packages
__author__ = 'Gareth Coles'
_ = Translations().get()
warning = """
.i;;;;i.
iYcviii;vXY:
.YXi .i1c.
.YC. . in7.
.vc. ...... ;1c.
i7, .. .;1;
i7, .. ... .Y1i
,7v .6MMM@; .YX,
.7;. ..IMMMMMM1 :t7.
.;Y. ;$MMMMMM9. :tc.
vY. .. .nMMM@MMU. ;1v.
i7i ... .#MM@M@C. .....:71i
it: .... $MMM@9;.,i;;;i,;tti
:t7. ..... 0MMMWv.,iii:::,,;St.
.nC. ..... IMMMQ..,::::::,.,czX.
.ct: ....... .ZMMMI..,:::::::,,:76Y.
c2: ......,i..Y$M@t..:::::::,,..inZY
vov ......:ii..c$MBc..,,,,,,,,,,..iI9i
i9Y ......iii:..7@MA,..,,,,,,,,,....;AA:
iIS. ......:ii::..;@MI....,............;Ez.
.I9. ......:i::::...8M1..................C0z.
.z9; ......:i::::,.. .i:...................zWX.
vbv ......,i::::,,. ................. :AQY
c6Y. .,...,::::,,..:t0@@QY. ................ :8bi
:6S. ..,,...,:::,,,..EMMMMMMI. ............... .;bZ,
:6o, .,,,,..:::,,,..i#MMMMMM#v................. YW2.
.n8i ..,,,,,,,::,,,,.. tMMMMM@C:.................. .1Wn
7Uc. .:::,,,,,::,,,,.. i1t;,..................... .UEi
7C...::::::::::::,,,,.. .................... vSi.
;1;...,,::::::,......... .................. Yz:
v97,......... .voC.
izAotX7777777777777777777777777777777777777777Y7n92:
.;CoIIIIIUAA666666699999ZZZZZZZZZZZZZZZZZZZZ6ov.
"""
class Metrics(object):
"""
Configurable metrics handler.
This sends some basic stats to the site over at http://ultros.io/metrics
when configured.
"""
__metaclass__ = Singleton
storage = None
events = None
packages = None
status = True
send_exceptions = True
config = {}
log = None
task = None
manager = None
interval = 300 # Every 5 minutes
domain = "https://ultros.io"
submit_url = domain + "/api/metrics/post/%s"
exception_url = domain + "/api/metrics/post/exception/%s"
uuid_url = domain + "/api/metrics/get/uuid"
destroy_url = domain + "/api/metrics/destroy/%s"
uuid = ""
def __init__(self, config=None, manager=None):
if config is None or manager is None:
raise ValueError("Config and manager must not be None!")
self.config = config
self.manager = manager
self.log = getLogger("Metrics")
self.storage = StorageManager()
self.events = EventManager()
self.packages = Packages(get=False)
self.data = self.storage.get_file(self, "data", JSON, "metrics.json")
self.task = LoopingCall(self.submit_metrics)
if "metrics" in config:
self.status = config["metrics"]
if self.status == "on":
self.status = True
elif self.status == "off":
self.status = False
else:
self.log.warn("\n%s\n" % warning)
self.log.warn(_(
"We couldn't find a \"metrics\" option in your settings.yml"
" file!"
))
self.log.warn(_(
"Metrics will default to being turned on. If this is not what"
" you want, please create a \"metrics\" option in your "
"settings and set it to \"off\"."
))
self.log.warn(_(
"If you want to keep metrics enabled, set the option to"
" \"on\"."
))
self.log.warn(_(
"This warning will be shown on every startup until the option"
" has been set."
))
self.status = True
if "send-exceptions" not in config:
self.log.warn(_(
"We couldn't find a \"send-exceptions\" option in your "
"settings.yml file!"
))
self.log.warn(_(
"Exception sending will default to being turned on. If this "
"is not what you want, please create a \"send-exceptions\" "
"option in your settings and set it to \"off\"."
))
self.log.warn(_(
"If you want to keep exception sending enabled, set the "
"option to \"on\"."
))
self.log.warn(_(
"This warning will be shown on every startup until the option"
" has been set."
))
self.send_exceptions = config.get("send-exceptions", True)
with self.data:
if self.status is True:
if "uuid" not in self.data:
try:
uuid = self.get(self.uuid_url)
except Exception:
self.log.exception(_("Error getting UUID"))
return
self.data["uuid"] = uuid
self.data["status"] = "enabled"
elif "uuid" not in self.data:
self.data["status"] = "disabled"
if self.status is False:
if self.data["status"] == "disabled":
self.log.info(_("Metrics are disabled."))
return
elif self.status is "destroy":
if "uuid" not in self.data:
self.log.info(_("Metrics are disabled."))
return
self.task.start(self.interval)
@run_async_threadpool
def submit_metrics(self):
self.log.trace(_("Firing task."))
compiled = {"plugins": [], "packages": [], "protocols": []}
if self.status is True:
self.log.debug(_("Submitting metrics."))
compiled["plugins"] = [
obj.info.name for obj in
self.manager.plugman.plugin_objects.values()
]
compiled["packages"] = self.packages.get_installed_packages()
for name in self.manager.factories.keys():
proto = self.manager.get_protocol(name)
compiled["protocols"].append(proto.TYPE)
try:
compiled["enabled"] = True
is_64bits = sys.maxsize > 2 ** 32
cpu = platform.processor().strip() or "Unknown"
_os = platform.system()
if _os.lower() == "linux":
nix = list(platform.linux_distribution())
if nix[2]:
nix[2] = "({})".format(nix[2])
nix = filter(None, nix)
if nix:
_os = "{}: {}".format(_os, " ".join(nix))
else:
_os = "{}: Unknown".format(_os)
else:
release = platform.release()
if release:
_os = "{} {}".format(_os, release)
ram = psutil.virtual_memory().total / 1048576.0
python = "%s %s %s" % (
platform.python_implementation(),
platform.python_version(),
"x64" if is_64bits else "x86"
)
release = version_info["release"]
_hash = version_info["hash"] or "Zipball (%s)" % release
compiled["system"] = {
"cpu": cpu,
"os": _os,
"python": python,
"ram": ram,
"release": release,
"hash": _hash
}
r = self.post(self.submit_url % self.data["uuid"], compiled)
r = json.loads(r)
self.log.trace(_("Submitted. Result: %s") % r)
if r["result"] == "error":
self.log.error(_("Error submitting metrics: %s")
% r["error"])
except Exception:
self.log.exception(_("Error submitting metrics"))
elif self.status is False:
self.log.debug(_("Submitting disable message."))
try:
compiled["enabled"] = False
r = self.post(self.submit_url % self.data["uuid"], compiled)
r = json.loads(r)
self.log.trace(_("Submitted. Result: %s") % r)
if r["result"] == "error":
self.log.error(_("Error submitting disable message: %s")
% r["error"])
except Exception:
self.log.exception(_("Error submitting disable message"))
else:
with self.data:
self.data["status"] = "disabled"
finally:
self.task.stop()
elif self.status == "destroy":
self.log.debug(_("Submitting destruction message."))
try:
r = self.get(self.destroy_url % self.data["uuid"])
r = json.loads(r)
self.log.trace("Submitted. Result: %s" % r)
if r["result"] == "success":
self.log.info(_("Metrics data has been removed from the "
"server."))
else:
self.log.warn(_("Unknown UUID, data was already removed "
"from the server."))
except Exception:
self.log.exception(_("Error submitting destruction message"))
else:
with self.data:
del self.data["uuid"]
self.data["status"] = "disabled"
finally:
self.task.stop()
else:
self.log.warn(_("Unknown status: %s") % self.status)
self.task.stop()
def submit_exception(self, exc_info):
t = None
if self.status is True and self.send_exceptions:
try:
t = traceback.format_exception(*exc_info)
tb = exc_info[2]
while 1:
if not tb.tb_next:
break
tb = tb.tb_next
f = tb.tb_frame
scope = {}
for key, value in sorted(f.f_locals.items()):
if key == "__doc__":
v = "[DOCSTRING]"
else:
try:
v = str(value)
except Exception:
try:
v = repr(value)
except Exception:
v = "[UNKNOWN]"
scope[key] = v
self.post(
self.exception_url % self.data["uuid"],
{
"traceback": "\n".join(t),
"type": str(exc_info[0]),
"value": str(exc_info[1]),
"scope": scope
}
)
finally:
del exc_info, t
def post(self, url, data):
data = json.dumps(data)
self.log.debug("Posting data: %s" % data)
data = urllib.urlencode({"data": data})
req = urllib2.Request(
url, data, {'Content-Type': 'application/json'}
)
result = urllib2.urlopen(req).read()
self.log.debug("Result: %s" % result)
return result
def get(self, url):
return urllib2.urlopen(url).read()
| artistic-2.0 |
mcc-petrinets/formulas | spot/python/buddy.py | 1 | 23863 | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.10
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info >= (2, 7, 0):
def swig_import_helper():
import importlib
pkg = __name__.rpartition('.')[0]
mname = '.'.join((pkg, '_buddy')).lstrip('.')
try:
return importlib.import_module(mname)
except ImportError:
return importlib.import_module('_buddy')
_buddy = swig_import_helper()
del swig_import_helper
elif _swig_python_version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_buddy', [dirname(__file__)])
except ImportError:
import _buddy
return _buddy
if fp is not None:
try:
_mod = imp.load_module('_buddy', fp, pathname, description)
finally:
fp.close()
return _mod
_buddy = swig_import_helper()
del swig_import_helper
else:
import _buddy
del _swig_python_version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
object.__setattr__(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr(self, class_type, name):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
raise AttributeError("'%s' object has no attribute '%s'" % (class_type.__name__, name))
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_method(set):
def set_attr(self, name, value):
if (name == "thisown"):
return self.this.own(value)
if hasattr(self, name) or (name == "this"):
set(self, name, value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
class const_int_ptr(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, ptr: 'int const *'):
_buddy.const_int_ptr_swiginit(self, _buddy.new_const_int_ptr(ptr))
ptr = _swig_property(_buddy.const_int_ptr_ptr_get, _buddy.const_int_ptr_ptr_set)
def __getitem__(self, i: 'int') -> "int":
return _buddy.const_int_ptr___getitem__(self, i)
__swig_destroy__ = _buddy.delete_const_int_ptr
const_int_ptr_swigregister = _buddy.const_int_ptr_swigregister
const_int_ptr_swigregister(const_int_ptr)
class bdd(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def id(self) -> "int":
return _buddy.bdd_id(self)
def __cmp__(self, b: 'bdd') -> "int":
return _buddy.bdd___cmp__(self, b)
def __le__(self, b: 'bdd') -> "bool":
return _buddy.bdd___le__(self, b)
def __lt__(self, b: 'bdd') -> "bool":
return _buddy.bdd___lt__(self, b)
def __eq__(self, b: 'bdd') -> "bool":
return _buddy.bdd___eq__(self, b)
def __ne__(self, b: 'bdd') -> "bool":
return _buddy.bdd___ne__(self, b)
def __ge__(self, b: 'bdd') -> "bool":
return _buddy.bdd___ge__(self, b)
def __gt__(self, b: 'bdd') -> "bool":
return _buddy.bdd___gt__(self, b)
def __hash__(self) -> "size_t":
return _buddy.bdd___hash__(self)
def __str__(self) -> "std::string":
return _buddy.bdd___str__(self)
def __and__(self, other: 'bdd') -> "bdd":
return _buddy.bdd___and__(self, other)
def __xor__(self, other: 'bdd') -> "bdd":
return _buddy.bdd___xor__(self, other)
def __or__(self, other: 'bdd') -> "bdd":
return _buddy.bdd___or__(self, other)
def __rshift__(self, other: 'bdd') -> "bdd":
return _buddy.bdd___rshift__(self, other)
def __lshift__(self, other: 'bdd') -> "bdd":
return _buddy.bdd___lshift__(self, other)
def __sub__(self, other: 'bdd') -> "bdd":
return _buddy.bdd___sub__(self, other)
def __neg__(self) -> "bdd":
return _buddy.bdd___neg__(self)
def __init__(self):
_buddy.bdd_swiginit(self, _buddy.new_bdd())
__swig_destroy__ = _buddy.delete_bdd
bdd_swigregister = _buddy.bdd_swigregister
bdd_swigregister(bdd)
def bdd_init(arg1: 'int', arg2: 'int') -> "int":
return _buddy.bdd_init(arg1, arg2)
bdd_init = _buddy.bdd_init
def bdd_done() -> "void":
return _buddy.bdd_done()
bdd_done = _buddy.bdd_done
def bdd_setvarnum(arg1: 'int') -> "int":
return _buddy.bdd_setvarnum(arg1)
bdd_setvarnum = _buddy.bdd_setvarnum
def bdd_extvarnum(arg1: 'int') -> "int":
return _buddy.bdd_extvarnum(arg1)
bdd_extvarnum = _buddy.bdd_extvarnum
def bdd_isrunning() -> "int":
return _buddy.bdd_isrunning()
bdd_isrunning = _buddy.bdd_isrunning
def bdd_setmaxnodenum(arg1: 'int') -> "int":
return _buddy.bdd_setmaxnodenum(arg1)
bdd_setmaxnodenum = _buddy.bdd_setmaxnodenum
def bdd_setmaxincrease(arg1: 'int') -> "int":
return _buddy.bdd_setmaxincrease(arg1)
bdd_setmaxincrease = _buddy.bdd_setmaxincrease
def bdd_setminfreenodes(arg1: 'int') -> "int":
return _buddy.bdd_setminfreenodes(arg1)
bdd_setminfreenodes = _buddy.bdd_setminfreenodes
def bdd_getnodenum() -> "int":
return _buddy.bdd_getnodenum()
bdd_getnodenum = _buddy.bdd_getnodenum
def bdd_getallocnum() -> "int":
return _buddy.bdd_getallocnum()
bdd_getallocnum = _buddy.bdd_getallocnum
def bdd_versionstr() -> "char *":
return _buddy.bdd_versionstr()
bdd_versionstr = _buddy.bdd_versionstr
def bdd_versionnum() -> "int":
return _buddy.bdd_versionnum()
bdd_versionnum = _buddy.bdd_versionnum
def bdd_fprintstat(arg1: 'FILE *') -> "void":
return _buddy.bdd_fprintstat(arg1)
bdd_fprintstat = _buddy.bdd_fprintstat
def bdd_printstat() -> "void":
return _buddy.bdd_printstat()
bdd_printstat = _buddy.bdd_printstat
def bdd_errstring(arg1: 'int') -> "char const *":
return _buddy.bdd_errstring(arg1)
bdd_errstring = _buddy.bdd_errstring
def bdd_clear_error() -> "void":
return _buddy.bdd_clear_error()
bdd_clear_error = _buddy.bdd_clear_error
def bdd_ithvar(v: 'int') -> "bdd":
return _buddy.bdd_ithvar(v)
bdd_ithvar = _buddy.bdd_ithvar
def bdd_nithvar(v: 'int') -> "bdd":
return _buddy.bdd_nithvar(v)
bdd_nithvar = _buddy.bdd_nithvar
def bdd_var(r: 'bdd') -> "int":
return _buddy.bdd_var(r)
bdd_var = _buddy.bdd_var
def bdd_low(r: 'bdd') -> "bdd":
return _buddy.bdd_low(r)
bdd_low = _buddy.bdd_low
def bdd_high(r: 'bdd') -> "bdd":
return _buddy.bdd_high(r)
bdd_high = _buddy.bdd_high
def bdd_scanset(r: 'bdd', v: 'int *&', n: 'int &') -> "int":
return _buddy.bdd_scanset(r, v, n)
bdd_scanset = _buddy.bdd_scanset
def bdd_makeset(v: 'int *', n: 'int') -> "bdd":
return _buddy.bdd_makeset(v, n)
bdd_makeset = _buddy.bdd_makeset
def bdd_setbddpair(p: 'bddPair *', ov: 'int', nv: 'bdd') -> "int":
return _buddy.bdd_setbddpair(p, ov, nv)
bdd_setbddpair = _buddy.bdd_setbddpair
def bdd_replace(r: 'bdd', p: 'bddPair *') -> "bdd":
return _buddy.bdd_replace(r, p)
bdd_replace = _buddy.bdd_replace
def bdd_compose(f: 'bdd', g: 'bdd', v: 'int') -> "bdd":
return _buddy.bdd_compose(f, g, v)
bdd_compose = _buddy.bdd_compose
def bdd_veccompose(f: 'bdd', p: 'bddPair *') -> "bdd":
return _buddy.bdd_veccompose(f, p)
bdd_veccompose = _buddy.bdd_veccompose
def bdd_restrict(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_restrict(r, var)
bdd_restrict = _buddy.bdd_restrict
def bdd_constrain(f: 'bdd', c: 'bdd') -> "bdd":
return _buddy.bdd_constrain(f, c)
bdd_constrain = _buddy.bdd_constrain
def bdd_simplify(d: 'bdd', b: 'bdd') -> "bdd":
return _buddy.bdd_simplify(d, b)
bdd_simplify = _buddy.bdd_simplify
def bdd_ibuildcube(v: 'int', w: 'int', a: 'int *') -> "bdd":
return _buddy.bdd_ibuildcube(v, w, a)
bdd_ibuildcube = _buddy.bdd_ibuildcube
def bdd_not(r: 'bdd') -> "bdd":
return _buddy.bdd_not(r)
bdd_not = _buddy.bdd_not
def bdd_apply(l: 'bdd', r: 'bdd', op: 'int') -> "bdd":
return _buddy.bdd_apply(l, r, op)
bdd_apply = _buddy.bdd_apply
def bdd_and(l: 'bdd', r: 'bdd') -> "bdd":
return _buddy.bdd_and(l, r)
bdd_and = _buddy.bdd_and
def bdd_or(l: 'bdd', r: 'bdd') -> "bdd":
return _buddy.bdd_or(l, r)
bdd_or = _buddy.bdd_or
def bdd_xor(l: 'bdd', r: 'bdd') -> "bdd":
return _buddy.bdd_xor(l, r)
bdd_xor = _buddy.bdd_xor
def bdd_imp(l: 'bdd', r: 'bdd') -> "bdd":
return _buddy.bdd_imp(l, r)
bdd_imp = _buddy.bdd_imp
def bdd_biimp(l: 'bdd', r: 'bdd') -> "bdd":
return _buddy.bdd_biimp(l, r)
bdd_biimp = _buddy.bdd_biimp
def bdd_setxor(l: 'bdd', r: 'bdd') -> "bdd":
return _buddy.bdd_setxor(l, r)
bdd_setxor = _buddy.bdd_setxor
def bdd_implies(l: 'bdd', r: 'bdd') -> "int":
return _buddy.bdd_implies(l, r)
bdd_implies = _buddy.bdd_implies
def bdd_ite(f: 'bdd', g: 'bdd', h: 'bdd') -> "bdd":
return _buddy.bdd_ite(f, g, h)
bdd_ite = _buddy.bdd_ite
def bdd_exist(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_exist(r, var)
bdd_exist = _buddy.bdd_exist
def bdd_existcomp(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_existcomp(r, var)
bdd_existcomp = _buddy.bdd_existcomp
def bdd_forall(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_forall(r, var)
bdd_forall = _buddy.bdd_forall
def bdd_forallcomp(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_forallcomp(r, var)
bdd_forallcomp = _buddy.bdd_forallcomp
def bdd_unique(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_unique(r, var)
bdd_unique = _buddy.bdd_unique
def bdd_uniquecomp(r: 'bdd', var: 'bdd') -> "bdd":
return _buddy.bdd_uniquecomp(r, var)
bdd_uniquecomp = _buddy.bdd_uniquecomp
def bdd_appex(l: 'bdd', r: 'bdd', op: 'int', var: 'bdd') -> "bdd":
return _buddy.bdd_appex(l, r, op, var)
bdd_appex = _buddy.bdd_appex
def bdd_appexcomp(l: 'bdd', r: 'bdd', op: 'int', var: 'bdd') -> "bdd":
return _buddy.bdd_appexcomp(l, r, op, var)
bdd_appexcomp = _buddy.bdd_appexcomp
def bdd_appall(l: 'bdd', r: 'bdd', op: 'int', var: 'bdd') -> "bdd":
return _buddy.bdd_appall(l, r, op, var)
bdd_appall = _buddy.bdd_appall
def bdd_appallcomp(l: 'bdd', r: 'bdd', op: 'int', var: 'bdd') -> "bdd":
return _buddy.bdd_appallcomp(l, r, op, var)
bdd_appallcomp = _buddy.bdd_appallcomp
def bdd_appuni(l: 'bdd', r: 'bdd', op: 'int', var: 'bdd') -> "bdd":
return _buddy.bdd_appuni(l, r, op, var)
bdd_appuni = _buddy.bdd_appuni
def bdd_appunicomp(l: 'bdd', r: 'bdd', op: 'int', var: 'bdd') -> "bdd":
return _buddy.bdd_appunicomp(l, r, op, var)
bdd_appunicomp = _buddy.bdd_appunicomp
def bdd_support(r: 'bdd') -> "bdd":
return _buddy.bdd_support(r)
bdd_support = _buddy.bdd_support
def bdd_satone(r: 'bdd') -> "bdd":
return _buddy.bdd_satone(r)
bdd_satone = _buddy.bdd_satone
def bdd_satoneset(r: 'bdd', var: 'bdd', pol: 'bdd') -> "bdd":
return _buddy.bdd_satoneset(r, var, pol)
bdd_satoneset = _buddy.bdd_satoneset
def bdd_fullsatone(r: 'bdd') -> "bdd":
return _buddy.bdd_fullsatone(r)
bdd_fullsatone = _buddy.bdd_fullsatone
def bdd_allsat(r: 'bdd', handler: 'bddallsathandler') -> "void":
return _buddy.bdd_allsat(r, handler)
bdd_allsat = _buddy.bdd_allsat
def bdd_satcount(r: 'bdd') -> "double":
return _buddy.bdd_satcount(r)
bdd_satcount = _buddy.bdd_satcount
def bdd_satcountset(r: 'bdd', varset: 'bdd') -> "double":
return _buddy.bdd_satcountset(r, varset)
bdd_satcountset = _buddy.bdd_satcountset
def bdd_satcountln(r: 'bdd') -> "double":
return _buddy.bdd_satcountln(r)
bdd_satcountln = _buddy.bdd_satcountln
def bdd_satcountlnset(r: 'bdd', varset: 'bdd') -> "double":
return _buddy.bdd_satcountlnset(r, varset)
bdd_satcountlnset = _buddy.bdd_satcountlnset
def bdd_nodecount(r: 'bdd') -> "int":
return _buddy.bdd_nodecount(r)
bdd_nodecount = _buddy.bdd_nodecount
def bdd_varprofile(r: 'bdd') -> "int *":
return _buddy.bdd_varprofile(r)
bdd_varprofile = _buddy.bdd_varprofile
def bdd_pathcount(r: 'bdd') -> "double":
return _buddy.bdd_pathcount(r)
bdd_pathcount = _buddy.bdd_pathcount
def bdd_fprinttable(file: 'FILE *', r: 'bdd') -> "void":
return _buddy.bdd_fprinttable(file, r)
bdd_fprinttable = _buddy.bdd_fprinttable
def bdd_printtable(r: 'bdd') -> "void":
return _buddy.bdd_printtable(r)
bdd_printtable = _buddy.bdd_printtable
def bdd_fprintset(file: 'FILE *', r: 'bdd') -> "void":
return _buddy.bdd_fprintset(file, r)
bdd_fprintset = _buddy.bdd_fprintset
def bdd_printset(r: 'bdd') -> "void":
return _buddy.bdd_printset(r)
bdd_printset = _buddy.bdd_printset
def bdd_printdot(r: 'bdd') -> "void":
return _buddy.bdd_printdot(r)
bdd_printdot = _buddy.bdd_printdot
def bdd_fprintdot(ofile: 'FILE *', r: 'bdd') -> "void":
return _buddy.bdd_fprintdot(ofile, r)
bdd_fprintdot = _buddy.bdd_fprintdot
def bdd_fnprintdot(fname: 'char *', r: 'bdd') -> "int":
return _buddy.bdd_fnprintdot(fname, r)
bdd_fnprintdot = _buddy.bdd_fnprintdot
def bdd_fnsave(fname: 'char *', r: 'bdd') -> "int":
return _buddy.bdd_fnsave(fname, r)
bdd_fnsave = _buddy.bdd_fnsave
def bdd_save(ofile: 'FILE *', r: 'bdd') -> "int":
return _buddy.bdd_save(ofile, r)
bdd_save = _buddy.bdd_save
def bdd_fnload(fname: 'char *', r: 'bdd') -> "int":
return _buddy.bdd_fnload(fname, r)
bdd_fnload = _buddy.bdd_fnload
def bdd_load(ifile: 'FILE *', r: 'bdd') -> "int":
return _buddy.bdd_load(ifile, r)
bdd_load = _buddy.bdd_load
def bdd_addvarblock(v: 'bdd', f: 'int') -> "int":
return _buddy.bdd_addvarblock(v, f)
bdd_addvarblock = _buddy.bdd_addvarblock
bddop_and = _buddy.bddop_and
bddop_xor = _buddy.bddop_xor
bddop_or = _buddy.bddop_or
bddop_nand = _buddy.bddop_nand
bddop_nor = _buddy.bddop_nor
bddop_imp = _buddy.bddop_imp
bddop_biimp = _buddy.bddop_biimp
bddop_diff = _buddy.bddop_diff
bddop_less = _buddy.bddop_less
bddop_invimp = _buddy.bddop_invimp
BDD_REORDER_NONE = _buddy.BDD_REORDER_NONE
BDD_REORDER_WIN2 = _buddy.BDD_REORDER_WIN2
BDD_REORDER_WIN2ITE = _buddy.BDD_REORDER_WIN2ITE
BDD_REORDER_SIFT = _buddy.BDD_REORDER_SIFT
BDD_REORDER_SIFTITE = _buddy.BDD_REORDER_SIFTITE
BDD_REORDER_WIN3 = _buddy.BDD_REORDER_WIN3
BDD_REORDER_WIN3ITE = _buddy.BDD_REORDER_WIN3ITE
BDD_REORDER_RANDOM = _buddy.BDD_REORDER_RANDOM
def fdd_extdomain(input_buf: 'int *') -> "int":
return _buddy.fdd_extdomain(input_buf)
fdd_extdomain = _buddy.fdd_extdomain
def fdd_overlapdomain(arg1: 'int', arg2: 'int') -> "int":
return _buddy.fdd_overlapdomain(arg1, arg2)
fdd_overlapdomain = _buddy.fdd_overlapdomain
def fdd_clearall() -> "void":
return _buddy.fdd_clearall()
fdd_clearall = _buddy.fdd_clearall
def fdd_domainnum() -> "int":
return _buddy.fdd_domainnum()
fdd_domainnum = _buddy.fdd_domainnum
def fdd_domainsize(arg1: 'int') -> "int":
return _buddy.fdd_domainsize(arg1)
fdd_domainsize = _buddy.fdd_domainsize
def fdd_varnum(arg1: 'int') -> "int":
return _buddy.fdd_varnum(arg1)
fdd_varnum = _buddy.fdd_varnum
def fdd_vars(arg1: 'int') -> "const_int_ptr":
return _buddy.fdd_vars(arg1)
fdd_vars = _buddy.fdd_vars
def fdd_ithvar(arg1: 'int', arg2: 'int') -> "bdd":
return _buddy.fdd_ithvar(arg1, arg2)
fdd_ithvar = _buddy.fdd_ithvar
def fdd_scanvar(arg1: 'bdd', arg2: 'int') -> "int":
return _buddy.fdd_scanvar(arg1, arg2)
fdd_scanvar = _buddy.fdd_scanvar
def fdd_scanallvar(arg1: 'bdd') -> "int *":
return _buddy.fdd_scanallvar(arg1)
fdd_scanallvar = _buddy.fdd_scanallvar
def fdd_ithset(arg1: 'int') -> "bdd":
return _buddy.fdd_ithset(arg1)
fdd_ithset = _buddy.fdd_ithset
def fdd_domain(arg1: 'int') -> "bdd":
return _buddy.fdd_domain(arg1)
fdd_domain = _buddy.fdd_domain
def fdd_equals(arg1: 'int', arg2: 'int') -> "bdd":
return _buddy.fdd_equals(arg1, arg2)
fdd_equals = _buddy.fdd_equals
def fdd_printset(arg1: 'bdd') -> "void":
return _buddy.fdd_printset(arg1)
fdd_printset = _buddy.fdd_printset
def fdd_fprintset(arg1: 'FILE *', arg2: 'bdd') -> "void":
return _buddy.fdd_fprintset(arg1, arg2)
fdd_fprintset = _buddy.fdd_fprintset
def fdd_scanset(arg1: 'bdd', arg2: 'int *&', arg3: 'int &') -> "int":
return _buddy.fdd_scanset(arg1, arg2, arg3)
fdd_scanset = _buddy.fdd_scanset
def fdd_makeset(arg1: 'int *', arg2: 'int') -> "bdd":
return _buddy.fdd_makeset(arg1, arg2)
fdd_makeset = _buddy.fdd_makeset
def fdd_intaddvarblock(arg1: 'int', arg2: 'int', arg3: 'int') -> "int":
return _buddy.fdd_intaddvarblock(arg1, arg2, arg3)
fdd_intaddvarblock = _buddy.fdd_intaddvarblock
def fdd_setpair(arg1: 'bddPair *', arg2: 'int', arg3: 'int') -> "int":
return _buddy.fdd_setpair(arg1, arg2, arg3)
fdd_setpair = _buddy.fdd_setpair
def fdd_setpairs(arg1: 'bddPair *', arg2: 'int *', arg3: 'int *', arg4: 'int') -> "int":
return _buddy.fdd_setpairs(arg1, arg2, arg3, arg4)
fdd_setpairs = _buddy.fdd_setpairs
def bvec_copy(v: 'bvec') -> "bvec":
return _buddy.bvec_copy(v)
bvec_copy = _buddy.bvec_copy
def bvec_true(bitnum: 'int') -> "bvec":
return _buddy.bvec_true(bitnum)
bvec_true = _buddy.bvec_true
def bvec_false(bitnum: 'int') -> "bvec":
return _buddy.bvec_false(bitnum)
bvec_false = _buddy.bvec_false
def bvec_con(bitnum: 'int', val: 'int') -> "bvec":
return _buddy.bvec_con(bitnum, val)
bvec_con = _buddy.bvec_con
def bvec_var(bitnum: 'int', offset: 'int', step: 'int') -> "bvec":
return _buddy.bvec_var(bitnum, offset, step)
bvec_var = _buddy.bvec_var
def bvec_varfdd(var: 'int') -> "bvec":
return _buddy.bvec_varfdd(var)
bvec_varfdd = _buddy.bvec_varfdd
def bvec_varvec(bitnum: 'int', var: 'int *') -> "bvec":
return _buddy.bvec_varvec(bitnum, var)
bvec_varvec = _buddy.bvec_varvec
def bvec_coerce(bitnum: 'int', v: 'bvec') -> "bvec":
return _buddy.bvec_coerce(bitnum, v)
bvec_coerce = _buddy.bvec_coerce
def bvec_isconst(e: 'bvec') -> "int":
return _buddy.bvec_isconst(e)
bvec_isconst = _buddy.bvec_isconst
def bvec_val(e: 'bvec') -> "int":
return _buddy.bvec_val(e)
bvec_val = _buddy.bvec_val
def bvec_map1(arg1: 'bvec', fun: 'bdd (*)(bdd const &)') -> "bvec":
return _buddy.bvec_map1(arg1, fun)
bvec_map1 = _buddy.bvec_map1
def bvec_map2(arg1: 'bvec', arg2: 'bvec', fun: 'bdd (*)(bdd const &,bdd const &)') -> "bvec":
return _buddy.bvec_map2(arg1, arg2, fun)
bvec_map2 = _buddy.bvec_map2
def bvec_map3(arg1: 'bvec', arg2: 'bvec', arg3: 'bvec', fun: 'bdd (*)(bdd const &,bdd const &,bdd const &)') -> "bvec":
return _buddy.bvec_map3(arg1, arg2, arg3, fun)
bvec_map3 = _buddy.bvec_map3
def bvec_add(left: 'bvec', right: 'bvec') -> "bvec":
return _buddy.bvec_add(left, right)
bvec_add = _buddy.bvec_add
def bvec_sub(left: 'bvec', right: 'bvec') -> "bvec":
return _buddy.bvec_sub(left, right)
bvec_sub = _buddy.bvec_sub
def bvec_mulfixed(e: 'bvec', c: 'int') -> "bvec":
return _buddy.bvec_mulfixed(e, c)
bvec_mulfixed = _buddy.bvec_mulfixed
def bvec_mul(left: 'bvec', right: 'bvec') -> "bvec":
return _buddy.bvec_mul(left, right)
bvec_mul = _buddy.bvec_mul
def bvec_divfixed(arg1: 'bvec', c: 'int', arg3: 'bvec', arg4: 'bvec') -> "int":
return _buddy.bvec_divfixed(arg1, c, arg3, arg4)
bvec_divfixed = _buddy.bvec_divfixed
def bvec_div(arg1: 'bvec', arg2: 'bvec', arg3: 'bvec', arg4: 'bvec') -> "int":
return _buddy.bvec_div(arg1, arg2, arg3, arg4)
bvec_div = _buddy.bvec_div
def bvec_ite(a: 'bdd', b: 'bvec', c: 'bvec') -> "bvec":
return _buddy.bvec_ite(a, b, c)
bvec_ite = _buddy.bvec_ite
def bvec_shlfixed(e: 'bvec', pos: 'int', c: 'bdd') -> "bvec":
return _buddy.bvec_shlfixed(e, pos, c)
bvec_shlfixed = _buddy.bvec_shlfixed
def bvec_shl(l: 'bvec', r: 'bvec', c: 'bdd') -> "bvec":
return _buddy.bvec_shl(l, r, c)
bvec_shl = _buddy.bvec_shl
def bvec_shrfixed(e: 'bvec', pos: 'int', c: 'bdd') -> "bvec":
return _buddy.bvec_shrfixed(e, pos, c)
bvec_shrfixed = _buddy.bvec_shrfixed
def bvec_shr(l: 'bvec', r: 'bvec', c: 'bdd') -> "bvec":
return _buddy.bvec_shr(l, r, c)
bvec_shr = _buddy.bvec_shr
def bvec_lth(left: 'bvec', right: 'bvec') -> "bdd":
return _buddy.bvec_lth(left, right)
bvec_lth = _buddy.bvec_lth
def bvec_lte(left: 'bvec', right: 'bvec') -> "bdd":
return _buddy.bvec_lte(left, right)
bvec_lte = _buddy.bvec_lte
def bvec_gth(left: 'bvec', right: 'bvec') -> "bdd":
return _buddy.bvec_gth(left, right)
bvec_gth = _buddy.bvec_gth
def bvec_gte(left: 'bvec', right: 'bvec') -> "bdd":
return _buddy.bvec_gte(left, right)
bvec_gte = _buddy.bvec_gte
def bvec_equ(left: 'bvec', right: 'bvec') -> "bdd":
return _buddy.bvec_equ(left, right)
bvec_equ = _buddy.bvec_equ
def bvec_neq(left: 'bvec', right: 'bvec') -> "bdd":
return _buddy.bvec_neq(left, right)
bvec_neq = _buddy.bvec_neq
class bvec(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_buddy.bvec_swiginit(self, _buddy.new_bvec(*args))
__swig_destroy__ = _buddy.delete_bvec
def set(self, i: 'int', b: 'bdd') -> "void":
return _buddy.bvec_set(self, i, b)
def bitnum(self) -> "int":
return _buddy.bvec_bitnum(self)
def empty(self) -> "int":
return _buddy.bvec_empty(self)
def __and__(self, a: 'bvec') -> "bvec":
return _buddy.bvec___and__(self, a)
def __xor__(self, a: 'bvec') -> "bvec":
return _buddy.bvec___xor__(self, a)
def __or__(self, a: 'bvec') -> "bvec":
return _buddy.bvec___or__(self, a)
def __lshift__(self, *args) -> "bvec":
return _buddy.bvec___lshift__(self, *args)
def __rshift__(self, *args) -> "bvec":
return _buddy.bvec___rshift__(self, *args)
def __add__(self, a: 'bvec') -> "bvec":
return _buddy.bvec___add__(self, a)
def __sub__(self, a: 'bvec') -> "bvec":
return _buddy.bvec___sub__(self, a)
def __mul__(self, *args) -> "bvec":
return _buddy.bvec___mul__(self, *args)
def __lt__(self, a: 'bvec') -> "bdd":
return _buddy.bvec___lt__(self, a)
def __le__(self, a: 'bvec') -> "bdd":
return _buddy.bvec___le__(self, a)
def __gt__(self, a: 'bvec') -> "bdd":
return _buddy.bvec___gt__(self, a)
def __ge__(self, a: 'bvec') -> "bdd":
return _buddy.bvec___ge__(self, a)
def __eq__(self, a: 'bvec') -> "bdd":
return _buddy.bvec___eq__(self, a)
def __ne__(self, a: 'bvec') -> "bdd":
return _buddy.bvec___ne__(self, a)
def __str__(self) -> "std::string":
return _buddy.bvec___str__(self)
def __getitem__(self, i: 'int') -> "bdd":
return _buddy.bvec___getitem__(self, i)
bvec_swigregister = _buddy.bvec_swigregister
bvec_swigregister(bvec)
cvar = _buddy.cvar
bddfalse = cvar.bddfalse
bddtrue = cvar.bddtrue
| mit |
mrquim/mrquimrepo | repo/plugin.program.indigo/libs/requests/packages/urllib3/poolmanager.py | 86 | 13053 | from __future__ import absolute_import
import collections
import functools
import logging
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
from .util.url import parse_url
from .util.retry import Retry
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
log = logging.getLogger(__name__)
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
'ssl_version', 'ca_cert_dir', 'ssl_context')
# The base fields to use when determining what pool to get a connection from;
# these do not rely on the ``connection_pool_kw`` and can be determined by the
# URL and potentially the ``urllib3.connection.port_by_scheme`` dictionary.
#
# All custom key schemes should include the fields in this key at a minimum.
BasePoolKey = collections.namedtuple('BasePoolKey', ('scheme', 'host', 'port'))
# The fields to use when determining what pool to get a HTTP and HTTPS
# connection from. All additional fields must be present in the PoolManager's
# ``connection_pool_kw`` instance variable.
HTTPPoolKey = collections.namedtuple(
'HTTPPoolKey', BasePoolKey._fields + ('timeout', 'retries', 'strict',
'block', 'source_address')
)
HTTPSPoolKey = collections.namedtuple(
'HTTPSPoolKey', HTTPPoolKey._fields + SSL_KEYWORDS
)
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key of type ``key_class`` for a request.
According to RFC 3986, both the scheme and host are case-insensitive.
Therefore, this function normalizes both before constructing the pool
key for an HTTPS request. If you wish to change this behaviour, provide
alternate callables to ``key_fn_by_scheme``.
:param key_class:
The class to use when constructing the key. This should be a namedtuple
with the ``scheme`` and ``host`` keys at a minimum.
:param request_context:
A dictionary-like object that contain the context for a request.
It should contain a key for each field in the :class:`HTTPPoolKey`
"""
context = {}
for key in key_class._fields:
context[key] = request_context.get(key)
context['scheme'] = context['scheme'].lower()
context['host'] = context['host'].lower()
return key_class(**context)
# A dictionary that maps a scheme to a callable that creates a pool key.
# This can be used to alter the way pool keys are constructed, if desired.
# Each PoolManager makes a copy of this dictionary so they can be configured
# globally here, or individually on the instance.
key_fn_by_scheme = {
'http': functools.partial(_default_key_normalizer, HTTPPoolKey),
'https': functools.partial(_default_key_normalizer, HTTPSPoolKey),
}
pool_classes_by_scheme = {
'http': HTTPConnectionPool,
'https': HTTPSConnectionPool,
}
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \\**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
proxy = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
# Locally set the pool classes and keys so other PoolManagers can
# override them.
self.pool_classes_by_scheme = pool_classes_by_scheme
self.key_fn_by_scheme = key_fn_by_scheme.copy()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port):
"""
Create a new :class:`ConnectionPool` based on host, port and scheme.
This method is used to actually create the connection pools handed out
by :meth:`connection_from_url` and companion methods. It is intended
to be overridden for customization.
"""
pool_cls = self.pool_classes_by_scheme[scheme]
kwargs = self.connection_pool_kw
if scheme == 'http':
kwargs = self.connection_pool_kw.copy()
for kw in SSL_KEYWORDS:
kwargs.pop(kw, None)
return pool_cls(host, port, **kwargs)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme='http'):
"""
Get a :class:`ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``.
"""
if not host:
raise LocationValueError("No host specified.")
request_context = self.connection_pool_kw.copy()
request_context['scheme'] = scheme or 'http'
if not port:
port = port_by_scheme.get(request_context['scheme'].lower(), 80)
request_context['port'] = port
request_context['host'] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
Get a :class:`ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context['scheme'].lower()
pool_key_constructor = self.key_fn_by_scheme[scheme]
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key)
def connection_from_pool_key(self, pool_key):
"""
Get a :class:`ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
``port`` fields.
"""
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
pool = self._new_pool(pool_key.scheme, pool_key.host, pool_key.port)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url` but
doesn't pass any additional parameters to the
:class:`urllib3.connectionpool.ConnectionPool` constructor.
Additional parameters are taken from the :class:`.PoolManager`
constructor.
"""
u = parse_url(url)
return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw['assert_same_host'] = False
kw['redirect'] = False
if 'headers' not in kw:
kw['headers'] = self.headers
if self.proxy is not None and u.scheme == "http":
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
# RFC 7231, Section 6.4.4
if response.status == 303:
method = 'GET'
retries = kw.get('retries')
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
raise
return response
kw['retries'] = retries
kw['redirect'] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary contaning headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
>>> len(proxy.pools)
1
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
>>> r4 = proxy.request('GET', 'https://twitter.com/')
>>> len(proxy.pools)
3
"""
def __init__(self, proxy_url, num_pools=10, headers=None,
proxy_headers=None, **connection_pool_kw):
if isinstance(proxy_url, HTTPConnectionPool):
proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
proxy_url.port)
proxy = parse_url(proxy_url)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
if proxy.scheme not in ("http", "https"):
raise ProxySchemeUnknown(proxy.scheme)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
connection_pool_kw['_proxy'] = self.proxy
connection_pool_kw['_proxy_headers'] = self.proxy_headers
super(ProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw)
def connection_from_host(self, host, port=None, scheme='http'):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
host, port, scheme)
return super(ProxyManager, self).connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme)
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {'Accept': '*/*'}
netloc = parse_url(url).netloc
if netloc:
headers_['Host'] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if u.scheme == "http":
# For proxied HTTPS requests, httplib sets the necessary headers
# on the CONNECT to the proxy. For HTTP, we'll definitely
# need to set 'Host' at the very least.
headers = kw.get('headers', self.headers)
kw['headers'] = self._set_proxy_headers(url, headers)
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
def proxy_from_url(url, **kw):
return ProxyManager(proxy_url=url, **kw)
| gpl-2.0 |
openpeer/webrtc-gyp | test/win/gyptest-cl-warning-level.py | 344 | 1394 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure warning level is extracted properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'compiler-flags'
test.run_gyp('warning-level.gyp', chdir=CHDIR)
# A separate target for each warning level: one pass (compiling a file
# containing a warning that's above the specified level); and one fail
# (compiling a file at the specified level). No pass for 4 of course,
# because it would have to have no warnings. The default warning level is
# equivalent to level 1.
test.build('warning-level.gyp', 'test_wl1_fail', chdir=CHDIR, status=1)
test.build('warning-level.gyp', 'test_wl1_pass', chdir=CHDIR)
test.build('warning-level.gyp', 'test_wl2_fail', chdir=CHDIR, status=1)
test.build('warning-level.gyp', 'test_wl2_pass', chdir=CHDIR)
test.build('warning-level.gyp', 'test_wl3_fail', chdir=CHDIR, status=1)
test.build('warning-level.gyp', 'test_wl3_pass', chdir=CHDIR)
test.build('warning-level.gyp', 'test_wl4_fail', chdir=CHDIR, status=1)
test.build('warning-level.gyp', 'test_def_fail', chdir=CHDIR, status=1)
test.build('warning-level.gyp', 'test_def_pass', chdir=CHDIR)
test.pass_test()
| bsd-3-clause |
klattimer/rtl_433 | examples/rtl_433_gps.py | 3 | 3467 | #!/usr/bin/env python
"""Read events from rtl_433 and gpsd and print out."""
# Needs gpsd (and the Python support from gpsd)
# Start gpsd and rtl_433 (rtl_433 -F syslog::1433), then this script
from __future__ import print_function
import socket
import json
import gps
import threading
# rtl_433 syslog address
UDP_IP = "127.0.0.1"
UDP_PORT = 1433
class GpsPoller(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.gps = gps.gps(mode=gps.WATCH_ENABLE)
self.running = True
def run(self):
while self.running:
self.gps.next()
@property
def utc(self):
return self.gps.utc
@property
def fix(self):
return self.gps.fix
@property
def satellites(self):
return self.gps.satellites
def parse_syslog(line):
"""Try to extract the payload from a syslog line."""
line = line.decode("ascii") # also UTF-8 if BOM
if line.startswith("<"):
# fields should be "<PRI>VER", timestamp, hostname, command, pid, mid, sdata, payload
fields = line.split(None, 7)
line = fields[-1]
return line
def prife(label, data, key):
"""Print if exists."""
if key in data:
print(label, data[key])
def report_event(data, gpsp):
"""Print out an rtl_433 event with gps data."""
# don't process if it isn't sensor data
if "model" not in data:
return
# don't process if it isn't TPMS data
if "type" not in data:
return
if data["type"] != "TPMS":
return
# now = int(time.time())
print("----------------------------------------")
print("Model ", data["model"])
prife("ID ", data, "id")
prife("Status ", data, "status")
prife("State ", data, "state")
prife("Flags ", data, "flags")
prife("Code ", data, "code")
prife("Pressure (kPa) ", data, "pressure_kPa")
prife("Pressure (PSI) ", data, "pressure_PSI")
prife("Temperature (C)", data, "temperature_C")
prife("Temperature (F)", data, "temperature_F")
print()
print("latitude ", gpsp.fix.latitude)
print("longitude ", gpsp.fix.longitude)
print("time utc ", gpsp.utc, " + ", gpsp.fix.time)
print("altitude (m) ", gpsp.fix.altitude)
print("eps ", gpsp.fix.eps)
print("epx ", gpsp.fix.epx)
print("epv ", gpsp.fix.epv)
print("ept ", gpsp.fix.ept)
print("speed (m/s) ", gpsp.fix.speed)
print("climb ", gpsp.fix.climb)
print("track ", gpsp.fix.track)
print("mode ", gpsp.fix.mode)
# print("sats ", gpsp.satellites)
if __name__ == '__main__':
gpsp = GpsPoller()
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.bind((UDP_IP, UDP_PORT))
try:
gpsp.start()
while True:
line, addr = sock.recvfrom(1024)
try:
line = parse_syslog(line)
data = json.loads(line)
report_event(data, gpsp)
except KeyError:
pass
except ValueError:
pass
except (KeyboardInterrupt, SystemExit): #when you press ctrl+c
print("\nAborted. Exiting...")
sock.close()
gpsp.running = False
gpsp.join() # wait for the thread to finish
print("Done.\n")
| gpl-2.0 |
Zanzibar82/plugin.video.pelisalacarta | servers/documentary.py | 40 | 2839 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para documentary.es
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("documentary get_video_url(page_url='%s')" % page_url)
video_urls = []
data = scrapertools.cache_page(page_url)
try:
#var videoVars = {"videoNonceVar":"94767795ce","post_id":"2835"};
videoNonceVar = scrapertools.get_match(data,'var\s*videoVars\s*\=\s*\{"videoNonceVar"\:"([^"]+)","post_id"\:"\d+"')
post_id = scrapertools.get_match(data,'var\s*videoVars\s*\=\s*\{"videoNonceVar"\:"[^"]+","post_id"\:"(\d+)"')
#http://documentary.es/wp-admin/admin-ajax.php?postId=2835&videoNonce=94767795ce&action=getVideo&_=1385893877929
import random
url = "http://documentary.es/wp-admin/admin-ajax.php?postId="+post_id+"&videoNonce="+videoNonceVar+"&action=getVideo&_="+str(random.randint(10000000000,9999999999999))
data = scrapertools.cache_page(url)
#{"videoUrl":"http:\/\/www.dailymotion.com\/embed\/video\/xioggh?autoplay=1&defaultSubtitle=es"}
data = data.replace("\\","")
except:
pass
import servertools
real_urls = servertools.find_video_items(data=data)
if len(real_urls)>0:
item = real_urls[len(real_urls)-1]
exec "import "+item.server
exec "servermodule = "+item.server
video_urls = servermodule.get_video_url(item.url)
for video_url in video_urls:
logger.info("documentary %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# <iframe src="http://documentary.es/2321-mundos-invisibles-1x02-mas-alla-de-nuestra-vision-720p?embed"
patronvideos = 'http://documentary.es/(\d+[a-z0-9\-]+)'
logger.info("documentary find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[documentary.es]"
url = "http://documentary.es/"+match+"?embed"
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'documentary' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://documentary.es/2321-mundos-invisibles-1x02-mas-alla-de-nuestra-vision-720p?embed")
return len(video_urls)>0 | gpl-3.0 |
KaranToor/MA450 | google-cloud-sdk/.install/.backup/lib/surface/debug/snapshots/describe.py | 6 | 1949 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""List command for gcloud debug snapshots command group."""
from googlecloudsdk.api_lib.debug import debug
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.debug import flags
from googlecloudsdk.core import properties
class Describe(base.DescribeCommand):
"""Describe debug snapshots."""
detailed_help = {
'DESCRIPTION': """\
This command describes debug snapshots for a Cloud Debugger debug
target. If the snapshot has been completed, the output will include
details on the stack trace and local variables, stored in a compact
form which is primarily intended to be machine-readable rather than
human-readable.
"""
}
@staticmethod
def Args(parser):
flags.AddIdOptions(parser, 'snapshot', 'snapshots', 'displayed')
def Run(self, args):
"""Run the describe command."""
project_id = properties.VALUES.core.project.Get(required=True)
self.user_email = properties.VALUES.core.account.Get(required=True)
debugger = debug.Debugger(project_id)
debuggee = debugger.FindDebuggee(args.target)
return debuggee.ListBreakpoints(args.location,
resource_ids=args.ids,
restrict_to_type=debugger.SNAPSHOT_TYPE)
def Collection(self):
return 'debug.snapshots'
| apache-2.0 |
michiz05000/jna-symfony | vendor/imagine/imagine/docs/conf.py | 218 | 7135 | # -*- coding: utf-8 -*-
#
# Imagine documentation build configuration file, created by
# sphinx-quickstart on Wed Apr 6 00:20:22 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Imagine'
copyright = u'2011, Bulat Shakirzyanov'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = '0.2.1-alpha'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
# pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'imagine'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'github_fork': 'avalanche123/Imagine'
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'Imagine, image manipulations reloaded'
# A shorter title for the navigation bar. Default is the same as html_title.
html_short_title = 'Imagine for PHP 5.3'
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static', 'API']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Imaginedoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Imagine.tex', u'Imagine Documentation',
u'Bulat Shakirzyanov', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'imagine', u'Imagine Documentation',
[u'Bulat Shakirzyanov'], 1)
]
| mit |
oppo-source/Neo5-kernel-source | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
shakamunyi/nova | nova/virt/xenapi/firewall.py | 10 | 4668 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.serialization import jsonutils
from nova import context
from nova.openstack.common import log as logging
from nova.virt import firewall
from nova.virt import netutils
LOG = logging.getLogger(__name__)
class Dom0IptablesFirewallDriver(firewall.IptablesFirewallDriver):
"""Dom0IptablesFirewallDriver class
This class provides an implementation for nova.virt.Firewall
using iptables. This class is meant to be used with the xenapi
backend and uses xenapi plugin to enforce iptables rules in dom0.
"""
def _plugin_execute(self, *cmd, **kwargs):
# Prepare arguments for plugin call
args = {}
args.update(map(lambda x: (x, str(kwargs[x])), kwargs))
args['cmd_args'] = jsonutils.dumps(cmd)
ret = self._session.call_plugin('xenhost', 'iptables_config', args)
json_ret = jsonutils.loads(ret)
return (json_ret['out'], json_ret['err'])
def __init__(self, virtapi, xenapi_session=None, **kwargs):
from nova.network import linux_net
super(Dom0IptablesFirewallDriver, self).__init__(virtapi, **kwargs)
self._session = xenapi_session
# Create IpTablesManager with executor through plugin
self.iptables = linux_net.IptablesManager(self._plugin_execute)
self.iptables.ipv4['filter'].add_chain('sg-fallback')
self.iptables.ipv4['filter'].add_rule('sg-fallback', '-j DROP')
self.iptables.ipv6['filter'].add_chain('sg-fallback')
self.iptables.ipv6['filter'].add_rule('sg-fallback', '-j DROP')
def _build_tcp_udp_rule(self, rule, version):
if rule['from_port'] == rule['to_port']:
return ['--dport', '%s' % (rule['from_port'],)]
else:
# No multiport needed for XS!
return ['--dport', '%s:%s' % (rule['from_port'],
rule['to_port'])]
def _provider_rules(self):
"""Generate a list of rules from provider for IP4 & IP6.
Note: We could not use the common code from virt.firewall because
XS doesn't accept the '-m multiport' option.
"""
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
rules = self._virtapi.provider_fw_rule_get_all(ctxt)
for rule in rules:
LOG.debug('Adding provider rule: %s', rule['cidr'])
version = netutils.get_ip_version(rule['cidr'])
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule['protocol']
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-p', protocol, '-s', rule['cidr']]
if protocol in ['udp', 'tcp']:
if rule['from_port'] == rule['to_port']:
args += ['--dport', '%s' % (rule['from_port'],)]
else:
args += ['--dport', '%s:%s' % (rule['from_port'],
rule['to_port'])]
elif protocol == 'icmp':
icmp_type = rule['from_port']
icmp_code = rule['to_port']
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
args += ['-j DROP']
fw_rules += [' '.join(args)]
return ipv4_rules, ipv6_rules
| apache-2.0 |
caveman-dick/ansible | lib/ansible/modules/cloud/profitbricks/profitbricks_datacenter.py | 26 | 7540 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: profitbricks_datacenter
short_description: Create or destroy a ProfitBricks Virtual Datacenter.
description:
- This is a simple module that supports creating or removing vDCs. A vDC is required before you can create servers. This module has a dependency
on profitbricks >= 1.0.0
version_added: "2.0"
options:
name:
description:
- The name of the virtual datacenter.
required: true
description:
description:
- The description of the virtual datacenter.
required: false
location:
description:
- The datacenter location.
required: false
default: us/las
choices: [ "us/las", "de/fra", "de/fkb" ]
subscription_user:
description:
- The ProfitBricks username. Overrides the PB_SUBSCRIPTION_ID environment variable.
required: false
subscription_password:
description:
- THe ProfitBricks password. Overrides the PB_PASSWORD environment variable.
required: false
wait:
description:
- wait for the datacenter to be created before returning
required: false
default: "yes"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 600
state:
description:
- create or terminate datacenters
required: false
default: 'present'
choices: [ "present", "absent" ]
requirements: [ "profitbricks" ]
author: Matt Baldwin (baldwin@stackpointcloud.com)
'''
EXAMPLES = '''
# Create a Datacenter
- profitbricks_datacenter:
datacenter: Tardis One
wait_timeout: 500
# Destroy a Datacenter. This will remove all servers, volumes, and other objects in the datacenter.
- profitbricks_datacenter:
datacenter: Tardis One
wait_timeout: 500
state: absent
'''
import re
import time
HAS_PB_SDK = True
try:
from profitbricks.client import ProfitBricksService, Datacenter
except ImportError:
HAS_PB_SDK = False
from ansible.module_utils.basic import AnsibleModule
LOCATIONS = ['us/las',
'de/fra',
'de/fkb']
uuid_match = re.compile(
'[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12}', re.I)
def _wait_for_completion(profitbricks, promise, wait_timeout, msg):
if not promise:
return
wait_timeout = time.time() + wait_timeout
while wait_timeout > time.time():
time.sleep(5)
operation_result = profitbricks.get_request(
request_id=promise['requestId'],
status=True)
if operation_result['metadata']['status'] == "DONE":
return
elif operation_result['metadata']['status'] == "FAILED":
raise Exception(
'Request failed to complete ' + msg + ' "' + str(
promise['requestId']) + '" to complete.')
raise Exception(
'Timed out waiting for async operation ' + msg + ' "' + str(
promise['requestId']
) + '" to complete.')
def _remove_datacenter(module, profitbricks, datacenter):
try:
profitbricks.delete_datacenter(datacenter)
except Exception as e:
module.fail_json(msg="failed to remove the datacenter: %s" % str(e))
def create_datacenter(module, profitbricks):
"""
Creates a Datacenter
This will create a new Datacenter in the specified location.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if a new datacenter was created, false otherwise
"""
name = module.params.get('name')
location = module.params.get('location')
description = module.params.get('description')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
i = Datacenter(
name=name,
location=location,
description=description
)
try:
datacenter_response = profitbricks.create_datacenter(datacenter=i)
if wait:
_wait_for_completion(profitbricks, datacenter_response,
wait_timeout, "_create_datacenter")
results = {
'datacenter_id': datacenter_response['id']
}
return results
except Exception as e:
module.fail_json(msg="failed to create the new datacenter: %s" % str(e))
def remove_datacenter(module, profitbricks):
"""
Removes a Datacenter.
This will remove a datacenter.
module : AnsibleModule object
profitbricks: authenticated profitbricks object.
Returns:
True if the datacenter was deleted, false otherwise
"""
name = module.params.get('name')
changed = False
if(uuid_match.match(name)):
_remove_datacenter(module, profitbricks, name)
changed = True
else:
datacenters = profitbricks.list_datacenters()
for d in datacenters['items']:
vdc = profitbricks.get_datacenter(d['id'])
if name == vdc['properties']['name']:
name = d['id']
_remove_datacenter(module, profitbricks, name)
changed = True
return changed
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(),
description=dict(),
location=dict(choices=LOCATIONS, default='us/las'),
subscription_user=dict(),
subscription_password=dict(no_log=True),
wait=dict(type='bool', default=True),
wait_timeout=dict(default=600),
state=dict(default='present'),
)
)
if not HAS_PB_SDK:
module.fail_json(msg='profitbricks required for this module')
if not module.params.get('subscription_user'):
module.fail_json(msg='subscription_user parameter is required')
if not module.params.get('subscription_password'):
module.fail_json(msg='subscription_password parameter is required')
subscription_user = module.params.get('subscription_user')
subscription_password = module.params.get('subscription_password')
profitbricks = ProfitBricksService(
username=subscription_user,
password=subscription_password)
state = module.params.get('state')
if state == 'absent':
if not module.params.get('name'):
module.fail_json(msg='name parameter is required deleting a virtual datacenter.')
try:
(changed) = remove_datacenter(module, profitbricks)
module.exit_json(
changed=changed)
except Exception as e:
module.fail_json(msg='failed to set datacenter state: %s' % str(e))
elif state == 'present':
if not module.params.get('name'):
module.fail_json(msg='name parameter is required for a new datacenter')
if not module.params.get('location'):
module.fail_json(msg='location parameter is required for a new datacenter')
try:
(datacenter_dict_array) = create_datacenter(module, profitbricks)
module.exit_json(**datacenter_dict_array)
except Exception as e:
module.fail_json(msg='failed to set datacenter state: %s' % str(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
mancoast/CPythonPyc_test | fail/332_test_modulefinder.py | 31 | 7869 | import os
import errno
import shutil
import unittest
import tempfile
from test import support
import modulefinder
TEST_DIR = tempfile.mkdtemp()
TEST_PATH = [TEST_DIR, os.path.dirname(tempfile.__file__)]
# Each test description is a list of 5 items:
#
# 1. a module name that will be imported by modulefinder
# 2. a list of module names that modulefinder is required to find
# 3. a list of module names that modulefinder should complain
# about because they are not found
# 4. a list of module names that modulefinder should complain
# about because they MAY be not found
# 5. a string specifying packages to create; the format is obvious imo.
#
# Each package will be created in TEST_DIR, and TEST_DIR will be
# removed after the tests again.
# Modulefinder searches in a path that contains TEST_DIR, plus
# the standard Lib directory.
maybe_test = [
"a.module",
["a", "a.module", "sys",
"b"],
["c"], ["b.something"],
"""\
a/__init__.py
a/module.py
from b import something
from c import something
b/__init__.py
from sys import *
"""]
maybe_test_new = [
"a.module",
["a", "a.module", "sys",
"b", "__future__"],
["c"], ["b.something"],
"""\
a/__init__.py
a/module.py
from b import something
from c import something
b/__init__.py
from __future__ import absolute_import
from sys import *
"""]
package_test = [
"a.module",
["a", "a.b", "a.c", "a.module", "mymodule", "sys"],
["blahblah", "c"], [],
"""\
mymodule.py
a/__init__.py
import blahblah
from a import b
import c
a/module.py
import sys
from a import b as x
from a.c import sillyname
a/b.py
a/c.py
from a.module import x
import mymodule as sillyname
from sys import version_info
"""]
absolute_import_test = [
"a.module",
["a", "a.module",
"b", "b.x", "b.y", "b.z",
"__future__", "sys", "gc"],
["blahblah", "z"], [],
"""\
mymodule.py
a/__init__.py
a/module.py
from __future__ import absolute_import
import sys # sys
import blahblah # fails
import gc # gc
import b.x # b.x
from b import y # b.y
from b.z import * # b.z.*
a/gc.py
a/sys.py
import mymodule
a/b/__init__.py
a/b/x.py
a/b/y.py
a/b/z.py
b/__init__.py
import z
b/unused.py
b/x.py
b/y.py
b/z.py
"""]
relative_import_test = [
"a.module",
["__future__",
"a", "a.module",
"a.b", "a.b.y", "a.b.z",
"a.b.c", "a.b.c.moduleC",
"a.b.c.d", "a.b.c.e",
"a.b.x",
"gc"],
[], [],
"""\
mymodule.py
a/__init__.py
from .b import y, z # a.b.y, a.b.z
a/module.py
from __future__ import absolute_import # __future__
import gc # gc
a/gc.py
a/sys.py
a/b/__init__.py
from ..b import x # a.b.x
#from a.b.c import moduleC
from .c import moduleC # a.b.moduleC
a/b/x.py
a/b/y.py
a/b/z.py
a/b/g.py
a/b/c/__init__.py
from ..c import e # a.b.c.e
a/b/c/moduleC.py
from ..c import d # a.b.c.d
a/b/c/d.py
a/b/c/e.py
a/b/c/x.py
"""]
relative_import_test_2 = [
"a.module",
["a", "a.module",
"a.sys",
"a.b", "a.b.y", "a.b.z",
"a.b.c", "a.b.c.d",
"a.b.c.e",
"a.b.c.moduleC",
"a.b.c.f",
"a.b.x",
"a.another"],
[], [],
"""\
mymodule.py
a/__init__.py
from . import sys # a.sys
a/another.py
a/module.py
from .b import y, z # a.b.y, a.b.z
a/gc.py
a/sys.py
a/b/__init__.py
from .c import moduleC # a.b.c.moduleC
from .c import d # a.b.c.d
a/b/x.py
a/b/y.py
a/b/z.py
a/b/c/__init__.py
from . import e # a.b.c.e
a/b/c/moduleC.py
#
from . import f # a.b.c.f
from .. import x # a.b.x
from ... import another # a.another
a/b/c/d.py
a/b/c/e.py
a/b/c/f.py
"""]
relative_import_test_3 = [
"a.module",
["a", "a.module"],
["a.bar"],
[],
"""\
a/__init__.py
def foo(): pass
a/module.py
from . import foo
from . import bar
"""]
relative_import_test_4 = [
"a.module",
["a", "a.module"],
[],
[],
"""\
a/__init__.py
def foo(): pass
a/module.py
from . import *
"""]
def open_file(path):
dirname = os.path.dirname(path)
try:
os.makedirs(dirname)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return open(path, "w")
def create_package(source):
ofi = None
try:
for line in source.splitlines():
if line.startswith(" ") or line.startswith("\t"):
ofi.write(line.strip() + "\n")
else:
if ofi:
ofi.close()
ofi = open_file(os.path.join(TEST_DIR, line.strip()))
finally:
if ofi:
ofi.close()
class ModuleFinderTest(unittest.TestCase):
def _do_test(self, info, report=False):
import_this, modules, missing, maybe_missing, source = info
create_package(source)
try:
mf = modulefinder.ModuleFinder(path=TEST_PATH)
mf.import_hook(import_this)
if report:
mf.report()
## # This wouldn't work in general when executed several times:
## opath = sys.path[:]
## sys.path = TEST_PATH
## try:
## __import__(import_this)
## except:
## import traceback; traceback.print_exc()
## sys.path = opath
## return
modules = sorted(set(modules))
found = sorted(mf.modules)
# check if we found what we expected, not more, not less
self.assertEqual(found, modules)
# check for missing and maybe missing modules
bad, maybe = mf.any_missing_maybe()
self.assertEqual(bad, missing)
self.assertEqual(maybe, maybe_missing)
finally:
shutil.rmtree(TEST_DIR)
def test_package(self):
self._do_test(package_test)
def test_maybe(self):
self._do_test(maybe_test)
def test_maybe_new(self):
self._do_test(maybe_test_new)
def test_absolute_imports(self):
self._do_test(absolute_import_test)
def test_relative_imports(self):
self._do_test(relative_import_test)
def test_relative_imports_2(self):
self._do_test(relative_import_test_2)
def test_relative_imports_3(self):
self._do_test(relative_import_test_3)
def test_relative_imports_4(self):
self._do_test(relative_import_test_4)
def test_main():
support.run_unittest(ModuleFinderTest)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
KontorConsulting/odoo | addons/product_margin/wizard/product_margin.py | 338 | 3457 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class product_margin(osv.osv_memory):
_name = 'product.margin'
_description = 'Product Margin'
_columns = {
'from_date': fields.date('From'),
'to_date': fields.date('To'),
'invoice_state': fields.selection([
('paid', 'Paid'),
('open_paid', 'Open and Paid'),
('draft_open_paid', 'Draft, Open and Paid'),
], 'Invoice State', select=True, required=True),
}
_defaults = {
'from_date': time.strftime('%Y-01-01'),
'to_date': time.strftime('%Y-12-31'),
'invoice_state': "open_paid",
}
def action_open_window(self, cr, uid, ids, context=None):
"""
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: the ID or list of IDs if we want more than one
@return:
"""
context = dict(context or {})
def ref(module, xml_id):
proxy = self.pool.get('ir.model.data')
return proxy.get_object_reference(cr, uid, module, xml_id)
model, search_view_id = ref('product', 'product_search_form_view')
model, graph_view_id = ref('product_margin', 'view_product_margin_graph')
model, form_view_id = ref('product_margin', 'view_product_margin_form')
model, tree_view_id = ref('product_margin', 'view_product_margin_tree')
#get the current product.margin object to obtain the values from it
records = self.browse(cr, uid, ids, context=context)
record = records[0]
context.update(invoice_state=record.invoice_state)
if record.from_date:
context.update(date_from=record.from_date)
if record.to_date:
context.update(date_to=record.to_date)
views = [
(tree_view_id, 'tree'),
(form_view_id, 'form'),
(graph_view_id, 'graph')
]
return {
'name': _('Product Margins'),
'context': context,
'view_type': 'form',
"view_mode": 'tree,form,graph',
'res_model': 'product.product',
'type': 'ir.actions.act_window',
'views': views,
'view_id': False,
'search_view_id': search_view_id,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ryfeus/lambda-packs | Tensorflow_OpenCV_Nightly/source/tensorflow/contrib/keras/api/keras/preprocessing/sequence/__init__.py | 57 | 1172 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras data preprocessing utils for sequence data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.keras.python.keras.preprocessing.sequence import make_sampling_table
from tensorflow.contrib.keras.python.keras.preprocessing.sequence import pad_sequences
from tensorflow.contrib.keras.python.keras.preprocessing.sequence import skipgrams
del absolute_import
del division
del print_function
| mit |
hsnr-gamera/gamera | gamera/pstat.py | 1 | 37622 | # Copyright (c) 1999-2007 Gary Strangman; All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Comments and/or additions are welcome (send e-mail to:
# strang@nmr.mgh.harvard.edu).
#
"""
pstat.py module
#################################################
####### Written by: Gary Strangman ###########
####### Last modified: Dec 18, 2007 ###########
#################################################
This module provides some useful list and array manipulation routines
modeled after those found in the |Stat package by Gary Perlman, plus a
number of other useful list/file manipulation functions. The list-based
functions include:
abut (source,*args)
simpleabut (source, addon)
colex (listoflists,cnums)
collapse (listoflists,keepcols,collapsecols,fcn1=None,fcn2=None,cfcn=None)
dm (listoflists,criterion)
flat (l)
linexand (listoflists,columnlist,valuelist)
linexor (listoflists,columnlist,valuelist)
linedelimited (inlist,delimiter)
lineincols (inlist,colsize)
lineincustcols (inlist,colsizes)
list2string (inlist)
makelol(inlist)
makestr(x)
printcc (lst,extra=2)
printincols (listoflists,colsize)
pl (listoflists)
printl(listoflists)
replace (lst,oldval,newval)
recode (inlist,listmap,cols='all')
remap (listoflists,criterion)
roundlist (inlist,num_digits_to_round_floats_to)
sortby(listoflists,sortcols)
unique (inlist)
duplicates(inlist)
writedelimited (listoflists, delimiter, file, writetype='w')
Some of these functions have alternate versions which are defined only if
Numeric (NumPy) can be imported. These functions are generally named as
above, with an 'a' prefix.
aabut (source, *args)
acolex (a,indices,axis=1)
acollapse (a,keepcols,collapsecols,sterr=0,ns=0)
adm (a,criterion)
alinexand (a,columnlist,valuelist)
alinexor (a,columnlist,valuelist)
areplace (a,oldval,newval)
arecode (a,listmap,col='all')
arowcompare (row1, row2)
arowsame (row1, row2)
asortrows(a,axis=0)
aunique(inarray)
aduplicates(inarray)
Currently, the code is all but completely un-optimized. In many cases, the
array versions of functions amount simply to aliases to built-in array
functions/methods. Their inclusion here is for function name consistency.
"""
## CHANGE LOG:
## ==========
## 07-11-26 ... edited to work with numpy
## 01-11-15 ... changed list2string() to accept a delimiter
## 01-06-29 ... converted exec()'s to eval()'s to make compatible with Py2.1
## 01-05-31 ... added duplicates() and aduplicates() functions
## 00-12-28 ... license made GPL, docstring and import requirements
## 99-11-01 ... changed version to 0.3
## 99-08-30 ... removed get, getstrings, put, aget, aput (into io.py)
## 03/27/99 ... added areplace function, made replace fcn recursive
## 12/31/98 ... added writefc function for ouput to fixed column sizes
## 12/07/98 ... fixed import problem (failed on collapse() fcn)
## added __version__ variable (now 0.2)
## 12/05/98 ... updated doc-strings
## added features to collapse() function
## added flat() function for lists
## fixed a broken asortrows()
## 11/16/98 ... fixed minor bug in aput for 1D arrays
##
## 11/08/98 ... fixed aput to output large arrays correctly
import stats # required 3rd party module
import string, copy
from types import *
__version__ = 0.4
###=========================== LIST FUNCTIONS ==========================
###
### Here are the list functions, DEFINED FOR ALL SYSTEMS.
### Array functions (for NumPy-enabled computers) appear below.
###
def abut (source,*args):
"""
Like the |Stat abut command. It concatenates two lists side-by-side
and returns the result. '2D' lists are also accomodated for either argument
(source or addon). CAUTION: If one list is shorter, it will be repeated
until it is as long as the longest list. If this behavior is not desired,
use pstat.simpleabut().
Usage: abut(source, args) where args=any # of lists
Returns: a list of lists as long as the LONGEST list past, source on the
'left', lists in <args> attached consecutively on the 'right'
"""
if type(source) not in [ListType,TupleType]:
source = [source]
for addon in args:
if type(addon) not in [ListType,TupleType]:
addon = [addon]
if len(addon) < len(source): # is source list longer?
if len(source) % len(addon) == 0: # are they integer multiples?
repeats = len(source)/len(addon) # repeat addon n times
origadd = copy.deepcopy(addon)
for i in range(repeats-1):
addon = addon + origadd
else:
repeats = len(source)/len(addon)+1 # repeat addon x times,
origadd = copy.deepcopy(addon) # x is NOT an integer
for i in range(repeats-1):
addon = addon + origadd
addon = addon[0:len(source)]
elif len(source) < len(addon): # is addon list longer?
if len(addon) % len(source) == 0: # are they integer multiples?
repeats = len(addon)/len(source) # repeat source n times
origsour = copy.deepcopy(source)
for i in range(repeats-1):
source = source + origsour
else:
repeats = len(addon)/len(source)+1 # repeat source x times,
origsour = copy.deepcopy(source) # x is NOT an integer
for i in range(repeats-1):
source = source + origsour
source = source[0:len(addon)]
source = simpleabut(source,addon)
return source
def simpleabut (source, addon):
"""
Concatenates two lists as columns and returns the result. '2D' lists
are also accomodated for either argument (source or addon). This DOES NOT
repeat either list to make the 2 lists of equal length. Beware of list pairs
with different lengths ... the resulting list will be the length of the
FIRST list passed.
Usage: simpleabut(source,addon) where source, addon=list (or list-of-lists)
Returns: a list of lists as long as source, with source on the 'left' and
addon on the 'right'
"""
if type(source) not in [ListType,TupleType]:
source = [source]
if type(addon) not in [ListType,TupleType]:
addon = [addon]
minlen = min(len(source),len(addon))
list = copy.deepcopy(source) # start abut process
if type(source[0]) not in [ListType,TupleType]:
if type(addon[0]) not in [ListType,TupleType]:
for i in range(minlen):
list[i] = [source[i]] + [addon[i]] # source/addon = column
else:
for i in range(minlen):
list[i] = [source[i]] + addon[i] # addon=list-of-lists
else:
if type(addon[0]) not in [ListType,TupleType]:
for i in range(minlen):
list[i] = source[i] + [addon[i]] # source=list-of-lists
else:
for i in range(minlen):
list[i] = source[i] + addon[i] # source/addon = list-of-lists
source = list
return source
def colex (listoflists,cnums):
"""
Extracts from listoflists the columns specified in the list 'cnums'
(cnums can be an integer, a sequence of integers, or a string-expression that
corresponds to a slice operation on the variable x ... e.g., 'x[3:]' will colex
columns 3 onward from the listoflists).
Usage: colex (listoflists,cnums)
Returns: a list-of-lists corresponding to the columns from listoflists
specified by cnums, in the order the column numbers appear in cnums
"""
global index
column = 0
if type(cnums) in [ListType,TupleType]: # if multiple columns to get
index = cnums[0]
column = map(lambda x: x[index], listoflists)
for col in cnums[1:]:
index = col
column = abut(column,map(lambda x: x[index], listoflists))
elif type(cnums) == StringType: # if an 'x[3:]' type expr.
evalstring = 'map(lambda x: x'+cnums+', listoflists)'
column = eval(evalstring)
else: # else it's just 1 col to get
index = cnums
column = map(lambda x: x[index], listoflists)
return column
def collapse (listoflists,keepcols,collapsecols,fcn1=None,fcn2=None,cfcn=None):
"""
Averages data in collapsecol, keeping all unique items in keepcols
(using unique, which keeps unique LISTS of column numbers), retaining the
unique sets of values in keepcols, the mean for each. Setting fcn1
and/or fcn2 to point to a function rather than None (e.g., stats.sterr, len)
will append those results (e.g., the sterr, N) after each calculated mean.
cfcn is the collapse function to apply (defaults to mean, defined here in the
pstat module to avoid circular imports with stats.py, but harmonicmean or
others could be passed).
Usage: collapse (listoflists,keepcols,collapsecols,fcn1=None,fcn2=None,cfcn=None)
Returns: a list of lists with all unique permutations of entries appearing in
columns ("conditions") specified by keepcols, abutted with the result of
cfcn (if cfcn=None, defaults to the mean) of each column specified by
collapsecols.
"""
def collmean (inlist):
s = 0
for item in inlist:
s = s + item
return s/float(len(inlist))
if type(keepcols) not in [ListType,TupleType]:
keepcols = [keepcols]
if type(collapsecols) not in [ListType,TupleType]:
collapsecols = [collapsecols]
if cfcn == None:
cfcn = collmean
if keepcols == []:
means = [0]*len(collapsecols)
for i in range(len(collapsecols)):
avgcol = colex(listoflists,collapsecols[i])
means[i] = cfcn(avgcol)
if fcn1:
try:
test = fcn1(avgcol)
except Exception:
test = 'N/A'
means[i] = [means[i], test]
if fcn2:
try:
test = fcn2(avgcol)
except Exception:
test = 'N/A'
try:
means[i] = means[i] + [len(avgcol)]
except TypeError:
means[i] = [means[i],len(avgcol)]
return means
else:
values = colex(listoflists,keepcols)
uniques = unique(values)
uniques.sort()
newlist = []
if type(keepcols) not in [ListType,TupleType]: keepcols = [keepcols]
for item in uniques:
if type(item) not in [ListType,TupleType]: item =[item]
tmprows = linexand(listoflists,keepcols,item)
for col in collapsecols:
avgcol = colex(tmprows,col)
item.append(cfcn(avgcol))
if fcn1 <> None:
try:
test = fcn1(avgcol)
except Exception:
test = 'N/A'
item.append(test)
if fcn2 <> None:
try:
test = fcn2(avgcol)
except Exception:
test = 'N/A'
item.append(test)
newlist.append(item)
return newlist
def dm (listoflists,criterion):
"""
Returns rows from the passed list of lists that meet the criteria in
the passed criterion expression (a string as a function of x; e.g., 'x[3]>=9'
will return all rows where the 4th column>=9 and "x[2]=='N'" will return rows
with column 2 equal to the string 'N').
Usage: dm (listoflists, criterion)
Returns: rows from listoflists that meet the specified criterion.
"""
function = 'filter(lambda x: '+criterion+',listoflists)'
lines = eval(function)
return lines
def flat(l):
"""
Returns the flattened version of a '2D' list. List-correlate to the a.ravel()()
method of NumPy arrays.
Usage: flat(l)
"""
newl = []
for i in range(len(l)):
for j in range(len(l[i])):
newl.append(l[i][j])
return newl
def linexand (listoflists,columnlist,valuelist):
"""
Returns the rows of a list of lists where col (from columnlist) = val
(from valuelist) for EVERY pair of values (columnlist[i],valuelists[i]).
len(columnlist) must equal len(valuelist).
Usage: linexand (listoflists,columnlist,valuelist)
Returns: the rows of listoflists where columnlist[i]=valuelist[i] for ALL i
"""
if type(columnlist) not in [ListType,TupleType]:
columnlist = [columnlist]
if type(valuelist) not in [ListType,TupleType]:
valuelist = [valuelist]
criterion = ''
for i in range(len(columnlist)):
if type(valuelist[i])==StringType:
critval = '\'' + valuelist[i] + '\''
else:
critval = str(valuelist[i])
criterion = criterion + ' x['+str(columnlist[i])+']=='+critval+' and'
criterion = criterion[0:-3] # remove the "and" after the last crit
function = 'filter(lambda x: '+criterion+',listoflists)'
lines = eval(function)
return lines
def linexor (listoflists,columnlist,valuelist):
"""
Returns the rows of a list of lists where col (from columnlist) = val
(from valuelist) for ANY pair of values (colunmlist[i],valuelist[i[).
One value is required for each column in columnlist. If only one value
exists for columnlist but multiple values appear in valuelist, the
valuelist values are all assumed to pertain to the same column.
Usage: linexor (listoflists,columnlist,valuelist)
Returns: the rows of listoflists where columnlist[i]=valuelist[i] for ANY i
"""
if type(columnlist) not in [ListType,TupleType]:
columnlist = [columnlist]
if type(valuelist) not in [ListType,TupleType]:
valuelist = [valuelist]
criterion = ''
if len(columnlist) == 1 and len(valuelist) > 1:
columnlist = columnlist*len(valuelist)
for i in range(len(columnlist)): # build an exec string
if type(valuelist[i])==StringType:
critval = '\'' + valuelist[i] + '\''
else:
critval = str(valuelist[i])
criterion = criterion + ' x['+str(columnlist[i])+']=='+critval+' or'
criterion = criterion[0:-2] # remove the "or" after the last crit
function = 'filter(lambda x: '+criterion+',listoflists)'
lines = eval(function)
return lines
def linedelimited (inlist,delimiter):
"""
Returns a string composed of elements in inlist, with each element
separated by 'delimiter.' Used by function writedelimited. Use '\t'
for tab-delimiting.
Usage: linedelimited (inlist,delimiter)
"""
outstr = ''
for item in inlist:
if type(item) <> StringType:
item = str(item)
outstr = outstr + item + delimiter
outstr = outstr[0:-1]
return outstr
def lineincols (inlist,colsize):
"""
Returns a string composed of elements in inlist, with each element
right-aligned in columns of (fixed) colsize.
Usage: lineincols (inlist,colsize) where colsize is an integer
"""
outstr = ''
for item in inlist:
if type(item) <> StringType:
item = str(item)
size = len(item)
if size <= colsize:
for i in range(colsize-size):
outstr = outstr + ' '
outstr = outstr + item
else:
outstr = outstr + item[0:colsize+1]
return outstr
def lineincustcols (inlist,colsizes):
"""
Returns a string composed of elements in inlist, with each element
right-aligned in a column of width specified by a sequence colsizes. The
length of colsizes must be greater than or equal to the number of columns
in inlist.
Usage: lineincustcols (inlist,colsizes)
Returns: formatted string created from inlist
"""
outstr = ''
for i in range(len(inlist)):
if type(inlist[i]) <> StringType:
item = str(inlist[i])
else:
item = inlist[i]
size = len(item)
if size <= colsizes[i]:
for j in range(colsizes[i]-size):
outstr = outstr + ' '
outstr = outstr + item
else:
outstr = outstr + item[0:colsizes[i]+1]
return outstr
def list2string (inlist,delimit=' '):
"""
Converts a 1D list to a single long string for file output, using
the string.join function.
Usage: list2string (inlist,delimit=' ')
Returns: the string created from inlist
"""
stringlist = map(makestr,inlist)
return string.join(stringlist,delimit)
def makelol(inlist):
"""
Converts a 1D list to a 2D list (i.e., a list-of-lists). Useful when you
want to use put() to write a 1D list one item per line in the file.
Usage: makelol(inlist)
Returns: if l = [1,2,'hi'] then returns [[1],[2],['hi']] etc.
"""
x = []
for item in inlist:
x.append([item])
return x
def makestr (x):
if type(x) <> StringType:
x = str(x)
return x
def printcc (lst,extra=2):
"""
Prints a list of lists in columns, customized by the max size of items
within the columns (max size of items in col, plus 'extra' number of spaces).
Use 'dashes' or '\\n' in the list-of-lists to print dashes or blank lines,
respectively.
Usage: printcc (lst,extra=2)
Returns: None
"""
if type(lst[0]) not in [ListType,TupleType]:
lst = [lst]
rowstokill = []
list2print = copy.deepcopy(lst)
for i in range(len(lst)):
if lst[i] == ['\n'] or lst[i]=='\n' or lst[i]=='dashes' or lst[i]=='' or lst[i]==['']:
rowstokill = rowstokill + [i]
rowstokill.reverse() # delete blank rows from the end
for row in rowstokill:
del list2print[row]
maxsize = [0]*len(list2print[0])
for col in range(len(list2print[0])):
items = colex(list2print,col)
items = map(makestr,items)
maxsize[col] = max(map(len,items)) + extra
for row in lst:
if row == ['\n'] or row == '\n' or row == '' or row == ['']:
print
elif row == ['dashes'] or row == 'dashes':
dashes = [0]*len(maxsize)
for j in range(len(maxsize)):
dashes[j] = '-'*(maxsize[j]-2)
print lineincustcols(dashes,maxsize)
else:
print lineincustcols(row,maxsize)
return None
def printincols (listoflists,colsize):
"""
Prints a list of lists in columns of (fixed) colsize width, where
colsize is an integer.
Usage: printincols (listoflists,colsize)
Returns: None
"""
for row in listoflists:
print lineincols(row,colsize)
return None
def pl (listoflists):
"""
Prints a list of lists, 1 list (row) at a time.
Usage: pl(listoflists)
Returns: None
"""
for row in listoflists:
if row[-1] == '\n':
print row,
else:
print row
return None
def printl(listoflists):
"""Alias for pl."""
pl(listoflists)
return
def replace (inlst,oldval,newval):
"""
Replaces all occurrences of 'oldval' with 'newval', recursively.
Usage: replace (inlst,oldval,newval)
"""
lst = inlst*1
for i in range(len(lst)):
if type(lst[i]) not in [ListType,TupleType]:
if lst[i]==oldval: lst[i]=newval
else:
lst[i] = replace(lst[i],oldval,newval)
return lst
def recode (inlist,listmap,cols=None):
"""
Changes the values in a list to a new set of values (useful when
you need to recode data from (e.g.) strings to numbers. cols defaults
to None (meaning all columns are recoded).
Usage: recode (inlist,listmap,cols=None) cols=recode cols, listmap=2D list
Returns: inlist with the appropriate values replaced with new ones
"""
lst = copy.deepcopy(inlist)
if cols != None:
if type(cols) not in [ListType,TupleType]:
cols = [cols]
for col in cols:
for row in range(len(lst)):
try:
idx = colex(listmap,0).index(lst[row][col])
lst[row][col] = listmap[idx][1]
except ValueError:
pass
else:
for row in range(len(lst)):
for col in range(len(lst)):
try:
idx = colex(listmap,0).index(lst[row][col])
lst[row][col] = listmap[idx][1]
except ValueError:
pass
return lst
def remap (listoflists,criterion):
"""
Remaps values in a given column of a 2D list (listoflists). This requires
a criterion as a function of 'x' so that the result of the following is
returned ... map(lambda x: 'criterion',listoflists).
Usage: remap(listoflists,criterion) criterion=string
Returns: remapped version of listoflists
"""
function = 'map(lambda x: '+criterion+',listoflists)'
lines = eval(function)
return lines
def roundlist (inlist,digits):
"""
Goes through each element in a 1D or 2D inlist, and applies the following
function to all elements of FloatType ... round(element,digits).
Usage: roundlist(inlist,digits)
Returns: list with rounded floats
"""
if type(inlist[0]) in [IntType, FloatType]:
inlist = [inlist]
l = inlist*1
for i in range(len(l)):
for j in range(len(l[i])):
if type(l[i][j])==FloatType:
l[i][j] = round(l[i][j],digits)
return l
def sortby(listoflists,sortcols):
"""
Sorts a list of lists on the column(s) specified in the sequence
sortcols.
Usage: sortby(listoflists,sortcols)
Returns: sorted list, unchanged column ordering
"""
newlist = abut(colex(listoflists,sortcols),listoflists)
newlist.sort()
try:
numcols = len(sortcols)
except TypeError:
numcols = 1
crit = '[' + str(numcols) + ':]'
newlist = colex(newlist,crit)
return newlist
def unique (inlist):
"""
Returns all unique items in the passed list. If the a list-of-lists
is passed, unique LISTS are found (i.e., items in the first dimension are
compared).
Usage: unique (inlist)
Returns: the unique elements (or rows) in inlist
"""
uniques = []
for item in inlist:
if item not in uniques:
uniques.append(item)
return uniques
def duplicates(inlist):
"""
Returns duplicate items in the FIRST dimension of the passed list.
Usage: duplicates (inlist)
"""
dups = []
for i in range(len(inlist)):
if inlist[i] in inlist[i+1:]:
dups.append(inlist[i])
return dups
def nonrepeats(inlist):
"""
Returns items that are NOT duplicated in the first dim of the passed list.
Usage: nonrepeats (inlist)
"""
nonrepeats = []
for i in range(len(inlist)):
if inlist.count(inlist[i]) == 1:
nonrepeats.append(inlist[i])
return nonrepeats
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
#=================== PSTAT ARRAY FUNCTIONS =====================
try: # DEFINE THESE *ONLY* IF numpy IS AVAILABLE
import numpy as N
def aabut (source, *args):
"""
Like the |Stat abut command. It concatenates two arrays column-wise
and returns the result. CAUTION: If one array is shorter, it will be
repeated until it is as long as the other.
Usage: aabut (source, args) where args=any # of arrays
Returns: an array as long as the LONGEST array past, source appearing on the
'left', arrays in <args> attached on the 'right'.
"""
if len(source.shape)==1:
width = 1
source = N.resize(source,[source.shape[0],width])
else:
width = source.shape[1]
for addon in args:
if len(addon.shape)==1:
width = 1
addon = N.resize(addon,[source.shape[0],width])
else:
width = source.shape[1]
if len(addon) < len(source):
addon = N.resize(addon,[source.shape[0],addon.shape[1]])
elif len(source) < len(addon):
source = N.resize(source,[addon.shape[0],source.shape[1]])
source = N.concatenate((source,addon),1)
return source
def acolex (a,indices,axis=1):
"""
Extracts specified indices (a list) from passed array, along passed
axis (column extraction is default). BEWARE: A 1D array is presumed to be a
column-array (and that the whole array will be returned as a column).
Usage: acolex (a,indices,axis=1)
Returns: the columns of a specified by indices
"""
if type(indices) not in [ListType,TupleType,N.ndarray]:
indices = [indices]
if len(N.shape(a)) == 1:
cols = N.resize(a,[a.shape[0],1])
else:
cols = N.take(a,indices,axis)
return cols
def acollapse (a,keepcols,collapsecols,fcn1=None,fcn2=None,cfcn=None):
"""
Averages data in collapsecol, keeping all unique items in keepcols
(using unique, which keeps unique LISTS of column numbers), retaining
the unique sets of values in keepcols, the mean for each. If stderror or
N of the mean are desired, set either or both parameters to 1.
Usage: acollapse (a,keepcols,collapsecols,fcn1=None,fcn2=None,cfcn=None)
Returns: unique 'conditions' specified by the contents of columns specified
by keepcols, abutted with the mean(s) of column(s) specified by
collapsecols
"""
def acollmean (inarray):
return N.sum(N.ravel(inarray))
if type(keepcols) not in [ListType,TupleType,N.ndarray]:
keepcols = [keepcols]
if type(collapsecols) not in [ListType,TupleType,N.ndarray]:
collapsecols = [collapsecols]
if cfcn == None:
cfcn = acollmean
if keepcols == []:
avgcol = acolex(a,collapsecols)
means = N.sum(avgcol)/float(len(avgcol))
if fcn1<>None:
try:
test = fcn1(avgcol)
except Exception:
test = N.array(['N/A']*len(means))
means = aabut(means,test)
if fcn2<>None:
try:
test = fcn2(avgcol)
except Exception:
test = N.array(['N/A']*len(means))
means = aabut(means,test)
return means
else:
if type(keepcols) not in [ListType,TupleType,N.ndarray]:
keepcols = [keepcols]
values = colex(a,keepcols) # so that "item" can be appended (below)
uniques = unique(values) # get a LIST, so .sort keeps rows intact
uniques.sort()
newlist = []
for item in uniques:
if type(item) not in [ListType,TupleType,N.ndarray]:
item =[item]
tmprows = alinexand(a,keepcols,item)
for col in collapsecols:
avgcol = acolex(tmprows,col)
item.append(acollmean(avgcol))
if fcn1<>None:
try:
test = fcn1(avgcol)
except Exception:
test = 'N/A'
item.append(test)
if fcn2<>None:
try:
test = fcn2(avgcol)
except Exception:
test = 'N/A'
item.append(test)
newlist.append(item)
try:
new_a = N.array(newlist)
except TypeError:
new_a = N.array(newlist,'O')
return new_a
def adm (a,criterion):
"""
Returns rows from the passed list of lists that meet the criteria in
the passed criterion expression (a string as a function of x).
Usage: adm (a,criterion) where criterion is like 'x[2]==37'
"""
function = 'filter(lambda x: '+criterion+',a)'
lines = eval(function)
try:
lines = N.array(lines)
except Exception:
lines = N.array(lines,dtype='O')
return lines
def isstring(x):
if type(x)==StringType:
return 1
else:
return 0
def alinexand (a,columnlist,valuelist):
"""
Returns the rows of an array where col (from columnlist) = val
(from valuelist). One value is required for each column in columnlist.
Usage: alinexand (a,columnlist,valuelist)
Returns: the rows of a where columnlist[i]=valuelist[i] for ALL i
"""
if type(columnlist) not in [ListType,TupleType,N.ndarray]:
columnlist = [columnlist]
if type(valuelist) not in [ListType,TupleType,N.ndarray]:
valuelist = [valuelist]
criterion = ''
for i in range(len(columnlist)):
if type(valuelist[i])==StringType:
critval = '\'' + valuelist[i] + '\''
else:
critval = str(valuelist[i])
criterion = criterion + ' x['+str(columnlist[i])+']=='+critval+' and'
criterion = criterion[0:-3] # remove the "and" after the last crit
return adm(a,criterion)
def alinexor (a,columnlist,valuelist):
"""
Returns the rows of an array where col (from columnlist) = val (from
valuelist). One value is required for each column in columnlist.
The exception is if either columnlist or valuelist has only 1 value,
in which case that item will be expanded to match the length of the
other list.
Usage: alinexor (a,columnlist,valuelist)
Returns: the rows of a where columnlist[i]=valuelist[i] for ANY i
"""
if type(columnlist) not in [ListType,TupleType,N.ndarray]:
columnlist = [columnlist]
if type(valuelist) not in [ListType,TupleType,N.ndarray]:
valuelist = [valuelist]
criterion = ''
if len(columnlist) == 1 and len(valuelist) > 1:
columnlist = columnlist*len(valuelist)
elif len(valuelist) == 1 and len(columnlist) > 1:
valuelist = valuelist*len(columnlist)
for i in range(len(columnlist)):
if type(valuelist[i])==StringType:
critval = '\'' + valuelist[i] + '\''
else:
critval = str(valuelist[i])
criterion = criterion + ' x['+str(columnlist[i])+']=='+critval+' or'
criterion = criterion[0:-2] # remove the "or" after the last crit
return adm(a,criterion)
def areplace (a,oldval,newval):
"""
Replaces all occurrences of oldval with newval in array a.
Usage: areplace(a,oldval,newval)
"""
return N.where(a==oldval,newval,a)
def arecode (a,listmap,col='all'):
"""
Remaps the values in an array to a new set of values (useful when
you need to recode data from (e.g.) strings to numbers as most stats
packages require. Can work on SINGLE columns, or 'all' columns at once.
@@@BROKEN 2007-11-26
Usage: arecode (a,listmap,col='all')
Returns: a version of array a where listmap[i][0] = (instead) listmap[i][1]
"""
ashape = a.shape
if col == 'all':
work = a.ravel()
else:
work = acolex(a,col)
work = work.ravel()
for pair in listmap:
if type(pair[1]) == StringType or work.dtype.char=='O' or a.dtype.char=='O':
work = N.array(work,dtype='O')
a = N.array(a,dtype='O')
for i in range(len(work)):
if work[i]==pair[0]:
work[i] = pair[1]
if col == 'all':
return N.reshape(work,ashape)
else:
return N.concatenate([a[:,0:col],work[:,N.newaxis],a[:,col+1:]],1)
else: # must be a non-Object type array and replacement
work = N.where(work==pair[0],pair[1],work)
return N.concatenate([a[:,0:col],work[:,N.newaxis],a[:,col+1:]],1)
def arowcompare(row1, row2):
"""
Compares two rows from an array, regardless of whether it is an
array of numbers or of python objects (which requires the cmp function).
@@@PURPOSE? 2007-11-26
Usage: arowcompare(row1,row2)
Returns: an array of equal length containing 1s where the two rows had
identical elements and 0 otherwise
"""
return
if row1.dtype.char=='O' or row2.dtype=='O':
cmpvect = N.logical_not(abs(N.array(map(cmp,row1,row2)))) # cmp fcn gives -1,0,1
else:
cmpvect = N.equal(row1,row2)
return cmpvect
def arowsame(row1, row2):
"""
Compares two rows from an array, regardless of whether it is an
array of numbers or of python objects (which requires the cmp function).
Usage: arowsame(row1,row2)
Returns: 1 if the two rows are identical, 0 otherwise.
"""
cmpval = N.alltrue(arowcompare(row1,row2))
return cmpval
def asortrows(a,axis=0):
"""
Sorts an array "by rows". This differs from the Numeric.sort() function,
which sorts elements WITHIN the given axis. Instead, this function keeps
the elements along the given axis intact, but shifts them 'up or down'
relative to one another.
Usage: asortrows(a,axis=0)
Returns: sorted version of a
"""
return N.sort(a,axis=axis,kind='mergesort')
def aunique(inarray):
"""
Returns unique items in the FIRST dimension of the passed array. Only
works on arrays NOT including string items.
Usage: aunique (inarray)
"""
uniques = N.array([inarray[0]])
if len(uniques.shape) == 1: # IF IT'S A 1D ARRAY
for item in inarray[1:]:
if N.add.reduce(N.equal(uniques,item).ravel()) == 0:
try:
uniques = N.concatenate([uniques,N.array[N.newaxis,:]])
except TypeError:
uniques = N.concatenate([uniques,N.array([item])])
else: # IT MUST BE A 2+D ARRAY
if inarray.dtype.char != 'O': # not an Object array
for item in inarray[1:]:
if not N.sum(N.alltrue(N.equal(uniques,item),1)):
try:
uniques = N.concatenate( [uniques,item[N.newaxis,:]] )
except TypeError: # the item to add isn't a list
uniques = N.concatenate([uniques,N.array([item])])
else:
pass # this item is already in the uniques array
else: # must be an Object array, alltrue/equal functions don't work
for item in inarray[1:]:
newflag = 1
for unq in uniques: # NOTE: cmp --> 0=same, -1=<, 1=>
test = N.sum(abs(N.array(map(cmp,item,unq))))
if test == 0: # if item identical to any 1 row in uniques
newflag = 0 # then not a novel item to add
break
if newflag == 1:
try:
uniques = N.concatenate( [uniques,item[N.newaxis,:]] )
except TypeError: # the item to add isn't a list
uniques = N.concatenate([uniques,N.array([item])])
return uniques
def aduplicates(inarray):
"""
Returns duplicate items in the FIRST dimension of the passed array. Only
works on arrays NOT including string items.
Usage: aunique (inarray)
"""
inarray = N.array(inarray)
if len(inarray.shape) == 1: # IF IT'S A 1D ARRAY
dups = []
inarray = inarray.tolist()
for i in range(len(inarray)):
if inarray[i] in inarray[i+1:]:
dups.append(inarray[i])
dups = aunique(dups)
else: # IT MUST BE A 2+D ARRAY
dups = []
aslist = inarray.tolist()
for i in range(len(aslist)):
if aslist[i] in aslist[i+1:]:
dups.append(aslist[i])
dups = unique(dups)
dups = N.array(dups)
return dups
except ImportError: # IF NUMERIC ISN'T AVAILABLE, SKIP ALL arrayfuncs
pass
| gpl-2.0 |
mancoast/CPythonPyc_test | cpython/277_test_filecmp.py | 136 | 5396 |
import os, filecmp, shutil, tempfile
import unittest
from test import test_support
class FileCompareTestCase(unittest.TestCase):
def setUp(self):
self.name = test_support.TESTFN
self.name_same = test_support.TESTFN + '-same'
self.name_diff = test_support.TESTFN + '-diff'
data = 'Contents of file go here.\n'
for name in [self.name, self.name_same, self.name_diff]:
output = open(name, 'w')
output.write(data)
output.close()
output = open(self.name_diff, 'a+')
output.write('An extra line.\n')
output.close()
self.dir = tempfile.gettempdir()
def tearDown(self):
os.unlink(self.name)
os.unlink(self.name_same)
os.unlink(self.name_diff)
def test_matching(self):
self.assertTrue(filecmp.cmp(self.name, self.name_same),
"Comparing file to itself fails")
self.assertTrue(filecmp.cmp(self.name, self.name_same, shallow=False),
"Comparing file to itself fails")
self.assertTrue(filecmp.cmp(self.name, self.name, shallow=False),
"Comparing file to identical file fails")
self.assertTrue(filecmp.cmp(self.name, self.name),
"Comparing file to identical file fails")
def test_different(self):
self.assertFalse(filecmp.cmp(self.name, self.name_diff),
"Mismatched files compare as equal")
self.assertFalse(filecmp.cmp(self.name, self.dir),
"File and directory compare as equal")
class DirCompareTestCase(unittest.TestCase):
def setUp(self):
tmpdir = tempfile.gettempdir()
self.dir = os.path.join(tmpdir, 'dir')
self.dir_same = os.path.join(tmpdir, 'dir-same')
self.dir_diff = os.path.join(tmpdir, 'dir-diff')
self.caseinsensitive = os.path.normcase('A') == os.path.normcase('a')
data = 'Contents of file go here.\n'
for dir in [self.dir, self.dir_same, self.dir_diff]:
shutil.rmtree(dir, True)
os.mkdir(dir)
if self.caseinsensitive and dir is self.dir_same:
fn = 'FiLe' # Verify case-insensitive comparison
else:
fn = 'file'
output = open(os.path.join(dir, fn), 'w')
output.write(data)
output.close()
output = open(os.path.join(self.dir_diff, 'file2'), 'w')
output.write('An extra file.\n')
output.close()
def tearDown(self):
shutil.rmtree(self.dir)
shutil.rmtree(self.dir_same)
shutil.rmtree(self.dir_diff)
def test_cmpfiles(self):
self.assertTrue(filecmp.cmpfiles(self.dir, self.dir, ['file']) ==
(['file'], [], []),
"Comparing directory to itself fails")
self.assertTrue(filecmp.cmpfiles(self.dir, self.dir_same, ['file']) ==
(['file'], [], []),
"Comparing directory to same fails")
# Try it with shallow=False
self.assertTrue(filecmp.cmpfiles(self.dir, self.dir, ['file'],
shallow=False) ==
(['file'], [], []),
"Comparing directory to itself fails")
self.assertTrue(filecmp.cmpfiles(self.dir, self.dir_same, ['file'],
shallow=False),
"Comparing directory to same fails")
# Add different file2
output = open(os.path.join(self.dir, 'file2'), 'w')
output.write('Different contents.\n')
output.close()
self.assertFalse(filecmp.cmpfiles(self.dir, self.dir_same,
['file', 'file2']) ==
(['file'], ['file2'], []),
"Comparing mismatched directories fails")
def test_dircmp(self):
# Check attributes for comparison of two identical directories
d = filecmp.dircmp(self.dir, self.dir_same)
if self.caseinsensitive:
self.assertEqual([d.left_list, d.right_list],[['file'], ['FiLe']])
else:
self.assertEqual([d.left_list, d.right_list],[['file'], ['file']])
self.assertTrue(d.common == ['file'])
self.assertTrue(d.left_only == d.right_only == [])
self.assertTrue(d.same_files == ['file'])
self.assertTrue(d.diff_files == [])
# Check attributes for comparison of two different directories
d = filecmp.dircmp(self.dir, self.dir_diff)
self.assertTrue(d.left_list == ['file'])
self.assertTrue(d.right_list == ['file', 'file2'])
self.assertTrue(d.common == ['file'])
self.assertTrue(d.left_only == [])
self.assertTrue(d.right_only == ['file2'])
self.assertTrue(d.same_files == ['file'])
self.assertTrue(d.diff_files == [])
# Add different file2
output = open(os.path.join(self.dir, 'file2'), 'w')
output.write('Different contents.\n')
output.close()
d = filecmp.dircmp(self.dir, self.dir_diff)
self.assertTrue(d.same_files == ['file'])
self.assertTrue(d.diff_files == ['file2'])
def test_main():
test_support.run_unittest(FileCompareTestCase, DirCompareTestCase)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
pihao/shadowsocks | utils/autoban.py | 1033 | 2156 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='See README')
parser.add_argument('-c', '--count', default=3, type=int,
help='with how many failure times it should be '
'considered as an attack')
config = parser.parse_args()
ips = {}
banned = set()
for line in sys.stdin:
if 'can not parse header when' in line:
ip = line.split()[-1].split(':')[0]
if ip not in ips:
ips[ip] = 1
print(ip)
sys.stdout.flush()
else:
ips[ip] += 1
if ip not in banned and ips[ip] >= config.count:
banned.add(ip)
cmd = 'iptables -A INPUT -s %s -j DROP' % ip
print(cmd, file=sys.stderr)
sys.stderr.flush()
os.system(cmd)
| apache-2.0 |
sonuyos/couchpotato | libs/requests/exceptions.py | 895 | 2517 | # -*- coding: utf-8 -*-
"""
requests.exceptions
~~~~~~~~~~~~~~~~~~~
This module contains the set of Requests' exceptions.
"""
from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
request."""
def __init__(self, *args, **kwargs):
"""
Initialize RequestException with `request` and `response` objects.
"""
response = kwargs.pop('response', None)
self.response = response
self.request = kwargs.pop('request', None)
if (response is not None and not self.request and
hasattr(response, 'request')):
self.request = self.response.request
super(RequestException, self).__init__(*args, **kwargs)
class HTTPError(RequestException):
"""An HTTP error occurred."""
class ConnectionError(RequestException):
"""A Connection error occurred."""
class ProxyError(ConnectionError):
"""A proxy error occurred."""
class SSLError(ConnectionError):
"""An SSL error occurred."""
class Timeout(RequestException):
"""The request timed out.
Catching this error will catch both
:exc:`~requests.exceptions.ConnectTimeout` and
:exc:`~requests.exceptions.ReadTimeout` errors.
"""
class ConnectTimeout(ConnectionError, Timeout):
"""The request timed out while trying to connect to the remote server.
Requests that produced this error are safe to retry.
"""
class ReadTimeout(Timeout):
"""The server did not send any data in the allotted amount of time."""
class URLRequired(RequestException):
"""A valid URL is required to make a request."""
class TooManyRedirects(RequestException):
"""Too many redirects."""
class MissingSchema(RequestException, ValueError):
"""The URL schema (e.g. http or https) is missing."""
class InvalidSchema(RequestException, ValueError):
"""See defaults.py for valid schemas."""
class InvalidURL(RequestException, ValueError):
""" The URL provided was somehow invalid. """
class ChunkedEncodingError(RequestException):
"""The server declared chunked encoding but sent an invalid chunk."""
class ContentDecodingError(RequestException, BaseHTTPError):
"""Failed to decode response content"""
class StreamConsumedError(RequestException, TypeError):
"""The content for this response was already consumed"""
class RetryError(RequestException):
"""Custom retries logic failed"""
| gpl-3.0 |
scripnichenko/nova | nova/api/openstack/compute/legacy_v2/contrib/os_networks.py | 43 | 9162 | # Copyright 2011 Grid Dynamics
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import webob
from webob import exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import context as nova_context
from nova import exception
from nova.i18n import _
from nova import network
from nova.objects import base as base_obj
from nova.objects import fields as obj_fields
authorize = extensions.extension_authorizer('compute', 'networks')
authorize_view = extensions.extension_authorizer('compute',
'networks:view')
extended_fields = ('mtu', 'dhcp_server', 'enable_dhcp', 'share_address')
def network_dict(context, network, extended):
fields = ('id', 'cidr', 'netmask', 'gateway', 'broadcast', 'dns1', 'dns2',
'cidr_v6', 'gateway_v6', 'label', 'netmask_v6')
admin_fields = ('created_at', 'updated_at', 'deleted_at', 'deleted',
'injected', 'bridge', 'vlan', 'vpn_public_address',
'vpn_public_port', 'vpn_private_address', 'dhcp_start',
'project_id', 'host', 'bridge_interface', 'multi_host',
'priority', 'rxtx_base')
if network:
# NOTE(mnaser): We display a limited set of fields so users can know
# what networks are available, extra system-only fields
# are only visible if they are an admin.
if context.is_admin:
fields += admin_fields
if extended:
fields += extended_fields
# TODO(mriedem): Remove the NovaObject type check once the
# network.create API is returning objects.
is_obj = isinstance(network, base_obj.NovaObject)
result = {}
for field in fields:
# NOTE(mriedem): If network is an object, IPAddress fields need to
# be cast to a string so they look the same in the response as
# before the objects conversion.
if is_obj and isinstance(network.fields[field].AUTO_TYPE,
obj_fields.IPAddress):
# NOTE(danms): Here, network should be an object, which could
# have come from neutron and thus be missing most of the
# attributes. Providing a default to get() avoids trying to
# lazy-load missing attributes.
val = network.get(field, None)
if val is not None:
result[field] = str(val)
else:
result[field] = val
else:
# It's either not an object or it's not an IPAddress field.
result[field] = network.get(field, None)
uuid = network.get('uuid')
if uuid:
result['id'] = uuid
return result
else:
return {}
class NetworkController(wsgi.Controller):
def __init__(self, network_api=None, ext_mgr=None):
self.network_api = network_api or network.API()
if ext_mgr:
self.extended = ext_mgr.is_loaded('os-extended-networks')
else:
self.extended = False
def index(self, req):
context = req.environ['nova.context']
authorize_view(context)
networks = self.network_api.get_all(context)
result = [network_dict(context, net_ref, self.extended)
for net_ref in networks]
return {'networks': result}
@wsgi.action("disassociate")
def _disassociate_host_and_project(self, req, id, body):
context = req.environ['nova.context']
authorize(context)
# NOTE(shaohe-feng): back-compatible with db layer hard-code
# admin permission checks. call db API objects.Network.associate
nova_context.require_admin_context(context)
try:
self.network_api.associate(context, id, host=None, project=None)
except exception.NetworkNotFound:
msg = _("Network not found")
raise exc.HTTPNotFound(explanation=msg)
except NotImplementedError:
msg = _('Disassociate network is not implemented by the '
'configured Network API')
raise exc.HTTPNotImplemented(explanation=msg)
return webob.Response(status_int=202)
def show(self, req, id):
context = req.environ['nova.context']
authorize_view(context)
try:
network = self.network_api.get(context, id)
except exception.NetworkNotFound:
msg = _("Network not found")
raise exc.HTTPNotFound(explanation=msg)
return {'network': network_dict(context, network, self.extended)}
def delete(self, req, id):
context = req.environ['nova.context']
authorize(context)
try:
self.network_api.delete(context, id)
except exception.NetworkInUse as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.NetworkNotFound:
msg = _("Network not found")
raise exc.HTTPNotFound(explanation=msg)
return webob.Response(status_int=202)
def create(self, req, body):
context = req.environ['nova.context']
authorize(context)
# NOTE(shaohe-feng): back-compatible with db layer hard-code
# admin permission checks. call db API objects.Network.create
nova_context.require_admin_context(context)
def bad(e):
return exc.HTTPBadRequest(explanation=e)
if not (body and body.get("network")):
raise bad(_("Missing network in body"))
params = body["network"]
if not params.get("label"):
raise bad(_("Network label is required"))
cidr = params.get("cidr") or params.get("cidr_v6")
if not cidr:
raise bad(_("Network cidr or cidr_v6 is required"))
if params.get("project_id") == "":
params["project_id"] = None
params["num_networks"] = 1
try:
params["network_size"] = netaddr.IPNetwork(cidr).size
except netaddr.AddrFormatError:
msg = _('%s is not a valid ip network') % cidr
raise exc.HTTPBadRequest(explanation=msg)
if not self.extended:
create_params = ('allowed_start', 'allowed_end')
for field in extended_fields + create_params:
if field in params:
del params[field]
try:
network = self.network_api.create(context, **params)[0]
except (exception.InvalidCidr,
exception.InvalidIntValue,
exception.InvalidAddress,
exception.NetworkNotCreated) as ex:
raise exc.HTTPBadRequest(explanation=ex.format_message)
except exception.CidrConflict as ex:
raise exc.HTTPConflict(explanation=ex.format_message())
return {"network": network_dict(context, network, self.extended)}
def add(self, req, body):
context = req.environ['nova.context']
authorize(context)
# NOTE(shaohe-feng): back-compatible with db layer hard-code
# admin permission checks. call db API objects.Network.associate
nova_context.require_admin_context(context)
if not body:
raise exc.HTTPUnprocessableEntity()
network_id = body.get('id', None)
project_id = context.project_id
try:
self.network_api.add_network_to_project(
context, project_id, network_id)
except NotImplementedError:
msg = (_("VLAN support must be enabled"))
raise exc.HTTPNotImplemented(explanation=msg)
except (exception.NoMoreNetworks,
exception.NetworkNotFoundForUUID) as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
return webob.Response(status_int=202)
class Os_networks(extensions.ExtensionDescriptor):
"""Admin-only Network Management Extension."""
name = "Networks"
alias = "os-networks"
namespace = ("http://docs.openstack.org/compute/"
"ext/os-networks/api/v1.1")
updated = "2011-12-23T00:00:00Z"
def get_resources(self):
member_actions = {'action': 'POST'}
collection_actions = {'add': 'POST'}
res = extensions.ResourceExtension(
'os-networks',
NetworkController(ext_mgr=self.ext_mgr),
member_actions=member_actions,
collection_actions=collection_actions)
return [res]
| apache-2.0 |
hlin117/statsmodels | statsmodels/sandbox/nonparametric/kernel_extras.py | 34 | 14055 | """
Multivariate Conditional and Unconditional Kernel Density Estimation
with Mixed Data Types
References
----------
[1] Racine, J., Li, Q. Nonparametric econometrics: theory and practice.
Princeton University Press. (2007)
[2] Racine, Jeff. "Nonparametric Econometrics: A Primer," Foundation
and Trends in Econometrics: Vol 3: No 1, pp1-88. (2008)
http://dx.doi.org/10.1561/0800000009
[3] Racine, J., Li, Q. "Nonparametric Estimation of Distributions
with Categorical and Continuous Data." Working Paper. (2000)
[4] Racine, J. Li, Q. "Kernel Estimation of Multivariate Conditional
Distributions Annals of Economics and Finance 5, 211-235 (2004)
[5] Liu, R., Yang, L. "Kernel estimation of multivariate
cumulative distribution function."
Journal of Nonparametric Statistics (2008)
[6] Li, R., Ju, G. "Nonparametric Estimation of Multivariate CDF
with Categorical and Continuous Data." Working Paper
[7] Li, Q., Racine, J. "Cross-validated local linear nonparametric
regression" Statistica Sinica 14(2004), pp. 485-512
[8] Racine, J.: "Consistent Significance Testing for Nonparametric
Regression" Journal of Business & Economics Statistics
[9] Racine, J., Hart, J., Li, Q., "Testing the Significance of
Categorical Predictor Variables in Nonparametric Regression
Models", 2006, Econometric Reviews 25, 523-544
"""
# TODO: make default behavior efficient=True above a certain n_obs
from statsmodels.compat.python import range, next
import numpy as np
from scipy import optimize
from scipy.stats.mstats import mquantiles
from statsmodels.nonparametric.api import KDEMultivariate, KernelReg
from statsmodels.nonparametric._kernel_base import \
gpke, LeaveOneOut, _get_type_pos, _adjust_shape
__all__ = ['SingleIndexModel', 'SemiLinear', 'TestFForm']
class TestFForm(object):
"""
Nonparametric test for functional form.
Parameters
----------
endog: list
Dependent variable (training set)
exog: list of array_like objects
The independent (right-hand-side) variables
bw: array_like, str
Bandwidths for exog or specify method for bandwidth selection
fform: function
The functional form ``y = g(b, x)`` to be tested. Takes as inputs
the RHS variables `exog` and the coefficients ``b`` (betas)
and returns a fitted ``y_hat``.
var_type: str
The type of the independent `exog` variables:
- c: continuous
- o: ordered
- u: unordered
estimator: function
Must return the estimated coefficients b (betas). Takes as inputs
``(endog, exog)``. E.g. least square estimator::
lambda (x,y): np.dot(np.pinv(np.dot(x.T, x)), np.dot(x.T, y))
References
----------
See Racine, J.: "Consistent Significance Testing for Nonparametric
Regression" Journal of Business \& Economics Statistics.
See chapter 12 in [1] pp. 355-357.
"""
def __init__(self, endog, exog, bw, var_type, fform, estimator, nboot=100):
self.endog = endog
self.exog = exog
self.var_type = var_type
self.fform = fform
self.estimator = estimator
self.nboot = nboot
self.bw = KDEMultivariate(exog, bw=bw, var_type=var_type).bw
self.sig = self._compute_sig()
def _compute_sig(self):
Y = self.endog
X = self.exog
b = self.estimator(Y, X)
m = self.fform(X, b)
n = np.shape(X)[0]
resid = Y - m
resid = resid - np.mean(resid) # center residuals
self.test_stat = self._compute_test_stat(resid)
sqrt5 = np.sqrt(5.)
fct1 = (1 - sqrt5) / 2.
fct2 = (1 + sqrt5) / 2.
u1 = fct1 * resid
u2 = fct2 * resid
r = fct2 / sqrt5
I_dist = np.empty((self.nboot,1))
for j in range(self.nboot):
u_boot = u2.copy()
prob = np.random.uniform(0,1, size = (n,))
ind = prob < r
u_boot[ind] = u1[ind]
Y_boot = m + u_boot
b_hat = self.estimator(Y_boot, X)
m_hat = self.fform(X, b_hat)
u_boot_hat = Y_boot - m_hat
I_dist[j] = self._compute_test_stat(u_boot_hat)
self.boots_results = I_dist
sig = "Not Significant"
if self.test_stat > mquantiles(I_dist, 0.9):
sig = "*"
if self.test_stat > mquantiles(I_dist, 0.95):
sig = "**"
if self.test_stat > mquantiles(I_dist, 0.99):
sig = "***"
return sig
def _compute_test_stat(self, u):
n = np.shape(u)[0]
XLOO = LeaveOneOut(self.exog)
uLOO = LeaveOneOut(u[:,None]).__iter__()
I = 0
S2 = 0
for i, X_not_i in enumerate(XLOO):
u_j = next(uLOO)
u_j = np.squeeze(u_j)
# See Bootstrapping procedure on p. 357 in [1]
K = gpke(self.bw, data=-X_not_i, data_predict=-self.exog[i, :],
var_type=self.var_type, tosum=False)
f_i = (u[i] * u_j * K)
assert u_j.shape == K.shape
I += f_i.sum() # See eq. 12.7 on p. 355 in [1]
S2 += (f_i**2).sum() # See Theorem 12.1 on p.356 in [1]
assert np.size(I) == 1
assert np.size(S2) == 1
I *= 1. / (n * (n - 1))
ix_cont = _get_type_pos(self.var_type)[0]
hp = self.bw[ix_cont].prod()
S2 *= 2 * hp / (n * (n - 1))
T = n * I * np.sqrt(hp / S2)
return T
class SingleIndexModel(KernelReg):
"""
Single index semiparametric model ``y = g(X * b) + e``.
Parameters
----------
endog: array_like
The dependent variable
exog: array_like
The independent variable(s)
var_type: str
The type of variables in X:
- c: continuous
- o: ordered
- u: unordered
Attributes
----------
b: array_like
The linear coefficients b (betas)
bw: array_like
Bandwidths
Methods
-------
fit(): Computes the fitted values ``E[Y|X] = g(X * b)``
and the marginal effects ``dY/dX``.
References
----------
See chapter on semiparametric models in [1]
Notes
-----
This model resembles the binary choice models. The user knows
that X and b interact linearly, but ``g(X * b)`` is unknown.
In the parametric binary choice models the user usually assumes
some distribution of g() such as normal or logistic.
"""
def __init__(self, endog, exog, var_type):
self.var_type = var_type
self.K = len(var_type)
self.var_type = self.var_type[0]
self.endog = _adjust_shape(endog, 1)
self.exog = _adjust_shape(exog, self.K)
self.nobs = np.shape(self.exog)[0]
self.data_type = self.var_type
self.func = self._est_loc_linear
self.b, self.bw = self._est_b_bw()
def _est_b_bw(self):
params0 = np.random.uniform(size=(self.K + 1, ))
b_bw = optimize.fmin(self.cv_loo, params0, disp=0)
b = b_bw[0:self.K]
bw = b_bw[self.K:]
bw = self._set_bw_bounds(bw)
return b, bw
def cv_loo(self, params):
# See p. 254 in Textbook
params = np.asarray(params)
b = params[0 : self.K]
bw = params[self.K:]
LOO_X = LeaveOneOut(self.exog)
LOO_Y = LeaveOneOut(self.endog).__iter__()
L = 0
for i, X_not_i in enumerate(LOO_X):
Y = next(LOO_Y)
#print b.shape, np.dot(self.exog[i:i+1, :], b).shape, bw,
G = self.func(bw, endog=Y, exog=-np.dot(X_not_i, b)[:,None],
#data_predict=-b*self.exog[i, :])[0]
data_predict=-np.dot(self.exog[i:i+1, :], b))[0]
#print G.shape
L += (self.endog[i] - G) ** 2
# Note: There might be a way to vectorize this. See p.72 in [1]
return L / self.nobs
def fit(self, data_predict=None):
if data_predict is None:
data_predict = self.exog
else:
data_predict = _adjust_shape(data_predict, self.K)
N_data_predict = np.shape(data_predict)[0]
mean = np.empty((N_data_predict,))
mfx = np.empty((N_data_predict, self.K))
for i in range(N_data_predict):
mean_mfx = self.func(self.bw, self.endog,
np.dot(self.exog, self.b)[:,None],
data_predict=np.dot(data_predict[i:i+1, :],self.b))
mean[i] = mean_mfx[0]
mfx_c = np.squeeze(mean_mfx[1])
mfx[i, :] = mfx_c
return mean, mfx
def __repr__(self):
"""Provide something sane to print."""
repr = "Single Index Model \n"
repr += "Number of variables: K = " + str(self.K) + "\n"
repr += "Number of samples: nobs = " + str(self.nobs) + "\n"
repr += "Variable types: " + self.var_type + "\n"
repr += "BW selection method: cv_ls" + "\n"
repr += "Estimator type: local constant" + "\n"
return repr
class SemiLinear(KernelReg):
"""
Semiparametric partially linear model, ``Y = Xb + g(Z) + e``.
Parameters
----------
endog: array_like
The dependent variable
exog: array_like
The linear component in the regression
exog_nonparametric: array_like
The nonparametric component in the regression
var_type: str
The type of the variables in the nonparametric component;
- c: continuous
- o: ordered
- u: unordered
k_linear : int
The number of variables that comprise the linear component.
Attributes
----------
bw: array_like
Bandwidths for the nonparametric component exog_nonparametric
b: array_like
Coefficients in the linear component
nobs : int
The number of observations.
k_linear : int
The number of variables that comprise the linear component.
Methods
-------
fit(): Returns the fitted mean and marginal effects dy/dz
Notes
-----
This model uses only the local constant regression estimator
References
----------
See chapter on Semiparametric Models in [1]
"""
def __init__(self, endog, exog, exog_nonparametric, var_type, k_linear):
self.endog = _adjust_shape(endog, 1)
self.exog = _adjust_shape(exog, k_linear)
self.K = len(var_type)
self.exog_nonparametric = _adjust_shape(exog_nonparametric, self.K)
self.k_linear = k_linear
self.nobs = np.shape(self.exog)[0]
self.var_type = var_type
self.data_type = self.var_type
self.func = self._est_loc_linear
self.b, self.bw = self._est_b_bw()
def _est_b_bw(self):
"""
Computes the (beta) coefficients and the bandwidths.
Minimizes ``cv_loo`` with respect to ``b`` and ``bw``.
"""
params0 = np.random.uniform(size=(self.k_linear + self.K, ))
b_bw = optimize.fmin(self.cv_loo, params0, disp=0)
b = b_bw[0 : self.k_linear]
bw = b_bw[self.k_linear:]
#bw = self._set_bw_bounds(np.asarray(bw))
return b, bw
def cv_loo(self, params):
"""
Similar to the cross validation leave-one-out estimator.
Modified to reflect the linear components.
Parameters
----------
params: array_like
Vector consisting of the coefficients (b) and the bandwidths (bw).
The first ``k_linear`` elements are the coefficients.
Returns
-------
L: float
The value of the objective function
References
----------
See p.254 in [1]
"""
params = np.asarray(params)
b = params[0 : self.k_linear]
bw = params[self.k_linear:]
LOO_X = LeaveOneOut(self.exog)
LOO_Y = LeaveOneOut(self.endog).__iter__()
LOO_Z = LeaveOneOut(self.exog_nonparametric).__iter__()
Xb = np.dot(self.exog, b)[:,None]
L = 0
for ii, X_not_i in enumerate(LOO_X):
Y = next(LOO_Y)
Z = next(LOO_Z)
Xb_j = np.dot(X_not_i, b)[:,None]
Yx = Y - Xb_j
G = self.func(bw, endog=Yx, exog=-Z,
data_predict=-self.exog_nonparametric[ii, :])[0]
lt = Xb[ii, :] #.sum() # linear term
L += (self.endog[ii] - lt - G) ** 2
return L
def fit(self, exog_predict=None, exog_nonparametric_predict=None):
"""Computes fitted values and marginal effects"""
if exog_predict is None:
exog_predict = self.exog
else:
exog_predict = _adjust_shape(exog_predict, self.k_linear)
if exog_nonparametric_predict is None:
exog_nonparametric_predict = self.exog_nonparametric
else:
exog_nonparametric_predict = _adjust_shape(exog_nonparametric_predict, self.K)
N_data_predict = np.shape(exog_nonparametric_predict)[0]
mean = np.empty((N_data_predict,))
mfx = np.empty((N_data_predict, self.K))
Y = self.endog - np.dot(exog_predict, self.b)[:,None]
for i in range(N_data_predict):
mean_mfx = self.func(self.bw, Y, self.exog_nonparametric,
data_predict=exog_nonparametric_predict[i, :])
mean[i] = mean_mfx[0]
mfx_c = np.squeeze(mean_mfx[1])
mfx[i, :] = mfx_c
return mean, mfx
def __repr__(self):
"""Provide something sane to print."""
repr = "Semiparamatric Partially Linear Model \n"
repr += "Number of variables: K = " + str(self.K) + "\n"
repr += "Number of samples: N = " + str(self.nobs) + "\n"
repr += "Variable types: " + self.var_type + "\n"
repr += "BW selection method: cv_ls" + "\n"
repr += "Estimator type: local constant" + "\n"
return repr
| bsd-3-clause |
EricMuller/mywebmarks-backend | requirements/twisted/Twisted-17.1.0/docs/names/examples/gethostbyname.py | 2 | 1793 | #!/usr/bin/env python
# -*- test-case-name: twisted.names.test.test_examples -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Print the IP address for a given hostname. eg
python gethostbyname.py www.google.com
This script does a host lookup using the default Twisted Names
resolver, a chained resolver, which attempts to lookup a name from:
* local hosts file
* memory cache of previous lookup results
* system recursive DNS servers
"""
import sys
from twisted.names import client, error
from twisted.internet.task import react
from twisted.python import usage
class Options(usage.Options):
synopsis = 'Usage: gethostbyname.py HOSTNAME'
def parseArgs(self, hostname):
self['hostname'] = hostname
def printResult(address, hostname):
"""
Print the IP address or an error message if an IP address was not
found.
"""
if address:
sys.stdout.write(address + '\n')
else:
sys.stderr.write(
'ERROR: No IP addresses found for name %r\n' % (hostname,))
def printError(failure, hostname):
"""
Print a friendly error message if the hostname could not be
resolved.
"""
failure.trap(error.DNSNameError)
sys.stderr.write('ERROR: hostname not found %r\n' % (hostname,))
def main(reactor, *argv):
options = Options()
try:
options.parseOptions(argv)
except usage.UsageError as errortext:
sys.stderr.write(str(options) + '\n')
sys.stderr.write('ERROR: %s\n' % (errortext,))
raise SystemExit(1)
hostname = options['hostname']
d = client.getHostByName(hostname)
d.addCallback(printResult, hostname)
d.addErrback(printError, hostname)
return d
if __name__ == '__main__':
react(main, sys.argv[1:])
| mit |
jsqihui/reimplement-paper | facenet/tensorflow/prepare_data.py | 1 | 2456 | """download mscelebrities dataset"""
import os
import utils
import requests
import urllib
from tqdm import tqdm
def download_imdb(face_only=True, data_dir="./data"):
""" https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/ """
full_urls = ["https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_0.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_1.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_2.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_3.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_4.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_5.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_6.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_7.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_8.tar",
"https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_9.tar",
]
full_meta = "https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_crop.tar"
face_url = "https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/imdb_crop.tar"
imdb_face = os.path.join(data_dir, "imdb_face.tar")
if face_only:
if os.path.isfile(imdb_face):
raise Exception("file %s exists" % imdb_face)
utils.download_file(face_url, imdb_face)
else:
# TODO download all imdb data
pass
def download_wiki(data_dir="./data"):
""" https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/ """
face_url = "https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/wiki_crop.tar"
face_meta_url = "https://data.vision.ee.ethz.ch/cvl/rrothe/imdb-wiki/static/wiki.tar.gz"
wiki_face = os.path.join(data_dir, "wiki_face.tar")
wiki_meta = os.path.join(data_dir, "wiki_meta.tar.gz")
# download faces
if os.path.isfile(wiki_face):
raise Exception("file %s exists" % wiki_face)
else:
utils.download_file(face_url, wiki_face)
if os.path.isfile(wiki_meta):
raise Exception("file %s exists" % wiki_meta)
else:
utils.download_file(face_meta_url, wiki_meta)
def main():
# download_imdb()
download_wiki()
if __name__ == "__main__":
main()
| apache-2.0 |
AnhellO/DAS_Sistemas | Ago-Dic-2018/Daniel Enriquez/ExamenExtraordinario/facade.py | 1 | 3071 |
#Clases donde cada clase es un test diferente
class TestUnitario:
def run(self):
print("--- Test Unitario ---")
print("Realizando Test Unitario...")
print("Resultados: -53,15\n")
class TestRegresion:
def run(self):
print("---- Test De Regresion ---")
print("Realizando Test De Regresion...")
print("Resultados: 20,20\n")
class TestIntegracion:
def run(self):
print("--- Test De Integracion ---")
print("Realizando Test De Integracion...")
print("Resultados: 0,-1\n")
#Implementando facade o fachada la cual se encarga de ejecutar todas las pruebas en vez de realizar una por una por separado
class TestEjecutar:
def __init__(self):
self.testUnitario = TestUnitario()
self.testRegresion = TestRegresion()
self.testIntegracion = TestIntegracion()
self.tests = [self.testUnitario, self.testRegresion, self.testIntegracion]
#La fachada manda llamar a la funcion All test el cual ejecutar todos os test
def AllTest(self):
print("--- Todos Los Test ---\n")
[i.run() for i in self.tests]
#La fachada manda llamar a la funcion IntegracionAndRegresion el cual ejecutar solo dos test
def IntegracionAndRegresion(self):
print("--- Test De Integracion y Regresion ---\n")
print("Realizando Test De Integracion y Regresion...")
self.testRegresion.run()
self.testIntegracion.run()
#La fachada manda llamar a la funcion UnitarioAndRegresion el cual ejecutar solo dos test
def UnitarioAndRegresion(self):
print("--- Test Unitario y de Regresion ---\n")
print("Realizando Test Unitario y de Regresion...")
self.testUnitario.run()
self.testRegresion.run()
#La fachada manda llamar a la funcion UnitarioAndIntegracion el cual ejecutar solo dos test
def UnitarioAndIntegracion(self):
print("--- Test Unitario y de Integracion ---\n")
print("Realizando Test Unitario y de Integracion...")
self.testUnitario.run()
self.testIntegracion.run()
# Esta es la parte del cliente el cual solo puede ejecutar los test ya sea todos a la vez o en ciertos conbinaciones
#El cliente no tiene acceso amodificar las clases de los test
if __name__ == '__main__':
while True:
seleccionado = int(input("Seleccione Test a realizar:\n"+"1.- Todos los test\n"+"2.Test De Integracion y Regresion\n"+"3.Test Unitario y Regresion\n"+"4.Test Unitario y De Integracion\n"+"5.-Salir\n"))
if seleccionado==1:
testEjecutados = TestEjecutar()
testEjecutados.AllTest()
pass
if seleccionado==2:
testJuntos=TestEjecutar()
testJuntos.IntegracionAndRegresion()
if seleccionado==3:
testJuntos2=TestEjecutar()
testJuntos2.UnitarioAndRegresion()
if seleccionado==4:
testJuntos3=TestEjecutar()
testJuntos3.UnitarioAndIntegracion()
if seleccionado==5:
print("--- Gracias! ---\n")
break
| mit |
apc-io/apc_8950_external_wpa_supplicant_8 | wpa_supplicant/examples/wpas-test.py | 189 | 2571 | #!/usr/bin/python
import dbus
import sys, os
import time
WPAS_DBUS_SERVICE = "fi.epitest.hostap.WPASupplicant"
WPAS_DBUS_INTERFACE = "fi.epitest.hostap.WPASupplicant"
WPAS_DBUS_OPATH = "/fi/epitest/hostap/WPASupplicant"
WPAS_DBUS_INTERFACES_INTERFACE = "fi.epitest.hostap.WPASupplicant.Interface"
WPAS_DBUS_INTERFACES_OPATH = "/fi/epitest/hostap/WPASupplicant/Interfaces"
WPAS_DBUS_BSSID_INTERFACE = "fi.epitest.hostap.WPASupplicant.BSSID"
def byte_array_to_string(s):
import urllib
r = ""
for c in s:
if c >= 32 and c < 127:
r += "%c" % c
else:
r += urllib.quote(chr(c))
return r
def main():
if len(sys.argv) != 2:
print "Usage: wpas-test.py <interface>"
os._exit(1)
ifname = sys.argv[1]
bus = dbus.SystemBus()
wpas_obj = bus.get_object(WPAS_DBUS_SERVICE, WPAS_DBUS_OPATH)
wpas = dbus.Interface(wpas_obj, WPAS_DBUS_INTERFACE)
# See if wpa_supplicant already knows about this interface
path = None
try:
path = wpas.getInterface(ifname)
except dbus.dbus_bindings.DBusException, exc:
if str(exc) != "wpa_supplicant knows nothing about this interface.":
raise exc
try:
path = wpas.addInterface(ifname, {'driver': dbus.Variant('wext')})
except dbus.dbus_bindings.DBusException, exc:
if str(exc) != "wpa_supplicant already controls this interface.":
raise exc
if_obj = bus.get_object(WPAS_DBUS_SERVICE, path)
iface = dbus.Interface(if_obj, WPAS_DBUS_INTERFACES_INTERFACE)
iface.scan()
# Should really wait for the "scanResults" signal instead of sleeping
time.sleep(5)
res = iface.scanResults()
print "Scanned wireless networks:"
for opath in res:
net_obj = bus.get_object(WPAS_DBUS_SERVICE, opath)
net = dbus.Interface(net_obj, WPAS_DBUS_BSSID_INTERFACE)
props = net.properties()
# Convert the byte-array for SSID and BSSID to printable strings
bssid = ""
for item in props["bssid"]:
bssid = bssid + ":%02x" % item
bssid = bssid[1:]
ssid = byte_array_to_string(props["ssid"])
wpa = "no"
if props.has_key("wpaie"):
wpa = "yes"
wpa2 = "no"
if props.has_key("rsnie"):
wpa2 = "yes"
freq = 0
if props.has_key("frequency"):
freq = props["frequency"]
caps = props["capabilities"]
qual = props["quality"]
level = props["level"]
noise = props["noise"]
maxrate = props["maxrate"] / 1000000
print " %s :: ssid='%s' wpa=%s wpa2=%s quality=%d%% rate=%d freq=%d" % (bssid, ssid, wpa, wpa2, qual, maxrate, freq)
wpas.removeInterface(dbus.ObjectPath(path))
# Should fail here with unknown interface error
iface.scan()
if __name__ == "__main__":
main()
| gpl-2.0 |
st135yle/django-site | dbenv/lib/python3.4/site-packages/django/conf/locale/pt/formats.py | 504 | 1717 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \d\e F \d\e Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \d\e Y'
MONTH_DAY_FORMAT = r'j \d\e F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%d/%m/%Y', '%d/%m/%y', # '2006-10-25', '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| mit |
pyconjp/pyconjp-website | localeurlcustom/templatetags/localeurl_tags.py | 1 | 2320 | from django import template
from django.template import Node, Token, TemplateSyntaxError
from django.template import resolve_variable
from django.template.defaultfilters import stringfilter
from django.templatetags import future
from localeurlcustom import utils
register = template.Library()
def chlocale(url, locale):
"""
Changes the URL's locale prefix if the path is not locale-independent.
Otherwise removes locale prefix.
"""
_, path = utils.strip_script_prefix(url)
_, path = utils.strip_path(path)
return utils.locale_url(path, locale)
chlocale = stringfilter(chlocale)
register.filter('chlocale', chlocale)
def rmlocale(url):
"""Removes the locale prefix from the URL."""
script_prefix, path = utils.strip_script_prefix(url)
_, path = utils.strip_path(path)
return ''.join([script_prefix, path])
rmlocale = stringfilter(rmlocale)
register.filter('rmlocale', rmlocale)
def locale_url(parser, token):
"""
Renders the url for the view with another locale prefix. The syntax is
like the 'url' tag, only with a locale before the view.
Examples:
{% locale_url "de" cal.views.day day %}
{% locale_url "nl" cal.views.home %}
{% locale_url "en-gb" cal.views.month month as month_url %}
"""
bits = token.split_contents()
if len(bits) < 3:
raise TemplateSyntaxError("'%s' takes at least two arguments:"
" the locale and a view" % bits[0])
urltoken = Token(token.token_type, bits[0] + ' ' + ' '.join(bits[2:]))
urlnode = future.url(parser, urltoken)
return LocaleURLNode(bits[1], urlnode)
class LocaleURLNode(Node):
def __init__(self, locale, urlnode):
self.locale = locale
self.urlnode = urlnode
def render(self, context):
locale = resolve_variable(self.locale, context)
if utils.supported_language(locale) is None:
raise ValueError("locale not in settings.LANGUAGES: %s" % locale)
path = self.urlnode.render(context)
if self.urlnode.asvar:
self.urlnode.render(context)
context[self.urlnode.asvar] = chlocale(context[self.urlnode.asvar],
locale)
return ''
else:
return chlocale(path, locale)
register.tag('locale_url', locale_url)
| bsd-3-clause |
peterlauri/django | tests/generic_views/test_edit.py | 43 | 18189 | from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.test import SimpleTestCase, TestCase, override_settings
from django.test.client import RequestFactory
from django.urls import reverse
from django.views.generic.base import View
from django.views.generic.edit import CreateView, FormMixin, ModelFormMixin
from . import views
from .forms import AuthorForm
from .models import Artist, Author
class FormMixinTests(SimpleTestCase):
def test_initial_data(self):
""" Test instance independence of initial data dict (see #16138) """
initial_1 = FormMixin().get_initial()
initial_1['foo'] = 'bar'
initial_2 = FormMixin().get_initial()
self.assertNotEqual(initial_1, initial_2)
def test_get_prefix(self):
""" Test prefix can be set (see #18872) """
test_string = 'test'
rf = RequestFactory()
get_request = rf.get('/')
class TestFormMixin(FormMixin):
request = get_request
default_kwargs = TestFormMixin().get_form_kwargs()
self.assertIsNone(default_kwargs.get('prefix'))
set_mixin = TestFormMixin()
set_mixin.prefix = test_string
set_kwargs = set_mixin.get_form_kwargs()
self.assertEqual(test_string, set_kwargs.get('prefix'))
def test_get_form(self):
class TestFormMixin(FormMixin):
request = RequestFactory().get('/')
self.assertIsInstance(
TestFormMixin().get_form(forms.Form), forms.Form,
'get_form() should use provided form class.'
)
class FormClassTestFormMixin(TestFormMixin):
form_class = forms.Form
self.assertIsInstance(
FormClassTestFormMixin().get_form(), forms.Form,
'get_form() should fallback to get_form_class() if none is provided.'
)
def test_get_context_data(self):
class FormContext(FormMixin):
request = RequestFactory().get('/')
form_class = forms.Form
self.assertIsInstance(FormContext().get_context_data()['form'], forms.Form)
@override_settings(ROOT_URLCONF='generic_views.urls')
class BasicFormTests(TestCase):
def test_post_data(self):
res = self.client.post('/contact/', {'name': "Me", 'message': "Hello"})
self.assertRedirects(res, '/list/authors/')
def test_late_form_validation(self):
"""
A form can be marked invalid in the form_valid() method (#25548).
"""
res = self.client.post('/late-validation/', {'name': "Me", 'message': "Hello"})
self.assertFalse(res.context['form'].is_valid())
class ModelFormMixinTests(SimpleTestCase):
def test_get_form(self):
form_class = views.AuthorGetQuerySetFormView().get_form_class()
self.assertEqual(form_class._meta.model, Author)
def test_get_form_checks_for_object(self):
mixin = ModelFormMixin()
mixin.request = RequestFactory().get('/')
self.assertEqual({'initial': {}, 'prefix': None},
mixin.get_form_kwargs())
@override_settings(ROOT_URLCONF='generic_views.urls')
class CreateViewTests(TestCase):
def test_create(self):
res = self.client.get('/edit/authors/create/')
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], forms.ModelForm)
self.assertIsInstance(res.context['view'], View)
self.assertNotIn('object', res.context)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
res = self.client.post('/edit/authors/create/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe>'])
def test_create_invalid(self):
res = self.client.post('/edit/authors/create/', {'name': 'A' * 101, 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
self.assertEqual(len(res.context['form'].errors), 1)
self.assertEqual(Author.objects.count(), 0)
def test_create_with_object_url(self):
res = self.client.post('/edit/artists/create/', {'name': 'Rene Magritte'})
self.assertEqual(res.status_code, 302)
artist = Artist.objects.get(name='Rene Magritte')
self.assertRedirects(res, '/detail/artist/%d/' % artist.pk)
self.assertQuerysetEqual(Artist.objects.all(), ['<Artist: Rene Magritte>'])
def test_create_with_redirect(self):
res = self.client.post('/edit/authors/create/redirect/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/')
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe>'])
def test_create_with_interpolated_redirect(self):
res = self.client.post(
'/edit/authors/create/interpolate_redirect/',
{'name': 'Randall Munroe', 'slug': 'randall-munroe'}
)
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe>'])
self.assertEqual(res.status_code, 302)
pk = Author.objects.first().pk
self.assertRedirects(res, '/edit/author/%d/update/' % pk)
# Also test with escaped chars in URL
res = self.client.post(
'/edit/authors/create/interpolate_redirect_nonascii/',
{'name': 'John Doe', 'slug': 'john-doe'}
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.get(name='John Doe').pk
self.assertRedirects(res, '/%C3%A9dit/author/{}/update/'.format(pk))
def test_create_with_special_properties(self):
res = self.client.get('/edit/authors/create/special/')
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], views.AuthorForm)
self.assertNotIn('object', res.context)
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/form.html')
res = self.client.post('/edit/authors/create/special/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
obj = Author.objects.get(slug='randall-munroe')
self.assertRedirects(res, reverse('author_detail', kwargs={'pk': obj.pk}))
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe>'])
def test_create_without_redirect(self):
with self.assertRaises(ImproperlyConfigured):
self.client.post('/edit/authors/create/naive/', {'name': 'Randall Munroe', 'slug': 'randall-munroe'})
def test_create_restricted(self):
res = self.client.post(
'/edit/authors/create/restricted/',
{'name': 'Randall Munroe', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/accounts/login/?next=/edit/authors/create/restricted/')
def test_create_view_with_restricted_fields(self):
class MyCreateView(CreateView):
model = Author
fields = ['name']
self.assertEqual(list(MyCreateView().get_form_class().base_fields), ['name'])
def test_create_view_all_fields(self):
class MyCreateView(CreateView):
model = Author
fields = '__all__'
self.assertEqual(list(MyCreateView().get_form_class().base_fields), ['name', 'slug'])
def test_create_view_without_explicit_fields(self):
class MyCreateView(CreateView):
model = Author
message = (
"Using ModelFormMixin (base class of MyCreateView) without the "
"'fields' attribute is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
MyCreateView().get_form_class()
def test_define_both_fields_and_form_class(self):
class MyCreateView(CreateView):
model = Author
form_class = AuthorForm
fields = ['name']
message = "Specifying both 'fields' and 'form_class' is not permitted."
with self.assertRaisesMessage(ImproperlyConfigured, message):
MyCreateView().get_form_class()
@override_settings(ROOT_URLCONF='generic_views.urls')
class UpdateViewTests(TestCase):
def test_update_post(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
res = self.client.get('/edit/author/%d/update/' % a.pk)
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], forms.ModelForm)
self.assertEqual(res.context['object'], Author.objects.get(pk=a.pk))
self.assertEqual(res.context['author'], Author.objects.get(pk=a.pk))
self.assertTemplateUsed(res, 'generic_views/author_form.html')
self.assertEqual(res.context['view'].get_form_called_count, 1)
# Modification with both POST and PUT (browser compatible)
res = self.client.post(
'/edit/author/%d/update/' % a.pk,
{'name': 'Randall Munroe (xkcd)', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe (xkcd)>'])
def test_update_invalid(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
res = self.client.post('/edit/author/%d/update/' % a.pk, {'name': 'A' * 101, 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'generic_views/author_form.html')
self.assertEqual(len(res.context['form'].errors), 1)
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe>'])
self.assertEqual(res.context['view'].get_form_called_count, 1)
def test_update_with_object_url(self):
a = Artist.objects.create(name='Rene Magritte')
res = self.client.post('/edit/artists/%d/update/' % a.pk, {'name': 'Rene Magritte'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/detail/artist/%d/' % a.pk)
self.assertQuerysetEqual(Artist.objects.all(), ['<Artist: Rene Magritte>'])
def test_update_with_redirect(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
res = self.client.post(
'/edit/author/%d/update/redirect/' % a.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/')
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe (author of xkcd)>'])
def test_update_with_interpolated_redirect(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
res = self.client.post(
'/edit/author/%d/update/interpolate_redirect/' % a.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe (author of xkcd)>'])
self.assertEqual(res.status_code, 302)
pk = Author.objects.first().pk
self.assertRedirects(res, '/edit/author/%d/update/' % pk)
# Also test with escaped chars in URL
res = self.client.post(
'/edit/author/%d/update/interpolate_redirect_nonascii/' % a.pk,
{'name': 'John Doe', 'slug': 'john-doe'}
)
self.assertEqual(res.status_code, 302)
pk = Author.objects.get(name='John Doe').pk
self.assertRedirects(res, '/%C3%A9dit/author/{}/update/'.format(pk))
def test_update_with_special_properties(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
res = self.client.get('/edit/author/%d/update/special/' % a.pk)
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], views.AuthorForm)
self.assertEqual(res.context['object'], Author.objects.get(pk=a.pk))
self.assertEqual(res.context['thingy'], Author.objects.get(pk=a.pk))
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/form.html')
res = self.client.post(
'/edit/author/%d/update/special/' % a.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/detail/author/%d/' % a.pk)
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe (author of xkcd)>'])
def test_update_without_redirect(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
# Should raise exception -- No redirect URL provided, and no
# get_absolute_url provided
with self.assertRaises(ImproperlyConfigured):
self.client.post(
'/edit/author/%d/update/naive/' % a.pk,
{'name': 'Randall Munroe (author of xkcd)', 'slug': 'randall-munroe'}
)
def test_update_get_object(self):
a = Author.objects.create(
pk=1,
name='Randall Munroe',
slug='randall-munroe',
)
res = self.client.get('/edit/author/update/')
self.assertEqual(res.status_code, 200)
self.assertIsInstance(res.context['form'], forms.ModelForm)
self.assertIsInstance(res.context['view'], View)
self.assertEqual(res.context['object'], Author.objects.get(pk=a.pk))
self.assertEqual(res.context['author'], Author.objects.get(pk=a.pk))
self.assertTemplateUsed(res, 'generic_views/author_form.html')
# Modification with both POST and PUT (browser compatible)
res = self.client.post('/edit/author/update/', {'name': 'Randall Munroe (xkcd)', 'slug': 'randall-munroe'})
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Randall Munroe (xkcd)>'])
@override_settings(ROOT_URLCONF='generic_views.urls')
class DeleteViewTests(TestCase):
def test_delete_by_post(self):
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.get('/edit/author/%d/delete/' % a.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Author.objects.get(pk=a.pk))
self.assertEqual(res.context['author'], Author.objects.get(pk=a.pk))
self.assertTemplateUsed(res, 'generic_views/author_confirm_delete.html')
# Deletion with POST
res = self.client.post('/edit/author/%d/delete/' % a.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_by_delete(self):
# Deletion with browser compatible DELETE method
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.delete('/edit/author/%d/delete/' % a.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_with_redirect(self):
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.post('/edit/author/%d/delete/redirect/' % a.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_with_interpolated_redirect(self):
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.post('/edit/author/%d/delete/interpolate_redirect/' % a.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/edit/authors/create/?deleted=%d' % a.pk)
self.assertQuerysetEqual(Author.objects.all(), [])
# Also test with escaped chars in URL
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.post('/edit/author/{}/delete/interpolate_redirect_nonascii/'.format(a.pk))
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/%C3%A9dit/authors/create/?deleted={}'.format(a.pk))
def test_delete_with_special_properties(self):
a = Author.objects.create(**{'name': 'Randall Munroe', 'slug': 'randall-munroe'})
res = self.client.get('/edit/author/%d/delete/special/' % a.pk)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Author.objects.get(pk=a.pk))
self.assertEqual(res.context['thingy'], Author.objects.get(pk=a.pk))
self.assertNotIn('author', res.context)
self.assertTemplateUsed(res, 'generic_views/confirm_delete.html')
res = self.client.post('/edit/author/%d/delete/special/' % a.pk)
self.assertEqual(res.status_code, 302)
self.assertRedirects(res, '/list/authors/')
self.assertQuerysetEqual(Author.objects.all(), [])
def test_delete_without_redirect(self):
a = Author.objects.create(
name='Randall Munroe',
slug='randall-munroe',
)
# Should raise exception -- No redirect URL provided, and no
# get_absolute_url provided
with self.assertRaises(ImproperlyConfigured):
self.client.post('/edit/author/%d/delete/naive/' % a.pk)
| bsd-3-clause |
mikkylok/mikky.lu | venv/lib/python2.7/site-packages/pkg_resources/_vendor/packaging/specifiers.py | 1107 | 28025 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse
class InvalidSpecifier(ValueError):
"""
An invalid specifier was found, users should refer to PEP 440.
"""
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __str__(self):
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
def __hash__(self):
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
"""
@abc.abstractproperty
def prereleases(self):
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
def prereleases(self, value):
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
def contains(self, item, prereleases=None):
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
"""
class _IndividualSpecifier(BaseSpecifier):
_operators = {}
def __init__(self, spec="", prereleases=None):
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
self._spec = (
match.group("operator").strip(),
match.group("version").strip(),
)
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<{0}({1!r}{2})>".format(
self.__class__.__name__,
str(self),
pre,
)
def __str__(self):
return "{0}{1}".format(*self._spec)
def __hash__(self):
return hash(self._spec)
def __eq__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec
def __ne__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec != other._spec
def _get_operator(self, op):
return getattr(self, "_compare_{0}".format(self._operators[op]))
def _coerce_version(self, version):
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def operator(self):
return self._spec[0]
@property
def version(self):
return self._spec[1]
@property
def prereleases(self):
return self._prereleases
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def __contains__(self, item):
return self.contains(item)
def contains(self, item, prereleases=None):
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
if item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
return self._get_operator(self.operator)(item, self.version)
def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
kw = {"prereleases": prereleases if prereleases is not None else True}
# Attempt to iterate over all the values in the iterable and if any of
# them match, yield them.
for version in iterable:
parsed_version = self._coerce_version(version)
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing
# else matches this specifier.
if (parsed_version.is_prerelease and not
(prereleases or self.prereleases)):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
# accepting prereleases from the begining.
else:
yielded = True
yield version
# Now that we've iterated over everything, determine if we've yielded
# any values, and if we have not and we have any prereleases stored up
# then we will go ahead and yield the prereleases.
if not yielded and found_prereleases:
for version in found_prereleases:
yield version
class LegacySpecifier(_IndividualSpecifier):
_regex_str = (
r"""
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
[^,;\s)]* # Since this is a "legacy" specifier, and the version
# string can be just about anything, we match everything
# except for whitespace, a semi-colon for marker support,
# a closing paren since versions can be enclosed in
# them, and a comma since it's a version separator.
)
"""
)
_regex = re.compile(
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
}
def _coerce_version(self, version):
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
return prospective > self._coerce_version(spec)
def _require_version_compare(fn):
@functools.wraps(fn)
def wrapped(self, prospective, spec):
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
return wrapped
class Specifier(_IndividualSpecifier):
_regex_str = (
r"""
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
# The identity operators allow for an escape hatch that will
# do an exact string match of the version you wish to install.
# This will not be parsed by PEP 440 and we cannot determine
# any semantic meaning from it. This operator is discouraged
# but included entirely as an escape hatch.
(?<====) # Only match for the identity operator
\s*
[^\s]* # We just match everything, except for whitespace
# since we are only testing for strict identity.
)
|
(?:
# The (non)equality operators allow for wild card and local
# versions to be specified so we have to define these two
# operators separately to enable that.
(?<===|!=) # Only match for equals and not equals
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
# You cannot use a wild card and a dev or local version
# together so group them with a | and make them optional.
(?:
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
\.\* # Wild card syntax of .*
)?
)
|
(?:
# The compatible operator requires at least two digits in the
# release segment.
(?<=~=) # Only match for the compatible operator
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
|
(?:
# All other operators only allow a sub set of what the
# (non)equality operators do. Specifically they do not allow
# local versions to be specified nor do they allow the prefix
# matching wild cards.
(?<!==|!=|~=) # We have special cases for these
# operators so we want to make sure they
# don't match here.
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
)
"""
)
_regex = re.compile(
r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"~=": "compatible",
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
"===": "arbitrary",
}
@_require_version_compare
def _compare_compatible(self, prospective, spec):
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
# implementing it ourselves. The only thing we need to do is construct
# the other specifiers.
# We want everything but the last item in the version, but we want to
# ignore post and dev releases and we want to treat the pre-release as
# it's own separate segment.
prefix = ".".join(
list(
itertools.takewhile(
lambda x: (not x.startswith("post") and not
x.startswith("dev")),
_version_split(spec),
)
)[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
return (self._get_operator(">=")(prospective, spec) and
self._get_operator("==")(prospective, prefix))
@_require_version_compare
def _compare_equal(self, prospective, spec):
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# In the case of prefix matching we want to ignore local segment.
prospective = Version(prospective.public)
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
prospective = prospective[:len(spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
spec, prospective = _pad_version(spec, prospective)
else:
# Convert our spec string into a Version
spec = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
if not spec.local:
prospective = Version(prospective.public)
return prospective == spec
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
return prospective <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= Version(spec)
@_require_version_compare
def _compare_less_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective < spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a pre-release version, that we do not accept pre-release
# versions for the version mentioned in the specifier (e.g. <3.1 should
# not match 3.1.dev0, but should match 3.0.dev0).
if not spec.is_prerelease and prospective.is_prerelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# less than the spec version *and* it's not a pre-release of the same
# version in the spec.
return True
@_require_version_compare
def _compare_greater_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective > spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a post-release version, that we do not accept
# post-release versions for the version mentioned in the specifier
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
if not spec.is_postrelease and prospective.is_postrelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# Ensure that we do not allow a local version of the version mentioned
# in the specifier, which is techincally greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# greater than the spec version *and* it's not a pre-release of the
# same version in the spec.
return True
def _compare_arbitrary(self, prospective, spec):
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
return self._prereleases
# Look at all of our specifiers and determine if they are inclusive
# operators, and if they are if they are including an explicit
# prerelease.
operator, version = self._spec
if operator in ["==", ">=", "<=", "~=", "==="]:
# The == specifier can include a trailing .*, if it does we
# want to remove before parsing.
if operator == "==" and version.endswith(".*"):
version = version[:-2]
# Parse the version, and if it is a pre-release than this
# specifier allows pre-releases.
if parse(version).is_prerelease:
return True
return False
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
result = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
else:
result.append(item)
return result
def _pad_version(left, right):
left_split, right_split = [], []
# Get the release segment of our versions
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
left_split.append(left[len(left_split[0]):])
right_split.append(right[len(right_split[0]):])
# Insert our padding
left_split.insert(
1,
["0"] * max(0, len(right_split[0]) - len(left_split[0])),
)
right_split.insert(
1,
["0"] * max(0, len(left_split[0]) - len(right_split[0])),
)
return (
list(itertools.chain(*left_split)),
list(itertools.chain(*right_split)),
)
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
for specifier in specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
parsed.add(LegacySpecifier(specifier))
# Turn our parsed specifiers into a frozen set and save them for later.
self._specs = frozenset(parsed)
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
return hash(self._specs)
def __and__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
specifier = SpecifierSet()
specifier._specs = frozenset(self._specs | other._specs)
if self._prereleases is None and other._prereleases is not None:
specifier._prereleases = other._prereleases
elif self._prereleases is not None and other._prereleases is None:
specifier._prereleases = self._prereleases
elif self._prereleases == other._prereleases:
specifier._prereleases = self._prereleases
else:
raise ValueError(
"Cannot combine SpecifierSets with True and False prerelease "
"overrides."
)
return specifier
def __eq__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
def __ne__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs != other._specs
def __len__(self):
return len(self._specs)
def __iter__(self):
return iter(self._specs)
@property
def prereleases(self):
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
return self._prereleases
# If we don't have any specifiers, and we don't have a forced value,
# then we'll just return None since we don't know if this should have
# pre-releases or not.
if not self._specs:
return None
# Otherwise we'll see if any of the given specifiers accept
# prereleases, if any of them do we'll return True, otherwise False.
return any(s.prereleases for s in self._specs)
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def __contains__(self, item):
return self.contains(item)
def contains(self, item, prereleases=None):
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
if prereleases is None:
prereleases = self.prereleases
# We can determine if we're going to allow pre-releases by looking to
# see if any of the underlying items supports them. If none of them do
# and this item is a pre-release then we do not allow it and we can
# short circuit that here.
# Note: This means that 1.0.dev1 would not be contained in something
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
if not prereleases and item.is_prerelease:
return False
# We simply dispatch to the underlying specs here to make sure that the
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
return all(
s.contains(item, prereleases=prereleases)
for s in self._specs
)
def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
if prereleases is None:
prereleases = self.prereleases
# If we have any specifiers, then we want to wrap our iterable in the
# filter method for each one, this will act as a logical AND amongst
# each specifier.
if self._specs:
for spec in self._specs:
iterable = spec.filter(iterable, prereleases=bool(prereleases))
return iterable
# If we do not have any specifiers, then we need to have a rough filter
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
filtered = []
found_prereleases = []
for item in iterable:
# Ensure that we some kind of Version class for this item.
if not isinstance(item, (LegacyVersion, Version)):
parsed_version = parse(item)
else:
parsed_version = item
# Filter out any item which is parsed as a LegacyVersion
if isinstance(parsed_version, LegacyVersion):
continue
# Store any item which is a pre-release for later unless we've
# already found a final version or we are accepting prereleases
if parsed_version.is_prerelease and not prereleases:
if not filtered:
found_prereleases.append(item)
else:
filtered.append(item)
# If we've found no items except for pre-releases, then we'll go
# ahead and use the pre-releases
if not filtered and found_prereleases and prereleases is None:
return found_prereleases
return filtered
| mit |
ircwaves/gips | gips/version.py | 1 | 1039 | #!/usr/bin/env python
################################################################################
# GIPS: Geospatial Image Processing System
#
# AUTHOR: Matthew Hanson
# EMAIL: matt.a.hanson@gmail.com
#
# Copyright (C) 2014-2018 Applied Geosolutions
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
################################################################################
__version__ = '0.14.6-dev'
| gpl-2.0 |
usersource/tasks | tasks_phonegap/Tasks/plugins/io.usersource.anno/tools/copytool2/httplib2/socks.py | 811 | 18459 | """SocksiPy - Python SOCKS module.
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
"""
"""
Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
for use in PyLoris (http://pyloris.sourceforge.net/)
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
mainly to merge bug fixes found in Sourceforge
"""
import base64
import socket
import struct
import sys
if getattr(socket, 'socket', None) is None:
raise ImportError('socket.socket missing, proxy support unusable')
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
PROXY_TYPE_HTTP_NO_TUNNEL = 4
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(Exception): pass
class GeneralProxyError(ProxyError): pass
class Socks5AuthError(ProxyError): pass
class Socks5Error(ProxyError): pass
class Socks4Error(ProxyError): pass
class HTTPError(ProxyError): pass
_generalerrors = ("success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
"bad input")
_socks5errors = ("succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
"Host unreachable",
"Connection refused",
"TTL expired",
"Command not supported",
"Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
"unknown error")
_socks4errors = ("request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
def wrapmodule(module):
"""wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
module.socket.socket = socksocket
else:
raise GeneralProxyError((4, "no proxy specified"))
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
self.__httptunnel = True
def __recvall(self, count):
"""__recvall(count) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = self.recv(count)
while len(data) < count:
d = self.recv(count-len(data))
if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
def sendall(self, content, *args):
""" override socket.socket.sendall method to rewrite the header
for non-tunneling proxies if needed
"""
if not self.__httptunnel:
content = self.__rewriteproxy(content)
return super(socksocket, self).sendall(content, *args)
def __rewriteproxy(self, header):
""" rewrite HTTP request headers to support non-tunneling proxies
(i.e. those which do not support the CONNECT method).
This only works for HTTP (not HTTPS) since HTTPS requires tunneling.
"""
host, endpt = None, None
hdrs = header.split("\r\n")
for hdr in hdrs:
if hdr.lower().startswith("host:"):
host = hdr
elif hdr.lower().startswith("get") or hdr.lower().startswith("post"):
endpt = hdr
if host and endpt:
hdrs.remove(host)
hdrs.remove(endpt)
host = host.split(" ")[1]
endpt = endpt.split(" ")
if (self.__proxy[4] != None and self.__proxy[5] != None):
hdrs.insert(0, self.__getauthheader())
hdrs.insert(0, "Host: %s" % host)
hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2]))
return "\r\n".join(hdrs)
def __getauthheader(self):
auth = self.__proxy[4] + ":" + self.__proxy[5]
return "Proxy-Authorization: Basic " + base64.b64encode(auth)
def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1:2] == chr(0x00).encode():
# No authentication is required
pass
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1:2] != chr(0x00).encode():
# Authentication failed
self.close()
raise Socks5AuthError((3, _socks5autherrors[3]))
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == chr(0xFF).encode():
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = struct.pack('BBB', 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + chr(0x01).encode() + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + chr(0x01).encode() + ipaddr
req = req + struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
if ord(resp[1:2])<=8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
# Get the bound address/port
elif resp[3:4] == chr(0x01).encode():
boundaddr = self.__recvall(4)
elif resp[3:4] == chr(0x03).encode():
resp = resp + self.recv(1)
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self,destaddr,destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] != None:
req = req + self.__proxy[4]
req = req + chr(0x00).encode()
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + chr(0x00).encode()
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
if ord(resp[1:2]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"]
headers += ["Host: ", destaddr, "\r\n"]
if (self.__proxy[4] != None and self.__proxy[5] != None):
headers += [self.__getauthheader(), "\r\n"]
headers.append("\r\n")
self.sendall("".join(headers).encode())
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n".encode()) == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ".encode(), 2)
if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self, despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1],portnum))
if destpair[1] == 443:
self.__negotiatehttp(destpair[0],destpair[1])
else:
self.__httptunnel = False
elif self.__proxy[0] == None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4]))
| mpl-2.0 |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/test/test_dict.py | 8 | 30334 | import unittest
from test import support
import collections, random, string
import collections.abc
import gc, weakref
import pickle
class DictTest(unittest.TestCase):
def test_invalid_keyword_arguments(self):
class Custom(dict):
pass
for invalid in {1 : 2}, Custom({1 : 2}):
with self.assertRaises(TypeError):
dict(**invalid)
with self.assertRaises(TypeError):
{}.update(**invalid)
def test_constructor(self):
# calling built-in types without argument must return empty
self.assertEqual(dict(), {})
self.assertIsNot(dict(), {})
def test_literal_constructor(self):
# check literal constructor for different sized dicts
# (to exercise the BUILD_MAP oparg).
for n in (0, 1, 6, 256, 400):
items = [(''.join(random.sample(string.ascii_letters, 8)), i)
for i in range(n)]
random.shuffle(items)
formatted_items = ('{!r}: {:d}'.format(k, v) for k, v in items)
dictliteral = '{' + ', '.join(formatted_items) + '}'
self.assertEqual(eval(dictliteral), dict(items))
def test_bool(self):
self.assertIs(not {}, True)
self.assertTrue({1: 2})
self.assertIs(bool({}), False)
self.assertIs(bool({1: 2}), True)
def test_keys(self):
d = {}
self.assertEqual(set(d.keys()), set())
d = {'a': 1, 'b': 2}
k = d.keys()
self.assertEqual(set(k), {'a', 'b'})
self.assertIn('a', k)
self.assertIn('b', k)
self.assertIn('a', d)
self.assertIn('b', d)
self.assertRaises(TypeError, d.keys, None)
self.assertEqual(repr(dict(a=1).keys()), "dict_keys(['a'])")
def test_values(self):
d = {}
self.assertEqual(set(d.values()), set())
d = {1:2}
self.assertEqual(set(d.values()), {2})
self.assertRaises(TypeError, d.values, None)
self.assertEqual(repr(dict(a=1).values()), "dict_values([1])")
def test_items(self):
d = {}
self.assertEqual(set(d.items()), set())
d = {1:2}
self.assertEqual(set(d.items()), {(1, 2)})
self.assertRaises(TypeError, d.items, None)
self.assertEqual(repr(dict(a=1).items()), "dict_items([('a', 1)])")
def test_contains(self):
d = {}
self.assertNotIn('a', d)
self.assertFalse('a' in d)
self.assertTrue('a' not in d)
d = {'a': 1, 'b': 2}
self.assertIn('a', d)
self.assertIn('b', d)
self.assertNotIn('c', d)
self.assertRaises(TypeError, d.__contains__)
def test_len(self):
d = {}
self.assertEqual(len(d), 0)
d = {'a': 1, 'b': 2}
self.assertEqual(len(d), 2)
def test_getitem(self):
d = {'a': 1, 'b': 2}
self.assertEqual(d['a'], 1)
self.assertEqual(d['b'], 2)
d['c'] = 3
d['a'] = 4
self.assertEqual(d['c'], 3)
self.assertEqual(d['a'], 4)
del d['b']
self.assertEqual(d, {'a': 4, 'c': 3})
self.assertRaises(TypeError, d.__getitem__)
class BadEq(object):
def __eq__(self, other):
raise Exc()
def __hash__(self):
return 24
d = {}
d[BadEq()] = 42
self.assertRaises(KeyError, d.__getitem__, 23)
class Exc(Exception): pass
class BadHash(object):
fail = False
def __hash__(self):
if self.fail:
raise Exc()
else:
return 42
x = BadHash()
d[x] = 42
x.fail = True
self.assertRaises(Exc, d.__getitem__, x)
def test_clear(self):
d = {1:1, 2:2, 3:3}
d.clear()
self.assertEqual(d, {})
self.assertRaises(TypeError, d.clear, None)
def test_update(self):
d = {}
d.update({1:100})
d.update({2:20})
d.update({1:1, 2:2, 3:3})
self.assertEqual(d, {1:1, 2:2, 3:3})
d.update()
self.assertEqual(d, {1:1, 2:2, 3:3})
self.assertRaises((TypeError, AttributeError), d.update, None)
class SimpleUserDict:
def __init__(self):
self.d = {1:1, 2:2, 3:3}
def keys(self):
return self.d.keys()
def __getitem__(self, i):
return self.d[i]
d.clear()
d.update(SimpleUserDict())
self.assertEqual(d, {1:1, 2:2, 3:3})
class Exc(Exception): pass
d.clear()
class FailingUserDict:
def keys(self):
raise Exc
self.assertRaises(Exc, d.update, FailingUserDict())
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = 1
def __iter__(self):
return self
def __next__(self):
if self.i:
self.i = 0
return 'a'
raise Exc
return BogonIter()
def __getitem__(self, key):
return key
self.assertRaises(Exc, d.update, FailingUserDict())
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = ord('a')
def __iter__(self):
return self
def __next__(self):
if self.i <= ord('z'):
rtn = chr(self.i)
self.i += 1
return rtn
raise StopIteration
return BogonIter()
def __getitem__(self, key):
raise Exc
self.assertRaises(Exc, d.update, FailingUserDict())
class badseq(object):
def __iter__(self):
return self
def __next__(self):
raise Exc()
self.assertRaises(Exc, {}.update, badseq())
self.assertRaises(ValueError, {}.update, [(1, 2, 3)])
def test_fromkeys(self):
self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None})
d = {}
self.assertIsNot(d.fromkeys('abc'), d)
self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None})
self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0})
self.assertEqual(d.fromkeys([]), {})
def g():
yield 1
self.assertEqual(d.fromkeys(g()), {1:None})
self.assertRaises(TypeError, {}.fromkeys, 3)
class dictlike(dict): pass
self.assertEqual(dictlike.fromkeys('a'), {'a':None})
self.assertEqual(dictlike().fromkeys('a'), {'a':None})
self.assertIsInstance(dictlike.fromkeys('a'), dictlike)
self.assertIsInstance(dictlike().fromkeys('a'), dictlike)
class mydict(dict):
def __new__(cls):
return collections.UserDict()
ud = mydict.fromkeys('ab')
self.assertEqual(ud, {'a':None, 'b':None})
self.assertIsInstance(ud, collections.UserDict)
self.assertRaises(TypeError, dict.fromkeys)
class Exc(Exception): pass
class baddict1(dict):
def __init__(self):
raise Exc()
self.assertRaises(Exc, baddict1.fromkeys, [1])
class BadSeq(object):
def __iter__(self):
return self
def __next__(self):
raise Exc()
self.assertRaises(Exc, dict.fromkeys, BadSeq())
class baddict2(dict):
def __setitem__(self, key, value):
raise Exc()
self.assertRaises(Exc, baddict2.fromkeys, [1])
# test fast path for dictionary inputs
d = dict(zip(range(6), range(6)))
self.assertEqual(dict.fromkeys(d, 0), dict(zip(range(6), [0]*6)))
class baddict3(dict):
def __new__(cls):
return d
d = {i : i for i in range(10)}
res = d.copy()
res.update(a=None, b=None, c=None)
self.assertEqual(baddict3.fromkeys({"a", "b", "c"}), res)
def test_copy(self):
d = {1:1, 2:2, 3:3}
self.assertEqual(d.copy(), {1:1, 2:2, 3:3})
self.assertEqual({}.copy(), {})
self.assertRaises(TypeError, d.copy, None)
def test_get(self):
d = {}
self.assertIs(d.get('c'), None)
self.assertEqual(d.get('c', 3), 3)
d = {'a': 1, 'b': 2}
self.assertIs(d.get('c'), None)
self.assertEqual(d.get('c', 3), 3)
self.assertEqual(d.get('a'), 1)
self.assertEqual(d.get('a', 3), 1)
self.assertRaises(TypeError, d.get)
self.assertRaises(TypeError, d.get, None, None, None)
def test_setdefault(self):
# dict.setdefault()
d = {}
self.assertIs(d.setdefault('key0'), None)
d.setdefault('key0', [])
self.assertIs(d.setdefault('key0'), None)
d.setdefault('key', []).append(3)
self.assertEqual(d['key'][0], 3)
d.setdefault('key', []).append(4)
self.assertEqual(len(d['key']), 2)
self.assertRaises(TypeError, d.setdefault)
class Exc(Exception): pass
class BadHash(object):
fail = False
def __hash__(self):
if self.fail:
raise Exc()
else:
return 42
x = BadHash()
d[x] = 42
x.fail = True
self.assertRaises(Exc, d.setdefault, x, [])
def test_setdefault_atomic(self):
# Issue #13521: setdefault() calls __hash__ and __eq__ only once.
class Hashed(object):
def __init__(self):
self.hash_count = 0
self.eq_count = 0
def __hash__(self):
self.hash_count += 1
return 42
def __eq__(self, other):
self.eq_count += 1
return id(self) == id(other)
hashed1 = Hashed()
y = {hashed1: 5}
hashed2 = Hashed()
y.setdefault(hashed2, [])
self.assertEqual(hashed1.hash_count, 1)
self.assertEqual(hashed2.hash_count, 1)
self.assertEqual(hashed1.eq_count + hashed2.eq_count, 1)
def test_setitem_atomic_at_resize(self):
class Hashed(object):
def __init__(self):
self.hash_count = 0
self.eq_count = 0
def __hash__(self):
self.hash_count += 1
return 42
def __eq__(self, other):
self.eq_count += 1
return id(self) == id(other)
hashed1 = Hashed()
# 5 items
y = {hashed1: 5, 0: 0, 1: 1, 2: 2, 3: 3}
hashed2 = Hashed()
# 6th item forces a resize
y[hashed2] = []
self.assertEqual(hashed1.hash_count, 1)
self.assertEqual(hashed2.hash_count, 1)
self.assertEqual(hashed1.eq_count + hashed2.eq_count, 1)
def test_popitem(self):
# dict.popitem()
for copymode in -1, +1:
# -1: b has same structure as a
# +1: b is a.copy()
for log2size in range(12):
size = 2**log2size
a = {}
b = {}
for i in range(size):
a[repr(i)] = i
if copymode < 0:
b[repr(i)] = i
if copymode > 0:
b = a.copy()
for i in range(size):
ka, va = ta = a.popitem()
self.assertEqual(va, int(ka))
kb, vb = tb = b.popitem()
self.assertEqual(vb, int(kb))
self.assertFalse(copymode < 0 and ta != tb)
self.assertFalse(a)
self.assertFalse(b)
d = {}
self.assertRaises(KeyError, d.popitem)
def test_pop(self):
# Tests for pop with specified key
d = {}
k, v = 'abc', 'def'
d[k] = v
self.assertRaises(KeyError, d.pop, 'ghi')
self.assertEqual(d.pop(k), v)
self.assertEqual(len(d), 0)
self.assertRaises(KeyError, d.pop, k)
self.assertEqual(d.pop(k, v), v)
d[k] = v
self.assertEqual(d.pop(k, 1), v)
self.assertRaises(TypeError, d.pop)
class Exc(Exception): pass
class BadHash(object):
fail = False
def __hash__(self):
if self.fail:
raise Exc()
else:
return 42
x = BadHash()
d[x] = 42
x.fail = True
self.assertRaises(Exc, d.pop, x)
def test_mutating_iteration(self):
# changing dict size during iteration
d = {}
d[1] = 1
with self.assertRaises(RuntimeError):
for i in d:
d[i+1] = 1
def test_mutating_lookup(self):
# changing dict during a lookup (issue #14417)
class NastyKey:
mutate_dict = None
def __init__(self, value):
self.value = value
def __hash__(self):
# hash collision!
return 1
def __eq__(self, other):
if NastyKey.mutate_dict:
mydict, key = NastyKey.mutate_dict
NastyKey.mutate_dict = None
del mydict[key]
return self.value == other.value
key1 = NastyKey(1)
key2 = NastyKey(2)
d = {key1: 1}
NastyKey.mutate_dict = (d, key1)
d[key2] = 2
self.assertEqual(d, {key2: 2})
def test_repr(self):
d = {}
self.assertEqual(repr(d), '{}')
d[1] = 2
self.assertEqual(repr(d), '{1: 2}')
d = {}
d[1] = d
self.assertEqual(repr(d), '{1: {...}}')
class Exc(Exception): pass
class BadRepr(object):
def __repr__(self):
raise Exc()
d = {1: BadRepr()}
self.assertRaises(Exc, repr, d)
def test_eq(self):
self.assertEqual({}, {})
self.assertEqual({1: 2}, {1: 2})
class Exc(Exception): pass
class BadCmp(object):
def __eq__(self, other):
raise Exc()
def __hash__(self):
return 1
d1 = {BadCmp(): 1}
d2 = {1: 1}
with self.assertRaises(Exc):
d1 == d2
def test_keys_contained(self):
self.helper_keys_contained(lambda x: x.keys())
self.helper_keys_contained(lambda x: x.items())
def helper_keys_contained(self, fn):
# Test rich comparisons against dict key views, which should behave the
# same as sets.
empty = fn(dict())
empty2 = fn(dict())
smaller = fn({1:1, 2:2})
larger = fn({1:1, 2:2, 3:3})
larger2 = fn({1:1, 2:2, 3:3})
larger3 = fn({4:1, 2:2, 3:3})
self.assertTrue(smaller < larger)
self.assertTrue(smaller <= larger)
self.assertTrue(larger > smaller)
self.assertTrue(larger >= smaller)
self.assertFalse(smaller >= larger)
self.assertFalse(smaller > larger)
self.assertFalse(larger <= smaller)
self.assertFalse(larger < smaller)
self.assertFalse(smaller < larger3)
self.assertFalse(smaller <= larger3)
self.assertFalse(larger3 > smaller)
self.assertFalse(larger3 >= smaller)
# Inequality strictness
self.assertTrue(larger2 >= larger)
self.assertTrue(larger2 <= larger)
self.assertFalse(larger2 > larger)
self.assertFalse(larger2 < larger)
self.assertTrue(larger == larger2)
self.assertTrue(smaller != larger)
# There is an optimization on the zero-element case.
self.assertTrue(empty == empty2)
self.assertFalse(empty != empty2)
self.assertFalse(empty == smaller)
self.assertTrue(empty != smaller)
# With the same size, an elementwise compare happens
self.assertTrue(larger != larger3)
self.assertFalse(larger == larger3)
def test_errors_in_view_containment_check(self):
class C:
def __eq__(self, other):
raise RuntimeError
d1 = {1: C()}
d2 = {1: C()}
with self.assertRaises(RuntimeError):
d1.items() == d2.items()
with self.assertRaises(RuntimeError):
d1.items() != d2.items()
with self.assertRaises(RuntimeError):
d1.items() <= d2.items()
with self.assertRaises(RuntimeError):
d1.items() >= d2.items()
d3 = {1: C(), 2: C()}
with self.assertRaises(RuntimeError):
d2.items() < d3.items()
with self.assertRaises(RuntimeError):
d3.items() > d2.items()
def test_dictview_set_operations_on_keys(self):
k1 = {1:1, 2:2}.keys()
k2 = {1:1, 2:2, 3:3}.keys()
k3 = {4:4}.keys()
self.assertEqual(k1 - k2, set())
self.assertEqual(k1 - k3, {1,2})
self.assertEqual(k2 - k1, {3})
self.assertEqual(k3 - k1, {4})
self.assertEqual(k1 & k2, {1,2})
self.assertEqual(k1 & k3, set())
self.assertEqual(k1 | k2, {1,2,3})
self.assertEqual(k1 ^ k2, {3})
self.assertEqual(k1 ^ k3, {1,2,4})
def test_dictview_set_operations_on_items(self):
k1 = {1:1, 2:2}.items()
k2 = {1:1, 2:2, 3:3}.items()
k3 = {4:4}.items()
self.assertEqual(k1 - k2, set())
self.assertEqual(k1 - k3, {(1,1), (2,2)})
self.assertEqual(k2 - k1, {(3,3)})
self.assertEqual(k3 - k1, {(4,4)})
self.assertEqual(k1 & k2, {(1,1), (2,2)})
self.assertEqual(k1 & k3, set())
self.assertEqual(k1 | k2, {(1,1), (2,2), (3,3)})
self.assertEqual(k1 ^ k2, {(3,3)})
self.assertEqual(k1 ^ k3, {(1,1), (2,2), (4,4)})
def test_dictview_mixed_set_operations(self):
# Just a few for .keys()
self.assertTrue({1:1}.keys() == {1})
self.assertTrue({1} == {1:1}.keys())
self.assertEqual({1:1}.keys() | {2}, {1, 2})
self.assertEqual({2} | {1:1}.keys(), {1, 2})
# And a few for .items()
self.assertTrue({1:1}.items() == {(1,1)})
self.assertTrue({(1,1)} == {1:1}.items())
self.assertEqual({1:1}.items() | {2}, {(1,1), 2})
self.assertEqual({2} | {1:1}.items(), {(1,1), 2})
def test_missing(self):
# Make sure dict doesn't have a __missing__ method
self.assertFalse(hasattr(dict, "__missing__"))
self.assertFalse(hasattr({}, "__missing__"))
# Test several cases:
# (D) subclass defines __missing__ method returning a value
# (E) subclass defines __missing__ method raising RuntimeError
# (F) subclass sets __missing__ instance variable (no effect)
# (G) subclass doesn't define __missing__ at a all
class D(dict):
def __missing__(self, key):
return 42
d = D({1: 2, 3: 4})
self.assertEqual(d[1], 2)
self.assertEqual(d[3], 4)
self.assertNotIn(2, d)
self.assertNotIn(2, d.keys())
self.assertEqual(d[2], 42)
class E(dict):
def __missing__(self, key):
raise RuntimeError(key)
e = E()
with self.assertRaises(RuntimeError) as c:
e[42]
self.assertEqual(c.exception.args, (42,))
class F(dict):
def __init__(self):
# An instance variable __missing__ should have no effect
self.__missing__ = lambda key: None
f = F()
with self.assertRaises(KeyError) as c:
f[42]
self.assertEqual(c.exception.args, (42,))
class G(dict):
pass
g = G()
with self.assertRaises(KeyError) as c:
g[42]
self.assertEqual(c.exception.args, (42,))
def test_tuple_keyerror(self):
# SF #1576657
d = {}
with self.assertRaises(KeyError) as c:
d[(1,)]
self.assertEqual(c.exception.args, ((1,),))
def test_bad_key(self):
# Dictionary lookups should fail if __eq__() raises an exception.
class CustomException(Exception):
pass
class BadDictKey:
def __hash__(self):
return hash(self.__class__)
def __eq__(self, other):
if isinstance(other, self.__class__):
raise CustomException
return other
d = {}
x1 = BadDictKey()
x2 = BadDictKey()
d[x1] = 1
for stmt in ['d[x2] = 2',
'z = d[x2]',
'x2 in d',
'd.get(x2)',
'd.setdefault(x2, 42)',
'd.pop(x2)',
'd.update({x2: 2})']:
with self.assertRaises(CustomException):
exec(stmt, locals())
def test_resize1(self):
# Dict resizing bug, found by Jack Jansen in 2.2 CVS development.
# This version got an assert failure in debug build, infinite loop in
# release build. Unfortunately, provoking this kind of stuff requires
# a mix of inserts and deletes hitting exactly the right hash codes in
# exactly the right order, and I can't think of a randomized approach
# that would be *likely* to hit a failing case in reasonable time.
d = {}
for i in range(5):
d[i] = i
for i in range(5):
del d[i]
for i in range(5, 9): # i==8 was the problem
d[i] = i
def test_resize2(self):
# Another dict resizing bug (SF bug #1456209).
# This caused Segmentation faults or Illegal instructions.
class X(object):
def __hash__(self):
return 5
def __eq__(self, other):
if resizing:
d.clear()
return False
d = {}
resizing = False
d[X()] = 1
d[X()] = 2
d[X()] = 3
d[X()] = 4
d[X()] = 5
# now trigger a resize
resizing = True
d[9] = 6
def test_empty_presized_dict_in_freelist(self):
# Bug #3537: if an empty but presized dict with a size larger
# than 7 was in the freelist, it triggered an assertion failure
with self.assertRaises(ZeroDivisionError):
d = {'a': 1 // 0, 'b': None, 'c': None, 'd': None, 'e': None,
'f': None, 'g': None, 'h': None}
d = {}
def test_container_iterator(self):
# Bug #3680: tp_traverse was not implemented for dictiter and
# dictview objects.
class C(object):
pass
views = (dict.items, dict.values, dict.keys)
for v in views:
obj = C()
ref = weakref.ref(obj)
container = {obj: 1}
obj.v = v(container)
obj.x = iter(obj.v)
del obj, container
gc.collect()
self.assertIs(ref(), None, "Cycle was not collected")
def _not_tracked(self, t):
# Nested containers can take several collections to untrack
gc.collect()
gc.collect()
self.assertFalse(gc.is_tracked(t), t)
def _tracked(self, t):
self.assertTrue(gc.is_tracked(t), t)
gc.collect()
gc.collect()
self.assertTrue(gc.is_tracked(t), t)
@support.cpython_only
def test_track_literals(self):
# Test GC-optimization of dict literals
x, y, z, w = 1.5, "a", (1, None), []
self._not_tracked({})
self._not_tracked({x:(), y:x, z:1})
self._not_tracked({1: "a", "b": 2})
self._not_tracked({1: 2, (None, True, False, ()): int})
self._not_tracked({1: object()})
# Dicts with mutable elements are always tracked, even if those
# elements are not tracked right now.
self._tracked({1: []})
self._tracked({1: ([],)})
self._tracked({1: {}})
self._tracked({1: set()})
@support.cpython_only
def test_track_dynamic(self):
# Test GC-optimization of dynamically-created dicts
class MyObject(object):
pass
x, y, z, w, o = 1.5, "a", (1, object()), [], MyObject()
d = dict()
self._not_tracked(d)
d[1] = "a"
self._not_tracked(d)
d[y] = 2
self._not_tracked(d)
d[z] = 3
self._not_tracked(d)
self._not_tracked(d.copy())
d[4] = w
self._tracked(d)
self._tracked(d.copy())
d[4] = None
self._not_tracked(d)
self._not_tracked(d.copy())
# dd isn't tracked right now, but it may mutate and therefore d
# which contains it must be tracked.
d = dict()
dd = dict()
d[1] = dd
self._not_tracked(dd)
self._tracked(d)
dd[1] = d
self._tracked(dd)
d = dict.fromkeys([x, y, z])
self._not_tracked(d)
dd = dict()
dd.update(d)
self._not_tracked(dd)
d = dict.fromkeys([x, y, z, o])
self._tracked(d)
dd = dict()
dd.update(d)
self._tracked(dd)
d = dict(x=x, y=y, z=z)
self._not_tracked(d)
d = dict(x=x, y=y, z=z, w=w)
self._tracked(d)
d = dict()
d.update(x=x, y=y, z=z)
self._not_tracked(d)
d.update(w=w)
self._tracked(d)
d = dict([(x, y), (z, 1)])
self._not_tracked(d)
d = dict([(x, y), (z, w)])
self._tracked(d)
d = dict()
d.update([(x, y), (z, 1)])
self._not_tracked(d)
d.update([(x, y), (z, w)])
self._tracked(d)
@support.cpython_only
def test_track_subtypes(self):
# Dict subtypes are always tracked
class MyDict(dict):
pass
self._tracked(MyDict())
def test_iterator_pickling(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
data = {1:"a", 2:"b", 3:"c"}
it = iter(data)
d = pickle.dumps(it, proto)
it = pickle.loads(d)
self.assertEqual(sorted(it), sorted(data))
it = pickle.loads(d)
try:
drop = next(it)
except StopIteration:
continue
d = pickle.dumps(it, proto)
it = pickle.loads(d)
del data[drop]
self.assertEqual(sorted(it), sorted(data))
def test_itemiterator_pickling(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
data = {1:"a", 2:"b", 3:"c"}
# dictviews aren't picklable, only their iterators
itorg = iter(data.items())
d = pickle.dumps(itorg, proto)
it = pickle.loads(d)
# note that the type of type of the unpickled iterator
# is not necessarily the same as the original. It is
# merely an object supporting the iterator protocol, yielding
# the same objects as the original one.
# self.assertEqual(type(itorg), type(it))
self.assertIsInstance(it, collections.abc.Iterator)
self.assertEqual(dict(it), data)
it = pickle.loads(d)
drop = next(it)
d = pickle.dumps(it, proto)
it = pickle.loads(d)
del data[drop[0]]
self.assertEqual(dict(it), data)
def test_valuesiterator_pickling(self):
for proto in range(pickle.HIGHEST_PROTOCOL):
data = {1:"a", 2:"b", 3:"c"}
# data.values() isn't picklable, only its iterator
it = iter(data.values())
d = pickle.dumps(it, proto)
it = pickle.loads(d)
self.assertEqual(sorted(list(it)), sorted(list(data.values())))
it = pickle.loads(d)
drop = next(it)
d = pickle.dumps(it, proto)
it = pickle.loads(d)
values = list(it) + [drop]
self.assertEqual(sorted(values), sorted(list(data.values())))
def test_instance_dict_getattr_str_subclass(self):
class Foo:
def __init__(self, msg):
self.msg = msg
f = Foo('123')
class _str(str):
pass
self.assertEqual(f.msg, getattr(f, _str('msg')))
self.assertEqual(f.msg, f.__dict__[_str('msg')])
def test_object_set_item_single_instance_non_str_key(self):
class Foo: pass
f = Foo()
f.__dict__[1] = 1
f.a = 'a'
self.assertEqual(f.__dict__, {1:1, 'a':'a'})
def check_reentrant_insertion(self, mutate):
# This object will trigger mutation of the dict when replaced
# by another value. Note this relies on refcounting: the test
# won't achieve its purpose on fully-GCed Python implementations.
class Mutating:
def __del__(self):
mutate(d)
d = {k: Mutating() for k in 'abcdefghijklmnopqr'}
for k in list(d):
d[k] = k
def test_reentrant_insertion(self):
# Reentrant insertion shouldn't crash (see issue #22653)
def mutate(d):
d['b'] = 5
self.check_reentrant_insertion(mutate)
def mutate(d):
d.update(self.__dict__)
d.clear()
self.check_reentrant_insertion(mutate)
def mutate(d):
while d:
d.popitem()
self.check_reentrant_insertion(mutate)
from test import mapping_tests
class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = dict
class Dict(dict):
pass
class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = Dict
def test_main():
support.run_unittest(
DictTest,
GeneralMappingTests,
SubclassMappingTests,
)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
switowski/invenio | invenio/modules/indexer/tokenizers/BibIndexFulltextTokenizer.py | 12 | 9806 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2010, 2011, 2012, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibIndexFulltextTokenizer: extracts words form a given document.
Document is given by its URL.
"""
import os
import sys
import logging
import urllib2
import re
from six import iteritems
from invenio.config import \
CFG_SOLR_URL, \
CFG_XAPIAN_ENABLED, \
CFG_BIBINDEX_FULLTEXT_INDEX_LOCAL_FILES_ONLY, \
CFG_BIBINDEX_SPLASH_PAGES
from invenio.utils.html import get_links_in_html_page
from invenio.legacy.websubmit.file_converter import convert_file, get_file_converter_logger
from invenio.legacy.miscutil.solrutils_bibindex_indexer import solr_add_fulltext
from invenio.legacy.miscutil.xapianutils_bibindex_indexer import xapian_add
from invenio.legacy.bibdocfile.api import bibdocfile_url_p, \
bibdocfile_url_to_bibdoc, download_url, \
BibRecDocs, InvenioBibDocFileError
from invenio.legacy.bibindex.engine_utils import get_idx_indexer
from invenio.legacy.bibsched.bibtask import write_message
from invenio.ext.logging import register_exception
from intbitset import intbitset
from invenio.modules.indexer.tokenizers.BibIndexDefaultTokenizer import BibIndexDefaultTokenizer
fulltext_added = intbitset() # stores ids of records whose fulltexts have been added
class BibIndexFulltextTokenizer(BibIndexDefaultTokenizer):
"""
Exctracts all the words contained in document specified by url.
"""
def __init__(self, stemming_language = None, remove_stopwords = False, remove_html_markup = False, remove_latex_markup = False):
self.verbose = 3
BibIndexDefaultTokenizer.__init__(self, stemming_language,
remove_stopwords,
remove_html_markup,
remove_latex_markup)
def set_verbose(self, verbose):
"""Allows to change verbosity level during indexing"""
self.verbose = verbose
def tokenize_for_words_default(self, phrase):
"""Default tokenize_for_words inherited from default tokenizer"""
return super(BibIndexFulltextTokenizer, self).tokenize_for_words(phrase)
def get_words_from_fulltext(self, url_direct_or_indirect):
"""Returns all the words contained in the document specified by
URL_DIRECT_OR_INDIRECT with the words being split by various
SRE_SEPARATORS regexp set earlier. If FORCE_FILE_EXTENSION is
set (e.g. to "pdf", then treat URL_DIRECT_OR_INDIRECT as a PDF
file. (This is interesting to index Indico for example.) Note
also that URL_DIRECT_OR_INDIRECT may be either a direct URL to
the fulltext file or an URL to a setlink-like page body that
presents the links to be indexed. In the latter case the
URL_DIRECT_OR_INDIRECT is parsed to extract actual direct URLs
to fulltext documents, for all knows file extensions as
specified by global CONV_PROGRAMS config variable.
"""
write_message("... reading fulltext files from %s started" % url_direct_or_indirect, verbose=2)
try:
if bibdocfile_url_p(url_direct_or_indirect):
write_message("... %s is an internal document" % url_direct_or_indirect, verbose=2)
try:
bibdoc = bibdocfile_url_to_bibdoc(url_direct_or_indirect)
except InvenioBibDocFileError:
# Outdated 8564 tag
return []
indexer = get_idx_indexer('fulltext')
if indexer != 'native':
# A document might belong to multiple records
for rec_link in bibdoc.bibrec_links:
recid = rec_link["recid"]
# Adds fulltexts of all files once per records
if not recid in fulltext_added:
bibrecdocs = BibRecDocs(recid)
try:
text = bibrecdocs.get_text()
except InvenioBibDocFileError:
# Invalid PDF
continue
if indexer == 'SOLR' and CFG_SOLR_URL:
solr_add_fulltext(recid, text)
elif indexer == 'XAPIAN' and CFG_XAPIAN_ENABLED:
xapian_add(recid, 'fulltext', text)
fulltext_added.add(recid)
# we are relying on an external information retrieval system
# to provide full-text indexing, so dispatch text to it and
# return nothing here:
return []
else:
text = ""
if hasattr(bibdoc, "get_text"):
text = bibdoc.get_text()
return self.tokenize_for_words_default(text)
else:
if CFG_BIBINDEX_FULLTEXT_INDEX_LOCAL_FILES_ONLY:
write_message("... %s is external URL but indexing only local files" % url_direct_or_indirect, verbose=2)
return []
write_message("... %s is an external URL" % url_direct_or_indirect, verbose=2)
urls_to_index = set()
for splash_re, url_re in iteritems(CFG_BIBINDEX_SPLASH_PAGES):
if re.match(splash_re, url_direct_or_indirect):
write_message("... %s is a splash page (%s)" % (url_direct_or_indirect, splash_re), verbose=2)
html = urllib2.urlopen(url_direct_or_indirect).read()
urls = get_links_in_html_page(html)
write_message("... found these URLs in %s splash page: %s" % (url_direct_or_indirect, ", ".join(urls)), verbose=3)
for url in urls:
if re.match(url_re, url):
write_message("... will index %s (matched by %s)" % (url, url_re), verbose=2)
urls_to_index.add(url)
if not urls_to_index:
urls_to_index.add(url_direct_or_indirect)
write_message("... will extract words from %s" % ', '.join(urls_to_index), verbose=2)
words = {}
for url in urls_to_index:
tmpdoc = download_url(url)
file_converter_logger = get_file_converter_logger()
old_logging_level = file_converter_logger.getEffectiveLevel()
if self.verbose > 3:
file_converter_logger.setLevel(logging.DEBUG)
try:
try:
tmptext = convert_file(tmpdoc, output_format='.txt')
text = open(tmptext).read()
os.remove(tmptext)
indexer = get_idx_indexer('fulltext')
if indexer != 'native':
if indexer == 'SOLR' and CFG_SOLR_URL:
solr_add_fulltext(None, text) # FIXME: use real record ID
if indexer == 'XAPIAN' and CFG_XAPIAN_ENABLED:
#xapian_add(None, 'fulltext', text) # FIXME: use real record ID
pass
# we are relying on an external information retrieval system
# to provide full-text indexing, so dispatch text to it and
# return nothing here:
tmpwords = []
else:
tmpwords = self.tokenize_for_words_default(text)
words.update(dict(map(lambda x: (x, 1), tmpwords)))
except Exception as e:
message = 'ERROR: it\'s impossible to correctly extract words from %s referenced by %s: %s' % (url, url_direct_or_indirect, e)
register_exception(prefix=message, alert_admin=True)
write_message(message, stream=sys.stderr)
finally:
os.remove(tmpdoc)
if self.verbose > 3:
file_converter_logger.setLevel(old_logging_level)
return words.keys()
except Exception as e:
message = 'ERROR: it\'s impossible to correctly extract words from %s: %s' % (url_direct_or_indirect, e)
register_exception(prefix=message, alert_admin=True)
write_message(message, stream=sys.stderr)
return []
def tokenize_for_words(self, phrase):
return self.get_words_from_fulltext(phrase)
def tokenize_for_pairs(self, phrase):
return []
def tokenize_for_phrases(self, phrase):
return []
| gpl-2.0 |
whitkirkchurch/baltimore | venv/lib/python2.7/site-packages/pip/wheel.py | 187 | 30186 | """
Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import
import compileall
import csv
import errno
import functools
import hashlib
import logging
import os
import os.path
import re
import shutil
import stat
import sys
import tempfile
import warnings
from base64 import urlsafe_b64encode
from email.parser import Parser
from pip._vendor.six import StringIO
import pip
from pip.download import path_to_url, unpack_url
from pip.exceptions import InvalidWheelFilename, UnsupportedWheel
from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME
from pip import pep425tags
from pip.utils import (
call_subprocess, ensure_dir, make_path_relative, captured_stdout,
rmtree)
from pip.utils.logging import indent_log
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor import pkg_resources
from pip._vendor.six.moves import configparser
wheel_ext = '.whl'
VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
class WheelCache(object):
"""A cache of wheels for future installs."""
def __init__(self, cache_dir, format_control):
"""Create a wheel cache.
:param cache_dir: The root of the cache.
:param format_control: A pip.index.FormatControl object to limit
binaries being read from the cache.
"""
self._cache_dir = os.path.expanduser(cache_dir) if cache_dir else None
self._format_control = format_control
def cached_wheel(self, link, package_name):
return cached_wheel(
self._cache_dir, link, self._format_control, package_name)
def _cache_for_link(cache_dir, link):
"""
Return a directory to store cached wheels in for link.
Because there are M wheels for any one sdist, we provide a directory
to cache them in, and then consult that directory when looking up
cache hits.
We only insert things into the cache if they have plausible version
numbers, so that we don't contaminate the cache with things that were not
unique. E.g. ./package might have dozens of installs done for it and build
a version of 0.0...and if we built and cached a wheel, we'd end up using
the same wheel even if the source has been edited.
:param cache_dir: The cache_dir being used by pip.
:param link: The link of the sdist for which this will cache wheels.
"""
# We want to generate an url to use as our cache key, we don't want to just
# re-use the URL because it might have other items in the fragment and we
# don't care about those.
key_parts = [link.url_without_fragment]
if link.hash_name is not None and link.hash is not None:
key_parts.append("=".join([link.hash_name, link.hash]))
key_url = "#".join(key_parts)
# Encode our key url with sha224, we'll use this because it has similar
# security properties to sha256, but with a shorter total output (and thus
# less secure). However the differences don't make a lot of difference for
# our use case here.
hashed = hashlib.sha224(key_url.encode()).hexdigest()
# We want to nest the directories some to prevent having a ton of top level
# directories where we might run out of sub directories on some FS.
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
# Inside of the base location for cached wheels, expand our parts and join
# them all together.
return os.path.join(cache_dir, "wheels", *parts)
def cached_wheel(cache_dir, link, format_control, package_name):
if not cache_dir:
return link
if not link:
return link
if link.is_wheel:
return link
if not link.is_artifact:
return link
if not package_name:
return link
canonical_name = pkg_resources.safe_name(package_name).lower()
formats = pip.index.fmt_ctl_formats(format_control, canonical_name)
if "binary" not in formats:
return link
root = _cache_for_link(cache_dir, link)
try:
wheel_names = os.listdir(root)
except OSError as e:
if e.errno in (errno.ENOENT, errno.ENOTDIR):
return link
raise
candidates = []
for wheel_name in wheel_names:
try:
wheel = Wheel(wheel_name)
except InvalidWheelFilename:
continue
if not wheel.supported():
# Built for a different python/arch/etc
continue
candidates.append((wheel.support_index_min(), wheel_name))
if not candidates:
return link
candidates.sort()
path = os.path.join(root, candidates[0][1])
return pip.index.Link(path_to_url(path), trusted=True)
def rehash(path, algo='sha256', blocksize=1 << 20):
"""Return (hash, length) for path using hashlib.new(algo)"""
h = hashlib.new(algo)
length = 0
with open(path, 'rb') as f:
block = f.read(blocksize)
while block:
length += len(block)
h.update(block)
block = f.read(blocksize)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
return (digest, length)
def open_for_csv(name, mode):
if sys.version_info[0] < 3:
nl = {}
bin = 'b'
else:
nl = {'newline': ''}
bin = ''
return open(name, mode + bin, **nl)
def fix_script(path):
"""Replace #!python with #!/path/to/python
Return True if file was changed."""
# XXX RECORD hashes will need to be updated
if os.path.isfile(path):
with open(path, 'rb') as script:
firstline = script.readline()
if not firstline.startswith(b'#!python'):
return False
exename = sys.executable.encode(sys.getfilesystemencoding())
firstline = b'#!' + exename + os.linesep.encode("ascii")
rest = script.read()
with open(path, 'wb') as script:
script.write(firstline)
script.write(rest)
return True
dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?)
\.dist-info$""", re.VERBOSE)
def root_is_purelib(name, wheeldir):
"""
Return True if the extracted wheel in wheeldir should go into purelib.
"""
name_folded = name.replace("-", "_")
for item in os.listdir(wheeldir):
match = dist_info_re.match(item)
if match and match.group('name') == name_folded:
with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
for line in wheel:
line = line.lower().rstrip()
if line == "root-is-purelib: true":
return True
return False
def get_entrypoints(filename):
if not os.path.exists(filename):
return {}, {}
# This is done because you can pass a string to entry_points wrappers which
# means that they may or may not be valid INI files. The attempt here is to
# strip leading and trailing whitespace in order to make them valid INI
# files.
with open(filename) as fp:
data = StringIO()
for line in fp:
data.write(line.strip())
data.write("\n")
data.seek(0)
cp = configparser.RawConfigParser()
cp.readfp(data)
console = {}
gui = {}
if cp.has_section('console_scripts'):
console = dict(cp.items('console_scripts'))
if cp.has_section('gui_scripts'):
gui = dict(cp.items('gui_scripts'))
return console, gui
def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None,
pycompile=True, scheme=None, isolated=False):
"""Install a wheel"""
if not scheme:
scheme = distutils_scheme(
name, user=user, home=home, root=root, isolated=isolated
)
if root_is_purelib(name, wheeldir):
lib_dir = scheme['purelib']
else:
lib_dir = scheme['platlib']
info_dir = []
data_dirs = []
source = wheeldir.rstrip(os.path.sep) + os.path.sep
# Record details of the files moved
# installed = files copied from the wheel to the destination
# changed = files changed while installing (scripts #! line typically)
# generated = files newly generated during the install (script wrappers)
installed = {}
changed = set()
generated = []
# Compile all of the pyc files that we're going to be installing
if pycompile:
with captured_stdout() as stdout:
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
compileall.compile_dir(source, force=True, quiet=True)
logger.debug(stdout.getvalue())
def normpath(src, p):
return make_path_relative(src, p).replace(os.path.sep, '/')
def record_installed(srcfile, destfile, modified=False):
"""Map archive RECORD paths to installation RECORD paths."""
oldpath = normpath(srcfile, wheeldir)
newpath = normpath(destfile, lib_dir)
installed[oldpath] = newpath
if modified:
changed.add(destfile)
def clobber(source, dest, is_base, fixer=None, filter=None):
ensure_dir(dest) # common for the 'include' path
for dir, subdirs, files in os.walk(source):
basedir = dir[len(source):].lstrip(os.path.sep)
destdir = os.path.join(dest, basedir)
if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
continue
for s in subdirs:
destsubdir = os.path.join(dest, basedir, s)
if is_base and basedir == '' and destsubdir.endswith('.data'):
data_dirs.append(s)
continue
elif (is_base and
s.endswith('.dist-info') and
# is self.req.project_name case preserving?
s.lower().startswith(
req.project_name.replace('-', '_').lower())):
assert not info_dir, 'Multiple .dist-info directories'
info_dir.append(destsubdir)
for f in files:
# Skip unwanted files
if filter and filter(f):
continue
srcfile = os.path.join(dir, f)
destfile = os.path.join(dest, basedir, f)
# directory creation is lazy and after the file filtering above
# to ensure we don't install empty dirs; empty dirs can't be
# uninstalled.
ensure_dir(destdir)
# We use copyfile (not move, copy, or copy2) to be extra sure
# that we are not moving directories over (copyfile fails for
# directories) as well as to ensure that we are not copying
# over any metadata because we want more control over what
# metadata we actually copy over.
shutil.copyfile(srcfile, destfile)
# Copy over the metadata for the file, currently this only
# includes the atime and mtime.
st = os.stat(srcfile)
if hasattr(os, "utime"):
os.utime(destfile, (st.st_atime, st.st_mtime))
# If our file is executable, then make our destination file
# executable.
if os.access(srcfile, os.X_OK):
st = os.stat(srcfile)
permissions = (
st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
)
os.chmod(destfile, permissions)
changed = False
if fixer:
changed = fixer(destfile)
record_installed(srcfile, destfile, changed)
clobber(source, lib_dir, True)
assert info_dir, "%s .dist-info directory not found" % req
# Get the defined entry points
ep_file = os.path.join(info_dir[0], 'entry_points.txt')
console, gui = get_entrypoints(ep_file)
def is_entrypoint_wrapper(name):
# EP, EP.exe and EP-script.py are scripts generated for
# entry point EP by setuptools
if name.lower().endswith('.exe'):
matchname = name[:-4]
elif name.lower().endswith('-script.py'):
matchname = name[:-10]
elif name.lower().endswith(".pya"):
matchname = name[:-4]
else:
matchname = name
# Ignore setuptools-generated scripts
return (matchname in console or matchname in gui)
for datadir in data_dirs:
fixer = None
filter = None
for subdir in os.listdir(os.path.join(wheeldir, datadir)):
fixer = None
if subdir == 'scripts':
fixer = fix_script
filter = is_entrypoint_wrapper
source = os.path.join(wheeldir, datadir, subdir)
dest = scheme[subdir]
clobber(source, dest, False, fixer=fixer, filter=filter)
maker = ScriptMaker(None, scheme['scripts'])
# Ensure old scripts are overwritten.
# See https://github.com/pypa/pip/issues/1800
maker.clobber = True
# Ensure we don't generate any variants for scripts because this is almost
# never what somebody wants.
# See https://bitbucket.org/pypa/distlib/issue/35/
maker.variants = set(('', ))
# This is required because otherwise distlib creates scripts that are not
# executable.
# See https://bitbucket.org/pypa/distlib/issue/32/
maker.set_mode = True
# Simplify the script and fix the fact that the default script swallows
# every single stack trace.
# See https://bitbucket.org/pypa/distlib/issue/34/
# See https://bitbucket.org/pypa/distlib/issue/33/
def _get_script_text(entry):
return maker.script_template % {
"module": entry.prefix,
"import_name": entry.suffix.split(".")[0],
"func": entry.suffix,
}
maker._get_script_text = _get_script_text
maker.script_template = """# -*- coding: utf-8 -*-
import re
import sys
from %(module)s import %(import_name)s
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(%(func)s())
"""
# Special case pip and setuptools to generate versioned wrappers
#
# The issue is that some projects (specifically, pip and setuptools) use
# code in setup.py to create "versioned" entry points - pip2.7 on Python
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
# the wheel metadata at build time, and so if the wheel is installed with
# a *different* version of Python the entry points will be wrong. The
# correct fix for this is to enhance the metadata to be able to describe
# such versioned entry points, but that won't happen till Metadata 2.0 is
# available.
# In the meantime, projects using versioned entry points will either have
# incorrect versioned entry points, or they will not be able to distribute
# "universal" wheels (i.e., they will need a wheel per Python version).
#
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
# override the versioned entry points in the wheel and generate the
# correct ones. This code is purely a short-term measure until Metadat 2.0
# is available.
#
# To add the level of hack in this section of code, in order to support
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
# variable which will control which version scripts get installed.
#
# ENSUREPIP_OPTIONS=altinstall
# - Only pipX.Y and easy_install-X.Y will be generated and installed
# ENSUREPIP_OPTIONS=install
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
# that this option is technically if ENSUREPIP_OPTIONS is set and is
# not altinstall
# DEFAULT
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
# and easy_install-X.Y.
pip_script = console.pop('pip', None)
if pip_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
spec = 'pip = ' + pip_script
generated.extend(maker.make(spec))
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
spec = 'pip%s = %s' % (sys.version[:1], pip_script)
generated.extend(maker.make(spec))
spec = 'pip%s = %s' % (sys.version[:3], pip_script)
generated.extend(maker.make(spec))
# Delete any other versioned pip entry points
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
for k in pip_ep:
del console[k]
easy_install_script = console.pop('easy_install', None)
if easy_install_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
spec = 'easy_install = ' + easy_install_script
generated.extend(maker.make(spec))
spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script)
generated.extend(maker.make(spec))
# Delete any other versioned easy_install entry points
easy_install_ep = [
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
]
for k in easy_install_ep:
del console[k]
# Generate the console and GUI entry points specified in the wheel
if len(console) > 0:
generated.extend(
maker.make_multiple(['%s = %s' % kv for kv in console.items()])
)
if len(gui) > 0:
generated.extend(
maker.make_multiple(
['%s = %s' % kv for kv in gui.items()],
{'gui': True}
)
)
record = os.path.join(info_dir[0], 'RECORD')
temp_record = os.path.join(info_dir[0], 'RECORD.pip')
with open_for_csv(record, 'r') as record_in:
with open_for_csv(temp_record, 'w+') as record_out:
reader = csv.reader(record_in)
writer = csv.writer(record_out)
for row in reader:
row[0] = installed.pop(row[0], row[0])
if row[0] in changed:
row[1], row[2] = rehash(row[0])
writer.writerow(row)
for f in generated:
h, l = rehash(f)
writer.writerow((f, h, l))
for f in installed:
writer.writerow((installed[f], '', ''))
shutil.move(temp_record, record)
def _unique(fn):
@functools.wraps(fn)
def unique(*args, **kw):
seen = set()
for item in fn(*args, **kw):
if item not in seen:
seen.add(item)
yield item
return unique
# TODO: this goes somewhere besides the wheel module
@_unique
def uninstallation_paths(dist):
"""
Yield all the uninstallation paths for dist based on RECORD-without-.pyc
Yield paths to all the files in RECORD. For each .py file in RECORD, add
the .pyc in the same directory.
UninstallPathSet.add() takes care of the __pycache__ .pyc.
"""
from pip.utils import FakeFile # circular import
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
yield path
if path.endswith('.py'):
dn, fn = os.path.split(path)
base = fn[:-3]
path = os.path.join(dn, base + '.pyc')
yield path
def wheel_version(source_dir):
"""
Return the Wheel-Version of an extracted wheel, if possible.
Otherwise, return False if we couldn't parse / extract it.
"""
try:
dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0]
wheel_data = dist.get_metadata('WHEEL')
wheel_data = Parser().parsestr(wheel_data)
version = wheel_data['Wheel-Version'].strip()
version = tuple(map(int, version.split('.')))
return version
except:
return False
def check_compatibility(version, name):
"""
Raises errors or warns if called with an incompatible Wheel-Version.
Pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
"""
if not version:
raise UnsupportedWheel(
"%s is in an unsupported or invalid wheel" % name
)
if version[0] > VERSION_COMPATIBLE[0]:
raise UnsupportedWheel(
"%s's Wheel-Version (%s) is not compatible with this version "
"of pip" % (name, '.'.join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warning(
'Installing from a newer Wheel-Version (%s)',
'.'.join(map(str, version)),
)
class Wheel(object):
"""A wheel file"""
# TODO: maybe move the install code into this class
wheel_file_re = re.compile(
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))
((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
\.whl|\.dist-info)$""",
re.VERBOSE
)
def __init__(self, filename):
"""
:raises InvalidWheelFilename: when the filename is invalid for a wheel
"""
wheel_info = self.wheel_file_re.match(filename)
if not wheel_info:
raise InvalidWheelFilename(
"%s is not a valid wheel filename." % filename
)
self.filename = filename
self.name = wheel_info.group('name').replace('_', '-')
# we'll assume "_" means "-" due to wheel naming scheme
# (https://github.com/pypa/pip/issues/1150)
self.version = wheel_info.group('ver').replace('_', '-')
self.pyversions = wheel_info.group('pyver').split('.')
self.abis = wheel_info.group('abi').split('.')
self.plats = wheel_info.group('plat').split('.')
# All the tag combinations from this file
self.file_tags = set(
(x, y, z) for x in self.pyversions
for y in self.abis for z in self.plats
)
def support_index_min(self, tags=None):
"""
Return the lowest index that one of the wheel's file_tag combinations
achieves in the supported_tags list e.g. if there are 8 supported tags,
and one of the file tags is first in the list, then return 0. Returns
None is the wheel is not supported.
"""
if tags is None: # for mock
tags = pep425tags.supported_tags
indexes = [tags.index(c) for c in self.file_tags if c in tags]
return min(indexes) if indexes else None
def supported(self, tags=None):
"""Is this wheel supported on this system?"""
if tags is None: # for mock
tags = pep425tags.supported_tags
return bool(set(tags).intersection(self.file_tags))
class WheelBuilder(object):
"""Build wheels from a RequirementSet."""
def __init__(self, requirement_set, finder, build_options=None,
global_options=None):
self.requirement_set = requirement_set
self.finder = finder
self._cache_root = requirement_set._wheel_cache._cache_dir
self._wheel_dir = requirement_set.wheel_download_dir
self.build_options = build_options or []
self.global_options = global_options or []
def _build_one(self, req, output_dir):
"""Build one wheel.
:return: The filename of the built wheel, or None if the build failed.
"""
tempd = tempfile.mkdtemp('pip-wheel-')
try:
if self.__build_one(req, tempd):
try:
wheel_name = os.listdir(tempd)[0]
wheel_path = os.path.join(output_dir, wheel_name)
shutil.move(os.path.join(tempd, wheel_name), wheel_path)
logger.info('Stored in directory: %s', output_dir)
return wheel_path
except:
return None
return None
finally:
rmtree(tempd)
def __build_one(self, req, tempd):
base_args = [
sys.executable, '-c',
"import setuptools;__file__=%r;"
"exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), "
"__file__, 'exec'))" % req.setup_py
] + list(self.global_options)
logger.info('Running setup.py bdist_wheel for %s', req.name)
logger.debug('Destination directory: %s', tempd)
wheel_args = base_args + ['bdist_wheel', '-d', tempd] \
+ self.build_options
try:
call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False)
return True
except:
logger.error('Failed building wheel for %s', req.name)
return False
def build(self, autobuilding=False):
"""Build wheels.
:param unpack: If True, replace the sdist we built from the with the
newly built wheel, in preparation for installation.
:return: True if all the wheels built correctly.
"""
assert self._wheel_dir or (autobuilding and self._cache_root)
# unpack sdists and constructs req set
self.requirement_set.prepare_files(self.finder)
reqset = self.requirement_set.requirements.values()
buildset = []
for req in reqset:
if req.constraint:
continue
if req.is_wheel:
if not autobuilding:
logger.info(
'Skipping %s, due to already being wheel.', req.name)
elif req.editable:
if not autobuilding:
logger.info(
'Skipping bdist_wheel for %s, due to being editable',
req.name)
elif autobuilding and req.link and not req.link.is_artifact:
pass
elif autobuilding and not req.source_dir:
pass
else:
if autobuilding:
link = req.link
base, ext = link.splitext()
if pip.index.egg_info_matches(base, None, link) is None:
# Doesn't look like a package - don't autobuild a wheel
# because we'll have no way to lookup the result sanely
continue
if "binary" not in pip.index.fmt_ctl_formats(
self.finder.format_control,
pkg_resources.safe_name(req.name).lower()):
logger.info(
"Skipping bdist_wheel for %s, due to binaries "
"being disabled for it.", req.name)
continue
buildset.append(req)
if not buildset:
return True
# Build the wheels.
logger.info(
'Building wheels for collected packages: %s',
', '.join([req.name for req in buildset]),
)
with indent_log():
build_success, build_failure = [], []
for req in buildset:
if autobuilding:
output_dir = _cache_for_link(self._cache_root, req.link)
try:
ensure_dir(output_dir)
except OSError as e:
logger.warn("Building wheel for %s failed: %s",
req.name, e)
build_failure.append(req)
continue
else:
output_dir = self._wheel_dir
wheel_file = self._build_one(req, output_dir)
if wheel_file:
build_success.append(req)
if autobuilding:
# XXX: This is mildly duplicative with prepare_files,
# but not close enough to pull out to a single common
# method.
# The code below assumes temporary source dirs -
# prevent it doing bad things.
if req.source_dir and not os.path.exists(os.path.join(
req.source_dir, PIP_DELETE_MARKER_FILENAME)):
raise AssertionError(
"bad source dir - missing marker")
# Delete the source we built the wheel from
req.remove_temporary_source()
# set the build directory again - name is known from
# the work prepare_files did.
req.source_dir = req.build_location(
self.requirement_set.build_dir)
# Update the link for this.
req.link = pip.index.Link(
path_to_url(wheel_file), trusted=True)
assert req.link.is_wheel
# extract the wheel into the dir
unpack_url(
req.link, req.source_dir, None, False,
session=self.requirement_set.session)
else:
build_failure.append(req)
# notify success/failure
if build_success:
logger.info(
'Successfully built %s',
' '.join([req.name for req in build_success]),
)
if build_failure:
logger.info(
'Failed to build %s',
' '.join([req.name for req in build_failure]),
)
# Return True if all builds were successful
return len(build_failure) == 0
| mit |
0x7678/youtube-dl | youtube_dl/extractor/gorillavid.py | 29 | 3921 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
int_or_none,
)
class GorillaVidIE(InfoExtractor):
IE_DESC = 'GorillaVid.in, daclips.in, movpod.in and fastvideo.in'
_VALID_URL = r'''(?x)
https?://(?P<host>(?:www\.)?
(?:daclips\.in|gorillavid\.in|movpod\.in|fastvideo\.in))/
(?:embed-)?(?P<id>[0-9a-zA-Z]+)(?:-[0-9]+x[0-9]+\.html)?
'''
_FILE_NOT_FOUND_REGEX = r'>(?:404 - )?File Not Found<'
_TESTS = [{
'url': 'http://gorillavid.in/06y9juieqpmi',
'md5': '5ae4a3580620380619678ee4875893ba',
'info_dict': {
'id': '06y9juieqpmi',
'ext': 'flv',
'title': 'Rebecca Black My Moment Official Music Video Reaction-6GK87Rc8bzQ',
'thumbnail': 're:http://.*\.jpg',
},
}, {
'url': 'http://gorillavid.in/embed-z08zf8le23c6-960x480.html',
'md5': 'c9e293ca74d46cad638e199c3f3fe604',
'info_dict': {
'id': 'z08zf8le23c6',
'ext': 'mp4',
'title': 'Say something nice',
'thumbnail': 're:http://.*\.jpg',
},
}, {
'url': 'http://daclips.in/3rso4kdn6f9m',
'md5': '1ad8fd39bb976eeb66004d3a4895f106',
'info_dict': {
'id': '3rso4kdn6f9m',
'ext': 'mp4',
'title': 'Micro Pig piglets ready on 16th July 2009-bG0PdrCdxUc',
'thumbnail': 're:http://.*\.jpg',
}
}, {
# video with countdown timeout
'url': 'http://fastvideo.in/1qmdn1lmsmbw',
'md5': '8b87ec3f6564a3108a0e8e66594842ba',
'info_dict': {
'id': '1qmdn1lmsmbw',
'ext': 'mp4',
'title': 'Man of Steel - Trailer',
'thumbnail': 're:http://.*\.jpg',
},
}, {
'url': 'http://movpod.in/0wguyyxi1yca',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage('http://%s/%s' % (mobj.group('host'), video_id), video_id)
if re.search(self._FILE_NOT_FOUND_REGEX, webpage) is not None:
raise ExtractorError('Video %s does not exist' % video_id, expected=True)
fields = dict(re.findall(r'''(?x)<input\s+
type="hidden"\s+
name="([^"]+)"\s+
(?:id="[^"]+"\s+)?
value="([^"]*)"
''', webpage))
if fields['op'] == 'download1':
countdown = int_or_none(self._search_regex(
r'<span id="countdown_str">(?:[Ww]ait)?\s*<span id="cxc">(\d+)</span>\s*(?:seconds?)?</span>',
webpage, 'countdown', default=None))
if countdown:
self._sleep(countdown, video_id)
post = compat_urllib_parse.urlencode(fields)
req = compat_urllib_request.Request(url, post)
req.add_header('Content-type', 'application/x-www-form-urlencoded')
webpage = self._download_webpage(req, video_id, 'Downloading video page')
title = self._search_regex(
r'style="z-index: [0-9]+;">([^<]+)</span>',
webpage, 'title', default=None) or self._og_search_title(webpage)
video_url = self._search_regex(
r'file\s*:\s*["\'](http[^"\']+)["\'],', webpage, 'file url')
thumbnail = self._search_regex(
r'image\s*:\s*["\'](http[^"\']+)["\'],', webpage, 'thumbnail', fatal=False)
formats = [{
'format_id': 'sd',
'url': video_url,
'quality': 1,
}]
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'formats': formats,
}
| unlicense |
jdemel/gnuradio | gr-blocks/python/blocks/qa_regenerate.py | 2 | 1902 | #!/usr/bin/env python
#
# Copyright 2007,2010,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, gr_unittest, blocks
class test_regenerate(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_regen1(self):
tb = self.tb
data = [0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
expected_result = [0, 0, 0,
1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
src = blocks.vector_source_b(data, False)
regen = blocks.regenerate_bb(5, 2)
dst = blocks.vector_sink_b()
tb.connect(src, regen)
tb.connect(regen, dst)
tb.run()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
def test_regen2(self):
tb = self.tb
data = 200*[0,]
data[9] = 1
data[99] = 1
expected_result = 200*[0,]
expected_result[9] = 1
expected_result[19] = 1
expected_result[29] = 1
expected_result[39] = 1
expected_result[99] = 1
expected_result[109] = 1
expected_result[119] = 1
expected_result[129] = 1
src = blocks.vector_source_b(data, False)
regen = blocks.regenerate_bb(10, 3)
dst = blocks.vector_sink_b()
tb.connect(src, regen)
tb.connect(regen, dst)
tb.run ()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
if __name__ == '__main__':
gr_unittest.run(test_regenerate, "test_regenerate.xml")
| gpl-3.0 |
mindnervestech/mnrp | addons/base_report_designer/plugin/openerp_report_designer/bin/script/AddAttachment.py | 384 | 11148 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import os
import uno
import unohelper
import xmlrpclib
import base64
from com.sun.star.task import XJobExecutor
if __name__<>"package":
from lib.gui import *
from lib.error import ErrorDialog
from lib.tools import *
from LoginTest import *
from lib.rpc import *
database="test"
uid = 3
class AddAttachment(unohelper.Base, XJobExecutor ):
Kind = {
'PDF' : 'pdf',
'OpenOffice': 'sxw',
}
def __init__(self, ctx):
self.ctx = ctx
self.module = "openerp_report"
self.version = "0.1"
LoginTest()
if not loginstatus and __name__=="package":
exit(1)
self.aSearchResult = []
desktop=getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo=oDoc2.getDocumentInfo()
global passwd
self.password = passwd
global url
self.sock=RPCSession(url)
if docinfo.getUserFieldValue(2) <> "" and docinfo.getUserFieldValue(3) <> "":
self.win = DBModalDialog(60, 50, 180, 70, "Add Attachment to Server")
self.win.addFixedText("lblResourceType", 2 , 5, 100, 10, "Select Appropriate Resource Type:")
self.win.addComboListBox("lstResourceType", -2, 25, 176, 15,True)
self.win.addButton('btnOkWithoutInformation', -2 , -5, 25 , 15,'OK' ,actionListenerProc = self.btnOkWithoutInformation_clicked )
else:
self.win = DBModalDialog(60, 50, 180, 190, "Add Attachment to Server")
self.win.addFixedText("lblModuleName",2 , 9, 42, 20, "Select Module:")
self.win.addComboListBox("lstmodel", -2, 5, 134, 15,True)
self.lstModel = self.win.getControl( "lstmodel" )
self.dModel = {}
# Open a new connexion to the server
ids = self.sock.execute(database, uid, self.password, 'ir.module.module', 'search', [('name','=','base_report_model'),('state', '=', 'installed')])
if not len(ids):
# If the module 'base_report_model' is not installed, use the default model
self.dModel = {
"Partner":'res.partner',
}
else:
ids =self.sock.execute(database, uid, self.password, 'base.report.model' , 'search', [])
res = self.sock.execute(database, uid, self.password, 'base.report.model' , 'read', ids, ['name','model_id'])
models = self.sock.execute(database, uid, self.password, 'ir.model' , 'read', map(lambda x:x['model_id'][0], res), ['model'])
models = dict(map(lambda x:(x['id'],x['model']), models))
self.dModel = dict(map(lambda x: (x['name'],models[x['model_id'][0]]), res))
for item in self.dModel.keys():
self.lstModel.addItem(item, self.lstModel.getItemCount())
self.win.addFixedText("lblSearchName",2 , 25, 60, 10, "Enter Search String:")
self.win.addEdit("txtSearchName", 2, 35, 149, 15,)
self.win.addButton('btnSearch', -2 , 35, 25 , 15,'Search' ,actionListenerProc = self.btnSearch_clicked )
self.win.addFixedText("lblSearchRecord", 2 , 55, 60, 10, "Search Result:")
self.win.addComboListBox("lstResource", -2, 65, 176, 70, False )
self.lstResource = self.win.getControl( "lstResource" )
self.win.addFixedText("lblResourceType", 2 , 137, 100, 20, "Select Appropriate Resource Type:")
self.win.addComboListBox("lstResourceType", -2, 147, 176, 15,True )
self.win.addButton('btnOkWithInformation', -2 , -5, 25 , 15,'OK' ,actionListenerProc = self.btnOkWithInformation_clicked )
self.lstResourceType = self.win.getControl( "lstResourceType" )
for kind in self.Kind.keys():
self.lstResourceType.addItem( kind, self.lstResourceType.getItemCount() )
self.win.addButton('btnCancel', -2 - 27 , -5 , 30 , 15, 'Cancel' ,actionListenerProc = self.btnCancel_clicked )
self.win.doModalDialog("lstResourceType", self.Kind.keys()[0])
def btnSearch_clicked(self, oActionEvent):
modelSelectedItem = self.win.getListBoxSelectedItem("lstmodel")
if modelSelectedItem == "":
return
desktop=getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo=oDoc2.getDocumentInfo()
self.aSearchResult =self.sock.execute( database, uid, self.password, self.dModel[modelSelectedItem], 'name_search', self.win.getEditText("txtSearchName"))
self.win.removeListBoxItems("lstResource", 0, self.win.getListBoxItemCount("lstResource"))
if self.aSearchResult == []:
ErrorDialog("No search result found.", "", "Search Error.")
return
for result in self.aSearchResult:
self.lstResource.addItem(result[1],result[0])
def _send_attachment(self, name, data, res_model, res_id):
desktop = getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo = oDoc2.getDocumentInfo()
params = {
'name': name,
'datas': base64.encodestring( data ),
'datas_fname': name,
'res_model' : res_model,
'res_id' : int(res_id),
}
return self.sock.execute( database, uid, self.password, 'ir.attachment', 'create', params )
def send_attachment(self, model, resource_id):
desktop = getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo = oDoc2.getDocumentInfo()
if oDoc2.getURL() == "":
ErrorDialog("You should save your file.", "", "Saving Error.")
return None
url = oDoc2.getURL()
if self.Kind[self.win.getListBoxSelectedItem("lstResourceType")] == "pdf":
url = self.doc2pdf(url[7:])
if url == None:
ErrorDialog( "Problem in creating PDF.", "", "PDF Error.")
return None
url = url[7:]
data = read_data_from_file( get_absolute_file_path( url ) )
return self._send_attachment( os.path.basename( url ), data, model, resource_id )
def btnOkWithoutInformation_clicked(self, oActionEvent):
desktop = getDesktop()
oDoc2 = desktop.getCurrentComponent()
docinfo = oDoc2.getDocumentInfo()
if self.win.getListBoxSelectedItem("lstResourceType") == "":
ErrorDialog("You have to select a resource type.", "", "Selection Error." )
return
res = self.send_attachment( docinfo.getUserFieldValue(3), docinfo.getUserFieldValue(2) )
self.win.endExecute()
def btnOkWithInformation_clicked(self, oActionEvent):
if self.win.getListBoxSelectedItem("lstResourceType") == "":
ErrorDialog( "You have to select a resource type.", "", "Selection Error." )
return
if self.win.getListBoxSelectedItem("lstResource") == "" or self.win.getListBoxSelectedItem("lstmodel") == "":
ErrorDialog("You have to select Model and Resource.", "", "Selection Error.")
return
resourceid = None
for s in self.aSearchResult:
if s[1] == self.win.getListBoxSelectedItem("lstResource"):
resourceid = s[0]
break
if resourceid == None:
ErrorDialog("No resource is selected.", "", "Resource Error." )
return
res = self.send_attachment( self.dModel[self.win.getListBoxSelectedItem('lstmodel')], resourceid )
self.win.endExecute()
def btnCancel_clicked(self, oActionEvent):
self.win.endExecute()
def doc2pdf(self, strFile):
oDoc = None
strFilterSubName = ''
strUrl = convertToURL( strFile )
desktop = getDesktop()
oDoc = desktop.loadComponentFromURL( strUrl, "_blank", 0, Array(self._MakePropertyValue("Hidden",True)))
if oDoc:
strFilterSubName = ""
# select appropriate filter
if oDoc.supportsService("com.sun.star.presentation.PresentationDocument"):
strFilterSubName = "impress_pdf_Export"
elif oDoc.supportsService("com.sun.star.sheet.SpreadsheetDocument"):
strFilterSubName = "calc_pdf_Export"
elif oDoc.supportsService("com.sun.star.text.WebDocument"):
strFilterSubName = "writer_web_pdf_Export"
elif oDoc.supportsService("com.sun.star.text.GlobalDocument"):
strFilterSubName = "writer_globaldocument_pdf_Export"
elif oDoc.supportsService("com.sun.star.text.TextDocument"):
strFilterSubName = "writer_pdf_Export"
elif oDoc.supportsService("com.sun.star.drawing.DrawingDocument"):
strFilterSubName = "draw_pdf_Export"
elif oDoc.supportsService("com.sun.star.formula.FormulaProperties"):
strFilterSubName = "math_pdf_Export"
elif oDoc.supportsService("com.sun.star.chart.ChartDocument"):
strFilterSubName = "chart_pdf_Export"
else:
pass
filename = len(strFilterSubName) > 0 and convertToURL( os.path.splitext( strFile )[0] + ".pdf" ) or None
if len(strFilterSubName) > 0:
oDoc.storeToURL( filename, Array(self._MakePropertyValue("FilterName", strFilterSubName ),self._MakePropertyValue("CompressMode", "1" )))
oDoc.close(True)
# Can be None if len(strFilterSubName) <= 0
return filename
def _MakePropertyValue(self, cName="", uValue=u""):
oPropertyValue = createUnoStruct( "com.sun.star.beans.PropertyValue" )
if cName:
oPropertyValue.Name = cName
if uValue:
oPropertyValue.Value = uValue
return oPropertyValue
if __name__<>"package" and __name__=="__main__":
AddAttachment(None)
elif __name__=="package":
g_ImplementationHelper.addImplementation( AddAttachment, "org.openoffice.openerp.report.addattachment", ("com.sun.star.task.Job",),)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
alexus37/AugmentedRealityChess | pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/pip/commands/install.py | 342 | 12694 | import os
import sys
import tempfile
import shutil
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.log import logger
from pip.locations import (src_prefix, virtualenv_no_global, distutils_scheme,
build_prefix)
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.exceptions import InstallationError, CommandError, PreviousBuildDirError
from pip import cmdoptions
class InstallCommand(Command):
"""
Install packages from:
- PyPI (and other indexes) using requirement specifiers.
- VCS project urls.
- Local project directories.
- Local or remote source archives.
pip also supports installing from "requirements files", which provide
an easy way to specify a whole environment to be installed.
"""
name = 'install'
usage = """
%prog [options] <requirement specifier> ...
%prog [options] -r <requirements file> ...
%prog [options] [-e] <vcs project url> ...
%prog [options] [-e] <local project path> ...
%prog [options] <archive url/path> ..."""
summary = 'Install packages.'
bundle = False
def __init__(self, *args, **kw):
super(InstallCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-e', '--editable',
dest='editables',
action='append',
default=[],
metavar='path/url',
help='Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.')
cmd_opts.add_option(cmdoptions.requirements.make())
cmd_opts.add_option(cmdoptions.build_dir.make())
cmd_opts.add_option(
'-t', '--target',
dest='target_dir',
metavar='dir',
default=None,
help='Install packages into <dir>.')
cmd_opts.add_option(
'-d', '--download', '--download-dir', '--download-directory',
dest='download_dir',
metavar='dir',
default=None,
help="Download packages into <dir> instead of installing them, regardless of what's already installed.")
cmd_opts.add_option(cmdoptions.download_cache.make())
cmd_opts.add_option(
'--src', '--source', '--source-dir', '--source-directory',
dest='src_dir',
metavar='dir',
default=src_prefix,
help='Directory to check out editable projects into. '
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".')
cmd_opts.add_option(
'-U', '--upgrade',
dest='upgrade',
action='store_true',
help='Upgrade all packages to the newest available version. '
'This process is recursive regardless of whether a dependency is already satisfied.')
cmd_opts.add_option(
'--force-reinstall',
dest='force_reinstall',
action='store_true',
help='When upgrading, reinstall all packages even if they are '
'already up-to-date.')
cmd_opts.add_option(
'-I', '--ignore-installed',
dest='ignore_installed',
action='store_true',
help='Ignore the installed packages (reinstalling instead).')
cmd_opts.add_option(cmdoptions.no_deps.make())
cmd_opts.add_option(
'--no-install',
dest='no_install',
action='store_true',
help="DEPRECATED. Download and unpack all packages, but don't actually install them.")
cmd_opts.add_option(
'--no-download',
dest='no_download',
action="store_true",
help="DEPRECATED. Don't download any packages, just install the ones already downloaded "
"(completes an install run with --no-install).")
cmd_opts.add_option(cmdoptions.install_options.make())
cmd_opts.add_option(cmdoptions.global_options.make())
cmd_opts.add_option(
'--user',
dest='use_user_site',
action='store_true',
help='Install using the user scheme.')
cmd_opts.add_option(
'--egg',
dest='as_egg',
action='store_true',
help="Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)")
cmd_opts.add_option(
'--root',
dest='root_path',
metavar='dir',
default=None,
help="Install everything relative to this alternate root directory.")
cmd_opts.add_option(
"--compile",
action="store_true",
dest="compile",
default=True,
help="Compile py files to pyc",
)
cmd_opts.add_option(
"--no-compile",
action="store_false",
dest="compile",
help="Do not compile py files to pyc",
)
cmd_opts.add_option(cmdoptions.use_wheel.make())
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help="Include pre-release and development versions. By default, pip only finds stable versions.")
cmd_opts.add_option(cmdoptions.no_clean.make())
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this install command.
This method is meant to be overridden by subclasses, not
called directly.
"""
return PackageFinder(find_links=options.find_links,
index_urls=index_urls,
use_wheel=options.use_wheel,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=
options.process_dependency_links,
session=session,
)
def run(self, options, args):
if (
options.no_install or
options.no_download or
(options.build_dir != build_prefix) or
options.no_clean
):
logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
'and --no-clean are deprecated. See https://github.com/pypa/pip/issues/906.')
if options.download_dir:
options.no_install = True
options.ignore_installed = True
options.build_dir = os.path.abspath(options.build_dir)
options.src_dir = os.path.abspath(options.src_dir)
install_options = options.install_options or []
if options.use_user_site:
if virtualenv_no_global():
raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
install_options.append('--user')
temp_target_dir = None
if options.target_dir:
options.ignore_installed = True
temp_target_dir = tempfile.mkdtemp()
options.target_dir = os.path.abspath(options.target_dir)
if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
raise CommandError("Target path exists but is not a directory, will not continue.")
install_options.append('--home=' + temp_target_dir)
global_options = options.global_options or []
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated("1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
if options.mirrors:
logger.deprecated("1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
index_urls += options.mirrors
session = self._build_session(options)
finder = self._build_package_finder(options, index_urls, session)
requirement_set = RequirementSet(
build_dir=options.build_dir,
src_dir=options.src_dir,
download_dir=options.download_dir,
download_cache=options.download_cache,
upgrade=options.upgrade,
as_egg=options.as_egg,
ignore_installed=options.ignore_installed,
ignore_dependencies=options.ignore_dependencies,
force_reinstall=options.force_reinstall,
use_user_site=options.use_user_site,
target_dir=temp_target_dir,
session=session,
pycompile=options.compile,
)
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(name, None))
for name in options.editables:
requirement_set.add_requirement(
InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
for filename in options.requirements:
for req in parse_requirements(filename, finder=finder, options=options, session=session):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
opts = {'name': self.name}
if options.find_links:
msg = ('You must give at least one requirement to %(name)s '
'(maybe you meant "pip %(name)s %(links)s"?)' %
dict(opts, links=' '.join(options.find_links)))
else:
msg = ('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % opts)
logger.warn(msg)
return
try:
if not options.no_download:
requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
else:
requirement_set.locate_files()
if not options.no_install and not self.bundle:
requirement_set.install(install_options, global_options, root=options.root_path)
installed = ' '.join([req.name for req in
requirement_set.successfully_installed])
if installed:
logger.notify('Successfully installed %s' % installed)
elif not self.bundle:
downloaded = ' '.join([req.name for req in
requirement_set.successfully_downloaded])
if downloaded:
logger.notify('Successfully downloaded %s' % downloaded)
elif self.bundle:
requirement_set.create_bundle(self.bundle_filename)
logger.notify('Created bundle in %s' % self.bundle_filename)
except PreviousBuildDirError:
options.no_clean = True
raise
finally:
# Clean up
if (not options.no_clean) and ((not options.no_install) or options.download_dir):
requirement_set.cleanup_files(bundle=self.bundle)
if options.target_dir:
if not os.path.exists(options.target_dir):
os.makedirs(options.target_dir)
lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
for item in os.listdir(lib_dir):
shutil.move(
os.path.join(lib_dir, item),
os.path.join(options.target_dir, item)
)
shutil.rmtree(temp_target_dir)
return requirement_set
| mit |
JRock007/boxxy | dist/Boxxy server.app/Contents/Resources/lib/python2.7/numpy/polynomial/tests/test_printing.py | 208 | 2004 | from __future__ import division, absolute_import, print_function
import numpy.polynomial as poly
from numpy.testing import TestCase, run_module_suite, assert_
class test_str(TestCase):
def test_polynomial_str(self):
res = str(poly.Polynomial([0, 1]))
tgt = 'poly([0., 1.])'
assert_(res, tgt)
def test_chebyshev_str(self):
res = str(poly.Chebyshev([0, 1]))
tgt = 'leg([0., 1.])'
assert_(res, tgt)
def test_legendre_str(self):
res = str(poly.Legendre([0, 1]))
tgt = 'leg([0., 1.])'
assert_(res, tgt)
def test_hermite_str(self):
res = str(poly.Hermite([0, 1]))
tgt = 'herm([0., 1.])'
assert_(res, tgt)
def test_hermiteE_str(self):
res = str(poly.HermiteE([0, 1]))
tgt = 'herme([0., 1.])'
assert_(res, tgt)
def test_laguerre_str(self):
res = str(poly.Laguerre([0, 1]))
tgt = 'lag([0., 1.])'
assert_(res, tgt)
class test_repr(TestCase):
def test_polynomial_str(self):
res = repr(poly.Polynomial([0, 1]))
tgt = 'Polynomial([0., 1.])'
assert_(res, tgt)
def test_chebyshev_str(self):
res = repr(poly.Chebyshev([0, 1]))
tgt = 'Chebyshev([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_legendre_repr(self):
res = repr(poly.Legendre([0, 1]))
tgt = 'Legendre([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_hermite_repr(self):
res = repr(poly.Hermite([0, 1]))
tgt = 'Hermite([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_hermiteE_repr(self):
res = repr(poly.HermiteE([0, 1]))
tgt = 'HermiteE([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_laguerre_repr(self):
res = repr(poly.Laguerre([0, 1]))
tgt = 'Laguerre([0., 1.], [0., 1.], [0., 1.])'
assert_(res, tgt)
#
if __name__ == "__main__":
run_module_suite()
| mit |
ionelmc/python-cogen | tests/test_socket.py | 4 | 8908 | __doc_all__ = []
import unittest
import random
import threading
import socket
import time
import sys
import errno
import exceptions
import datetime
import traceback
import thread
from cStringIO import StringIO
from cogen.common import *
from base import priorities, proactors_available
from cogen.core.coroutines import debug_coroutine
class SocketTest_MixIn:
sockets = []
def setUp(self):
self.thread_exception = None
self.local_addr = ('localhost', random.randint(10000,64000))
if self.run_first is None:
self.m = Scheduler( default_priority=self.prio, proactor=self.poller,
proactor_resolution=0.01)
else:
self.m = Scheduler( default_priority=self.prio, proactor=self.poller,
proactor_resolution=0.01,
proactor_multiplex_first=self.run_first)
def run():
try:
time.sleep(1)
self.m.run()
except:
import traceback
traceback.print_exc()
self.thread_exception = sys.exc_info
self.m_run = threading.Thread(target=run)
def tearDown(self):
for s in self.sockets:
s.close()
self.sockets = []
del self.m
import gc; gc.collect()
def test_proper_err_cleanup(self):
@coroutine
def foo():
yield events.Sleep(0.2)
s = sockets.Socket()
yield s.connect(self.local_addr)
s.settimeout(0.01)
yield events.Sleep(0.2)
try:
yield s.send("aaaaaaaa")
yield s.send("bbbbbbbb") #should throw a EHUP or something in the mp
except sockets.SocketError, e:
#~ import traceback
#~ traceback.print_exc()
pass
#test for proper cleanup
x = sockets.Socket()
x.settimeout(0.1)
yield x.connect(self.local_addr)
self.m.add(foo)
self.sock = socket.socket()
self.sock.bind(self.local_addr)
self.sock.listen(1)
self.m_run.start()
conn, addr = self.sock.accept()
#~ conn.shutdown(socket.SHUT_RDWR)
conn.close()
self.m_run.join()
self.failIf(self.thread_exception)
def test_read_lines(self):
self.waitobj = None
@coroutine
def reader():
srv = sockets.Socket()
self.sockets.append(srv)
srv.setblocking(0)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.bind(self.local_addr)
srv.listen(0)
conn, addr = (yield srv.accept(prio=self.prio))
fh = conn.makefile()
self.line1 = yield fh.readline(1024, prio=self.prio)
self.line2 = yield fh.readline(512, prio=self.prio)
self.line3 = yield fh.readline(1512, prio=self.prio)
# eat up the remaining data waiting on socket
y1 = fh.readline(1024, prio=self.prio)
y2 = fh.readline(1024, prio=self.prio)
y3 = fh.readline(1024, prio=self.prio)
a1 = yield y1
a2 = yield y2
a3 = yield y3
self.recvobj2 = (a1,a2,a3)
#~ srv.close()
self.m.shutdown()
coro = self.m.add(reader)
self.m_run.start()
time.sleep(1.5)
sock = socket.socket()
sock.connect(self.local_addr)
sock.send("X"*512)
time.sleep(0.5)
self.assert_(coro not in self.m.active)
sock.send("\n")
time.sleep(0.5)
self.assert_(len(self.m.proactor)==1)
#~ self.assert_(self.waitobj.buff is self.recvobj)
self.assertEqual(self.line1, "X"*512+"\n")
time.sleep(0.5)
sock.send("X"*1024)
time.sleep(1.5)
self.assertEqual(self.line2, "X"*512)
sock.send("\n")
time.sleep(0.5)
a_line = "X"*64+"\n"
sock.send(a_line*3)
self.m_run.join()
self.assertEqual(self.recvobj2, (a_line,a_line,a_line))
self.assertEqual(len(self.m.proactor), 0)
self.assertEqual(len(self.m.active), 0)
self.failIf(self.m_run.isAlive())
def test_read_all(self):
@coroutine
def reader():
srv = sockets.Socket()
self.sockets.append(srv)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.bind(self.local_addr)
srv.listen(0)
conn, addr = yield sockets.Accept(srv, prio=self.prio)
self.recvobj = yield sockets.Recv(conn, 1024*4, prio=self.prio)
self.recvobj_all = yield sockets.RecvAll(conn, 1024**2-1024*4, prio=self.prio)
#~ srv.close()
self.m.shutdown()
coro = self.m.add(reader)
self.m_run.start()
time.sleep(1.5)
sock = socket.socket()
self.sockets.append(sock)
sock.connect(self.local_addr)
sent = 0
length = 1024**2
buff = "X"*length
while sent<length:
time.sleep(0.1)
sent += sock.send(buff[sent:])
self.m_run.join()
self.assert_(len(self.recvobj)<=1024*4)
self.assertEqual(len(self.recvobj_all)+len(self.recvobj),1024**2)
self.assertEqual(len(self.m.proactor), 0)
self.assertEqual(len(self.m.active), 0)
self.failIf(self.m_run.isAlive())
def test_write_all(self):
@coroutine
def writer():
try:
cli = sockets.Socket()
self.sockets.append(cli)
conn = yield sockets.Connect(cli, self.local_addr, timeout=0.5, prio=self.prio)
self.writeobj = yield sockets.Send(conn, 'X'*(1024**2), prio=self.prio)
self.writeobj_all = yield sockets.SendAll(conn, 'Y'*(1024**2), prio=self.prio)
self.sockets.append(conn)
self.sockets.append(cli)
except:
traceback.print_exc()
thread.interrupt_main()
try:
srv = socket.socket()
self.sockets.append(srv)
srv.setblocking(1)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.bind(self.local_addr)
srv.listen(0)
coro = self.m.add(writer)
thread.start_new_thread(lambda: time.sleep(0.3) or self.m_run.start(), ())
while 1:
try:
cli, addr = srv.accept()
break
except socket.error, exc:
if exc[0] in (errno.EAGAIN, errno.EWOULDBLOCK):
continue
else:
raise
self.sockets.append(cli)
time.sleep(0.2)
cli.setblocking(1)
buff = cli.recv(1024*2)
cli.setblocking(0)
time.sleep(0.5)
total = len(buff)
while len(buff):
time.sleep(0.01)
try:
buff = cli.recv(1024**2*10)
total += len(buff)
except socket.error, exc:
if exc[0] in (errno.EAGAIN, errno.EWOULDBLOCK):
break
else:
raise
self.assertEqual(self.writeobj+self.writeobj_all, total)
self.assertEqual(len(self.m.proactor), 0)
self.assertEqual(len(self.m.active), 0)
self.failIf(self.m_run.isAlive())
except KeyboardInterrupt:
self.failIf("Interrupted from the coroutine, something failed.")
for poller_cls in proactors_available:
for prio_mixin in priorities:
if poller_cls.supports_multiplex_first:
for run_first in (True, False):
name = 'SocketTest_%s_%s_%s' % (prio_mixin.__name__, poller_cls.__name__, run_first and 'RunFirst' or 'PollFirst')
globals()[name] = type(
name, (SocketTest_MixIn, prio_mixin, unittest.TestCase),
{'poller':poller_cls, 'run_first':run_first}
)
else:
name = 'SocketTest_%s_%s' % (prio_mixin.__name__, poller_cls.__name__)
globals()[name] = type(
name, (SocketTest_MixIn, prio_mixin, unittest.TestCase),
{'poller':poller_cls, 'run_first':None}
)
if __name__ == "__main__":
sys.argv.insert(1, '-v')
unittest.main()
| mit |
igavrilov/django-tastypie | tests/testcases.py | 13 | 3625 | import socket
import threading
from django.core.handlers.wsgi import WSGIHandler
from django.core.management import call_command
from django.core.servers import basehttp
from django.db import connections
from django.test.testcases import TransactionTestCase, TestCase
class StoppableWSGIServer(basehttp.WSGIServer):
"""WSGIServer with short timeout, so that server thread can stop this server."""
def server_bind(self):
"""Sets timeout to 1 second."""
basehttp.WSGIServer.server_bind(self)
self.socket.settimeout(1)
def get_request(self):
"""Checks for timeout when getting request."""
try:
sock, address = self.socket.accept()
sock.settimeout(None)
return (sock, address)
except socket.timeout:
raise
class TestServerThread(threading.Thread):
"""Thread for running a http server while tests are running."""
def __init__(self, address, port):
self.address = address
self.port = port
self._stopevent = threading.Event()
self.started = threading.Event()
self.error = None
super(TestServerThread, self).__init__()
def run(self):
"""Sets up test server and database and loops over handling http requests."""
try:
handler = WSGIHandler()
server_address = (self.address, self.port)
httpd = StoppableWSGIServer(server_address, basehttp.WSGIRequestHandler)
httpd.set_app(handler)
self.started.set()
except basehttp.WSGIServerException as e:
self.error = e
self.started.set()
return
# Must do database stuff in this new thread if database in memory.
from django.conf import settings
db = settings.DATABASES['default']
ENGINE = db['ENGINE']
TEST_NAME = db.get('TEST_NAME')
if ('sqlite3' in ENGINE or 'spatialite' in ENGINE) \
and (not TEST_NAME or TEST_NAME == ':memory:'):
if 'spatialite' in ENGINE:
cursor = connections['default'].cursor()
cursor.execute('SELECT InitSpatialMetaData()')
row = cursor.fetchone()
call_command('syncdb', interactive=False, verbosity=0)
# Import the fixture data into the test database.
if hasattr(self, 'fixtures'):
# We have to use this slightly awkward syntax due to the fact
# that we're using *args and **kwargs together.
call_command('loaddata', *self.fixtures, **{'verbosity': 0})
# Loop until we get a stop event.
while not self._stopevent.isSet():
httpd.handle_request()
def join(self, timeout=None):
"""Stop the thread and wait for it to finish."""
self._stopevent.set()
threading.Thread.join(self, timeout)
class TestServerTestCase(TransactionTestCase):
fixtures = ['test_data.json']
def start_test_server(self, address='localhost', port=8000):
"""Creates a live test server object (instance of WSGIServer)."""
self.server_thread = TestServerThread(address, port)
self.server_thread.fixtures = self.fixtures
self.server_thread.start()
self.server_thread.started.wait()
if self.server_thread.error:
raise self.server_thread.error
def stop_test_server(self):
if self.server_thread:
self.server_thread.join()
class TestCaseWithFixture(TestCase):
fixtures = ['test_data.json']
| bsd-3-clause |
CYBAI/servo | etc/crates-graph.py | 7 | 1751 | #!/usr/bin/env python3
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import json
import os
import subprocess
import sys
def main(crate=None):
os.chdir(os.path.join(os.path.dirname(__file__), ".."))
meta = json.loads(subprocess.check_output(["cargo", "metadata", "--format-version", "1"]))
graph = {}
for package in meta["packages"]:
if package["source"] is None: # Lives in this repo
for dependency in package["dependencies"]:
if dependency["source"] is None: # Also lives in this repo
graph.setdefault(package["name"], []).append(dependency["name"])
if crate:
filtered = {}
seen = set()
def traverse(name):
if name not in seen:
seen.add(name)
for dependency in graph.get(name, []):
filtered.setdefault(name, []).append(dependency)
traverse(dependency)
traverse(crate)
else:
filtered = graph
print("// This is in Graphviz DOT format.")
print("// Use the 'dot' or 'xdot' tool to visualize.")
print('digraph "local crates" {')
for package, dependencies in filtered.items():
for dependency in dependencies:
print(' "%s" -> "%s";' % (package, dependency))
print("}")
if __name__ == "__main__":
sys.exit(main(*sys.argv[1:]))
| mpl-2.0 |
chrisidefix/devide | modules/readers/resources/python/rawVolumeRDRViewFrame.py | 7 | 4028 | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# generated by wxGlade 0.6.3 on Sat Feb 09 13:43:04 2008
import wx
# begin wxGlade: extracode
# end wxGlade
class rawVolumeRDRViewFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: rawVolumeRDRViewFrame.__init__
kwds["style"] = wx.CAPTION|wx.MINIMIZE_BOX|wx.MAXIMIZE_BOX|wx.SYSTEM_MENU|wx.RESIZE_BORDER
wx.Frame.__init__(self, *args, **kwds)
self.viewFramePanel = wx.Panel(self, -1)
self.label_1_copy_2 = wx.StaticText(self.viewFramePanel, -1, "Filename")
self.filenameText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.browseButtonId = wx.NewId()
self.button_1_copy = wx.Button(self.viewFramePanel, self.browseButtonId, "Browse")
self.label_3_copy_1 = wx.StaticText(self.viewFramePanel, -1, "Data type")
self.dataTypeChoice = wx.Choice(self.viewFramePanel, -1, choices=["Dummy 1", "Dummy 2", "Dummy 3", "Dummy 4", "Dummy 5"])
self.endiannessRadioBox = wx.RadioBox(self.viewFramePanel, -1, "Endianness", choices=["Little (LSB at lowest address)", "Big (MSB at lowest address)"], majorDimension=2, style=wx.RA_SPECIFY_COLS)
self.label_2 = wx.StaticText(self.viewFramePanel, -1, "Header size (bytes)")
self.headerSizeText = wx.TextCtrl(self.viewFramePanel, -1, "0")
self.label_4 = wx.StaticText(self.viewFramePanel, -1, "Extent (x0, x1, y0, y1, z0, z1)")
self.extentText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.label_5 = wx.StaticText(self.viewFramePanel, -1, "Spacing (Sx, Sy, Sz)")
self.spacingText = wx.TextCtrl(self.viewFramePanel, -1, "")
self.__set_properties()
self.__do_layout()
# end wxGlade
def __set_properties(self):
# begin wxGlade: rawVolumeRDRViewFrame.__set_properties
self.SetTitle("Raw Volume Reader")
self.dataTypeChoice.SetSelection(0)
self.endiannessRadioBox.SetSelection(0)
# end wxGlade
def __do_layout(self):
# begin wxGlade: rawVolumeRDRViewFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_5 = wx.BoxSizer(wx.VERTICAL)
grid_sizer_1 = wx.FlexGridSizer(3, 2, 4, 4)
sizer_2 = wx.BoxSizer(wx.VERTICAL)
sizer_7 = wx.BoxSizer(wx.HORIZONTAL)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6.Add(self.label_1_copy_2, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 2)
sizer_6.Add(self.filenameText, 1, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_6.Add(self.button_1_copy, 0, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_2.Add(sizer_6, 0, wx.BOTTOM|wx.EXPAND, 4)
sizer_3.Add(self.label_3_copy_1, 0, wx.RIGHT|wx.ALIGN_CENTER_VERTICAL, 2)
sizer_3.Add(self.dataTypeChoice, 1, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_2.Add(sizer_3, 0, wx.BOTTOM|wx.EXPAND, 4)
sizer_7.Add(self.endiannessRadioBox, 1, wx.EXPAND, 0)
sizer_2.Add(sizer_7, 0, wx.BOTTOM|wx.EXPAND, 4)
sizer_5.Add(sizer_2, 1, wx.ALL|wx.EXPAND, 7)
grid_sizer_1.Add(self.label_2, 0, wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.headerSizeText, 0, wx.EXPAND, 0)
grid_sizer_1.Add(self.label_4, 0, wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.extentText, 0, wx.EXPAND, 0)
grid_sizer_1.Add(self.label_5, 0, wx.ALIGN_CENTER_VERTICAL, 0)
grid_sizer_1.Add(self.spacingText, 0, wx.EXPAND, 0)
grid_sizer_1.AddGrowableCol(1)
sizer_5.Add(grid_sizer_1, 0, wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 7)
self.viewFramePanel.SetSizer(sizer_5)
sizer_1.Add(self.viewFramePanel, 1, wx.EXPAND, 0)
self.SetSizer(sizer_1)
sizer_1.Fit(self)
self.Layout()
# end wxGlade
# end of class rawVolumeRDRViewFrame
if __name__ == "__main__":
app = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
frame_1 = rawVolumeRDRViewFrame(None, -1, "")
app.SetTopWindow(frame_1)
frame_1.Show()
app.MainLoop()
| bsd-3-clause |
verdurin/bcbio-nextgen | bcbio/variation/ensemble.py | 4 | 9863 | """Ensemble methods that create consensus calls from multiple approaches.
This handles merging calls produced by multiple calling methods or
technologies into a single consolidated callset. Uses the bcbio.variation
toolkit: https://github.com/chapmanb/bcbio.variation and bcbio.variation.recall:
https://github.com/chapmanb/bcbio.variation.recall
"""
import collections
import copy
import glob
import math
import os
import yaml
import toolz as tz
from bcbio import utils
from bcbio.log import logger
from bcbio.pipeline import config_utils
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do
from bcbio.variation import population, validate, vcfutils
def combine_calls(batch_id, samples, data):
"""Combine multiple callsets into a final set of merged calls.
"""
logger.info("Ensemble consensus calls for {0}: {1}".format(
batch_id, ",".join(x["variantcaller"] for x in samples[0]["variants"])))
edata = copy.deepcopy(data)
base_dir = utils.safe_makedir(os.path.join(edata["dirs"]["work"], "ensemble", batch_id))
caller_names, vrn_files, bam_files = _organize_variants(samples, batch_id)
exist_variants = False
for tmp_vrn_file in vrn_files:
if vcfutils.vcf_has_variants(tmp_vrn_file):
exist_variants = True
break
if exist_variants:
if "classifiers" not in edata["config"]["algorithm"]["ensemble"]:
callinfo = _run_ensemble_intersection(batch_id, vrn_files, base_dir, edata)
else:
config_file = _write_config_file(batch_id, caller_names, base_dir, edata)
callinfo = _run_ensemble(batch_id, vrn_files, config_file, base_dir,
edata["sam_ref"], edata)
callinfo["vrn_file"] = vcfutils.bgzip_and_index(callinfo["vrn_file"], data["config"])
edata["config"]["algorithm"]["variantcaller"] = "ensemble"
edata["vrn_file"] = callinfo["vrn_file"]
edata["ensemble_bed"] = callinfo["bed_file"]
callinfo["validate"] = validate.compare_to_rm(edata)[0][0].get("validate")
else:
out_vcf_file = os.path.join(base_dir, "{0}-ensemble.vcf".format(batch_id))
vcfutils.write_empty_vcf(out_vcf_file)
callinfo = {"variantcaller": "ensemble",
"vrn_file": vcfutils.bgzip_and_index(out_vcf_file, data["config"]),
"bed_file": None}
return [[batch_id, callinfo]]
def combine_calls_parallel(samples, run_parallel):
"""Combine calls using batched Ensemble approach.
"""
batch_groups, extras = _group_by_batches(samples, _has_ensemble)
out = []
if batch_groups:
processed = run_parallel("combine_calls", ((b, xs, xs[0]) for b, xs in batch_groups.iteritems()))
for batch_id, callinfo in processed:
for data in batch_groups[batch_id]:
data["variants"].insert(0, callinfo)
out.append([data])
return out + extras
def _has_ensemble(data):
# for tumour-normal calling, a sample may have "ensemble" for the normal
# sample configured but there won't be any variant files per se
variants_to_process = (len(data["variants"]) > 1
and any([x.get('vrn_file', None) is not None or x.get('vrn_file_batch', None) is not None
for x in data["variants"]]))
return variants_to_process and "ensemble" in data["config"]["algorithm"]
def _group_by_batches(samples, check_fn):
"""Group calls by batches, processing families together during ensemble calling.
"""
batch_groups = collections.defaultdict(list)
extras = []
for data in [x[0] for x in samples]:
if check_fn(data):
batch = data.get("metadata", {}).get("batch")
if batch:
batch_groups[batch].append(data)
else:
assert data["name"][-1] not in batch_groups
batch_groups[data["name"][-1]] = [data]
else:
extras.append([data])
return batch_groups, extras
def _organize_variants(samples, batch_id):
"""Retrieve variant calls for all samples, merging batched samples into single VCF.
"""
bam_files = set([])
caller_names = [x["variantcaller"] for x in samples[0]["variants"]]
calls = collections.defaultdict(list)
for data in samples:
if "work_bam" in data:
bam_files.add(data["work_bam"])
for vrn in data["variants"]:
calls[vrn["variantcaller"]].append(vrn["vrn_file"])
data = samples[0]
vrn_files = []
for caller in caller_names:
fnames = calls[caller]
if len(fnames) == 1:
vrn_files.append(fnames[0])
else:
vrn_files.append(population.get_multisample_vcf(fnames, batch_id, caller, data))
return caller_names, vrn_files, list(bam_files)
def _handle_somatic_ensemble(vrn_file, data):
"""For somatic ensemble, discard normal samples and filtered variants from vcfs.
Only needed for bcbio.variation based ensemble calling.
"""
if tz.get_in(["metadata", "phenotype"], data, "").lower().startswith("tumor"):
vrn_file_temp = vrn_file.replace(".vcf", "_tumorOnly_noFilteredCalls.vcf")
# Select tumor sample and keep only PASS and . calls
vrn_file = vcfutils.select_sample(in_file=vrn_file, sample=data["name"][1],
out_file=vrn_file_temp,
config=data["config"], filters="PASS,.")
return vrn_file
def _bcbio_variation_ensemble(vrn_files, out_file, ref_file, config_file, base_dir, data):
"""Run a variant comparison using the bcbio.variation toolkit, given an input configuration.
"""
vrn_files = [_handle_somatic_ensemble(v, data) for v in vrn_files]
tmp_dir = utils.safe_makedir(os.path.join(base_dir, "tmp"))
bv_jar = config_utils.get_jar("bcbio.variation",
config_utils.get_program("bcbio_variation", data["config"], "dir"))
resources = config_utils.get_resources("bcbio_variation", data["config"])
jvm_opts = resources.get("jvm_opts", ["-Xms750m", "-Xmx2g"])
java_args = ["-Djava.io.tmpdir=%s" % tmp_dir]
cmd = ["java"] + jvm_opts + java_args + ["-jar", bv_jar, "variant-ensemble", config_file,
ref_file, out_file] + vrn_files
with utils.chdir(base_dir):
do.run(cmd, "Ensemble calling: %s" % os.path.basename(base_dir))
def _run_ensemble(batch_id, vrn_files, config_file, base_dir, ref_file, data):
"""Run an ensemble call using merging and SVM-based approach in bcbio.variation
"""
out_vcf_file = os.path.join(base_dir, "{0}-ensemble.vcf".format(batch_id))
out_bed_file = os.path.join(base_dir, "{0}-callregions.bed".format(batch_id))
work_dir = "%s-work" % os.path.splitext(out_vcf_file)[0]
if not utils.file_exists(out_vcf_file):
_bcbio_variation_ensemble(vrn_files, out_vcf_file, ref_file, config_file,
base_dir, data)
if not utils.file_exists(out_vcf_file):
base_vcf = glob.glob(os.path.join(work_dir, "prep", "*-cfilter.vcf"))[0]
utils.symlink_plus(base_vcf, out_vcf_file)
if not utils.file_exists(out_bed_file):
multi_beds = glob.glob(os.path.join(work_dir, "prep", "*-multicombine.bed"))
if len(multi_beds) > 0:
utils.symlink_plus(multi_beds[0], out_bed_file)
return {"variantcaller": "ensemble",
"vrn_file": out_vcf_file,
"bed_file": out_bed_file if os.path.exists(out_bed_file) else None}
def _write_config_file(batch_id, caller_names, base_dir, data):
"""Write YAML configuration to generate an ensemble set of combined calls.
"""
config_dir = utils.safe_makedir(os.path.join(base_dir, "config"))
config_file = os.path.join(config_dir, "{0}-ensemble.yaml".format(batch_id))
algorithm = data["config"]["algorithm"]
econfig = {"ensemble": algorithm["ensemble"],
"names": caller_names,
"prep-inputs": False}
intervals = validate.get_analysis_intervals(data)
if intervals:
econfig["intervals"] = os.path.abspath(intervals)
with open(config_file, "w") as out_handle:
yaml.safe_dump(econfig, out_handle, allow_unicode=False, default_flow_style=False)
return config_file
def _get_num_pass(data, n):
"""Calculate the number of samples needed to pass ensemble calling.
"""
numpass = tz.get_in(["config", "algorithm", "ensemble", "numpass"], data)
if numpass:
return int(numpass)
trusted_pct = tz.get_in(["config", "algorithm", "ensemble", "trusted_pct"], data)
if trusted_pct:
return int(math.ceil(float(trusted_pct) * n))
return 2
def _run_ensemble_intersection(batch_id, vrn_files, base_dir, edata):
"""Run intersection n out of x based ensemble method using bcbio.variation.recall.
"""
out_vcf_file = os.path.join(base_dir, "{0}-ensemble.vcf.gz".format(batch_id))
if not utils.file_exists(out_vcf_file):
num_pass = _get_num_pass(edata, len(vrn_files))
cmd = [config_utils.get_program("bcbio-variation-recall", edata["config"]),
"ensemble", "--cores=%s" % edata["config"]["algorithm"].get("num_cores", 1),
"--numpass", str(num_pass)]
# Remove filtered calls if we're dealing with tumor/normal calls
if vcfutils.get_paired_phenotype(edata):
cmd += ["--nofiltered"]
cmd += [out_vcf_file, dd.get_ref_file(edata)] + vrn_files
do.run(cmd, "Ensemble intersection calling: %s" % (batch_id))
in_data = utils.deepish_copy(edata)
in_data["vrn_file"] = out_vcf_file
return {"variantcaller": "ensemble",
"vrn_file": out_vcf_file,
"bed_file": None}
| mit |
mooninite/cobbler | cobbler/modules/manage_in_tftpd.py | 14 | 6968 | """
This is some of the code behind 'cobbler sync'.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import glob
import os.path
import shutil
import tftpgen
from cexceptions import CX
import clogger
import templar
import utils
from utils import _
def register():
"""
The mandatory cobbler module registration hook.
"""
return "manage"
class InTftpdManager:
def what(self):
return "in_tftpd"
def __init__(self, collection_mgr, logger):
"""
Constructor
"""
self.logger = logger
if self.logger is None:
self.logger = clogger.Logger()
self.collection_mgr = collection_mgr
self.templar = templar.Templar(collection_mgr)
self.settings_file = "/etc/xinetd.d/tftp"
self.tftpgen = tftpgen.TFTPGen(collection_mgr, self.logger)
self.systems = collection_mgr.systems()
self.bootloc = utils.tftpboot_location()
def regen_hosts(self):
pass # not used
def write_dns_files(self):
pass # not used
def write_boot_files_distro(self, distro):
# collapse the object down to a rendered datastructure
# the second argument set to false means we don't collapse
# dicts/arrays into a flat string
target = utils.blender(self.collection_mgr.api, False, distro)
# Create metadata for the templar function
# Right now, just using local_img_path, but adding more
# cobbler variables here would probably be good
metadata = {}
metadata["local_img_path"] = os.path.join(utils.tftpboot_location(), "images", distro.name)
# Create the templar instance. Used to template the target directory
templater = templar.Templar(self.collection_mgr)
# Loop through the dict of boot files,
# executing a cp for each one
self.logger.info("processing boot_files for distro: %s" % distro.name)
for file in target["boot_files"].keys():
rendered_file = templater.render(file, metadata, None)
try:
for f in glob.glob(target["boot_files"][file]):
if f == target["boot_files"][file]:
# this wasn't really a glob, so just copy it as is
filedst = rendered_file
else:
# this was a glob, so figure out what the destination
# file path/name should be
tgt_path, tgt_file = os.path.split(f)
rnd_path, rnd_file = os.path.split(rendered_file)
filedst = os.path.join(rnd_path, tgt_file)
if not os.path.isfile(filedst):
shutil.copyfile(f, filedst)
self.collection_mgr.api.log("copied file %s to %s for %s" % (f, filedst, distro.name))
except:
self.logger.error("failed to copy file %s to %s for %s" % (f, filedst, distro.name))
return 0
def write_boot_files(self):
"""
Copy files in profile["boot_files"] into /tftpboot. Used for vmware
currently.
"""
for distro in self.collection_mgr.distros():
self.write_boot_files_distro(distro)
return 0
def write_tftpd_files(self):
"""
xinetd files are written when manage_tftp is set in
/var/lib/cobbler/settings.
"""
template_file = "/etc/cobbler/tftpd.template"
try:
f = open(template_file, "r")
except:
raise CX(_("error reading template %s") % template_file)
template_data = ""
template_data = f.read()
f.close()
metadata = {
"user": "root",
"binary": "/usr/sbin/in.tftpd",
"args": "%s" % self.bootloc
}
self.logger.info("generating %s" % self.settings_file)
self.templar.render(template_data, metadata, self.settings_file, None)
def update_netboot(self, name):
"""
Write out new pxelinux.cfg files to /tftpboot
"""
system = self.systems.find(name=name)
if system is None:
utils.die(self.logger, "error in system lookup for %s" % name)
menu_items = self.tftpgen.get_menu_items()['pxe']
self.tftpgen.write_all_system_files(system, menu_items)
# generate any templates listed in the system
self.tftpgen.write_templates(system)
def add_single_system(self, system):
"""
Write out new pxelinux.cfg files to /tftpboot
"""
# write the PXE files for the system
menu_items = self.tftpgen.get_menu_items()['pxe']
self.tftpgen.write_all_system_files(system, menu_items)
# generate any templates listed in the distro
self.tftpgen.write_templates(system)
def add_single_distro(self, distro):
self.tftpgen.copy_single_distro_files(distro, self.bootloc, False)
self.write_boot_files_distro(distro)
def sync(self, verbose=True):
"""
Write out all files to /tftpdboot
"""
self.tftpgen.verbose = verbose
self.logger.info("copying bootloaders")
self.tftpgen.copy_bootloaders()
self.logger.info("copying distros to tftpboot")
# Adding in the exception handling to not blow up if files have
# been moved (or the path references an NFS directory that's no longer
# mounted)
for d in self.collection_mgr.distros():
try:
self.logger.info("copying files for distro: %s" % d.name)
self.tftpgen.copy_single_distro_files(d, self.bootloc, False)
except CX, e:
self.logger.error(e.value)
self.logger.info("copying images")
self.tftpgen.copy_images()
# the actual pxelinux.cfg files, for each interface
self.logger.info("generating PXE configuration files")
menu_items = self.tftpgen.get_menu_items()['pxe']
for x in self.systems:
self.tftpgen.write_all_system_files(x, menu_items)
self.logger.info("generating PXE menu structure")
self.tftpgen.make_pxe_menu()
def get_manager(collection_mgr, logger):
return InTftpdManager(collection_mgr, logger)
| gpl-2.0 |
aiyyoi/DevFest-MaxBond | MaxBond/env/lib/python2.7/site-packages/sqlalchemy/ext/associationproxy.py | 33 | 32975 | # ext/associationproxy.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Contain the ``AssociationProxy`` class.
The ``AssociationProxy`` is a Python property object which provides
transparent proxied access to the endpoint of an association object.
See the example ``examples/association/proxied_association.py``.
"""
import itertools
import operator
import weakref
from .. import exc, orm, util
from ..orm import collections, interfaces
from ..sql import not_, or_
def association_proxy(target_collection, attr, **kw):
"""Return a Python property implementing a view of a target
attribute which references an attribute on members of the
target.
The returned value is an instance of :class:`.AssociationProxy`.
Implements a Python property representing a relationship as a collection
of simpler values, or a scalar value. The proxied property will mimic
the collection type of the target (list, dict or set), or, in the case of
a one to one relationship, a simple scalar value.
:param target_collection: Name of the attribute we'll proxy to.
This attribute is typically mapped by
:func:`~sqlalchemy.orm.relationship` to link to a target collection, but
can also be a many-to-one or non-scalar relationship.
:param attr: Attribute on the associated instance or instances we'll
proxy for.
For example, given a target collection of [obj1, obj2], a list created
by this proxy property would look like [getattr(obj1, *attr*),
getattr(obj2, *attr*)]
If the relationship is one-to-one or otherwise uselist=False, then
simply: getattr(obj, *attr*)
:param creator: optional.
When new items are added to this proxied collection, new instances of
the class collected by the target collection will be created. For list
and set collections, the target class constructor will be called with
the 'value' for the new instance. For dict types, two arguments are
passed: key and value.
If you want to construct instances differently, supply a *creator*
function that takes arguments as above and returns instances.
For scalar relationships, creator() will be called if the target is None.
If the target is present, set operations are proxied to setattr() on the
associated object.
If you have an associated object with multiple attributes, you may set
up multiple association proxies mapping to different attributes. See
the unit tests for examples, and for examples of how creator() functions
can be used to construct the scalar relationship on-demand in this
situation.
:param \*\*kw: Passes along any other keyword arguments to
:class:`.AssociationProxy`.
"""
return AssociationProxy(target_collection, attr, **kw)
ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
"""Symbol indicating an :class:`_InspectionAttr` that's
of type :class:`.AssociationProxy`.
Is assigned to the :attr:`._InspectionAttr.extension_type`
attibute.
"""
class AssociationProxy(interfaces._InspectionAttr):
"""A descriptor that presents a read/write view of an object attribute."""
is_attribute = False
extension_type = ASSOCIATION_PROXY
def __init__(self, target_collection, attr, creator=None,
getset_factory=None, proxy_factory=None,
proxy_bulk_set=None):
"""Construct a new :class:`.AssociationProxy`.
The :func:`.association_proxy` function is provided as the usual
entrypoint here, though :class:`.AssociationProxy` can be instantiated
and/or subclassed directly.
:param target_collection: Name of the collection we'll proxy to,
usually created with :func:`.relationship`.
:param attr: Attribute on the collected instances we'll proxy
for. For example, given a target collection of [obj1, obj2], a
list created by this proxy property would look like
[getattr(obj1, attr), getattr(obj2, attr)]
:param creator: Optional. When new items are added to this proxied
collection, new instances of the class collected by the target
collection will be created. For list and set collections, the
target class constructor will be called with the 'value' for the
new instance. For dict types, two arguments are passed:
key and value.
If you want to construct instances differently, supply a 'creator'
function that takes arguments as above and returns instances.
:param getset_factory: Optional. Proxied attribute access is
automatically handled by routines that get and set values based on
the `attr` argument for this proxy.
If you would like to customize this behavior, you may supply a
`getset_factory` callable that produces a tuple of `getter` and
`setter` functions. The factory is called with two arguments, the
abstract type of the underlying collection and this proxy instance.
:param proxy_factory: Optional. The type of collection to emulate is
determined by sniffing the target collection. If your collection
type can't be determined by duck typing or you'd like to use a
different collection implementation, you may supply a factory
function to produce those collections. Only applicable to
non-scalar relationships.
:param proxy_bulk_set: Optional, use with proxy_factory. See
the _set() method for details.
"""
self.target_collection = target_collection
self.value_attr = attr
self.creator = creator
self.getset_factory = getset_factory
self.proxy_factory = proxy_factory
self.proxy_bulk_set = proxy_bulk_set
self.owning_class = None
self.key = '_%s_%s_%s' % (
type(self).__name__, target_collection, id(self))
self.collection_class = None
@property
def remote_attr(self):
"""The 'remote' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.local_attr`
"""
return getattr(self.target_class, self.value_attr)
@property
def local_attr(self):
"""The 'local' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.remote_attr`
"""
return getattr(self.owning_class, self.target_collection)
@property
def attr(self):
"""Return a tuple of ``(local_attr, remote_attr)``.
This attribute is convenient when specifying a join
using :meth:`.Query.join` across two relationships::
sess.query(Parent).join(*Parent.proxied.attr)
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.local_attr`
:attr:`.AssociationProxy.remote_attr`
"""
return (self.local_attr, self.remote_attr)
def _get_property(self):
return (orm.class_mapper(self.owning_class).
get_property(self.target_collection))
@util.memoized_property
def target_class(self):
"""The intermediary class handled by this :class:`.AssociationProxy`.
Intercepted append/set/assignment events will result
in the generation of new instances of this class.
"""
return self._get_property().mapper.class_
@util.memoized_property
def scalar(self):
"""Return ``True`` if this :class:`.AssociationProxy` proxies a scalar
relationship on the local side."""
scalar = not self._get_property().uselist
if scalar:
self._initialize_scalar_accessors()
return scalar
@util.memoized_property
def _value_is_scalar(self):
return not self._get_property().\
mapper.get_property(self.value_attr).uselist
@util.memoized_property
def _target_is_object(self):
return getattr(self.target_class, self.value_attr).impl.uses_objects
def __get__(self, obj, class_):
if self.owning_class is None:
self.owning_class = class_ and class_ or type(obj)
if obj is None:
return self
if self.scalar:
target = getattr(obj, self.target_collection)
return self._scalar_get(target)
else:
try:
# If the owning instance is reborn (orm session resurrect,
# etc.), refresh the proxy cache.
creator_id, proxy = getattr(obj, self.key)
if id(obj) == creator_id:
return proxy
except AttributeError:
pass
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy
def __set__(self, obj, values):
if self.owning_class is None:
self.owning_class = type(obj)
if self.scalar:
creator = self.creator and self.creator or self.target_class
target = getattr(obj, self.target_collection)
if target is None:
setattr(obj, self.target_collection, creator(values))
else:
self._scalar_set(target, values)
else:
proxy = self.__get__(obj, None)
if proxy is not values:
proxy.clear()
self._set(proxy, values)
def __delete__(self, obj):
if self.owning_class is None:
self.owning_class = type(obj)
delattr(obj, self.key)
def _initialize_scalar_accessors(self):
if self.getset_factory:
get, set = self.getset_factory(None, self)
else:
get, set = self._default_getset(None)
self._scalar_get, self._scalar_set = get, set
def _default_getset(self, collection_class):
attr = self.value_attr
_getter = operator.attrgetter(attr)
getter = lambda target: _getter(target) if target is not None else None
if collection_class is dict:
setter = lambda o, k, v: setattr(o, attr, v)
else:
setter = lambda o, v: setattr(o, attr, v)
return getter, setter
def _new(self, lazy_collection):
creator = self.creator and self.creator or self.target_class
self.collection_class = util.duck_type_collection(lazy_collection())
if self.proxy_factory:
return self.proxy_factory(
lazy_collection, creator, self.value_attr, self)
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
if self.collection_class is list:
return _AssociationList(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
return _AssociationDict(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is set:
return _AssociationSet(
lazy_collection, creator, getter, setter, self)
else:
raise exc.ArgumentError(
'could not guess which interface to use for '
'collection_class "%s" backing "%s"; specify a '
'proxy_factory and proxy_bulk_set manually' %
(self.collection_class.__name__, self.target_collection))
def _inflate(self, proxy):
creator = self.creator and self.creator or self.target_class
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
proxy.creator = creator
proxy.getter = getter
proxy.setter = setter
def _set(self, proxy, values):
if self.proxy_bulk_set:
self.proxy_bulk_set(proxy, values)
elif self.collection_class is list:
proxy.extend(values)
elif self.collection_class is dict:
proxy.update(values)
elif self.collection_class is set:
proxy.update(values)
else:
raise exc.ArgumentError(
'no proxy_bulk_set supplied for custom '
'collection_class implementation')
@property
def _comparator(self):
return self._get_property().comparator
def any(self, criterion=None, **kwargs):
"""Produce a proxied 'any' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._value_is_scalar:
value_expr = getattr(
self.target_class, self.value_attr).has(criterion, **kwargs)
else:
value_expr = getattr(
self.target_class, self.value_attr).any(criterion, **kwargs)
# check _value_is_scalar here, otherwise
# we're scalar->scalar - call .any() so that
# the "can't call any() on a scalar" msg is raised.
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
value_expr
)
else:
return self._comparator.any(
value_expr
)
def has(self, criterion=None, **kwargs):
"""Produce a proxied 'has' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._target_is_object:
return self._comparator.has(
getattr(self.target_class, self.value_attr).
has(criterion, **kwargs)
)
else:
if criterion is not None or kwargs:
raise exc.ArgumentError(
"Non-empty has() not allowed for "
"column-targeted association proxy; use ==")
return self._comparator.has()
def contains(self, obj):
"""Produce a proxied 'contains' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
, :meth:`.RelationshipProperty.Comparator.has`,
and/or :meth:`.RelationshipProperty.Comparator.contains`
operators of the underlying proxied attributes.
"""
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
getattr(self.target_class, self.value_attr).contains(obj)
)
else:
return self._comparator.any(**{self.value_attr: obj})
def __eq__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
if obj is None:
return or_(
self._comparator.has(**{self.value_attr: obj}),
self._comparator == None
)
else:
return self._comparator.has(**{self.value_attr: obj})
def __ne__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
return self._comparator.has(
getattr(self.target_class, self.value_attr) != obj)
class _lazy_collection(object):
def __init__(self, obj, target):
self.ref = weakref.ref(obj)
self.target = target
def __call__(self):
obj = self.ref()
if obj is None:
raise exc.InvalidRequestError(
"stale association proxy, parent object has gone out of "
"scope")
return getattr(obj, self.target)
def __getstate__(self):
return {'obj': self.ref(), 'target': self.target}
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
"""Constructs an _AssociationCollection.
This will always be a subclass of either _AssociationList,
_AssociationSet, or _AssociationDict.
lazy_collection
A callable returning a list-based collection of entities (usually an
object attribute managed by a SQLAlchemy relationship())
creator
A function that creates new target entities. Given one parameter:
value. This assertion is assumed::
obj = creator(somevalue)
assert getter(obj) == somevalue
getter
A function. Given an associated object, return the 'value'.
setter
A function. Given an associated object and a value, store that
value on the object.
"""
self.lazy_collection = lazy_collection
self.creator = creator
self.getter = getter
self.setter = setter
self.parent = parent
col = property(lambda self: self.lazy_collection())
def __len__(self):
return len(self.col)
def __bool__(self):
return bool(self.col)
__nonzero__ = __bool__
def __getstate__(self):
return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
def __setstate__(self, state):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __getitem__(self, index):
return self._get(self.col[index])
def __setitem__(self, index, value):
if not isinstance(index, slice):
self._set(self.col[index], value)
else:
if index.stop is None:
stop = len(self)
elif index.stop < 0:
stop = len(self) + index.stop
else:
stop = index.stop
step = index.step or 1
start = index.start or 0
rng = list(range(index.start or 0, stop, step))
if step == 1:
for i in rng:
del self[start]
i = start
for item in value:
self.insert(i, item)
i += 1
else:
if len(value) != len(rng):
raise ValueError(
"attempt to assign sequence of size %s to "
"extended slice of size %s" % (len(value),
len(rng)))
for i, item in zip(rng, value):
self._set(self.col[i], item)
def __delitem__(self, index):
del self.col[index]
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __getslice__(self, start, end):
return [self._get(member) for member in self.col[start:end]]
def __setslice__(self, start, end, values):
members = [self._create(v) for v in values]
self.col[start:end] = members
def __delslice__(self, start, end):
del self.col[start:end]
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or
just use the underlying collection directly from its property
on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def append(self, value):
item = self._create(value)
self.col.append(item)
def count(self, value):
return sum([1 for _ in
util.itertools_filter(lambda v: v == value, iter(self))])
def extend(self, values):
for v in values:
self.append(v)
def insert(self, index, value):
self.col[index:index] = [self._create(value)]
def pop(self, index=-1):
return self.getter(self.col.pop(index))
def remove(self, value):
for i, val in enumerate(self):
if val == value:
del self.col[i]
return
raise ValueError("value not in list")
def reverse(self):
"""Not supported, use reversed(mylist)"""
raise NotImplementedError
def sort(self):
"""Not supported, use sorted(mylist)"""
raise NotImplementedError
def clear(self):
del self.col[0:len(self.col)]
def __eq__(self, other):
return list(self) == other
def __ne__(self, other):
return list(self) != other
def __lt__(self, other):
return list(self) < other
def __le__(self, other):
return list(self) <= other
def __gt__(self, other):
return list(self) > other
def __ge__(self, other):
return list(self) >= other
def __cmp__(self, other):
return cmp(list(self), other)
def __add__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return list(self) + other
def __radd__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return other + list(self)
def __mul__(self, n):
if not isinstance(n, int):
return NotImplemented
return list(self) * n
__rmul__ = __mul__
def __iadd__(self, iterable):
self.extend(iterable)
return self
def __imul__(self, n):
# unlike a regular list *=, proxied __imul__ will generate unique
# backing objects for each copy. *= on proxied lists is a bit of
# a stretch anyhow, and this interpretation of the __imul__ contract
# is more plausibly useful than copying the backing objects.
if not isinstance(n, int):
return NotImplemented
if n == 0:
self.clear()
elif n > 1:
self.extend(list(self) * (n - 1))
return self
def copy(self):
return list(self)
def __repr__(self):
return repr(list(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
_NotProvided = util.symbol('_NotProvided')
class _AssociationDict(_AssociationCollection):
"""Generic, converting, dict-to-dict proxy."""
def _create(self, key, value):
return self.creator(key, value)
def _get(self, object):
return self.getter(object)
def _set(self, object, key, value):
return self.setter(object, key, value)
def __getitem__(self, key):
return self._get(self.col[key])
def __setitem__(self, key, value):
if key in self.col:
self._set(self.col[key], key, value)
else:
self.col[key] = self._create(key, value)
def __delitem__(self, key):
del self.col[key]
def __contains__(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def has_key(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def __iter__(self):
return iter(self.col.keys())
def clear(self):
self.col.clear()
def __eq__(self, other):
return dict(self) == other
def __ne__(self, other):
return dict(self) != other
def __lt__(self, other):
return dict(self) < other
def __le__(self, other):
return dict(self) <= other
def __gt__(self, other):
return dict(self) > other
def __ge__(self, other):
return dict(self) >= other
def __cmp__(self, other):
return cmp(dict(self), other)
def __repr__(self):
return repr(dict(self.items()))
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
if key not in self.col:
self.col[key] = self._create(key, default)
return default
else:
return self[key]
def keys(self):
return self.col.keys()
if util.py2k:
def iteritems(self):
return ((key, self._get(self.col[key])) for key in self.col)
def itervalues(self):
return (self._get(self.col[key]) for key in self.col)
def iterkeys(self):
return self.col.iterkeys()
def values(self):
return [self._get(member) for member in self.col.values()]
def items(self):
return [(k, self._get(self.col[k])) for k in self]
else:
def items(self):
return ((key, self._get(self.col[key])) for key in self.col)
def values(self):
return (self._get(self.col[key]) for key in self.col)
def pop(self, key, default=_NotProvided):
if default is _NotProvided:
member = self.col.pop(key)
else:
member = self.col.pop(key, default)
return self._get(member)
def popitem(self):
item = self.col.popitem()
return (item[0], self._get(item[1]))
def update(self, *a, **kw):
if len(a) > 1:
raise TypeError('update expected at most 1 arguments, got %i' %
len(a))
elif len(a) == 1:
seq_or_map = a[0]
# discern dict from sequence - took the advice from
# http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
self[item] = seq_or_map[item]
else:
try:
for k, v in seq_or_map:
self[k] = v
except ValueError:
raise ValueError(
"dictionary update sequence "
"requires 2-element tuples")
for key, value in kw:
self[key] = value
def copy(self):
return dict(self.items())
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
class _AssociationSet(_AssociationCollection):
"""Generic, converting, set-to-set proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __len__(self):
return len(self.col)
def __bool__(self):
if self.col:
return True
else:
return False
__nonzero__ = __bool__
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or just use
the underlying collection directly from its property on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def add(self, value):
if value not in self:
self.col.add(self._create(value))
# for discard and remove, choosing a more expensive check strategy rather
# than call self.creator()
def discard(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
break
def remove(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
return
raise KeyError(value)
def pop(self):
if not self.col:
raise KeyError('pop from an empty set')
member = self.col.pop()
return self._get(member)
def update(self, other):
for value in other:
self.add(value)
def __ior__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.add(value)
return self
def _set(self):
return set(iter(self))
def union(self, other):
return set(self).union(other)
__or__ = union
def difference(self, other):
return set(self).difference(other)
__sub__ = difference
def difference_update(self, other):
for value in other:
self.discard(value)
def __isub__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.discard(value)
return self
def intersection(self, other):
return set(self).intersection(other)
__and__ = intersection
def intersection_update(self, other):
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __iand__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def symmetric_difference(self, other):
return set(self).symmetric_difference(other)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __ixor__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def issubset(self, other):
return set(self).issubset(other)
def issuperset(self, other):
return set(self).issuperset(other)
def clear(self):
self.col.clear()
def copy(self):
return set(self)
def __eq__(self, other):
return set(self) == other
def __ne__(self, other):
return set(self) != other
def __lt__(self, other):
return set(self) < other
def __le__(self, other):
return set(self) <= other
def __gt__(self, other):
return set(self) > other
def __ge__(self, other):
return set(self) >= other
def __repr__(self):
return repr(set(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func
| mit |
upliftaero/MissionPlanner | Lib/encodings/cp775.py | 93 | 35429 | """ Python Character Mapping Codec cp775 generated from 'VENDORS/MICSFT/PC/CP775.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp775',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x0101, # LATIN SMALL LETTER A WITH MACRON
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x0089: 0x0113, # LATIN SMALL LETTER E WITH MACRON
0x008a: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
0x008b: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
0x008c: 0x012b, # LATIN SMALL LETTER I WITH MACRON
0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x014d, # LATIN SMALL LETTER O WITH MACRON
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
0x0096: 0x00a2, # CENT SIGN
0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x00d7, # MULTIPLICATION SIGN
0x009f: 0x00a4, # CURRENCY SIGN
0x00a0: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
0x00a1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00a4: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00a5: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x00a6: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00a7: 0x00a6, # BROKEN BAR
0x00a8: 0x00a9, # COPYRIGHT SIGN
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x00b6: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x00b7: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00b8: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
0x00be: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
0x00c7: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00d0: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x00d1: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x00d2: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00d3: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
0x00d4: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
0x00d5: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00d6: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
0x00d7: 0x016b, # LATIN SMALL LETTER U WITH MACRON
0x00d8: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
0x00e2: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00e8: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
0x00e9: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
0x00ea: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
0x00eb: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
0x00ec: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
0x00ed: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
0x00ee: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
0x00ef: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\u0106' # 0x0080 -> LATIN CAPITAL LETTER C WITH ACUTE
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\u0101' # 0x0083 -> LATIN SMALL LETTER A WITH MACRON
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u0123' # 0x0085 -> LATIN SMALL LETTER G WITH CEDILLA
u'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\u0107' # 0x0087 -> LATIN SMALL LETTER C WITH ACUTE
u'\u0142' # 0x0088 -> LATIN SMALL LETTER L WITH STROKE
u'\u0113' # 0x0089 -> LATIN SMALL LETTER E WITH MACRON
u'\u0156' # 0x008a -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\u0157' # 0x008b -> LATIN SMALL LETTER R WITH CEDILLA
u'\u012b' # 0x008c -> LATIN SMALL LETTER I WITH MACRON
u'\u0179' # 0x008d -> LATIN CAPITAL LETTER Z WITH ACUTE
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
u'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
u'\u014d' # 0x0093 -> LATIN SMALL LETTER O WITH MACRON
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\u0122' # 0x0095 -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\xa2' # 0x0096 -> CENT SIGN
u'\u015a' # 0x0097 -> LATIN CAPITAL LETTER S WITH ACUTE
u'\u015b' # 0x0098 -> LATIN SMALL LETTER S WITH ACUTE
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
u'\xa3' # 0x009c -> POUND SIGN
u'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd7' # 0x009e -> MULTIPLICATION SIGN
u'\xa4' # 0x009f -> CURRENCY SIGN
u'\u0100' # 0x00a0 -> LATIN CAPITAL LETTER A WITH MACRON
u'\u012a' # 0x00a1 -> LATIN CAPITAL LETTER I WITH MACRON
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\u017b' # 0x00a3 -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
u'\u017c' # 0x00a4 -> LATIN SMALL LETTER Z WITH DOT ABOVE
u'\u017a' # 0x00a5 -> LATIN SMALL LETTER Z WITH ACUTE
u'\u201d' # 0x00a6 -> RIGHT DOUBLE QUOTATION MARK
u'\xa6' # 0x00a7 -> BROKEN BAR
u'\xa9' # 0x00a8 -> COPYRIGHT SIGN
u'\xae' # 0x00a9 -> REGISTERED SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\u0141' # 0x00ad -> LATIN CAPITAL LETTER L WITH STROKE
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u0104' # 0x00b5 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u010c' # 0x00b6 -> LATIN CAPITAL LETTER C WITH CARON
u'\u0118' # 0x00b7 -> LATIN CAPITAL LETTER E WITH OGONEK
u'\u0116' # 0x00b8 -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u012e' # 0x00bd -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u0160' # 0x00be -> LATIN CAPITAL LETTER S WITH CARON
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u0172' # 0x00c6 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\u016a' # 0x00c7 -> LATIN CAPITAL LETTER U WITH MACRON
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\u017d' # 0x00cf -> LATIN CAPITAL LETTER Z WITH CARON
u'\u0105' # 0x00d0 -> LATIN SMALL LETTER A WITH OGONEK
u'\u010d' # 0x00d1 -> LATIN SMALL LETTER C WITH CARON
u'\u0119' # 0x00d2 -> LATIN SMALL LETTER E WITH OGONEK
u'\u0117' # 0x00d3 -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\u012f' # 0x00d4 -> LATIN SMALL LETTER I WITH OGONEK
u'\u0161' # 0x00d5 -> LATIN SMALL LETTER S WITH CARON
u'\u0173' # 0x00d6 -> LATIN SMALL LETTER U WITH OGONEK
u'\u016b' # 0x00d7 -> LATIN SMALL LETTER U WITH MACRON
u'\u017e' # 0x00d8 -> LATIN SMALL LETTER Z WITH CARON
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u258c' # 0x00dd -> LEFT HALF BLOCK
u'\u2590' # 0x00de -> RIGHT HALF BLOCK
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S (GERMAN)
u'\u014c' # 0x00e2 -> LATIN CAPITAL LETTER O WITH MACRON
u'\u0143' # 0x00e3 -> LATIN CAPITAL LETTER N WITH ACUTE
u'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
u'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\u0144' # 0x00e7 -> LATIN SMALL LETTER N WITH ACUTE
u'\u0136' # 0x00e8 -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\u0137' # 0x00e9 -> LATIN SMALL LETTER K WITH CEDILLA
u'\u013b' # 0x00ea -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u013c' # 0x00eb -> LATIN SMALL LETTER L WITH CEDILLA
u'\u0146' # 0x00ec -> LATIN SMALL LETTER N WITH CEDILLA
u'\u0112' # 0x00ed -> LATIN CAPITAL LETTER E WITH MACRON
u'\u0145' # 0x00ee -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\u2019' # 0x00ef -> RIGHT SINGLE QUOTATION MARK
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u201c' # 0x00f2 -> LEFT DOUBLE QUOTATION MARK
u'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
u'\xb6' # 0x00f4 -> PILCROW SIGN
u'\xa7' # 0x00f5 -> SECTION SIGN
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\u201e' # 0x00f7 -> DOUBLE LOW-9 QUOTATION MARK
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\u2219' # 0x00f9 -> BULLET OPERATOR
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\xb9' # 0x00fb -> SUPERSCRIPT ONE
u'\xb3' # 0x00fc -> SUPERSCRIPT THREE
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a2: 0x0096, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x009f, # CURRENCY SIGN
0x00a6: 0x00a7, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a9: 0x00a8, # COPYRIGHT SIGN
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x009e, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S (GERMAN)
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x0100: 0x00a0, # LATIN CAPITAL LETTER A WITH MACRON
0x0101: 0x0083, # LATIN SMALL LETTER A WITH MACRON
0x0104: 0x00b5, # LATIN CAPITAL LETTER A WITH OGONEK
0x0105: 0x00d0, # LATIN SMALL LETTER A WITH OGONEK
0x0106: 0x0080, # LATIN CAPITAL LETTER C WITH ACUTE
0x0107: 0x0087, # LATIN SMALL LETTER C WITH ACUTE
0x010c: 0x00b6, # LATIN CAPITAL LETTER C WITH CARON
0x010d: 0x00d1, # LATIN SMALL LETTER C WITH CARON
0x0112: 0x00ed, # LATIN CAPITAL LETTER E WITH MACRON
0x0113: 0x0089, # LATIN SMALL LETTER E WITH MACRON
0x0116: 0x00b8, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x0117: 0x00d3, # LATIN SMALL LETTER E WITH DOT ABOVE
0x0118: 0x00b7, # LATIN CAPITAL LETTER E WITH OGONEK
0x0119: 0x00d2, # LATIN SMALL LETTER E WITH OGONEK
0x0122: 0x0095, # LATIN CAPITAL LETTER G WITH CEDILLA
0x0123: 0x0085, # LATIN SMALL LETTER G WITH CEDILLA
0x012a: 0x00a1, # LATIN CAPITAL LETTER I WITH MACRON
0x012b: 0x008c, # LATIN SMALL LETTER I WITH MACRON
0x012e: 0x00bd, # LATIN CAPITAL LETTER I WITH OGONEK
0x012f: 0x00d4, # LATIN SMALL LETTER I WITH OGONEK
0x0136: 0x00e8, # LATIN CAPITAL LETTER K WITH CEDILLA
0x0137: 0x00e9, # LATIN SMALL LETTER K WITH CEDILLA
0x013b: 0x00ea, # LATIN CAPITAL LETTER L WITH CEDILLA
0x013c: 0x00eb, # LATIN SMALL LETTER L WITH CEDILLA
0x0141: 0x00ad, # LATIN CAPITAL LETTER L WITH STROKE
0x0142: 0x0088, # LATIN SMALL LETTER L WITH STROKE
0x0143: 0x00e3, # LATIN CAPITAL LETTER N WITH ACUTE
0x0144: 0x00e7, # LATIN SMALL LETTER N WITH ACUTE
0x0145: 0x00ee, # LATIN CAPITAL LETTER N WITH CEDILLA
0x0146: 0x00ec, # LATIN SMALL LETTER N WITH CEDILLA
0x014c: 0x00e2, # LATIN CAPITAL LETTER O WITH MACRON
0x014d: 0x0093, # LATIN SMALL LETTER O WITH MACRON
0x0156: 0x008a, # LATIN CAPITAL LETTER R WITH CEDILLA
0x0157: 0x008b, # LATIN SMALL LETTER R WITH CEDILLA
0x015a: 0x0097, # LATIN CAPITAL LETTER S WITH ACUTE
0x015b: 0x0098, # LATIN SMALL LETTER S WITH ACUTE
0x0160: 0x00be, # LATIN CAPITAL LETTER S WITH CARON
0x0161: 0x00d5, # LATIN SMALL LETTER S WITH CARON
0x016a: 0x00c7, # LATIN CAPITAL LETTER U WITH MACRON
0x016b: 0x00d7, # LATIN SMALL LETTER U WITH MACRON
0x0172: 0x00c6, # LATIN CAPITAL LETTER U WITH OGONEK
0x0173: 0x00d6, # LATIN SMALL LETTER U WITH OGONEK
0x0179: 0x008d, # LATIN CAPITAL LETTER Z WITH ACUTE
0x017a: 0x00a5, # LATIN SMALL LETTER Z WITH ACUTE
0x017b: 0x00a3, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x017c: 0x00a4, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x017d: 0x00cf, # LATIN CAPITAL LETTER Z WITH CARON
0x017e: 0x00d8, # LATIN SMALL LETTER Z WITH CARON
0x2019: 0x00ef, # RIGHT SINGLE QUOTATION MARK
0x201c: 0x00f2, # LEFT DOUBLE QUOTATION MARK
0x201d: 0x00a6, # RIGHT DOUBLE QUOTATION MARK
0x201e: 0x00f7, # DOUBLE LOW-9 QUOTATION MARK
0x2219: 0x00f9, # BULLET OPERATOR
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| gpl-3.0 |
kevinpanaro/esp8266_micropython | umqtt/simple.py | 2 | 6136 | import usocket as socket
import ustruct as struct
from ubinascii import hexlify
class MQTTException(Exception):
pass
class MQTTClient:
def __init__(self, client_id, server, port=0, user=None, password=None, keepalive=0,
ssl=False, ssl_params={}):
if port == 0:
port = 8883 if ssl else 1883
self.client_id = client_id
self.sock = None
self.addr = socket.getaddrinfo(server, port)[0][-1]
self.ssl = ssl
self.ssl_params = ssl_params
self.pid = 0
self.cb = None
self.user = user
self.pswd = password
self.keepalive = keepalive
self.lw_topic = None
self.lw_msg = None
self.lw_qos = 0
self.lw_retain = False
def _send_str(self, s):
self.sock.write(struct.pack("!H", len(s)))
self.sock.write(s)
def _recv_len(self):
n = 0
sh = 0
while 1:
b = self.sock.read(1)[0]
n |= (b & 0x7f) << sh
if not b & 0x80:
return n
sh += 7
def set_callback(self, f):
self.cb = f
def set_last_will(self, topic, msg, retain=False, qos=0):
assert 0 <= qos <= 2
assert topic
self.lw_topic = topic
self.lw_msg = msg
self.lw_qos = qos
self.lw_retain = retain
def connect(self, clean_session=True):
self.sock = socket.socket()
self.sock.connect(self.addr)
if self.ssl:
import ussl
self.sock = ussl.wrap_socket(self.sock, **self.ssl_params)
msg = bytearray(b"\x10\0\0\x04MQTT\x04\x02\0\0")
msg[1] = 10 + 2 + len(self.client_id)
msg[9] = clean_session << 1
if self.user is not None:
msg[1] += 2 + len(self.user) + 2 + len(self.pswd)
msg[9] |= 0xC0
if self.keepalive:
assert self.keepalive < 65536
msg[10] |= self.keepalive >> 8
msg[11] |= self.keepalive & 0x00FF
if self.lw_topic:
msg[1] += 2 + len(self.lw_topic) + 2 + len(self.lw_msg)
msg[9] |= 0x4 | (self.lw_qos & 0x1) << 3 | (self.lw_qos & 0x2) << 3
msg[9] |= self.lw_retain << 5
self.sock.write(msg)
#print(hex(len(msg)), hexlify(msg, ":"))
self._send_str(self.client_id)
if self.lw_topic:
self._send_str(self.lw_topic)
self._send_str(self.lw_msg)
if self.user is not None:
self._send_str(self.user)
self._send_str(self.pswd)
resp = self.sock.read(4)
assert resp[0] == 0x20 and resp[1] == 0x02
if resp[3] != 0:
raise MQTTException(resp[3])
return resp[2] & 1
def disconnect(self):
self.sock.write(b"\xe0\0")
self.sock.close()
def ping(self):
self.sock.write(b"\xc0\0")
def publish(self, topic, msg, retain=False, qos=0):
pkt = bytearray(b"\x30\0\0\0")
pkt[0] |= qos << 1 | retain
sz = 2 + len(topic) + len(msg)
if qos > 0:
sz += 2
assert sz < 2097152
i = 1
while sz > 0x7f:
pkt[i] = (sz & 0x7f) | 0x80
sz >>= 7
i += 1
pkt[i] = sz
#print(hex(len(pkt)), hexlify(pkt, ":"))
self.sock.write(pkt, i + 1)
self._send_str(topic)
if qos > 0:
self.pid += 1
pid = self.pid
struct.pack_into("!H", pkt, 0, pid)
self.sock.write(pkt, 2)
self.sock.write(msg)
if qos == 1:
while 1:
op = self.wait_msg()
if op == 0x40:
sz = self.sock.read(1)
assert sz == b"\x02"
rcv_pid = self.sock.read(2)
rcv_pid = rcv_pid[0] << 8 | rcv_pid[1]
if pid == rcv_pid:
return
elif qos == 2:
assert 0
def subscribe(self, topic, qos=0):
assert self.cb is not None, "Subscribe callback is not set"
pkt = bytearray(b"\x82\0\0\0")
self.pid += 1
struct.pack_into("!BH", pkt, 1, 2 + 2 + len(topic) + 1, self.pid)
#print(hex(len(pkt)), hexlify(pkt, ":"))
self.sock.write(pkt)
self._send_str(topic)
self.sock.write(qos.to_bytes(1))
while 1:
op = self.wait_msg()
if op == 0x90:
resp = self.sock.read(4)
#print(resp)
assert resp[1] == pkt[2] and resp[2] == pkt[3]
if resp[3] == 0x80:
raise MQTTException(resp[3])
return
# Wait for a single incoming MQTT message and process it.
# Subscribed messages are delivered to a callback previously
# set by .set_callback() method. Other (internal) MQTT
# messages processed internally.
def wait_msg(self):
res = self.sock.read(1)
self.sock.setblocking(True)
if res is None:
return None
if res == b"":
raise OSError(-1)
if res == b"\xd0": # PINGRESP
sz = self.sock.read(1)[0]
assert sz == 0
return None
op = res[0]
if op & 0xf0 != 0x30:
return op
sz = self._recv_len()
topic_len = self.sock.read(2)
topic_len = (topic_len[0] << 8) | topic_len[1]
topic = self.sock.read(topic_len)
sz -= topic_len + 2
if op & 6:
pid = self.sock.read(2)
pid = pid[0] << 8 | pid[1]
sz -= 2
msg = self.sock.read(sz)
self.cb(topic, msg)
if op & 6 == 2:
pkt = bytearray(b"\x40\x02\0\0")
struct.pack_into("!H", pkt, 2, pid)
self.sock.write(pkt)
elif op & 6 == 4:
assert 0
# Checks whether a pending message from server is available.
# If not, returns immediately with None. Otherwise, does
# the same processing as wait_msg.
def check_msg(self):
self.sock.setblocking(False)
return self.wait_msg()
| mit |
borosnborea/SwordGO_app | example/gps/main.py | 2 | 1650 | from kivy.lang import Builder
from plyer import gps
from kivy.app import App
from kivy.properties import StringProperty
from kivy.clock import Clock, mainthread
kv = '''
BoxLayout:
orientation: 'vertical'
Label:
text: app.gps_location
Label:
text: app.gps_status
BoxLayout:
size_hint_y: None
height: '48dp'
padding: '4dp'
ToggleButton:
text: 'Start' if self.state == 'normal' else 'Stop'
on_state:
app.start(1000, 0) if self.state == 'down' else \
app.stop()
'''
class GpsTest(App):
gps_location = StringProperty()
gps_status = StringProperty('Click Start to get GPS location updates')
def build(self):
try:
gps.configure(on_location=self.on_location,
on_status=self.on_status)
except NotImplementedError:
import traceback
traceback.print_exc()
self.gps_status = 'GPS is not implemented for your platform'
return Builder.load_string(kv)
def start(self, minTime, minDistance):
gps.start(minTime, minDistance)
def stop(self):
gps.stop()
@mainthread
def on_location(self, **kwargs):
self.gps_location = '\n'.join([
'{}={}'.format(k, v) for k, v in kwargs.items()])
@mainthread
def on_status(self, stype, status):
self.gps_status = 'type={}\n{}'.format(stype, status)
def on_pause(self):
gps.stop()
return True
def on_resume(self):
gps.start(1000, 0)
pass
if __name__ == '__main__':
GpsTest().run()
| gpl-3.0 |
vinodpanicker/scancode-toolkit | tests/cluecode/data/copyrights/copyright_psf_in_python-BitVector_py.py | 48 | 86557 | #!/usr/bin/env python
__version__ = '1.5.1'
__author__ = "Avinash Kak (kak@purdue.edu)"
__date__ = '2008-September-4'
__url__ = 'http://RVL4.ecn.purdue.edu/~kak/dist/BitVector-1.5.1.html'
__copyright__ = "(C) 2008 Avinash Kak. Python Software Foundation."
__doc__ = '''
BitVector.py
Version: ''' + __version__ + '''
Author: Avinash Kak (kak@purdue.edu)
Date: ''' + __date__ + '''
CHANGE LOG:
Version 1.5.1:
Removed a bug from the implementation of the right
circular shift operator.
Version 1.5:
This version should prove to be much more efficient
for long bit vectors. Efficiency in BitVector
construction when only its size is specified was
achieved by eliminating calls to _setbit().
The application of logical operators to two
BitVectors of equal length was also made efficient
by eliminating calls to the padding function.
Another feature of this version is the count_bits()
method that returns the total number of bits set
in a BitVector instance. Yet another feature of
this version is the setValue() method that alters
the bit pattern associated with a previously
constructed BitVector.
Version 1.4.1:
The reset() method now returns 'self' to allow for
cascaded inovocation with the slicing operator.
Also removed the discrepancy between the value of the
__copyright__ variable in the module and the value of
license variable in setup.py.
Version 1.4:
This version includes the following two upgrades:
1) code for slice assignment; and 2) A reset function
to reinitialize a previously constructed BitVector.
Additionally, the code was cleaned up with the help of
pychecker.
Version 1.3.2:
Fixed a potentially misleading documentation issue for
the Windows users of the BitVector class. If you are
writing an internally generated BitVector to a disk file,
you must open the file in the binary mode. If you don't,
the bit patterns that correspond to line breaks will be
misinterpreted. On a Windows machine in the text mode,
the bit pattern 000001010 ('\\n') will be written out
to the disk as 0000110100001010 ('\\r\\n').
Version 1.3.1:
Removed the inconsistency in the internal representation
of bit vectors produced by logical bitwise operations
vis-a-vis the bit vectors created by the constructor.
Previously, the logical bitwise operations resulted in bit
vectors that had their bits packed into lists of ints,
as opposed to arrays of unsigned shorts.
Version 1.3:
(a) One more constructor mode included: When initializing
a new bit vector with an integer value, you can now also
specify a size for the bit vector. The constructor
zero-pads the bit vector from the left with zeros. (b) The
BitVector class now supports 'if x in y' syntax to test if
the bit pattern 'x' is contained in the bit pattern 'y'.
(c) Improved syntax to conform to well-established Python
idioms. (d) What used to be a comment before the beginning
of each method definition is now a docstring.
Version 1.2:
(a) One more constructor mode included: You can now construct
a bit vector directly from a string of 1's and 0's. (b) The
class now constructs a shortest possible bit vector from an
integer value. So the bit vector for the integer value 0 is
just one bit of value 0, and so on. (c) All the rich
comparison operators are now overloaded. (d) The class now
includes a new method 'intValue()' that returns the unsigned
integer value of a bit vector. This can also be done through
'__int__'. (e) The package now includes a unittest based
framework for testing out an installation. This is in a
separate directory called "TestBitVector".
Version 1.1.1:
The function that does block reads from a disk file now peeks
ahead at the end of each block to see if there is anything
remaining to be read in the file. If nothing remains, the
more_to_read attribute of the BitVector object is set to
False. This simplifies reading loops. This version also
allows BitVectors of size 0 to be constructed
Version 1.1:
I have changed the API significantly to provide more ways for
constructing a bit vector. As a result, it is now necessary
to supply a keyword argument to the constructor.
INSTALLATION:
The BitVector class has been packaged using Distutils.
For installation, execute the following command-line in the
source directory (this is the directory that contains the
setup.py file after you have downloaded and uncompressed the
package):
python setup.py install
You have to have root privileges for this to work. On Linux
distributions, this will install the module file at a location
that looks like
/usr/lib/python2.5/site-packages/
If you do not have root access, you have the option of
working directly off the directory in which you downloaded
the software by simply placing the following statements at
the top of your scripts that use the BitVector class
import sys
sys.path.append( "pathname_to_BitVector_directory" )
To uninstall the module, simply delete the source directory,
locate where BitVector was installed with "locate BitVector"
and delete those files. As mentioned above, the full
pathname to the installed version is likely to look like
/usr/lib/python2.5/site-packages/BitVector*
If you want to carry out a non-standard install of BitVector,
look up the on-line information on Disutils by pointing your
browser to
http://docs.python.org/dist/dist.html
INTRODUCTION:
The BitVector class for a memory-efficient packed representation
of bit arrays and for logical operations on such arrays. The
core idea used in this Python script for bin packing is based on
an internet posting by Josiah Carlson to the Pyrex mailing list.
Operations supported on bit vectors:
__getitem__
__setitem__
__len__
__iter__
__contains__
__getslice__
__str__
__int__
__add__
__eq__, __ne__, __lt__, __le__, __gt__, __ge__
| for bitwise or
& for bitwise and
^ for bitwise xor
~ for bitwise inversion
<< for circular rotation to the left
>> for circular rotation to the right
+ for concatenation
intValue() for returning the integer value
divide_into_two
permute
unpermute
pad_from_left
pad_from_right
read_bits_from_file
write_to_file
read_bits_from_fileobject
write_bits_to_fileobject
reset
slice assignment
count_bits
setValue
CONSTRUCTING BIT VECTORS:
You can construct a bit vector in six different ways.
(1) You can construct a bit vector directly from either a tuple
or a list of bits, as in
bv = BitVector( bitlist = [1,0,1,0,0,1,0,1,0,0,1,0,1,0,0,1] )
(2) You can construct a bit vector from an integer by
bv = BitVector( intVal = 56789 )
The bits stored now will correspond to the binary
representation of the integer. The resulting bit vector is
the shortest possible bit vector for the integer value
supplied. For example, when intVal is 0, the bit vector
constructed will consist of just the bit 0.
(3) When initializing a bit vector with an intVal as shown
above, you can also specify a size for the bit vector:
bv = BitVector( intVal = 0, size = 8 )
will return the bit vector consisting of the bit pattern
00000000. The zero padding needed for meeting the size
requirement is always on the left. If the size supplied is
smaller than what it takes to create the shortest possible
bit vector for intVal, an exception is thrown.
(4) You can create a zero-initialized bit vector of a given size
by
bv = BitVector( size = 62 )
This bit vector will hold exactly 62 bits, all initialized to
the 0 bit value.
(5) You can construct a bit vector from a disk file by a two-step
procedure. First you construct an instance of bit vector by
bv = BitVector( filename = 'somefile' )
This bit vector itself is incapable of holding the bits. To
now create bit vectors that actually hold the bits, you need
to make the following sort of a call on the above variable
bv:
bv1 = bv.read_bits_from_file( 64 )
bv1 will be a regular bit vector containing 64 bits from the
disk file. If you want to re-read a file from the beginning
for some reason, you must obviously first close the file
object that was acquired with a call to the BitVector
constructor with a filename argument. This can be
accomplished by
bv.close_file_object()
(6) You can construct a bit vector from a string of 1's and 0's
by
bv = BitVector( bitstring = '110011110000' )
(7) Yet another way to construct a bit vector is to read the bits
directly from a file-like object, as in
x = "111100001111"
fileobj = StringIO.StringIO( x )
bv = BitVector( fp = fileobj )
OPERATIONS SUPPORTED BY THE BITVECTOR CLASS:
DISPLAYING BIT VECTORS:
1) Since the BitVector class implements the __str__ method, a
bit vector can be displayed on a terminal by
print bitvec
Basically, you can always obtain the string representation
of a bit vector by
str( bitvec )
and integer value by
int( bitvec )
ACCESSING AND SETTING INDIVIDUAL BITS AND SLICES:
2) Any single bit of a bit vector bv can be set to 1 or 0 by
bv[M] = 1_or_0
print bv[M]
for accessing (and setting) the bit at the position that is
indexed M. You can retrieve the bit at position M by bv[M].
3) A slice of a bit vector obtained by
bv[i:j]
is a bit vector constructed from the bits at index positions
from i through j-1.
4) You can also carry out slice assignment:
bv1 = BitVector( size = 25 )
bv2 = BitVector( bitstring = '1010001' )
bv1[6:9] = bv2[0:3]
bv3 = BitVector( bitstring = '101' )
bv1[0:3] = bv3
The first slice assignment will set the 6th, 7th, and
the 8th bits of the bit vector bv1 according to the first
three bits of bv2. The second slice assignment will set
the first three bits of bv1 according to the three bits
in bv3.
5) You can iterate over a bit vector, as illustrated by
for bit in bitvec:
print bit,
This is made possible by the override definition for the
special __iter__() method.
6) Negative subscripts for array-like indexing are supported.
Therefore,
bitvec[ -i ]
is legal assuming that the index range is not violated.
7) You can reset a previously constructed bit vector to
either the all zeros state or the all ones state by
bv1 = BitVector( size = 25 )
...
...
bv1.reset( 1 )
...
...
bv1.reset( 0 )
The first call to reset() will set all the bits of
bv1 to 1's and the second call all bit to 0's.
LOGICAL OPERATIONS ON BIT VECTORS:
8) Given two bit vectors bv1 and bv2, you can perform bitwise
logical operations on them by
result_bv = bv1 ^ bv2
result_bv = bv1 & bv2
result_bv = bv1 | bv2
result_bv = ~bv1
COMPARING BIT VECTORS:
9) Given two bit vectors bv1 and bv2, you can carry out the
following comparisons that return Boolean values:
bv1 == bv2
bv1 != bv2
bv1 < bv2
bv1 <= bv2
bv1 > bv2
bv1 >= bv2
The equalities and inequalities are determined by the integer
values associated with the bit vectors.
OTHER SUPPORTED OPERATIONS:
10) You can permute and un-permute bit vectors:
bv_permuted = bv.permute( permutation_list )
bv_unpermuted = bv.unpermute( permutation_list )
11) Left and right circular rotations can be carried out by
bitvec << N
bitvec >> N
for circular rotations to the left and right by N bit
positions.
12) A bit vector containing an even number of bits can be
divided into two equal parts by
[left_half, right_half] = bitvec.divide_into_two()
where left_half and right_half hold references to the two
returned bit vectors.
13) You can find the integer value of a bit array by
bitvec.invValue()
or by
int( bitvec )
14) You can convert a bit vector into its string representation
by
str( bitvec )
15) Because __add__ is supplied, you can always join two
bit vectors by
bitvec3 = bitvec1 + bitvec2
bitvec3 is a new bit vector that contains all the
bits of bitvec1 followed by all the bits of bitvec2.
16) You can write a bit vector directly to a file, as
illustrated by the following example that reads one bit
vector from a file and then writes it to another
file
bv = BitVector( filename = 'input.txt' )
bv1 = bv.read_bits_from_file(64)
print bv1
FILEOUT = open( 'output.bits', 'wb' )
bv1.write_to_file( FILEOUT )
FILEOUT.close()
bv = BitVector( filename = 'output.bits' )
bv2 = bv.read_bits_from_file( 64 )
print bv2
IMPORTANT: The size of bit vector must be a multiple of
of 8 for this write function to work. If this
condition is not met, the function throws an
exception.
IMPORTANT FOR WINDOWS USERS: When writing an internally
generated bit vector out to a disk file, it
is important to open the file in the binary
mode as shown. Otherwise, the bit pattern
00001010 ('\\n') in your bitstring will be written
out as 0000110100001010 ('\\r\\n'), which is the
linebreak on Windows machine.
17) You can also write a bit vector directly to a stream
object, as illustrated by
fp_write = StringIO.StringIO()
bitvec.write_bits_to_fileobject( fp_write )
print fp_write.getvalue() # 111100001111
18) You can pad a bit vector from the left or from the
right with a designated number of zeros
bitvec.pad_from_left( n )
bitvec.pad_from_right( n )
In the first case, the new bit vector will be the same
as the old bit vector except for the additional n zeros
on the left. The same thing happens in the second
case except that now the additional n zeros will be on
the right.
19) You can test if a bit vector x is contained in another bit
vector y by using the syntax 'if x in y'. This is made
possible by the override definition for the special
__contains__() method.
20) You can count the number of bits set in a BitVector
instance by
bv = BitVector( bitstring = '100111' )
print bv.count_bits() # 4
21) You can change the bit pattern associated with a
previously constructed BitVector instance:
bv = BitVector( intVal = 7, size =16 )
print bv # 0000000000000111
bv.setValue( intVal = 45 )
print bv # 101101
HOW THE BIT VECTORS ARE STORED:
The bits of a bit array are stored in 16-bit unsigned ints.
After resolving the argument with which the constructor is
called (which happens in lines (A2) through (A70) of the file
BitVector.py), the very first thing that the constructor does is
to figure out in line (A78) as to how many of those 2-byte ints
it needs for the bits. For example, if you wanted to store a
64-bit array, the variable 'two_byte_ints_needed' in line (A78)
would be set to 4. (This does not mean that the size of a bit
vector must be a multiple of 16. Any sized bit vectors can
constructed using the required number of two-byte ints.) Line
(A79) then creates an array of 2-byte ints and initializes it
with the required number of zeros. Lines (A80) then shifts the
bits into the array of two-byte ints.
As mentioned above, note that it is not necessary for the size
of the vector to be a multiple of 16 even though we are using
C's unsigned short as as a basic unit for storing the bit
arrays. The class BitVector keeps track of the actual number of
bits in the bit vector through the "size" instance attribute.
With regard to the code in lines (A2) through (A77) of the file
BitVector.py, note that, except for one case, the constructor
must be called with a single keyword argument, which determines
how the bit vector will be constructed. The single exception to
this rule is for the keyword argument 'intVal' which can be used
along with the 'size' keyword argument. When 'intVal' is used
with the 'size' option, the bit vector constructed for the
integer is the shortest possible bit vector. On the other hand,
when 'size' is also specified, the bit vector is padded with
zeroes from the left so that it has the specified size.
Lines (A16) through (A22) are for the following sort of a call
bv = BitVector( filename = 'myfilename' )
This call returns a bit vector on which you must subsequently
invoke the 'read_bits_from_file()' method to actually obtain a
bit vector consisting of the bits that constitute the
information stored in the file.
Lines (A23) through (A28) are for the case when you want to
construct a bit vector by reading the bits off a file-like
object, as in
x = "111100001111"
fileobj = StringIO.StringIO( x )
bv = BitVector( fp = fileobj )
Lines (A29) through (A61) are for the case when you want to
construct a bit vector from an integer, as in
bv = BitVector( intVal = 123456 )
The bits stored in the bit vector will correspond to the binary
representation of the integer argument provided. The bit vector
constructed with the above call will be the shortest possible
bit vector for the integer supplied. As a case in point, when
the intVal is 0, the bit vector will consist of a single bit
which will be 0 also. The code in lines (A27) through (A59) can
also handle the following sort of a call
bv = BitVector( intVal = 46, size = 16 )
which returns a bit vector of a specfic size by padding the
shortest possible bit vector the the intVal with zeros from the
left.
Lines (A62) through (A68) are for constructing a bit vector with
just the size information, as in
bv = BitVector( size = 61 )
This returns a bit vector that will hold exactly 61 bits, all
initialized to the zero value.
Lines (A69) through (A73) are for constructing a bit vector from
a bitstring, as in
bv = BitVector( bitstring = '00110011111' )
Finally, lines (A74) through (A77) are for constructing a bit
vector from a list or a tuple of the individual bits:
bv = BitVector( bitlist = (1, 0, 1, 1, 0, 0, 1) )
The bit vector constructed is initialized with the supplied
bits.
ACKNOWLEDGEMENTS:
The author is grateful to Oleg Broytmann for suggesting many
improvements that were incorporated in Version 1.1 of this
package. The author would like to thank Kurt Schwehr whose
email resulted in the creation of Version 1.2. Kurt also caught
an error in my earlier version of 'setup.py' and suggested a
unittest based approach to the testing of the package. Kurt
also supplied the Makefile that is included in this
distribution. The author would also like to thank all (Scott
Daniels, Blair Houghton, and Steven D'Aprano) for their
responses to my comp.lang.python query concerning how to make a
Python input stream peekable. This feature was included in
Version 1.1.1.
With regard to the changes incorporated in Version 1.3, thanks
are owed to Kurt Schwehr and Gabriel Ricardo for bringing to my
attention the bug related to the intVal method of initializing a
bit vector when the value of intVal exceeded sys.maxint. This
problem is fixed in Version 1.3. Version 1.3 also includes many
other improvements that make the syntax better conform to the
standard idioms of Python. These changes and the addition of
the new constructor mode (that allows a bit vector of a given
size to be constructed from an integer value) are also owing to
Kurt's suggestions.
With regard to the changes incorporated in Version 1.3.1, I
would like to thank Michael Haggerty for noticing that the
bitwise logical operators resulted in bit vectors that had their
bits packed into lists of ints, as opposed to arrays of
unsigned shorts. This inconsistency in representation has been
removed in version 1.3.1. Michael has also suggested that
since BitVector is mutable, I should be overloading __iand__(),
__ior__(), etc., for in-place modifications of bit vectors.
Michael certainly makes a good point. But I am afraid that this
change will break the code for the existing users of the
BitVector class.
I thank Mathieu Roy for bringing to my attention the problem
with writing bitstrings out to a disk files on Windows
machines. This turned out to be a problem more with the
documentation than with the BitVector class itself. On a
Windows machine, it is particularly important that a file
you are writing a bitstring into be opened in binary mode
since otherwise the bit pattern 00001010 ('\\n') will be written
out as 0000110100001010 ('\\r\\n'). This documentation fix
resulted in Version 1.3.2.
With regard to Version 1.4, the suggestions/bug reports
made by John Kominek, Bob Morse, and Steve Ward contributed
to this version. I wish to thank all three. John wanted me
to equip the class with a reset() method so that a previously
constructed class could be reset to either all 0's or all
1's. Bob spotted loose local variables in the implementation
--- presumably left over from a debugging phase of the code.
Bob recommended that I clean up the code with pychecker. That
has been done. Steve noticed that slice assignment was not
working. It should work now.
Version 1.4.1 was prompted by John Kominek suggesting that
if reset() returned self, then the slice operation could
be combined with the reset operation. Thanks John! Another
reason for 1.4.1 was to remove the discrepancy between the
value of the __copyright__ variable in the module and the
value of license variable in setup.py. This discrepancy
was brought to my attention by David Eyk. Thanks David!
Version 1.5 has benefited greatly by the suggestions made
by Ryan Cox. By examining the BitVector execution with
cProfile, Ryan observed that my implementation was making
unnecessary method calls to _setbit() when just the size
option is used for constructing a BitVector instance.
Since Python allocates cleaned up memory, it is unnecessary
to set the individual bits of a vector if it is known in
advance that they are all zero. Ryan made a similar observation
for the logical operations applied to two BitVector instances
of equal length. He noticed that I was making unnecessary
calls to _resize_pad_from_left() for the case of equal
arguments to logical operations. Ryan also recommended that
I include a method that returns the total number of bits
set in a BitVector instance. The new method count_bits() does
exactly that. Thanks Ryan for all your suggestions.
Version 1.5 also includes the method setValue() that allows
the internally stored bit pattern associated with a previously
constructed BitVector to be changed. A need for this method
was expressed by Aleix Conchillo. Thanks Aleix.
Version 1.5.1 is a quick release to fix a bug in the
right circular shift operator. This bug was discovered
by Jasper Spaans. Thanks very much Jasper.
ABOUT THE AUTHOR:
Avi Kak is the author of "Programming with Objects: A
Comparative Presentation of Object-Oriented Programming
with C++ and Java", published by John-Wiley in 2003. This
book presents a new approach to the combined learning of
two large object-oriented languages, C++ and Java. It is
being used as a text in a number of educational programs
around the world. This book has also been translated into
Chinese. Further information on the book is available at
www.programming-with-objects.com
SOME EXAMPLE CODE:
#!/usr/bin/env python
import BitVector
# Construct a bit vector from a list or tuple of bits:
bv = BitVector.BitVector( bitlist = (1, 0, 0, 1) )
print bv # 1001
# Construct a bit vector from an integer:
bv = BitVector.BitVector( intVal = 5678 )
print bv # 0001011000101110
# Construct a bit vector of a given size from a given
# integer:
bv = BitVector( intVal = 45, size = 16 )
print bv # 0000000000101101
# Construct a zero-initialized bit vector of a given size:
bv = BitVector.BitVector( size = 5 )
print bv # 00000
# Construct a bit vector from a bit string:
bv = BitVector.BitVector( bitstring = '110001' )
print bv[0], bv[1], bv[2], bv[3], bv[4], bv[5] # 1 1 0 0 0 1
print bv[-1], bv[-2], bv[-3], bv[-4], bv[-5], bv[-6] # 1 0 0 0 1 1
# Construct a bit vector from a file like object:
import StringIO
x = "111100001111"
fp_read = StringIO.StringIO( x )
bv = BitVector.BitVector( fp = fp_read )
print bv # 111100001111
# Experiments with bitwise logical operations:
bv3 = bv1 | bv2
bv3 = bv1 & bv2
bv3 = bv1 ^ bv2
bv6 = ~bv5
# Find the length of a bit vector
print len( bitvec )
# Find the integer value of a bit vector
print int( bitvec )
# Open a file for reading bit vectors from
bv = BitVector.BitVector( filename = 'TestBitVector/testinput1.txt' )
print bv # nothing yet
bv1 = bv.read_bits_from_file(64)
print bv1 # first 64 bits from the file
# Divide a bit vector into two equal sub-vectors:
[bv1, bv2] = bitvec.divide_into_two()
# Permute and Un-Permute a bit vector:
bv2 = bitvec.permute( permutation_list )
bv2 = bitvec.unpermute( permutation_list )
# Try circular shifts to the left and to the right
bitvec << 7
bitvec >> 7
# Try 'if x in y' syntax for bit vectors:
bv1 = BitVector( bitstring = '0011001100' )
bv2 = BitVector( bitstring = '110011' )
if bv2 in bv1:
print "%s is in %s" % (bv2, bv1)
else:
print "%s is not in %s" % (bv2, bv1)
.....
.....
(For a more complete working example, see the example code in
the BitVectorDemo.py file in the Examples sub-directory.)
'''
import array
import operator
_hexdict = { '0' : '0000', '1' : '0001', '2' : '0010', '3' : '0011',
'4' : '0100', '5' : '0101', '6' : '0110', '7' : '0111',
'8' : '1000', '9' : '1001', 'a' : '1010', 'b' : '1011',
'c' : '1100', 'd' : '1101', 'e' : '1110', 'f' : '1111' }
def _readblock( blocksize, bitvector ): #(R1)
'''If this function can read all blocksize bits, it peeks ahead to
see if there is anything more to be read in the file. It uses
tell-read-seek mechanism for this in lines (R18) through (R21). If
there is nothing further to be read, it sets the more_to_read
attribute of the bitvector object to False. Obviously, this can
only be done for seekable streams such as those connected with disk
files. According to Blair Houghton, a similar feature could
presumably be implemented for socket streams by using recv() or
recvfrom() if you set the flags argument to MSG_PEEK.
'''
global hexdict #(R2)
bitstring = '' #(R3)
i = 0 #(R4)
while ( i < blocksize / 8 ): #(R5)
i += 1 #(R6)
byte = bitvector.FILEIN.read(1) #(R7)
if byte == '': #(R8)
if len(bitstring) < blocksize: #(R9)
bitvector.more_to_read = False #(R10)
return bitstring #(R11)
hexvalue = hex( ord( byte ) ) #(R12)
hexvalue = hexvalue[2:] #(R13)
if len( hexvalue ) == 1: #(R14)
hexvalue = '0' + hexvalue #(R15)
bitstring += _hexdict[ hexvalue[0] ] #(R16)
bitstring += _hexdict[ hexvalue[1] ] #(R17)
file_pos = bitvector.FILEIN.tell() #(R18)
# peek at the next byte; moves file position only if a
# byte is read
next_byte = bitvector.FILEIN.read(1) #(R19)
if next_byte: #(R20)
# pretend we never read the byte
bitvector.FILEIN.seek( file_pos ) #(R21)
else: #(R22)
bitvector.more_to_read = False #(R23)
return bitstring #(R24)
#-------------------- BitVector Class Definition ----------------------
class BitVector( object ): #(A1)
def __init__( self, *args, **kwargs ): #(A2)
if args: #(A3)
raise ValueError( #(A4)
'''BitVector constructor can only be called
with keyword arguments for the following
keywords: filename, fp (for fileobject),
size, intValue, bitlist (for a list or
tuple of bits, or bitstring)''')
filename = fp = intVal = size = bitlist = bitstring = None #(A5)
if kwargs.has_key('filename'):filename=kwargs.pop('filename')#(A6)
if kwargs.has_key('fp'): fp = kwargs.pop('fp') #(A7)
if kwargs.has_key('size'): size = kwargs.pop('size') #(A8)
if kwargs.has_key('intVal'): intVal = kwargs.pop('intVal') #(A9)
if kwargs.has_key('bitlist'):
bitlist = kwargs.pop('bitlist') #(A10)
if kwargs.has_key('bitstring') :
bitstring = kwargs.pop('bitstring') #(A11)
self.filename = None #(A12)
self.size = 0 #(A13)
self.FILEIN = None #(A14)
self.FILEOUT = None #(A15)
if filename: #(A16)
if fp or size or intVal or bitlist or bitstring: #(A17)
raise ValueError( #(A18)
'''When filename is specified, you cannot
give values to any other constructor args''')
self.filename = filename #(A19)
self.FILEIN = open( filename, 'rb' ) #(A20)
self.more_to_read = True #(A21)
return #(A22)
elif fp: #(A23)
if filename or size or intVal or bitlist or bitstring: #(A24)
raise ValueError( #(A25)
'''When fileobject is specified, you cannot
give values to any other constructor args''')
bits = self.read_bits_from_fileobject( fp ) #(A26)
bitlist = map( int, bits ) #(A27)
self.size = len( bitlist ) #(A28)
elif intVal or intVal == 0: #(A29)
if filename or fp or bitlist or bitstring: #(A30)
raise ValueError( #(A31)
'''When intVal is specified, you can only give
a value to the 'size' constructor arg''')
if intVal == 0: #(A32)
bitlist = [0] #(A33)
if not size: #(A34)
self.size = 1 #(A35)
else: #(A36)
if size < len(bitlist): #(A37)
raise ValueError( #(A38)
'''The value specified for size must be
at least as large as for the smallest
bit vector possible for intVal''')
n = size - len(bitlist) #(A39)
bitlist = [0]*n + bitlist #(A40)
self.size = len( bitlist ) #(A41)
else: #(A42)
hexVal = hex( intVal ).lower().rstrip('l') #(A43)
hexVal = hexVal[2:] #(A44)
if len( hexVal ) == 1: #(A45)
hexVal = '0' + hexVal #(A46)
bitlist = ''.join(map(lambda x: _hexdict[x],hexVal))#(A47)
bitlist = map( int, bitlist ) #(A48)
i = 0 #(A49)
while ( i < len( bitlist ) ): #(A50)
if bitlist[i] == 1: break #(A51)
i += 1 #(A52)
del bitlist[0:i] #(A53)
if not size: #(A54)
self.size = len( bitlist ) #(A55)
else: #(A56)
if size < len(bitlist): #(A57)
raise ValueError( #(A58)
'''The value specified for size must be
at least as large as for the smallest
bit vector possible for intVal''')
n = size - len(bitlist) #(A59)
bitlist = [0]*n + bitlist #(A60)
self.size = len( bitlist ) #(A61)
elif size >= 0: #(A62)
if filename or fp or intVal or bitlist or bitstring: #(A63)
raise ValueError( #(A64)
'''When size is specified (without an intVal),
you cannot give values to any other
constructor args''')
self.size = size #(A65)
two_byte_ints_needed = (size + 15) // 16 #(A66)
self.vector = array.array('H', [0]*two_byte_ints_needed)#(A67)
return #(A68)
elif bitstring or bitstring == '': #(A69)
if filename or fp or size or intVal or bitlist: #(A70)
raise ValueError( #(A71)
'''When a bitstring is specified, you cannot
give values to any other constructor args''')
bitlist = map( int, list(bitstring) ) #(A72)
self.size = len( bitlist ) #(A73)
elif bitlist: #(A74)
if filename or fp or size or intVal or bitstring: #(A75)
raise ValueError( #(A76)
'''When bits are specified, you cannot
give values to any other constructor args''')
self.size = len( bitlist ) #(A77)
else: #(A78)
raise ValueError("wrong arg(s) for constructor") #(A79)
two_byte_ints_needed = (len(bitlist) + 15) // 16 #(A80)
self.vector = array.array( 'H', [0]*two_byte_ints_needed ) #(A81)
map( self._setbit, enumerate(bitlist), bitlist) #(A82)
def _setbit( self, posn, val ): #(B1)
'Set the bit at the designated position to the value shown'
if val not in (0, 1): #(B2)
raise ValueError( "incorrect value for a bit" ) #(B3)
if isinstance( posn, (tuple) ): #(B4)
posn = posn[0] #(B5)
if posn >= self.size or posn < -self.size: #(B6)
raise ValueError( "index range error" ) #(B7)
if posn < 0: posn = self.size + posn #(B8)
block_index = posn // 16 #(B9)
shift = posn & 15 #(B10)
cv = self.vector[block_index] #(B11)
if ( cv >> shift ) & 1 != val: #(B12)
self.vector[block_index] = cv ^ (1 << shift) #(B13)
def _getbit( self, posn ): #(C1)
'Get the bit from the designated position'
if posn >= self.size or posn < -self.size: #(C2)
raise ValueError( "index range error" ) #(C3)
if posn < 0: posn = self.size + posn #(C4)
return ( self.vector[posn//16] >> (posn&15) ) & 1 #(C5)
def __xor__(self, other): #(E1)
'''
Take a bitwise 'xor' of the bit vector on which
the method is invoked with the argument bit vector.
Return the result as a new bit vector. If the two
bit vectors are not of the same size, pad the shorter
one with zeros from the left.
'''
if self.size < other.size: #(E2)
bv1 = self._resize_pad_from_left(other.size - self.size) #(E3)
bv2 = other #(E4)
elif self.size > other.size: #(E5)
bv1 = self #(E6)
bv2 = other._resize_pad_from_left(self.size - other.size)#(E7)
else: #(E8)
bv1 = self #(E9)
bv2 = other #(E10)
res = BitVector( size = bv1.size ) #(E11)
lpb = map(operator.__xor__, bv1.vector, bv2.vector) #(E12)
res.vector = array.array( 'H', lpb ) #(E13)
return res #(E14)
def __and__(self, other): #(F1)
'''
Take a bitwise 'and' of the bit vector on which the method is
invoked with the argument bit vector. Return the result as a
new bit vector. If the two bit vectors are not of the same
size, pad the shorter one with zeros from the left.
'''
if self.size < other.size: #(F2)
bv1 = self._resize_pad_from_left(other.size - self.size) #(F3)
bv2 = other #(F4)
elif self.size > other.size: #(F5)
bv1 = self #(F6)
bv2 = other._resize_pad_from_left(self.size - other.size)#(F7)
else: #(F8)
bv1 = self #(F9)
bv2 = other #(F10)
res = BitVector( size = bv1.size ) #(F11)
lpb = map(operator.__and__, bv1.vector, bv2.vector) #(F12)
res.vector = array.array( 'H', lpb ) #(F13)
return res #(F14)
def __or__(self, other): #(G1)
'''
Take a bitwise 'or' of the bit vector on which the
method is invoked with the argument bit vector. Return
the result as a new bit vector. If the two bit vectors
are not of the same size, pad the shorter one with
zero's from the left.
'''
if self.size < other.size: #(G2)
bv1 = self._resize_pad_from_left(other.size - self.size) #(G3)
bv2 = other #(G4)
elif self.size > other.size: #(G5)
bv1 = self #(G6)
bv2 = other._resize_pad_from_left(self.size - other.size)#(G7)
else: #(G8)
bv1 = self #(G9)
bv2 = other #(G10)
res = BitVector( size = bv1.size ) #(G11)
lpb = map(operator.__or__, bv1.vector, bv2.vector) #(G12)
res.vector = array.array( 'H', lpb ) #(G13)
return res #(G14)
def __invert__(self): #(H1)
'''
Invert the bits in the bit vector on which the
method is invoked and return the result as a new
bit vector.
'''
res = BitVector( size = self.size ) #(H2)
lpb = map( operator.__inv__, self.vector ) #(H3)
res.vector = array.array( 'H' ) #(H3)
for i in range(len(lpb)): #(H4)
res.vector.append( lpb[i] & 0x0000FFFF ) #(H5)
return res #(H6)
def __add__(self, other): #(J1)
'''
Concatenate the argument bit vector with the bit
vector on which the method is invoked. Return the
concatenated bit vector as a new BitVector object.
'''
i = 0 #(J2)
outlist = [] #(J3)
while ( i < self.size ): #(J4)
outlist.append( self[i] ) #(J5)
i += 1 #(J6)
i = 0 #(J7)
while ( i < other.size ): #(J8)
outlist.append( other[i] ) #(J9)
i += 1 #(J10)
return BitVector( bitlist = outlist ) #(J11)
def _getsize(self): #(K1)
'Return the number of bits in a bit vector.'
return self.size #(K2)
def read_bits_from_file(self, blocksize): #(L1)
'''
Read blocksize bits from a disk file and return a
BitVector object containing the bits. If the file
contains fewer bits than blocksize, construct the
BitVector object from however many bits there are
in the file. If the file contains zero bits, return
a BitVector object of size attribute set to 0.
'''
error_str = '''You need to first construct a BitVector
object with a filename as argument''' #(L2)
if not self.filename: #(L3)
raise SyntaxError( error_str ) #(L4)
if blocksize % 8 != 0: #(L5)
raise ValueError( "block size must be a multiple of 8" ) #(L6)
bitstr = _readblock( blocksize, self ) #(L7)
if len( bitstr ) == 0: #(L8)
return BitVector( size = 0 ) #(L9)
else: #(L10)
return BitVector( bitstring = bitstr ) #(L11)
def read_bits_from_fileobject( self, fp ): #(M1)
'''
This function is meant to read a bit string from a
file like object.
'''
bitlist = [] #(M2)
while 1: #(M3)
bit = fp.read() #(M4)
if bit == '': return bitlist #(M5)
bitlist += bit #(M6)
def write_bits_to_fileobject( self, fp ): #(N1)
'''
This function is meant to write a bit vector directly to
a file like object. Note that whereas 'write_to_file'
method creates a memory footprint that corresponds exactly
to the bit vector, the 'write_bits_to_fileobject' actually
writes out the 1's and 0's as individual items to the
file object. That makes this method convenient for
creating a string representation of a bit vector,
especially if you use the StringIO class, as shown in
the test code.
'''
for bit_index in range(self.size): #(N2)
if self[bit_index] == 0: #(N3)
fp.write( '0' ) #(N4)
else: #(N5)
fp.write( '1' ) #(N6)
def divide_into_two(self): #(P1)
'''
Divides an even-sized bit vector into two and returns
the two halves as a list of two bit vectors.
'''
if self.size % 2 != 0: #(P2)
raise ValueError( "must have even num bits" ) #(P3)
i = 0 #(P4)
outlist1 = [] #(P5)
while ( i < self.size /2 ): #(P6)
outlist1.append( self[i] ) #(P7)
i += 1 #(P8)
outlist2 = [] #(P9)
while ( i < self.size ): #(P10)
outlist2.append( self[i] ) #(P11)
i += 1 #(P12)
return [ BitVector( bitlist = outlist1 ),
BitVector( bitlist = outlist2 ) ] #(P13)
def permute(self, permute_list): #(Q1)
'''
Permute a bit vector according to the indices
shown in the second argument list. Return the
permuted bit vector as a new bit vector.
'''
if max(permute_list) > self.size -1: #(Q2)
raise ValueError( "Bad permutation index" ) #(Q3)
outlist = [] #(Q4)
i = 0 #(Q5)
while ( i < len( permute_list ) ): #(Q6)
outlist.append( self[ permute_list[i] ] ) #(Q7)
i += 1 #(Q8)
return BitVector( bitlist = outlist ) #(Q9)
def unpermute(self, permute_list): #(S1)
'''
Unpermute the bit vector according to the
permutation list supplied as the second argument.
If you first permute a bit vector by using permute()
and then unpermute() it using the same permutation
list, you will get back the original bit vector.
'''
if max(permute_list) > self.size -1: #(S2)
raise ValueError( "Bad permutation index" ) #(S3)
if self.size != len( permute_list ): #(S4)
raise ValueError( "Bad size for permute list" ) #(S5)
out_bv = BitVector( size = self.size ) #(S6)
i = 0 #(S7)
while ( i < len(permute_list) ): #(S8)
out_bv[ permute_list[i] ] = self[i] #(S9)
i += 1 #(S10)
return out_bv #(S11)
def write_to_file(self, file_out): #(T1)
'''
(Contributed by Joe Davidson) Write the bitvector
to the file object file_out. (A file object is
returned by a call to open()). Since all file I/O
is byte oriented, the bitvector must be multiple
of 8 bits. Each byte treated as MSB first (0th index).
'''
err_str = '''Only a bit vector whose length is a multiple of 8
can be written to a file. Use the padding functions
to satisfy this constraint.''' #(T2)
if not self.FILEOUT:
self.FILEOUT = file_out
if self.size % 8: #(T3)
raise ValueError( err_str ) #(T4)
for byte in range(self.size/8 ): #(T5)
value = 0 #(T6)
for bit in range(8): #(T7)
value += (self._getbit( byte*8 + (7 - bit) ) << bit )#(T8)
file_out.write( chr(value) ) #(T9)
def close_file_object(self): #(U1)
'''
For closing a file object that was used for reading
the bits into one or more BitVector objects.
'''
if not self.FILEIN: #(U2)
raise SyntaxError( "No associated open file" ) #(U3)
self.FILEIN.close() #(U4)
def intValue(self): #(V1)
'Return the integer value of a bitvector'
intVal = 0 #(V2)
for i in range(self.size): #(V3)
intVal += self[i] * (2 ** (self.size - i - 1)) #(V4)
return intVal #(V5)
def __lshift__( self, n ): #(W1)
'For an in-place left circular shift by n bit positions'
for i in range(n): #(W2)
self.circular_rotate_left_by_one() #(W3)
def __rshift__( self, n ): #(W4)
'For an in-place right circular shift by n bit positions.'
for i in range(n): #(W5)
self.circular_rotate_right_by_one() #(W6)
def circular_rotate_left_by_one(self): #(X1)
'For a one-bit in-place left circular shift'
size = len(self.vector) #(X2)
bitstring_leftmost_bit = self.vector[0] & 1 #(X3)
left_most_bits = map(operator.__and__, self.vector, [1]*size)#(X4)
left_most_bits.append(left_most_bits[0]) #(X5)
del(left_most_bits[0]) #(X6)
self.vector = map(operator.__rshift__, self.vector, [1]*size)#(X7)
self.vector = map( operator.__or__, self.vector, \
map(operator.__lshift__, left_most_bits, [15]*size) ) #(X8)
self._setbit(self.size -1, bitstring_leftmost_bit) #(X9)
def circular_rotate_right_by_one(self): #(Y1)
'For a one-bit in-place right circular shift'
size = len(self.vector) #(Y2)
bitstring_rightmost_bit = self[self.size - 1] #(Y3)
right_most_bits = map( operator.__and__,
self.vector, [0x8000]*size ) #(Y4)
self.vector = \
map( operator.__and__, self.vector, [~0x8000]*size ) #(Y5)
right_most_bits.insert(0, bitstring_rightmost_bit) #(Y6)
right_most_bits.pop() #(Y7)
self.vector = map(operator.__lshift__, self.vector, [1]*size)#(Y8)
self.vector = map( operator.__or__, self.vector, \
map(operator.__rshift__, right_most_bits, [15]*size) ) #(Y9)
self._setbit(0, bitstring_rightmost_bit) #(Y10)
def circular_rot_left(self): #(Z1)
'''
This is merely another implementation of the method
circular_rotate_left_by_one() shown above. This one
does NOT use map functions. This method carries out a
one-bit left circular shift of a bit vector.
'''
max_index = (self.size -1) // 16 #(Z2)
left_most_bit = self.vector[0] & 1 #(Z3)
self.vector[0] = self.vector[0] >> 1 #(Z4)
for i in range(1, max_index + 1): #(Z5)
left_bit = self.vector[i] & 1 #(Z6)
self.vector[i] = self.vector[i] >> 1 #(Z7)
self.vector[i-1] |= left_bit << 15 #(Z8)
self._setbit(self.size -1, left_most_bit) #(Z9)
def circular_rot_right(self): #(a1)
'''
This is merely another implementation of the method
circular_rotate_right_by_one() shown above. This one
does NOT use map functions. This method does a one-bit
right circular shift of a bit vector.
'''
max_index = (self.size -1) // 16 #(a2)
right_most_bit = self[self.size - 1] #(a3)
self.vector[max_index] &= ~0x8000 #(a4)
self.vector[max_index] = self.vector[max_index] << 1 #(a5)
for i in range(max_index-1, -1, -1): #(a6)
right_bit = self.vector[i] & 0x8000 #(a7)
self.vector[i] &= ~0x8000 #(a8)
self.vector[i] = self.vector[i] << 1 #(a9)
self.vector[i+1] |= right_bit >> 15 #(a10)
self._setbit(0, right_most_bit) #(a11)
# Allow array like subscripting for getting and setting:
__getitem__ = _getbit #(b1)
def __setitem__(self, pos, item): #(b2)
'''
This is needed for both slice assignments and for
index assignments. It checks the types of pos and item
to see if the call is for slice assignment. For slice
assignment, pos must be of type 'slice' and item of
type BitVector. For index assignment, the argument types
are checked in the _setbit() method.
'''
# The following section is for slice assignment:
if isinstance( pos, slice ): #(b3)
if (not isinstance( item, BitVector )): #(b4)
raise TypeError('For slice assignment, \
the right hand side must be a BitVector') #(b5)
if ( (pos.stop - pos.start) != len(item) ): #(b6)
raise ValueError('incompatible lengths for \
slice assignment') #(b7)
for i in range( pos.start, pos.stop ): #(b8)
self[i] = item[ i - pos.start ] #(b9)
return #(b10)
# For index assignment use _setbit()
self._setbit( pos, item ) #(b11)
def __getslice__(self, i, j): #(c1)
'Allow slicing with [i:j], [:], etc.'
slicebits = [] #(c2)
if j > self.size: j = self.size #(c3)
for x in range(i,j): #(c4)
slicebits.append( self[x] ) #(c5)
return BitVector( bitlist = slicebits ) #(c6)
# Allow len() to work:
__len__ = _getsize #(d1)
# Allow int() to work:
__int__ = intValue #(d2)
def __iter__( self ): #(d3)
'''
To allow iterations over a bit vector by supporting the
'for bit in bit_vector' syntax:
'''
return BitVectorIterator( self ) #(d4)
def __str__( self ): #(e1)
'To create a print representation'
if self.size == 0: #(e2)
return '' #(e3)
return ''.join( map( str, self ) ) #(e4)
# Compare two bit vectors:
def __eq__(self, other): #(f1)
if self.size != other.size: #(f2)
return False #(f3)
i = 0 #(f4)
while ( i < self.size ): #(f5)
if (self[i] != other[i]): return False #(f6)
i += 1 #(f7)
return True #(f8)
def __ne__(self, other): #(f9)
return not self == other #(f10)
def __lt__(self, other): #(f11)
return self.intValue() < other.intValue() #(f12)
def __le__(self, other): #(f13)
return self.intValue() <= other.intValue() #(f14)
def __gt__(self, other): #(f15)
return self.intValue() > other.intValue() #(f16)
def __ge__(self, other): #(f17)
return self.intValue() >= other.intValue() #(f18)
# Some additional utility functions:
def _make_deep_copy( self ): #(g1)
'Make a deep copy of a bit vector'
copy = str( self ) #(g2)
return BitVector( bitstring = copy ) #(g3)
def _resize_pad_from_left( self, n ): #(g4)
'''
Resize a bit vector by padding with n 0's
from the left. Return the result as a new bit
vector.
'''
new_str = '0'*n + str( self ) #(g5)
return BitVector( bitstring = new_str ) #(g6)
def _resize_pad_from_right( self, n ): #(g7)
'''
Resize a bit vector by padding with n 0's
from the right. Return the result as a new bit
vector.
'''
new_str = str( self ) + '0'*n #(g8)
return BitVector( bitstring = new_str ) #(g9)
def pad_from_left( self, n ): #(g10)
'Pad a bit vector with n zeros from the left'
new_str = '0'*n + str( self ) #(g11)
bitlist = map( int, list(new_str) ) #(g12)
self.size = len( bitlist ) #(g13)
two_byte_ints_needed = (len(bitlist) + 15) // 16 #(g14)
self.vector = array.array( 'H', [0]*two_byte_ints_needed ) #(g15)
map( self._setbit, enumerate(bitlist), bitlist) #(g16)
def pad_from_right( self, n ): #(g17)
'Pad a bit vector with n zeros from the right'
new_str = str( self ) + '0'*n #(g18)
bitlist = map( int, list(new_str) ) #(g19)
self.size = len( bitlist ) #(g20)
two_byte_ints_needed = (len(bitlist) + 15) // 16 #(g21)
self.vector = array.array( 'H', [0]*two_byte_ints_needed ) #(g22)
map( self._setbit, enumerate(bitlist), bitlist) #(g23)
def __contains__( self, otherBitVec ): #(h1)
'''
This supports 'if x in y' and 'if x not in y'
syntax for bit vectors.
'''
if self.size == 0: #(h2)
raise ValueError, "First arg bitvec has no bits" #(h3)
elif self.size < otherBitVec.size: #(h4)
raise ValueError, "First arg bitvec too short" #(h5)
max_index = self.size - otherBitVec.size + 1 #(h6)
for i in range(max_index): #(h7)
if self[i:i+otherBitVec.size] == otherBitVec: #(h8)
return True #(h9)
return False #(h10)
def reset( self, val ): #(j1)
'''
Resets a previously created BitVector to either all
zeros or all ones depending on the argument val.
Returns self to allow for syntax like
bv = bv1[3:6].reset(1)
or
bv = bv1[:].reset(1)
'''
if val not in (0,1): #(j2)
raise ValueError( "Incorrect reset argument" ) #(j3)
bitlist = [val for i in range( self.size )] #(j4)
map( self._setbit, enumerate(bitlist), bitlist ) #(j5)
return self #(j6)
def count_bits( self ): #(k1)
'''
Return the number of bits set in a BitVector instance.
'''
return reduce( lambda x, y: int(x)+int(y), self ) #(k2)
def setValue(self, *args, **kwargs ): #(m1)
'''
Changes the bit pattern associated with a previously
constructed BitVector instance. The allowable modes
for chaning the internally stored bit patten are the
same as for the constructor.
'''
self.__init__( *args, **kwargs ) #(m2)
#----------------------- BitVectorIterator Class -----------------------
class BitVectorIterator: #(j1)
def __init__( self, bitvec ): #(j2)
self.items = [] #(j3)
for i in range( bitvec.size ): #(j4)
self.items.append( bitvec._getbit(i) ) #(j5)
self.index = -1 #(j6)
def __iter__( self ): #(j7)
return self #(j8)
def next( self ): #(j9)
self.index += 1 #(j10)
if self.index < len( self.items ): #(j11)
return self.items[ self.index ] #(j12)
else: #(j13)
raise StopIteration #(j14)
#------------------------ End of Class Definition -----------------------
#------------------------ Test Code Follows -----------------------
if __name__ == '__main__':
# Construct a bit vector of size 0
print "\nConstructing a bit vector of size 0:"
bv1 = BitVector( size = 0 )
print bv1 # no output
# Construct a bit vector of size 2:
print "\nConstructing a bit vector of size 2:"
bv2 = BitVector( size = 2 )
print bv2 # 00
# Joining two bit vectors:
print "\nOutput concatenation of two previous bit vectors:"
print bv1 + bv2 # 00
# Construct a bit vector with a tuple of bits:
print "\nThis is a bit vector from a tuple of bits:"
bv = BitVector( bitlist = (1, 0, 0, 1) )
print bv # 1001
# Construct a bit vector with a list of bits:
print "\nThis is a bit vector from a list of bits:"
bv = BitVector( bitlist = [1, 1, 0, 1] )
print bv # 1101
# Construct a bit vector from an integer
bv = BitVector( intVal = 5678 )
print "\nBit vector constructed from integer 5678:"
print bv # 1011000101110
print "\nBit vector constructed from integer 0:"
bv = BitVector( intVal = 0 )
print bv # 0
print "\nBit vector constructed from integer 2:"
bv = BitVector( intVal = 2 )
print bv # 10
print "\nBit vector constructed from integer 3:"
bv = BitVector( intVal = 3 )
print bv # 11
print "\nBit vector constructed from integer 123456:"
bv = BitVector( intVal = 123456 )
print bv # 11110001001000000
print "\nInt value of the previous bit vector as computed by intVal():"
print bv.intValue() # 123456
print "\nInt value of the previous bit vector as computed by int():"
print int( bv ) # 123456
# Construct a bit vector directly from a file-like object:
import StringIO
x = "111100001111"
fp_read = StringIO.StringIO( x )
bv = BitVector( fp = fp_read )
print "\nBit vector constructed directed from a file like object:"
print bv # 111100001111
# Construct a bit vector directly from a bit string:
bv = BitVector( bitstring = '00110011' )
print "\nBit Vector constructed directly from a string:"
print bv # 00110011
bv = BitVector( bitstring = '' )
print "\nBit Vector constructed directly from an empty string:"
print bv # nothing
print "\nInteger value of the previous bit vector:"
print bv.intValue() # 0
# Test array-like indexing for a bit vector:
bv = BitVector( bitstring = '110001' )
print "\nPrints out bits individually from bitstring 110001:"
print bv[0], bv[1], bv[2], bv[3], bv[4], bv[5] # 1 1 0 0 0 1
print "\nSame as above but using negative array indexing:"
print bv[-1], bv[-2], bv[-3], bv[-4], bv[-5], bv[-6] # 1 0 0 0 1 1
# Test setting bit values with positive and negative
# accessors:
bv = BitVector( bitstring = '1111' )
print "\nBitstring for 1111:"
print bv # 1111
print "\nReset individual bits of above vector:"
bv[0]=0;bv[1]=0;bv[2]=0;bv[3]=0
print bv # 0000
print "\nDo the same as above with negative indices:"
bv[-1]=1;bv[-2]=1;bv[-4]=1
print bv # 1011
print "\nCheck equality and inequality ops:"
bv1 = BitVector( bitstring = '00110011' )
bv2 = BitVector( bitlist = [0,0,1,1,0,0,1,1] )
print bv1 == bv2 # True
print bv1 != bv2 # False
print bv1 < bv2 # False
print bv1 <= bv2 # True
bv3 = BitVector( intVal = 5678 )
print bv3.intValue() # 5678
print bv3 # 10110000101110
print bv1 == bv3 # False
print bv3 > bv1 # True
print bv3 >= bv1 # True
# Create a string representation of a bit vector:
fp_write = StringIO.StringIO()
bv.write_bits_to_fileobject( fp_write )
print "\nGet bit vector written out to a file-like object:"
print fp_write.getvalue() # 1011
print "\nExperiments with bitwise logical operations:"
bv3 = bv1 | bv2
print bv3 # 00110011
bv3 = bv1 & bv2
print bv3 # 00110011
bv3 = bv1 + bv2
print bv3 # 0011001100110011
bv4 = BitVector( size = 3 )
print bv4 # 000
bv5 = bv3 + bv4
print bv5 # 0011001100110011000
bv6 = ~bv5
print bv6 # 1100110011001100111
bv7 = bv5 & bv6
print bv7 # 0000000000000000000
bv7 = bv5 | bv6
print bv7 # 1111111111111111111
print "\nTry logical operations on bit vectors of different sizes:"
print BitVector( intVal = 6 ) ^ BitVector( intVal = 13 ) # 1011
print BitVector( intVal = 6 ) & BitVector( intVal = 13 ) # 0100
print BitVector( intVal = 6 ) | BitVector( intVal = 13 ) # 1111
print BitVector( intVal = 1 ) ^ BitVector( intVal = 13 ) # 1100
print BitVector( intVal = 1 ) & BitVector( intVal = 13 ) # 0001
print BitVector( intVal = 1 ) | BitVector( intVal = 13 ) # 1101
print "\nExperiments with setbit() and getsize():"
bv7[7] = 0
print bv7 # 1111111011111111111
print len( bv7 ) # 19
bv8 = (bv5 & bv6) ^ bv7
print bv8 # 1111111011111111111
print "\nConstruct a bit vector from what is in the file testinput1.txt:"
bv = BitVector( filename = 'TestBitVector/testinput1.txt' )
#print bv # nothing to show
bv1 = bv.read_bits_from_file(64)
print "\nPrint out the first 64 bits read from the file:"
print bv1
# 0100000100100000011010000111010101101110011001110111001001111001
print "\nRead the next 64 bits from the same file:"
bv2 = bv.read_bits_from_file(64)
print bv2
# 0010000001100010011100100110111101110111011011100010000001100110
print "\nTake xor of the previous two bit vectors:"
bv3 = bv1 ^ (bv2)
print bv3
# 0110000101000010000110100001101000011001000010010101001000011111
print "\nExperiment with dividing an even-sized vector into two:"
[bv4, bv5] = bv3.divide_into_two()
print bv4 # 01100001010000100001101000011010
print bv5 # 00011001000010010101001000011111
# Permute a bit vector:
print "\nWe will use this bit vector for experiments with permute()"
bv1 = BitVector( bitlist = [1, 0, 0, 1, 1, 0, 1] )
print bv1 # 1001101
bv2 = bv1.permute( [6, 2, 0, 1] )
print "\nPermuted and contracted form of the previous bit vector:"
print bv2 # 1010
print "\nExperiment with writing an internally generated bit vector out to a disk file:"
bv1 = BitVector( bitstring = '00001010' )
FILEOUT = open( 'TestBitVector/test.txt', 'wb' )
bv1.write_to_file( FILEOUT )
FILEOUT.close()
bv2 = BitVector( filename = 'TestBitVector/test.txt' )
bv3 = bv2.read_bits_from_file( 32 )
print "\nDisplay bit vectors written out to file and read back from the file and their respective lengths:"
print bv1, bv3
print len(bv1), len(bv3)
print "\nExperiments with reading a file from the beginning to end:"
bv = BitVector( filename = 'TestBitVector/testinput4.txt' )
print "\nHere are all the bits read from the file:"
while (bv.more_to_read):
bv_read = bv.read_bits_from_file( 64 )
print bv_read
print
print "\nExperiment with closing a file object and start extracting bit vectors from the file from the beginning again:"
bv.close_file_object()
bv = BitVector( filename = 'TestBitVector/testinput4.txt' )
bv1 = bv.read_bits_from_file(64)
print "\nHere are all the first 64 bits read from the file again after the file object was closed and opened again:"
print bv1
FILEOUT = open( 'TestBitVector/testinput5.txt', 'wb' )
bv1.write_to_file( FILEOUT )
FILEOUT.close()
print "\nExperiment in 64-bit permutation and unpermutation of the previous 64-bit bitvector:"
print "The permutation array was generated separately by the Fisher-Yates shuffle algorithm:"
bv2 = bv1.permute( [22, 47, 33, 36, 18, 6, 32, 29, 54, 62, 4,
9, 42, 39, 45, 59, 8, 50, 35, 20, 25, 49,
15, 61, 55, 60, 0, 14, 38, 40, 23, 17, 41,
10, 57, 12, 30, 3, 52, 11, 26, 43, 21, 13,
58, 37, 48, 28, 1, 63, 2, 31, 53, 56, 44, 24,
51, 19, 7, 5, 34, 27, 16, 46] )
print "Permuted bit vector:"
print bv2
bv3 = bv2.unpermute( [22, 47, 33, 36, 18, 6, 32, 29, 54, 62, 4,
9, 42, 39, 45, 59, 8, 50, 35, 20, 25, 49,
15, 61, 55, 60, 0, 14, 38, 40, 23, 17, 41,
10, 57, 12, 30, 3, 52, 11, 26, 43, 21, 13,
58, 37, 48, 28, 1, 63, 2, 31, 53, 56, 44, 24,
51, 19, 7, 5, 34, 27, 16, 46] )
print "Unpurmute the bit vector:"
print bv3
print
print
print "\nTry circular shifts to the left and to the right for the following bit vector:"
print bv3
print "\nCircular shift to the left by 7 positions:"
bv3 << 7
print bv3
print "\nCircular shift to the right by 7 positions:"
bv3 >> 7
print bv3
print "Test len() on the above bit vector:"
print len( bv3 ) # 64
print "\nTest forming a [5:22] slice of the above bit vector:"
bv4 = bv3[5:22]
print bv4 # 00100100000011010
print "\nTest the iterator:"
for bit in bv4:
print bit, # 0 0 1 0 0 1 0 0 0 0 0 0 1 1 0 1 0
print
print "\nDemonstrate padding a bit vector from left:"
bv = BitVector( bitstring = '101010' )
bv.pad_from_left( 4 )
print bv # 0000101010
print "\nDemonstrate padding a bit vector from right:"
bv.pad_from_right( 4 )
print bv # 00001010100000
print "\nTest the syntax 'if bit_vector_1 in bit_vector_2' syntax:"
try:
bv1 = BitVector( bitstring = '0011001100' )
bv2 = BitVector( bitstring = '110011' )
if bv2 in bv1:
print "%s is in %s" % (bv2, bv1)
else:
print "%s is not in %s" % (bv2, bv1)
except ValueError, arg:
print "Error Message: " + str(arg)
print "\nTest the size modifier when a bit vector is initialized with the intVal method:"
bv = BitVector( intVal = 45, size = 16 )
print bv # 0000000000101101
bv = BitVector( intVal = 0, size = 8 )
print bv # 00000000
bv = BitVector( intVal = 1, size = 8 )
print bv # 00000001
print "\nTesting slice assignment:"
bv1 = BitVector( size = 25 )
print "bv1= ", bv1 # 0000000000000000000000000
bv2 = BitVector( bitstring = '1010001' )
print "bv2= ", bv2 # 1010001
bv1[6:9] = bv2[0:3]
print "bv1= ", bv1 # 0000001010000000000000000
print "\nTesting reset function:"
bv1.reset( 1 )
print "bv1= ", bv1 # 1111111111111111111111111
print bv1[3:9].reset(0) # 000000
print bv1[:].reset(0) # 0000000000000000000000000
print "\nTesting count_bit():"
bv = BitVector( intVal = 45, size = 16 )
y = bv.count_bits()
print y
bv = BitVector( bitstring = '100111' )
print bv.count_bits()
bv = BitVector( bitstring = '00111000' )
print bv.count_bits()
bv = BitVector( bitstring = '001' )
print bv.count_bits()
bv = BitVector( bitstring = '00000000000000' )
print bv.count_bits()
print "\nTest setValue idea:"
bv = BitVector( intVal = 7, size =16 )
print bv # 0000000000000111
bv.setValue( intVal = 45 )
print bv # 101101
| apache-2.0 |
CoherentLabs/depot_tools | third_party/pylint/pyreverse/utils.py | 1 | 3711 | # Copyright (c) 2002-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
generic classes/functions for pyreverse core/extensions
"""
import sys
import re
import os
########### pyreverse option utils ##############################
RCFILE = '.pyreverserc'
def get_default_options():
"""
Read config file and return list of options
"""
options = []
home = os.environ.get('HOME', '')
if home:
rcfile = os.path.join(home, RCFILE)
try:
options = open(rcfile).read().split()
except IOError:
pass # ignore if no config file found
return options
def insert_default_options():
"""insert default options to sys.argv
"""
options = get_default_options()
options.reverse()
for arg in options:
sys.argv.insert(1, arg)
# astroid utilities ###########################################################
SPECIAL = re.compile('^__[A-Za-z0-9]+[A-Za-z0-9_]*__$')
PRIVATE = re.compile('^__[_A-Za-z0-9]*[A-Za-z0-9]+_?$')
PROTECTED = re.compile('^_[_A-Za-z0-9]*$')
def get_visibility(name):
"""return the visibility from a name: public, protected, private or special
"""
if SPECIAL.match(name):
visibility = 'special'
elif PRIVATE.match(name):
visibility = 'private'
elif PROTECTED.match(name):
visibility = 'protected'
else:
visibility = 'public'
return visibility
ABSTRACT = re.compile('^.*Abstract.*')
FINAL = re.compile('^[A-Z_]*$')
def is_abstract(node):
"""return true if the given class node correspond to an abstract class
definition
"""
return ABSTRACT.match(node.name)
def is_final(node):
"""return true if the given class/function node correspond to final
definition
"""
return FINAL.match(node.name)
def is_interface(node):
# bw compat
return node.type == 'interface'
def is_exception(node):
# bw compat
return node.type == 'exception'
# Helpers #####################################################################
_CONSTRUCTOR = 1
_SPECIAL = 2
_PROTECTED = 4
_PRIVATE = 8
MODES = {
'ALL' : 0,
'PUB_ONLY' : _SPECIAL + _PROTECTED + _PRIVATE,
'SPECIAL' : _SPECIAL,
'OTHER' : _PROTECTED + _PRIVATE,
}
VIS_MOD = {'special': _SPECIAL, 'protected': _PROTECTED,
'private': _PRIVATE, 'public': 0}
class FilterMixIn(object):
"""filter nodes according to a mode and nodes' visibility
"""
def __init__(self, mode):
"init filter modes"
__mode = 0
for nummod in mode.split('+'):
try:
__mode += MODES[nummod]
except KeyError, ex:
print >> sys.stderr, 'Unknown filter mode %s' % ex
self.__mode = __mode
def show_attr(self, node):
"""return true if the node should be treated
"""
visibility = get_visibility(getattr(node, 'name', node))
return not (self.__mode & VIS_MOD[visibility])
| bsd-3-clause |
wangyum/beam | sdks/python/apache_beam/runners/worker/log_handler.py | 6 | 3855 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Beam fn API log handler."""
import logging
import math
import Queue as queue
import threading
import grpc
from apache_beam.portability.api import beam_fn_api_pb2
from apache_beam.portability.api import beam_fn_api_pb2_grpc
# This module is experimental. No backwards-compatibility guarantees.
class FnApiLogRecordHandler(logging.Handler):
"""A handler that writes log records to the fn API."""
# Maximum number of log entries in a single stream request.
_MAX_BATCH_SIZE = 1000
# Used to indicate the end of stream.
_FINISHED = object()
# Mapping from logging levels to LogEntry levels.
LOG_LEVEL_MAP = {
logging.FATAL: beam_fn_api_pb2.LogEntry.Severity.CRITICAL,
logging.ERROR: beam_fn_api_pb2.LogEntry.Severity.ERROR,
logging.WARNING: beam_fn_api_pb2.LogEntry.Severity.WARN,
logging.INFO: beam_fn_api_pb2.LogEntry.Severity.INFO,
logging.DEBUG: beam_fn_api_pb2.LogEntry.Severity.DEBUG
}
def __init__(self, log_service_descriptor):
super(FnApiLogRecordHandler, self).__init__()
self._log_channel = grpc.insecure_channel(log_service_descriptor.url)
self._logging_stub = beam_fn_api_pb2_grpc.BeamFnLoggingStub(
self._log_channel)
self._log_entry_queue = queue.Queue()
log_control_messages = self._logging_stub.Logging(self._write_log_entries())
self._reader = threading.Thread(
target=lambda: self._read_log_control_messages(log_control_messages),
name='read_log_control_messages')
self._reader.daemon = True
self._reader.start()
def emit(self, record):
log_entry = beam_fn_api_pb2.LogEntry()
log_entry.severity = self.LOG_LEVEL_MAP[record.levelno]
log_entry.message = self.format(record)
log_entry.thread = record.threadName
log_entry.log_location = record.module + '.' + record.funcName
(fraction, seconds) = math.modf(record.created)
nanoseconds = 1e9 * fraction
log_entry.timestamp.seconds = int(seconds)
log_entry.timestamp.nanos = int(nanoseconds)
self._log_entry_queue.put(log_entry)
def close(self):
"""Flush out all existing log entries and unregister this handler."""
# Acquiring the handler lock ensures ``emit`` is not run until the lock is
# released.
self.acquire()
self._log_entry_queue.put(self._FINISHED)
# wait on server to close.
self._reader.join()
self.release()
# Unregister this handler.
super(FnApiLogRecordHandler, self).close()
def _write_log_entries(self):
done = False
while not done:
log_entries = [self._log_entry_queue.get()]
try:
for _ in range(self._MAX_BATCH_SIZE):
log_entries.append(self._log_entry_queue.get_nowait())
except queue.Empty:
pass
if log_entries[-1] is self._FINISHED:
done = True
log_entries.pop()
if log_entries:
yield beam_fn_api_pb2.LogEntry.List(log_entries=log_entries)
def _read_log_control_messages(self, log_control_iterator):
# TODO(vikasrk): Handle control messages.
for _ in log_control_iterator:
pass
| apache-2.0 |
vlatosev/pushtest | node_modules/socket.io/node_modules/socket.io-client/node_modules/engine.io-client/node_modules/engine.io-parser/node_modules/utf8/tests/generate-test-data.py | 2214 | 1347 | #!/usr/bin/env python
import re
import json
# http://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
# http://stackoverflow.com/a/13436167/96656
def unisymbol(codePoint):
if codePoint >= 0x0000 and codePoint <= 0xFFFF:
return unichr(codePoint)
elif codePoint >= 0x010000 and codePoint <= 0x10FFFF:
highSurrogate = int((codePoint - 0x10000) / 0x400) + 0xD800
lowSurrogate = int((codePoint - 0x10000) % 0x400) + 0xDC00
return unichr(highSurrogate) + unichr(lowSurrogate)
else:
return 'Error'
def hexify(codePoint):
return 'U+' + hex(codePoint)[2:].upper().zfill(6)
def writeFile(filename, contents):
print filename
with open(filename, 'w') as f:
f.write(contents.strip() + '\n')
data = []
for codePoint in range(0x000000, 0x10FFFF + 1):
symbol = unisymbol(codePoint)
# http://stackoverflow.com/a/17199950/96656
bytes = symbol.encode('utf8').decode('latin1')
data.append({
'codePoint': codePoint,
'decoded': symbol,
'encoded': bytes
});
jsonData = json.dumps(data, sort_keys=False, indent=2, separators=(',', ': '))
# Use tabs instead of double spaces for indentation
jsonData = jsonData.replace(' ', '\t')
# Escape hexadecimal digits in escape sequences
jsonData = re.sub(
r'\\u([a-fA-F0-9]{4})',
lambda match: r'\u{}'.format(match.group(1).upper()),
jsonData
)
writeFile('data.json', jsonData)
| mit |
atodorov/lorax | src/composer/cli/status.py | 5 | 1972 | #
# Copyright (C) 2018 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
log = logging.getLogger("composer-cli")
from composer import http_client as client
from composer.cli.help import status_help
from composer.cli.utilities import handle_api_result
def status_cmd(opts):
"""Process status commands
:param opts: Cmdline arguments
:type opts: argparse.Namespace
:returns: Value to return from sys.exit()
:rtype: int
"""
if opts.args[1] == "help" or opts.args[1] == "--help":
print(status_help)
return 0
elif opts.args[1] != "show":
log.error("Unknown status command: %s", opts.args[1])
return 1
result = client.get_url_json(opts.socket, "/api/status")
(rc, exit_now) = handle_api_result(result, opts.json)
if exit_now:
return rc
print("API server status:")
print(" Database version: " + result["db_version"])
print(" Database supported: %s" % result["db_supported"])
print(" Schema version: " + result["schema_version"])
print(" API version: " + result["api"])
print(" Backend: " + result["backend"])
print(" Build: " + result["build"])
if result["msgs"]:
print("Error messages:")
print("\n".join([" " + r for r in result["msgs"]]))
return rc
| gpl-2.0 |
psi4/psi4 | tests/pytests/test_aaa_profiling.py | 12 | 1796 | import time
import pytest
import numpy as np
import multiprocessing
import psi4
# Test below is fine on its own but erratic through pytest. Most likely
# to succeed as first test collected, so here it lies.
@pytest.mark.xfail(True, reason='threading treatment suspect', run=True)
def disabled_test_threaded_blas():
threads = multiprocessing.cpu_count()
threads = int(threads / 2)
times = {}
size = [200, 500, 2000, 5000]
threads = [1, threads]
for th in threads:
psi4.set_num_threads(th)
for sz in size:
nruns = max(1, int(1.e10 / (sz ** 3)))
a = psi4.core.Matrix(sz, sz)
b = psi4.core.Matrix(sz, sz)
c = psi4.core.Matrix(sz, sz)
tp4 = time.time()
for n in range(nruns):
c.gemm(False, False, 1.0, a, b, 0.0)
retp4 = (time.time() - tp4) / nruns
tnp = time.time()
for n in range(nruns):
np.dot(a, b, out=np.asarray(c))
retnp = (time.time() - tnp) / nruns
print("Time for threads %2d, size %5d: Psi4: %12.6f NumPy: %12.6f" % (th, sz, retp4, retnp))
if sz == 5000:
times["p4-n{}".format(th)] = retp4
times["np-n{}".format(th)] = retnp
assert psi4.get_num_threads() == th
rat1 = times["np-n" + str(threads[-1])] / times["p4-n" + str(threads[-1])]
rat2 = times["p4-n" + str(threads[0])] / times["p4-n" + str(threads[-1])]
print(" NumPy@n%d : Psi4@n%d ratio (want ~1): %.2f" % (threads[-1], threads[-1], rat1))
print(" Psi4@n%d : Psi4@n%d ratio (want ~%d): %.2f" % (threads[0], threads[-1], threads[-1], rat2))
assert pytest.approx(rat1, 0.2) == 1.0
assert pytest.approx(rat2, 0.8) == threads[-1]
| lgpl-3.0 |
mayankcu/Django-social | venv/Lib/site-packages/django/contrib/gis/db/backends/postgis/adapter.py | 94 | 1501 | """
This object provides quoting for GEOS geometries into PostgreSQL/PostGIS.
"""
from psycopg2 import Binary
from psycopg2.extensions import ISQLQuote
class PostGISAdapter(object):
def __init__(self, geom):
"Initializes on the geometry."
# Getting the WKB (in string form, to allow easy pickling of
# the adaptor) and the SRID from the geometry.
self.ewkb = str(geom.ewkb)
self.srid = geom.srid
self._adapter = Binary(self.ewkb)
def __conform__(self, proto):
# Does the given protocol conform to what Psycopg2 expects?
if proto == ISQLQuote:
return self
else:
raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?')
def __eq__(self, other):
if not isinstance(other, PostGISAdapter):
return False
return (self.ewkb == other.ewkb) and (self.srid == other.srid)
def __str__(self):
return self.getquoted()
def prepare(self, conn):
"""
This method allows escaping the binary in the style required by the
server's `standard_conforming_string` setting.
"""
self._adapter.prepare(conn)
def getquoted(self):
"Returns a properly quoted string for use in PostgreSQL/PostGIS."
# psycopg will figure out whether to use E'\\000' or '\000'
return 'ST_GeomFromEWKB(%s)' % self._adapter.getquoted()
def prepare_database_save(self, unused):
return self
| bsd-3-clause |
zevnux/pep8 | testsuite/E12.py | 9 | 6781 | #: E121
print "E121", (
"dent")
#: E122
print "E122", (
"dent")
#: E123
my_list = [
1, 2, 3,
4, 5, 6,
]
#: E124
print "E124", ("visual",
"indent_two"
)
#: E124
print "E124", ("visual",
"indent_five"
)
#: E124
a = (123,
)
#: E129
if (row < 0 or self.moduleCount <= row or
col < 0 or self.moduleCount <= col):
raise Exception("%s,%s - %s" % (row, col, self.moduleCount))
#: E126
print "E126", (
"dent")
#: E126
print "E126", (
"dent")
#: E127
print "E127", ("over-",
"over-indent")
#: E128
print "E128", ("visual",
"hanging")
#: E128
print "E128", ("under-",
"under-indent")
#:
#: E126
my_list = [
1, 2, 3,
4, 5, 6,
]
#: E121
result = {
'key1': 'value',
'key2': 'value',
}
#: E126 E126
rv.update(dict.fromkeys((
'qualif_nr', 'reasonComment_en', 'reasonComment_fr',
'reasonComment_de', 'reasonComment_it'),
'?'),
"foo")
#: E126
abricot = 3 + \
4 + \
5 + 6
#: E131
print "hello", (
"there",
# "john",
"dude")
#: E126
part = set_mimetype((
a.get('mime_type', 'text')),
'default')
#:
#: E122
if True:
result = some_function_that_takes_arguments(
'a', 'b', 'c',
'd', 'e', 'f',
)
#: E122
if some_very_very_very_long_variable_name or var \
or another_very_long_variable_name:
raise Exception()
#: E122
if some_very_very_very_long_variable_name or var[0] \
or another_very_long_variable_name:
raise Exception()
#: E122
if True:
if some_very_very_very_long_variable_name or var \
or another_very_long_variable_name:
raise Exception()
#: E122
if True:
if some_very_very_very_long_variable_name or var[0] \
or another_very_long_variable_name:
raise Exception()
#: E122
dictionary = [
"is": {
"nested": yes(),
},
]
#: E122
setup('',
scripts=[''],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
])
#:
#: E123 W291
print "E123", (
"bad", "hanging", "close"
)
#
#: E123 E123 E123
result = {
'foo': [
'bar', {
'baz': 'frop',
}
]
}
#: E123
result = some_function_that_takes_arguments(
'a', 'b', 'c',
'd', 'e', 'f',
)
#: E124
my_list = [1, 2, 3,
4, 5, 6,
]
#: E124
my_list = [1, 2, 3,
4, 5, 6,
]
#: E124
result = some_function_that_takes_arguments('a', 'b', 'c',
'd', 'e', 'f',
)
#: E124
fooff(aaaa,
cca(
vvv,
dadd
), fff,
)
#: E124
fooff(aaaa,
ccaaa(
vvv,
dadd
),
fff,
)
#: E124
d = dict('foo',
help="exclude files or directories which match these "
"comma separated patterns (default: %s)" % DEFAULT_EXCLUDE
)
#: E124 E128 E128
if line_removed:
self.event(cr, uid,
name="Removing the option for contract",
description="contract line has been removed",
)
#:
#: E125
if foo is None and bar is "frop" and \
blah == 'yeah':
blah = 'yeahnah'
#: E125
# Further indentation required as indentation is not distinguishable
def long_function_name(
var_one, var_two, var_three,
var_four):
print(var_one)
#
#: E125
def qualify_by_address(
self, cr, uid, ids, context=None,
params_to_check=frozenset(QUALIF_BY_ADDRESS_PARAM)):
""" This gets called by the web server """
#: E129
if (a == 2 or
b == "abc def ghi"
"jkl mno"):
return True
#:
#: E126
my_list = [
1, 2, 3,
4, 5, 6,
]
#: E126
abris = 3 + \
4 + \
5 + 6
#: E126
fixed = re.sub(r'\t+', ' ', target[c::-1], 1)[::-1] + \
target[c + 1:]
#: E126 E126
rv.update(dict.fromkeys((
'qualif_nr', 'reasonComment_en', 'reasonComment_fr',
'reasonComment_de', 'reasonComment_it'),
'?'),
"foo")
#: E126
eat_a_dict_a_day({
"foo": "bar",
})
#: E126
if (
x == (
3
) or
y == 4):
pass
#: E126
if (
x == (
3
) or
x == (
3
) or
y == 4):
pass
#: E131
troublesome_hash = {
"hash": "value",
"long": "the quick brown fox jumps over the lazy dog before doing a "
"somersault",
}
#:
#: E128
# Arguments on first line forbidden when not using vertical alignment
foo = long_function_name(var_one, var_two,
var_three, var_four)
#
#: E128
print('l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]], token[1]))
#: E128
def qualify_by_address(self, cr, uid, ids, context=None,
params_to_check=frozenset(QUALIF_BY_ADDRESS_PARAM)):
""" This gets called by the web server """
#:
#: E128
foo(1, 2, 3,
4, 5, 6)
#: E128
foo(1, 2, 3,
4, 5, 6)
#: E128
foo(1, 2, 3,
4, 5, 6)
#: E128
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E127
foo(1, 2, 3,
4, 5, 6)
#: E128 E128
if line_removed:
self.event(cr, uid,
name="Removing the option for contract",
description="contract line has been removed",
)
#: E124 E127 E127
if line_removed:
self.event(cr, uid,
name="Removing the option for contract",
description="contract line has been removed",
)
#: E127
rv.update(d=('a', 'b', 'c'),
e=42)
#
#: E127 W503
rv.update(d=('a' + 'b', 'c'),
e=42, f=42
+ 42)
#: E127 W503
input1 = {'a': {'calc': 1 + 2}, 'b': 1
+ 42}
#: E128 W503
rv.update(d=('a' + 'b', 'c'),
e=42, f=(42
+ 42))
#: E123
if True:
def example_issue254():
return [node.copy(
(
replacement
# First, look at all the node's current children.
for child in node.children
# Replace them.
for replacement in replace(child)
),
dict(name=token.undefined)
)]
#: E125:2:5 E125:8:5
if ("""
"""):
pass
for foo in """
abc
123
""".strip().split():
print(foo)
#: E122:6:5 E122:7:5 E122:8:1
print dedent(
'''
mkdir -p ./{build}/
mv ./build/ ./{build}/%(revision)s/
'''.format(
build='build',
# more stuff
)
)
#: E701:1:8 E122:2:1 E203:4:8 E128:5:1
if True:\
print(True)
print(a
, end=' ')
#:
| mit |
ThinkingBridge/platform_external_chromium_org | tools/android/remove_strings.py | 183 | 1426 | #!/usr/bin/python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Remove strings by name from a GRD file."""
import optparse
import re
import sys
def RemoveStrings(grd_path, string_names):
"""Removes strings with the given names from a GRD file. Overwrites the file.
Args:
grd_path: path to the GRD file.
string_names: a list of string names to be removed.
"""
with open(grd_path, 'r') as f:
grd = f.read()
names_pattern = '|'.join(map(re.escape, string_names))
pattern = r'<message [^>]*name="(%s)".*?</message>\s*' % names_pattern
grd = re.sub(pattern, '', grd, flags=re.DOTALL)
with open(grd_path, 'w') as f:
f.write(grd)
def ParseArgs(args):
usage = 'usage: %prog GRD_PATH...'
parser = optparse.OptionParser(
usage=usage, description='Remove strings from GRD files. Reads string '
'names from stdin, and removes strings with those names from the listed '
'GRD files.')
options, args = parser.parse_args(args=args)
if not args:
parser.error('must provide GRD_PATH argument(s)')
return args
def main(args=None):
grd_paths = ParseArgs(args)
strings_to_remove = filter(None, map(str.strip, sys.stdin.readlines()))
for grd_path in grd_paths:
RemoveStrings(grd_path, strings_to_remove)
if __name__ == '__main__':
main()
| bsd-3-clause |
szaghi/MaTiSSe | release/MaTiSSe-v0.0.2/matisse/presentation/subsection.py | 10 | 4076 | #!/usr/bin/env python
"""
subsection.py, module definition of Subsection class.
This defines a subsection of the presentation.
"""
# modules loading
# standard library modules: these should be present in any recent python distribution
from collections import OrderedDict
import re
# MaTiSSe.py modules
from ..config import __config__
from ..utils.source_editor import __source_editor__
from .regexs import __regex_slide__
from .slide import Slide
# class definition
class Subsection(object):
"""
Subsection is an object that handles a single subsection, its attributes and methods.
Attributes
----------
subsections_number : int
global number of subsections (equals to the number of Subsection instances)
"""
subsections_number = 0
@classmethod
def reset(cls):
"""Method resetting Subsection to initial values."""
cls.subsections_number = 0
return
def __init__(self,raw_body='',title='',data=None,local_number=1):
"""
Parameters
----------
raw_body : str, optional
string containing the body of the subsection in raw format
title : str, optional
subsection title
data : OrderedDict object, optional
subsection metadata
local_number : int, optional
subsection number in local-to-section numeration
Attributes
----------
raw_body : str
subslide number in global numeration
number : int
subsection number in global numeration
local_number : int
subsection number in local-to-section numeration
title : str
subsection title
data : OrderedDict object
subsection metadata
slides : list
list of slides
remainder : str
remainder data that are not data slides
"""
Subsection.subsections_number += 1
self.raw_body = raw_body
self.number = Subsection.subsections_number
self.local_number = local_number
self.title = title
self.data = OrderedDict()
if data:
for key,val in data.items():
self.data[key] = val
self.data['subsectiontitle' ] = self.title
self.data['subsectionnumber'] = str(self.number)
self.slides = None
self.remainder = None
return
def get_remainder(self):
"""Method for getting the remainder of the source in case there are no data slides."""
self.remainder = self.raw_body
if __config__.verbose:
message = ['\nAttention: found a bad usage of presentation sectioning!']
message.append('\nThere are not data slides!')
message.append('\nThe data:\n"""\n')
message.append(self.remainder)
message.append('"""\nis placed without "### slide" section into the current section/subsection!')
print(''.join(message))
return
def get_slides(self,theme):
"""Method for getting the slides contained into the subsection.
Parameters
----------
theme : Theme object
section_number : int
current section number
"""
slides = []
self.slides = []
purged_source = __source_editor__.purge_codes(self.raw_body)
for match in re.finditer(__regex_slide__,purged_source):
slides.append([match.group('expr'),match.start(),match.end()])
if __config__.toc_at_subsec_beginning:
slides = [['Table of Contents',__config__.toc_at_subsec_beginning]] + slides
for sdn,slide in enumerate(slides):
if len(slide)==2:
raw_body = '$toc('+str(slide[1])+')'
else:
if sdn < len(slides)-1:
raw_body = self.raw_body[slide[2]+1:slides[sdn+1][1]]
else:
raw_body = self.raw_body[slide[2]+1:]
first_of_sec = None
first_of_subsec = None
if sdn == 0:
first_of_subsec = self.number
if self.local_number == 1:
first_of_sec = int(self.data['sectionnumber'])
data = OrderedDict()
data['first_of_sec'] = first_of_sec
data['first_of_subsec'] = first_of_subsec
data.update(self.data)
self.slides.append(Slide(raw_body = raw_body, title = slide[0], data = data, theme = theme, local_number = sdn + 1))
return
| gpl-3.0 |
jmesteve/asterisk | openerp/addons/account/project/report/quantity_cost_ledger.py | 56 | 6166 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp import pooler
from openerp.report import report_sxw
class account_analytic_quantity_cost_ledger(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_analytic_quantity_cost_ledger, self).__init__(cr, uid, name, context=context)
self.localcontext.update( {
'time': time,
'lines_g': self._lines_g,
'lines_a': self._lines_a,
'sum_quantity': self._sum_quantity,
'account_sum_quantity': self._account_sum_quantity,
})
def _lines_g(self, account_id, date1, date2, journals):
if not journals:
self.cr.execute("SELECT sum(aal.unit_amount) AS quantity, \
aa.code AS code, aa.name AS name, aa.id AS id \
FROM account_account AS aa, account_analytic_line AS aal \
WHERE (aal.account_id=%s) AND (aal.date>=%s) \
AND (aal.date<=%s) AND (aal.general_account_id=aa.id) \
AND aa.active \
GROUP BY aa.code, aa.name, aa.id ORDER BY aa.code",
(account_id, date1, date2))
else:
journal_ids = journals
self.cr.execute("SELECT sum(aal.unit_amount) AS quantity, \
aa.code AS code, aa.name AS name, aa.id AS id \
FROM account_account AS aa, account_analytic_line AS aal \
WHERE (aal.account_id=%s) AND (aal.date>=%s) \
AND (aal.date<=%s) AND (aal.general_account_id=aa.id) \
AND aa.active \
AND (aal.journal_id IN %s ) \
GROUP BY aa.code, aa.name, aa.id ORDER BY aa.code",
(account_id, date1, date2, tuple(journal_ids)))
res = self.cr.dictfetchall()
return res
def _lines_a(self, general_account_id, account_id, date1, date2, journals):
if not journals:
self.cr.execute("SELECT aal.name AS name, aal.code AS code, \
aal.unit_amount AS quantity, aal.date AS date, \
aaj.code AS cj \
FROM account_analytic_line AS aal, \
account_analytic_journal AS aaj \
WHERE (aal.general_account_id=%s) AND (aal.account_id=%s) \
AND (aal.date>=%s) AND (aal.date<=%s) \
AND (aal.journal_id=aaj.id) \
ORDER BY aal.date, aaj.code, aal.code",
(general_account_id, account_id, date1, date2))
else:
journal_ids = journals
self.cr.execute("SELECT aal.name AS name, aal.code AS code, \
aal.unit_amount AS quantity, aal.date AS date, \
aaj.code AS cj \
FROM account_analytic_line AS aal, \
account_analytic_journal AS aaj \
WHERE (aal.general_account_id=%s) AND (aal.account_id=%s) \
AND (aal.date>=%s) AND (aal.date<=%s) \
AND (aal.journal_id=aaj.id) AND (aaj.id IN %s) \
ORDER BY aal.date, aaj.code, aal.code",
(general_account_id, account_id, date1, date2,tuple(journal_ids)))
res = self.cr.dictfetchall()
return res
def _account_sum_quantity(self, account_id, date1, date2, journals):
if not journals:
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id=%s AND date>=%s AND date<=%s",
(account_id, date1, date2))
else:
journal_ids = journals
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id = %s AND date >= %s AND date <= %s \
AND journal_id IN %s",
(account_id, date1, date2, tuple(journal_ids),))
return self.cr.fetchone()[0] or 0.0
def _sum_quantity(self, accounts, date1, date2, journals):
ids = map(lambda x: x.id, accounts)
if not ids:
return 0.0
if not journals:
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id IN %s AND date>=%s AND date<=%s",
(tuple(ids), date1, date2,))
else:
journal_ids = journals
self.cr.execute("SELECT sum(unit_amount) \
FROM account_analytic_line \
WHERE account_id IN %s AND date >= %s AND date <= %s \
AND journal_id IN %s",(tuple(ids), date1, date2, tuple(journal_ids)))
return self.cr.fetchone()[0] or 0.0
report_sxw.report_sxw('report.account.analytic.account.quantity_cost_ledger',
'account.analytic.account',
'addons/account/project/report/quantity_cost_ledger.rml',
parser=account_analytic_quantity_cost_ledger, header="internal")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
amolkahat/pandas | pandas/core/computation/ops.py | 7 | 15907 | """Operator classes for eval.
"""
import operator as op
from functools import partial
from datetime import datetime
import numpy as np
from pandas.core.dtypes.common import is_list_like, is_scalar
import pandas as pd
from pandas.compat import PY3, string_types, text_type
import pandas.core.common as com
from pandas.io.formats.printing import pprint_thing, pprint_thing_encoded
from pandas.core.base import StringMixin
from pandas.core.computation.common import _ensure_decoded, _result_type_many
from pandas.core.computation.scope import _DEFAULT_GLOBALS
_reductions = 'sum', 'prod'
_unary_math_ops = ('sin', 'cos', 'exp', 'log', 'expm1', 'log1p',
'sqrt', 'sinh', 'cosh', 'tanh', 'arcsin', 'arccos',
'arctan', 'arccosh', 'arcsinh', 'arctanh', 'abs')
_binary_math_ops = ('arctan2',)
_mathops = _unary_math_ops + _binary_math_ops
_LOCAL_TAG = '__pd_eval_local_'
class UndefinedVariableError(NameError):
"""NameError subclass for local variables."""
def __init__(self, name, is_local):
if is_local:
msg = 'local variable {0!r} is not defined'
else:
msg = 'name {0!r} is not defined'
super(UndefinedVariableError, self).__init__(msg.format(name))
class Term(StringMixin):
def __new__(cls, name, env, side=None, encoding=None):
klass = Constant if not isinstance(name, string_types) else cls
supr_new = super(Term, klass).__new__
return supr_new(klass)
def __init__(self, name, env, side=None, encoding=None):
self._name = name
self.env = env
self.side = side
tname = text_type(name)
self.is_local = (tname.startswith(_LOCAL_TAG) or
tname in _DEFAULT_GLOBALS)
self._value = self._resolve_name()
self.encoding = encoding
@property
def local_name(self):
return self.name.replace(_LOCAL_TAG, '')
def __unicode__(self):
return pprint_thing(self.name)
def __call__(self, *args, **kwargs):
return self.value
def evaluate(self, *args, **kwargs):
return self
def _resolve_name(self):
res = self.env.resolve(self.local_name, is_local=self.is_local)
self.update(res)
if hasattr(res, 'ndim') and res.ndim > 2:
raise NotImplementedError("N-dimensional objects, where N > 2,"
" are not supported with eval")
return res
def update(self, value):
"""
search order for local (i.e., @variable) variables:
scope, key_variable
[('locals', 'local_name'),
('globals', 'local_name'),
('locals', 'key'),
('globals', 'key')]
"""
key = self.name
# if it's a variable name (otherwise a constant)
if isinstance(key, string_types):
self.env.swapkey(self.local_name, key, new_value=value)
self.value = value
@property
def is_scalar(self):
return is_scalar(self._value)
@property
def type(self):
try:
# potentially very slow for large, mixed dtype frames
return self._value.values.dtype
except AttributeError:
try:
# ndarray
return self._value.dtype
except AttributeError:
# scalar
return type(self._value)
return_type = type
@property
def raw(self):
return pprint_thing('{0}(name={1!r}, type={2})'
''.format(self.__class__.__name__, self.name,
self.type))
@property
def is_datetime(self):
try:
t = self.type.type
except AttributeError:
t = self.type
return issubclass(t, (datetime, np.datetime64))
@property
def value(self):
return self._value
@value.setter
def value(self, new_value):
self._value = new_value
@property
def name(self):
return self._name
@name.setter
def name(self, new_name):
self._name = new_name
@property
def ndim(self):
return self._value.ndim
class Constant(Term):
def __init__(self, value, env, side=None, encoding=None):
super(Constant, self).__init__(value, env, side=side,
encoding=encoding)
def _resolve_name(self):
return self._name
@property
def name(self):
return self.value
def __unicode__(self):
# in python 2 str() of float
# can truncate shorter than repr()
return repr(self.name)
_bool_op_map = {'not': '~', 'and': '&', 'or': '|'}
class Op(StringMixin):
"""Hold an operator of arbitrary arity
"""
def __init__(self, op, operands, *args, **kwargs):
self.op = _bool_op_map.get(op, op)
self.operands = operands
self.encoding = kwargs.get('encoding', None)
def __iter__(self):
return iter(self.operands)
def __unicode__(self):
"""Print a generic n-ary operator and its operands using infix
notation"""
# recurse over the operands
parened = ('({0})'.format(pprint_thing(opr))
for opr in self.operands)
return pprint_thing(' {0} '.format(self.op).join(parened))
@property
def return_type(self):
# clobber types to bool if the op is a boolean operator
if self.op in (_cmp_ops_syms + _bool_ops_syms):
return np.bool_
return _result_type_many(*(term.type for term in com.flatten(self)))
@property
def has_invalid_return_type(self):
types = self.operand_types
obj_dtype_set = frozenset([np.dtype('object')])
return self.return_type == object and types - obj_dtype_set
@property
def operand_types(self):
return frozenset(term.type for term in com.flatten(self))
@property
def is_scalar(self):
return all(operand.is_scalar for operand in self.operands)
@property
def is_datetime(self):
try:
t = self.return_type.type
except AttributeError:
t = self.return_type
return issubclass(t, (datetime, np.datetime64))
def _in(x, y):
"""Compute the vectorized membership of ``x in y`` if possible, otherwise
use Python.
"""
try:
return x.isin(y)
except AttributeError:
if is_list_like(x):
try:
return y.isin(x)
except AttributeError:
pass
return x in y
def _not_in(x, y):
"""Compute the vectorized membership of ``x not in y`` if possible,
otherwise use Python.
"""
try:
return ~x.isin(y)
except AttributeError:
if is_list_like(x):
try:
return ~y.isin(x)
except AttributeError:
pass
return x not in y
_cmp_ops_syms = '>', '<', '>=', '<=', '==', '!=', 'in', 'not in'
_cmp_ops_funcs = op.gt, op.lt, op.ge, op.le, op.eq, op.ne, _in, _not_in
_cmp_ops_dict = dict(zip(_cmp_ops_syms, _cmp_ops_funcs))
_bool_ops_syms = '&', '|', 'and', 'or'
_bool_ops_funcs = op.and_, op.or_, op.and_, op.or_
_bool_ops_dict = dict(zip(_bool_ops_syms, _bool_ops_funcs))
_arith_ops_syms = '+', '-', '*', '/', '**', '//', '%'
_arith_ops_funcs = (op.add, op.sub, op.mul, op.truediv if PY3 else op.div,
op.pow, op.floordiv, op.mod)
_arith_ops_dict = dict(zip(_arith_ops_syms, _arith_ops_funcs))
_special_case_arith_ops_syms = '**', '//', '%'
_special_case_arith_ops_funcs = op.pow, op.floordiv, op.mod
_special_case_arith_ops_dict = dict(zip(_special_case_arith_ops_syms,
_special_case_arith_ops_funcs))
_binary_ops_dict = {}
for d in (_cmp_ops_dict, _bool_ops_dict, _arith_ops_dict):
_binary_ops_dict.update(d)
def _cast_inplace(terms, acceptable_dtypes, dtype):
"""Cast an expression inplace.
Parameters
----------
terms : Op
The expression that should cast.
acceptable_dtypes : list of acceptable numpy.dtype
Will not cast if term's dtype in this list.
.. versionadded:: 0.19.0
dtype : str or numpy.dtype
The dtype to cast to.
"""
dt = np.dtype(dtype)
for term in terms:
if term.type in acceptable_dtypes:
continue
try:
new_value = term.value.astype(dt)
except AttributeError:
new_value = dt.type(term.value)
term.update(new_value)
def is_term(obj):
return isinstance(obj, Term)
class BinOp(Op):
"""Hold a binary operator and its operands
Parameters
----------
op : str
left : Term or Op
right : Term or Op
"""
def __init__(self, op, lhs, rhs, **kwargs):
super(BinOp, self).__init__(op, (lhs, rhs))
self.lhs = lhs
self.rhs = rhs
self._disallow_scalar_only_bool_ops()
self.convert_values()
try:
self.func = _binary_ops_dict[op]
except KeyError:
# has to be made a list for python3
keys = list(_binary_ops_dict.keys())
raise ValueError('Invalid binary operator {0!r}, valid'
' operators are {1}'.format(op, keys))
def __call__(self, env):
"""Recursively evaluate an expression in Python space.
Parameters
----------
env : Scope
Returns
-------
object
The result of an evaluated expression.
"""
# handle truediv
if self.op == '/' and env.scope['truediv']:
self.func = op.truediv
# recurse over the left/right nodes
left = self.lhs(env)
right = self.rhs(env)
return self.func(left, right)
def evaluate(self, env, engine, parser, term_type, eval_in_python):
"""Evaluate a binary operation *before* being passed to the engine.
Parameters
----------
env : Scope
engine : str
parser : str
term_type : type
eval_in_python : list
Returns
-------
term_type
The "pre-evaluated" expression as an instance of ``term_type``
"""
if engine == 'python':
res = self(env)
else:
# recurse over the left/right nodes
left = self.lhs.evaluate(env, engine=engine, parser=parser,
term_type=term_type,
eval_in_python=eval_in_python)
right = self.rhs.evaluate(env, engine=engine, parser=parser,
term_type=term_type,
eval_in_python=eval_in_python)
# base cases
if self.op in eval_in_python:
res = self.func(left.value, right.value)
else:
res = pd.eval(self, local_dict=env, engine=engine,
parser=parser)
name = env.add_tmp(res)
return term_type(name, env=env)
def convert_values(self):
"""Convert datetimes to a comparable value in an expression.
"""
def stringify(value):
if self.encoding is not None:
encoder = partial(pprint_thing_encoded,
encoding=self.encoding)
else:
encoder = pprint_thing
return encoder(value)
lhs, rhs = self.lhs, self.rhs
if is_term(lhs) and lhs.is_datetime and is_term(rhs) and rhs.is_scalar:
v = rhs.value
if isinstance(v, (int, float)):
v = stringify(v)
v = pd.Timestamp(_ensure_decoded(v))
if v.tz is not None:
v = v.tz_convert('UTC')
self.rhs.update(v)
if is_term(rhs) and rhs.is_datetime and is_term(lhs) and lhs.is_scalar:
v = lhs.value
if isinstance(v, (int, float)):
v = stringify(v)
v = pd.Timestamp(_ensure_decoded(v))
if v.tz is not None:
v = v.tz_convert('UTC')
self.lhs.update(v)
def _disallow_scalar_only_bool_ops(self):
if ((self.lhs.is_scalar or self.rhs.is_scalar) and
self.op in _bool_ops_dict and
(not (issubclass(self.rhs.return_type, (bool, np.bool_)) and
issubclass(self.lhs.return_type, (bool, np.bool_))))):
raise NotImplementedError("cannot evaluate scalar only bool ops")
def isnumeric(dtype):
return issubclass(np.dtype(dtype).type, np.number)
class Div(BinOp):
"""Div operator to special case casting.
Parameters
----------
lhs, rhs : Term or Op
The Terms or Ops in the ``/`` expression.
truediv : bool
Whether or not to use true division. With Python 3 this happens
regardless of the value of ``truediv``.
"""
def __init__(self, lhs, rhs, truediv, *args, **kwargs):
super(Div, self).__init__('/', lhs, rhs, *args, **kwargs)
if not isnumeric(lhs.return_type) or not isnumeric(rhs.return_type):
raise TypeError("unsupported operand type(s) for {0}:"
" '{1}' and '{2}'".format(self.op,
lhs.return_type,
rhs.return_type))
if truediv or PY3:
# do not upcast float32s to float64 un-necessarily
acceptable_dtypes = [np.float32, np.float_]
_cast_inplace(com.flatten(self), acceptable_dtypes, np.float_)
_unary_ops_syms = '+', '-', '~', 'not'
_unary_ops_funcs = op.pos, op.neg, op.invert, op.invert
_unary_ops_dict = dict(zip(_unary_ops_syms, _unary_ops_funcs))
class UnaryOp(Op):
"""Hold a unary operator and its operands
Parameters
----------
op : str
The token used to represent the operator.
operand : Term or Op
The Term or Op operand to the operator.
Raises
------
ValueError
* If no function associated with the passed operator token is found.
"""
def __init__(self, op, operand):
super(UnaryOp, self).__init__(op, (operand,))
self.operand = operand
try:
self.func = _unary_ops_dict[op]
except KeyError:
raise ValueError('Invalid unary operator {0!r}, valid operators '
'are {1}'.format(op, _unary_ops_syms))
def __call__(self, env):
operand = self.operand(env)
return self.func(operand)
def __unicode__(self):
return pprint_thing('{0}({1})'.format(self.op, self.operand))
@property
def return_type(self):
operand = self.operand
if operand.return_type == np.dtype('bool'):
return np.dtype('bool')
if (isinstance(operand, Op) and
(operand.op in _cmp_ops_dict or operand.op in _bool_ops_dict)):
return np.dtype('bool')
return np.dtype('int')
class MathCall(Op):
def __init__(self, func, args):
super(MathCall, self).__init__(func.name, args)
self.func = func
def __call__(self, env):
operands = [op(env) for op in self.operands]
with np.errstate(all='ignore'):
return self.func.func(*operands)
def __unicode__(self):
operands = map(str, self.operands)
return pprint_thing('{0}({1})'.format(self.op, ','.join(operands)))
class FuncNode(object):
def __init__(self, name):
if name not in _mathops:
raise ValueError(
"\"{0}\" is not a supported function".format(name))
self.name = name
self.func = getattr(np, name)
def __call__(self, *args):
return MathCall(self, args)
| bsd-3-clause |
macks22/scikit-learn | examples/linear_model/plot_sgd_loss_functions.py | 249 | 1095 | """
==========================
SGD: convex loss functions
==========================
A plot that compares the various convex loss functions supported by
:class:`sklearn.linear_model.SGDClassifier` .
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
def modified_huber_loss(y_true, y_pred):
z = y_pred * y_true
loss = -4 * z
loss[z >= -1] = (1 - z[z >= -1]) ** 2
loss[z >= 1.] = 0
return loss
xmin, xmax = -4, 4
xx = np.linspace(xmin, xmax, 100)
plt.plot([xmin, 0, 0, xmax], [1, 1, 0, 0], 'k-',
label="Zero-one loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0), 'g-',
label="Hinge loss")
plt.plot(xx, -np.minimum(xx, 0), 'm-',
label="Perceptron loss")
plt.plot(xx, np.log2(1 + np.exp(-xx)), 'r-',
label="Log loss")
plt.plot(xx, np.where(xx < 1, 1 - xx, 0) ** 2, 'b-',
label="Squared hinge loss")
plt.plot(xx, modified_huber_loss(xx, 1), 'y--',
label="Modified Huber loss")
plt.ylim((0, 8))
plt.legend(loc="upper right")
plt.xlabel(r"Decision function $f(x)$")
plt.ylabel("$L(y, f(x))$")
plt.show()
| bsd-3-clause |
mclois/iteexe | exe/webui/hangmanblock.py | 4 | 20194 | # ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
from exe.export.exportmediaconverter import ExportMediaConverter
from exe.export.xmlpage import XMLPage
"""
HangmanBlock can render and process HangmanIdevices as XHTML and Javascript to
make a game
"""
import logging
from exe.webui.block import Block
from exe.webui.element import TextAreaElement
from exe.webui.element import ImageElement
from exe.webui import common
from exe.webui.element import TextElement
from exe.engine.extendedfieldengine import field_engine_is_delete_request
log = logging.getLogger(__name__)
# ===========================================================================
class HangmanBlockInc(Block):
"""
ExampleBlock can render and process ExampleIdevices as XHTML
GenericBlock will replace it..... one day
"""
def __init__(self, parent, idevice):
Block.__init__(self, parent, idevice)
self.titleElement = TextElement(idevice.titleField)
self.contentElement = TextAreaElement(idevice.content)
self.contentElement.height = 250
self.chanceImageElements = []
#go through all image fields in the list and create an image element linked to that field
for chanceImageField in idevice.chanceImageFields:
newImgElement = ImageElement(chanceImageField)
self.chanceImageElements.append(newImgElement)
self.wordElements = []
self.hintElements = []
#go through all of the word fields and hint fields and create an
for wordIndex, word in enumerate(idevice.wordTextFields):
newWordElement = TextElement(word)
self.wordElements.append(newWordElement)
newHintElement = TextElement(idevice.hintTextFields[wordIndex])
self.hintElements.append(newHintElement)
#make an element for the alphabet
self.alphabetElement = TextElement(idevice.alphabet)
#element for the messages that are shown to the player
self.wrongGuessTextElement = TextAreaElement(self.idevice.wrongGuessMessageField)
self.lostLevelTextElement = TextAreaElement(self.idevice.lostLevelMessageField)
self.levelPassedTextElement = TextAreaElement(self.idevice.levelPasssedMessageField)
self.gameWonTextElement = TextAreaElement(self.idevice.gameWonMessageField)
self.letterButtonStyleElement = TextElement(self.idevice.letterButtonStyle)
self.wrongLetterButtonStyleElement = TextElement(self.idevice.wrongLetterButtonStyle)
self.rightLetterButtonStyleElement = TextElement(self.idevice.rightLetterButtonStyle)
self.hintFieldStyleElement = TextElement(self.idevice.hintFieldStyle)
self.wordAreaStyleElement = TextElement(self.idevice.wordAreaStyle)
self.resetButtonTextElement = TextElement(self.idevice.resetButtonText)
self.resetButtonStyleElement = TextElement(self.idevice.resetButtonStyle)
def process(self, request):
"""
Process the request arguments from the web server to see if any
apply to this block
"""
#Make sure that we don't do anything when it's time to die...
Block.process(self, request)
self.idevice.message = ""
if field_engine_is_delete_request(request):
return
self.idevice.addGameScript()
self.titleElement.process(request)
self.idevice.title = self.titleElement.renderView()
self.alphabetElement.process(request)
self.wrongGuessTextElement.process(request)
self.lostLevelTextElement.process(request)
self.levelPassedTextElement.process(request)
self.gameWonTextElement.process(request)
self.letterButtonStyleElement.process(request)
self.wrongLetterButtonStyleElement.process(request)
self.rightLetterButtonStyleElement.process(request)
self.hintFieldStyleElement.process(request)
self.wordAreaStyleElement.process(request)
self.resetButtonTextElement.process(request)
self.resetButtonStyleElement.process(request)
#see if we need to delete a word
blankWords = False
for wordIndex in range(0, len(self.wordElements)):
if self.wordElements[wordIndex].renderView() == "":
blankWords = True
elif self.hintElements[wordIndex].renderView() == "":
blankWords = True
if blankWords is True:
self.idevice.message = _("One or more words or hints are blank. Please do not have any blank hints or words - you can delete unused ones.")
self.idevice.edit = True
#see if we need to add another chance
if ("addChance"+unicode(self.id)) in request.args:
self.idevice.addChance()
self.idevice.edit = True
# disable Undo once a question has been added:
self.idevice.undo = False
if("addWord"+unicode(self.id)) in request.args:
self.idevice.addWord()
self.idevice.edit = True
self.idevice.undo = False
content = self.contentElement.process(request)
for imgElement in self.chanceImageElements:
imgElement.process(request)
if "action" in request.args and request.args["action"][0] == imgElement.id:
self.idevice.chanceImageFields.remove(imgElement.field)
imgElement.field.idevice.undo = False
imgElement.field.idevice.edit = True
for wordElement in self.wordElements:
wordElement.process(request)
if "action" in request.args and request.args["action"][0] == wordElement.id:
wordIdx = self.wordElements.index(wordElement)
self.idevice.wordTextFields.remove(wordElement.field)
self.idevice.hintTextFields.remove(self.hintElements[wordIdx].field)
wordElement.field.idevice.undo = False
wordElement.field.idevice.edit = True
for hintElement in self.hintElements:
hintElement.process(request)
if content:
self.idevice.content = content
#
# Get an TextArea render back according to mode
def _renderHTMLElement(self, mode, element, containerId = None):
retVal = ""
idStr = ""
if containerId is not None:
idStr = " id='%s' " % containerId
retVal += "<div %s >" % idStr
if mode == "preview":
retVal += element.renderPreview()
else:
retVal += element.renderView()
retVal += "</div>"
return retVal
#
# This will generate the HTML elements and javascript that will be required
# for this to be shown as a Javascript game in the web browser
#
def _renderGame(self, style, mode = "view"):
hangmanGameId = "hangman" + self.id
resPath = ""
if mode == "preview":
resPath = "/templates/"
html = u"<script src='" + resPath + "hangman.js' type='text/javascript'></script>\n"
html += common.ideviceHeader(self, style, mode)
html += "<div id='hangman%(gameId)smessageStore' style='display: none'>" % {"gameId" : hangmanGameId}
html += self._renderHTMLElement(mode, self.wrongGuessTextElement, "hmwrong" + hangmanGameId)
html += self._renderHTMLElement(mode, self.lostLevelTextElement, "hmlost" + hangmanGameId)
html += self._renderHTMLElement(mode, self.levelPassedTextElement, "hmpassed" + hangmanGameId)
html += self._renderHTMLElement(mode, self.gameWonTextElement, "hmwon" + hangmanGameId)
html += "</div>"
html += u"<script type='text/javascript'>\n"
#Go through the images and find out the max height and maxwidth
imgMaxHeight = 0
imgMaxWidth = 0
for imgElement in self.chanceImageElements:
if imgElement.field.imageResource and imgElement.field.imageResource is not None:
if(int(imgElement.field.width) > imgMaxWidth):
imgMaxWidth = int(imgElement.field.width)
if(imgElement.field.height > imgMaxHeight):
imgMaxHeight = int(imgElement.field.height)
#Makes a javascript array of the list of words that the user has given
html += "hangman_words['%s'] = new Array();\n" % hangmanGameId
html += "hangman_buttonStyles['%s'] = new Array();\n" % hangmanGameId
for wordIndex, word in enumerate(self.wordElements):
html += u"hangman_words['%(gameId)s'][%(index)d] = new Array('%(word)s', '%(hint)s');\n" % \
{"index" : wordIndex, "word" : word.renderView(), \
"hint" : self.hintElements[wordIndex].renderView(), \
"gameId" : hangmanGameId }
#make the style for the buttons
html += "hangman_buttonStyles['%(gameId)s'][HANGMAN_BEFORE_GUESS] = \"%(style)s\";\n" \
% {"gameId" : hangmanGameId, "style" : self.letterButtonStyleElement.renderView()}
html += "hangman_buttonStyles['%(gameId)s'][HANGMAN_CORRECT_GUESS] = \"%(style)s\";\n" \
% {"gameId" : hangmanGameId, "style" : self.rightLetterButtonStyleElement.renderView()}
html += "hangman_buttonStyles['%(gameId)s'][HANGMAN_WRONG_GUESS] = \"%(style)s\";\n" \
% {"gameId" : hangmanGameId, "style" : self.wrongLetterButtonStyleElement.renderView()}
#Makes a javscript string of the alphabet that the user can guess from
html += u"hangman_alphabet['%(gameId)s'] = '%(alphabet)s';\n" % \
{"alphabet" : self.alphabetElement.renderView(), \
"gameId" : hangmanGameId }
#Makes an array of the ids of the divs that hold the chance images
html += u"hangman_chanceimgids['%s'] = new Array();\n" % hangmanGameId
for imgIndex, imgElement in enumerate(self.chanceImageElements):
html += "hangman_chanceimgids['%(gameId)s'][%(index)d] = '%(imgdivid)s';\n" % \
{"index" : imgIndex, "imgdivid" : "hangman" + self.id + "img" + imgElement.id, \
"gameId" : hangmanGameId }
#Make the messages for this game
html += u"playerMessages['%s'] = new Array();\n" % hangmanGameId
messagesStr = """
playerMessages['%(gameid)s']['wrongguess'] =
document.getElementById('hmwrong%(gameid)s').innerHTML;
playerMessages['%(gameid)s']['lostlevel'] =
document.getElementById('hmlost%(gameid)s').innerHTML;
playerMessages['%(gameid)s']['levelpassed'] =
document.getElementById('hmpassed%(gameid)s').innerHTML;
playerMessages['%(gameid)s']['gamewon'] =
document.getElementById('hmwon%(gameid)s').innerHTML;
</script>
""" % {"gameid" : hangmanGameId }
html += messagesStr
html += "<div id='hangman" + self.id + "_img'>"
#render view of these images
for imgElement in self.chanceImageElements:
if imgElement.field.imageResource and imgElement.field.imageResource is not None:
html += "<div id='hangman" + self.id + "img" + imgElement.id + "' style='display: none'>"
if mode == "view":
html += imgElement.renderView()
else:
html += imgElement.renderPreview()
html += "</div>"
html += "</div>"
messageTopMargin = (imgMaxHeight - 30) / 2
gameWidth = max(600, imgMaxWidth)
gameAreaHTML = """
<div id="%(gameId)s_gamearea" style='width: %(width)dpx;' class='exehangman_gamearea'>
<div class='exehangman_alertarea' id="%(gameId)s_alertarea" style='position: absolute; z-index: 10; text-align: center; border: 1px; background-color: white; width: %(width)dpx; margin-top: %(messagetopmargin)dpx; visibility: hidden'>
 
</div>
<div id="%(gameId)s_imgarea" style='height: %(height)dpx; z-index: 1;' class='exehangman_imgarea'>
</div>
<input type='text' style='%(hintStyle)s' id='%(gameId)s_hintarea' style='width: %(width)dpx' class='exehangman_hintarea'/>
<input type='text' style='%(wordStyle)s' id='%(gameId)s_wordarea' style='width: %(width)dpx' class='exehangman_wordarea'/>
<div id="%(gameId)s_letterarea" class='exehangman_letterarea'>
</div>
<input class='exehangman_resetbutton' type='button' value='%(resetText)s' style='%(resetStyle)s' onclick='restartLevel("%(gameId)s")'/>
</div>
""" % { "gameId" : hangmanGameId, "width" : gameWidth, "height": imgMaxHeight, \
"messagetopmargin" : messageTopMargin, 'hintStyle' : self.hintFieldStyleElement.renderView(), \
'wordStyle' : self.wordAreaStyleElement.renderView(), 'resetText' : self.resetButtonTextElement.renderView(), \
'resetStyle' : self.resetButtonStyleElement.renderView() }
html += gameAreaHTML
html += "<script type='text/javascript'>setupGame('%s');</script>" % hangmanGameId
return html
def renderEdit(self, style):
"""
Returns an XHTML string with the form element for editing this block
"""
html = u"<div>\n"
html += common.ideviceShowEditMessage(self)
html += self.titleElement.renderEdit()
html += self.contentElement.renderEdit()
html += self.alphabetElement.renderEdit()
#messages to show the user for different events
html += self.wrongGuessTextElement.renderEdit()
html += self.lostLevelTextElement.renderEdit()
html += self.levelPassedTextElement.renderEdit()
html += self.gameWonTextElement.renderEdit()
html += self.resetButtonTextElement.renderEdit()
divId = "fieldtype_advanced" + self.id
html += "<input name='showbox" + divId + "' type='checkbox' onchange='$(\"#" + divId + "\").toggle()'/>"
html += _("Show Advanced Options") + "<br/>"
html += "<div id='" + divId + "' style='display: none' "
html += ">"
#styles for buttons
html += self.letterButtonStyleElement.renderEdit()
html += self.wrongLetterButtonStyleElement.renderEdit()
html += self.rightLetterButtonStyleElement.renderEdit()
#style of the text fields
html += self.hintFieldStyleElement.renderEdit()
html += self.wordAreaStyleElement.renderEdit()
html += self.resetButtonStyleElement.renderEdit()
html += "</div>"
#render edit of these images
for imgElement in self.chanceImageElements:
html += imgElement.renderEdit()
html += common.submitImage(imgElement.id, imgElement.field.idevice.id,
"/images/stock-cancel.png",
_("Remove This Life")) + "<br/>"
addChanceButtonLabel = _("Add Chance")
html += common.submitButton("addChance"+unicode(self.id), addChanceButtonLabel)
html += "<br/>"
#show words to be guessed
html += _("<h2>Words to Guess</h2>")
for wordIndex in range(0, len(self.wordElements)):
word = self.wordElements[wordIndex]
html += word.renderEdit()
html += self.hintElements[wordIndex].renderEdit()
html += "<br/>"
if wordIndex > 0:
html += common.submitImage(word.id, word.field.idevice.id,
"/images/stock-cancel.png",
_("Remove This Word")) + "<br/>"
html += common.submitButton("addWord"+unicode(self.id), _("Add Word"))
html += "<br/>"
html += self.renderEditButtons()
html += u"</div>\n"
return html
def renderPreview(self, style):
"""
Returns an XHTML string for previewing this block
"""
html = u"<div class=\"iDevice "
html += u"emphasis"+unicode(self.idevice.emphasis)+"\" "
html += u"ondblclick=\"submitLink('edit',"+self.id+", 0);\">\n"
html += self.contentElement.renderView()
html += self._renderGame(style, mode = "preview")
html += self.renderViewButtons()
html += "</div>\n"
return html
def renderXML(self, style):
xml = u""
mediaConverter = ExportMediaConverter.getInstance()
width = mediaConverter.getProfileWidth()
height = mediaConverter.getProfileHeight()
if mediaConverter is not None:
for imgElement in self.chanceImageElements:
if imgElement.field.imageResource is not None:
mediaConverter.resizeImg(XMLPage.currentOutputDir/imgElement.field.imageResource.storageName, \
width, height, {}, {"resizemethod" : "stretch"})
xml += "<idevice type='hangman' id='%s'>\n" % self.idevice.id
xml += "<chanceimages>\n"
for imgElement in self.chanceImageElements:
if imgElement.field.imageResource is not None:
xml += "<img src='%s'/>\n" % imgElement.field.imageResource.storageName
xml += "</chanceimages>\n"
xml += "<alphabet>%s</alphabet>\n" % self.alphabetElement.renderView()
xml += "<wrongguessmessage><![CDATA[ %s ]]></wrongguessmessage>\n" % self.wrongGuessTextElement.renderView()
xml += "<lostlevelmessage><![CDATA[ %s ]]></lostlevelmessage>\n" % self.lostLevelTextElement.renderView()
xml += "<levelpassedmessage><![CDATA[ %s ]]></levelpassedmessage>\n" % self.levelPassedTextElement.renderView()
xml += "<gamewonmessage><![CDATA[ %s ]]></gamewonmessage>\n" % self.gameWonTextElement.renderView()
xml += "<words>"
for wordIndex in range(0, len(self.wordElements)):
word = self.wordElements[wordIndex]
if word != "":
xml += "<word>\n<hint>%(hint)s</hint>\n<answer>%(answer)s</answer>\n</word>\n" \
% {"answer" : word.renderView() , "hint" : self.hintElements[wordIndex].renderView()}
xml += "</words>\n"
xml += "</idevice>\n"
return xml
def renderView(self, style):
"""
Returns an XHTML string for viewing this block
"""
html = u"<div class=\"iDevice "
html += u"emphasis"+unicode(self.idevice.emphasis)+"\">\n"
html += self.contentElement.renderView()
html += self._renderGame(style, mode = "view")
html += u"</div>\n"
return html
# ===========================================================================
"""Register this block with the BlockFactory"""
from exe.engine.hangmanidevice import HangmanIdeviceInc
from exe.webui.blockfactory import g_blockFactory
g_blockFactory.registerBlockType(HangmanBlockInc, HangmanIdeviceInc)
# ===========================================================================
| gpl-2.0 |
ansible/ansible | test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py | 47 | 3576 | # -*- coding: utf-8 -*-
# Copyright: (c) 2015, Peter Sprygada <psprygada@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = r"""options:
provider:
description:
- B(Deprecated)
- 'Starting with Ansible 2.5 we recommend using C(connection: network_cli).'
- For more information please see the L(IOS Platform Options guide, ../network/user_guide/platform_ios.html).
- HORIZONTALLINE
- A dict object containing connection details.
type: dict
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote device
over the specified transport. The value of host is used as the destination
address for the transport.
type: str
required: true
port:
description:
- Specifies the port to use when building the connection to the remote device.
type: int
default: 22
username:
description:
- Configures the username to use to authenticate the connection to the remote
device. This value is used to authenticate the SSH session. If the value
is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME)
will be used instead.
type: str
password:
description:
- Specifies the password to use to authenticate the connection to the remote
device. This value is used to authenticate the SSH session. If the value
is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD)
will be used instead.
type: str
timeout:
description:
- Specifies the timeout in seconds for communicating with the network device
for either connecting or sending commands. If the timeout is exceeded before
the operation is completed, the module will error.
type: int
default: 10
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to the remote
device. This value is the path to the key used to authenticate the SSH
session. If the value is not specified in the task, the value of environment
variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead.
type: path
authorize:
description:
- Instructs the module to enter privileged mode on the remote device before
sending any commands. If not specified, the device will attempt to execute
all commands in non-privileged mode. If the value is not specified in the
task, the value of environment variable C(ANSIBLE_NET_AUTHORIZE) will be
used instead.
type: bool
default: false
auth_pass:
description:
- Specifies the password to use if required to enter privileged mode on the
remote device. If I(authorize) is false, then this argument does nothing.
If the value is not specified in the task, the value of environment variable
C(ANSIBLE_NET_AUTH_PASS) will be used instead.
type: str
notes:
- For more information on using Ansible to manage network devices see the :ref:`Ansible
Network Guide <network_guide>`
- For more information on using Ansible to manage Cisco devices see the `Cisco integration
page <https://www.ansible.com/integrations/networks/cisco>`_.
"""
| gpl-3.0 |
harisibrahimkv/django | tests/migrations/test_loader.py | 23 | 19149 | from django.db import connection, connections
from django.db.migrations.exceptions import (
AmbiguityError, InconsistentMigrationHistory, NodeNotFoundError,
)
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.recorder import MigrationRecorder
from django.test import TestCase, modify_settings, override_settings
class RecorderTests(TestCase):
"""
Tests recording migrations as applied or not.
"""
multi_db = True
def test_apply(self):
"""
Tests marking migrations as applied/unapplied.
"""
recorder = MigrationRecorder(connection)
self.assertEqual(
set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"),
set(),
)
recorder.record_applied("myapp", "0432_ponies")
self.assertEqual(
set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"),
{("myapp", "0432_ponies")},
)
# That should not affect records of another database
recorder_other = MigrationRecorder(connections['other'])
self.assertEqual(
set((x, y) for (x, y) in recorder_other.applied_migrations() if x == "myapp"),
set(),
)
recorder.record_unapplied("myapp", "0432_ponies")
self.assertEqual(
set((x, y) for (x, y) in recorder.applied_migrations() if x == "myapp"),
set(),
)
class LoaderTests(TestCase):
"""
Tests the disk and database loader, and running through migrations
in memory.
"""
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
@modify_settings(INSTALLED_APPS={'append': 'basic'})
def test_load(self):
"""
Makes sure the loader can load the migrations for the test apps,
and then render them out to a new Apps.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0002_second"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0002_second"))
self.assertEqual(len(project_state.models), 2)
author_state = project_state.models["migrations", "author"]
self.assertEqual(
[x for x, y in author_state.fields],
["id", "name", "slug", "age", "rating"]
)
book_state = project_state.models["migrations", "book"]
self.assertEqual(
[x for x, y in book_state.fields],
["id", "author"]
)
# Ensure we've included unmigrated apps in there too
self.assertIn("basic", project_state.real_apps)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_unmigdep"})
def test_load_unmigrated_dependency(self):
"""
Makes sure the loader can load migrations with a dependency on an unmigrated app.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0001_initial")),
[
('contenttypes', '0001_initial'),
('auth', '0001_initial'),
("migrations", "0001_initial"),
],
)
# Now render it out!
project_state = migration_loader.project_state(("migrations", "0001_initial"))
self.assertEqual(len([m for a, m in project_state.models if a == "migrations"]), 1)
book_state = project_state.models["migrations", "book"]
self.assertEqual(
[x for x, y in book_state.fields],
["id", "user"]
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_run_before(self):
"""
Makes sure the loader uses Migration.run_before.
"""
# Load and test the plan
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "0002_second")),
[
("migrations", "0001_initial"),
("migrations", "0003_third"),
("migrations", "0002_second"),
],
)
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations_first",
"migrations2": "migrations2.test_migrations_2_first",
})
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
def test_first(self):
"""
Makes sure the '__first__' migrations build correctly.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.graph.forwards_plan(("migrations", "second")),
[
("migrations", "thefirst"),
("migrations2", "0001_initial"),
("migrations2", "0002_second"),
("migrations", "second"),
],
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_name_match(self):
"Tests prefix name matching"
migration_loader = MigrationLoader(connection)
self.assertEqual(
migration_loader.get_migration_by_prefix("migrations", "0001").name,
"0001_initial",
)
with self.assertRaises(AmbiguityError):
migration_loader.get_migration_by_prefix("migrations", "0")
with self.assertRaises(KeyError):
migration_loader.get_migration_by_prefix("migrations", "blarg")
def test_load_import_error(self):
with override_settings(MIGRATION_MODULES={"migrations": "import_error_package"}):
with self.assertRaises(ImportError):
MigrationLoader(connection)
def test_load_module_file(self):
with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.file"}):
loader = MigrationLoader(connection)
self.assertIn(
"migrations", loader.unmigrated_apps,
"App with migrations module file not in unmigrated apps."
)
def test_load_empty_dir(self):
with override_settings(MIGRATION_MODULES={"migrations": "migrations.faulty_migrations.namespace"}):
loader = MigrationLoader(connection)
self.assertIn(
"migrations", loader.unmigrated_apps,
"App missing __init__.py in migrations module not in unmigrated apps."
)
@override_settings(
INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'],
)
def test_marked_as_migrated(self):
"""
Undefined MIGRATION_MODULES implies default migration module.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, {'migrated_app'})
self.assertEqual(migration_loader.unmigrated_apps, set())
@override_settings(
INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'],
MIGRATION_MODULES={"migrated_app": None},
)
def test_marked_as_unmigrated(self):
"""
MIGRATION_MODULES allows disabling of migrations for a particular app.
"""
migration_loader = MigrationLoader(connection)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'})
@override_settings(
INSTALLED_APPS=['migrations.migrations_test_apps.migrated_app'],
MIGRATION_MODULES={'migrated_app': 'missing-module'},
)
def test_explicit_missing_module(self):
"""
If a MIGRATION_MODULES override points to a missing module, the error
raised during the importation attempt should be propagated unless
`ignore_no_migrations=True`.
"""
with self.assertRaisesMessage(ImportError, 'missing-module'):
migration_loader = MigrationLoader(connection)
migration_loader = MigrationLoader(connection, ignore_no_migrations=True)
self.assertEqual(migration_loader.migrated_apps, set())
self.assertEqual(migration_loader.unmigrated_apps, {'migrated_app'})
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_loading_squashed(self):
"Tests loading a squashed migration"
migration_loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Loading with nothing applied should just give us the one node
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
1,
)
# However, fake-apply one migration and it should now use the old two
recorder.record_applied("migrations", "0001_initial")
migration_loader.build_graph()
self.assertEqual(
len([x for x in migration_loader.graph.nodes if x[0] == "migrations"]),
2,
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_loading_squashed_complex(self):
"Tests loading a complex set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(('migrations', '7_auto')))
return len(plan - loader.applied_migrations)
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
recorder.record_applied("migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 to 5 cannot use the squashed migration
recorder.record_applied("migrations", "3_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "4_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# Starting at 5 to 7 we are passed the squashed migrations
recorder.record_applied("migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
recorder.record_applied("migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
recorder.record_applied("migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]})
def test_loading_squashed_complex_multi_apps(self):
loader = MigrationLoader(connection)
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
expected_plan = {
('app1', '1_auto'),
('app2', '1_squashed_2'),
('app1', '2_squashed_3'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_complex_multi_apps.app1",
"app2": "migrations.test_migrations_squashed_complex_multi_apps.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_complex_multi_apps.app1",
"migrations.test_migrations_squashed_complex_multi_apps.app2",
]})
def test_loading_squashed_complex_multi_apps_partially_applied(self):
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
recorder.record_applied('app1', '1_auto')
recorder.record_applied('app1', '2_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app2', '1_squashed_2'),
('app1', '3_auto'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_erroneous"})
def test_loading_squashed_erroneous(self):
"Tests loading a complex but erroneous set of squashed migrations"
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
def num_nodes():
plan = set(loader.graph.forwards_plan(('migrations', '7_auto')))
return len(plan - loader.applied_migrations)
# Empty database: use squashed migration
loader.build_graph()
self.assertEqual(num_nodes(), 5)
# Starting at 1 or 2 should use the squashed migration too
recorder.record_applied("migrations", "1_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 4)
recorder.record_applied("migrations", "2_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 3)
# However, starting at 3 or 4, nonexistent migrations would be needed.
msg = ("Migration migrations.6_auto depends on nonexistent node ('migrations', '5_auto'). "
"Django tried to replace migration migrations.5_auto with any of "
"[migrations.3_squashed_5] but wasn't able to because some of the replaced "
"migrations are already applied.")
recorder.record_applied("migrations", "3_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
recorder.record_applied("migrations", "4_auto")
with self.assertRaisesMessage(NodeNotFoundError, msg):
loader.build_graph()
# Starting at 5 to 7 we are passed the squashed migrations
recorder.record_applied("migrations", "5_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 2)
recorder.record_applied("migrations", "6_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 1)
recorder.record_applied("migrations", "7_auto")
loader.build_graph()
self.assertEqual(num_nodes(), 0)
@override_settings(
MIGRATION_MODULES={'migrations': 'migrations.test_migrations'},
INSTALLED_APPS=['migrations'],
)
def test_check_consistent_history(self):
loader = MigrationLoader(connection=None)
loader.check_consistent_history(connection)
recorder = MigrationRecorder(connection)
recorder.record_applied('migrations', '0002_second')
msg = (
"Migration migrations.0002_second is applied before its dependency "
"migrations.0001_initial on database 'default'."
)
with self.assertRaisesMessage(InconsistentMigrationHistory, msg):
loader.check_consistent_history(connection)
@override_settings(
MIGRATION_MODULES={'migrations': 'migrations.test_migrations_squashed_extra'},
INSTALLED_APPS=['migrations'],
)
def test_check_consistent_history_squashed(self):
"""
MigrationLoader.check_consistent_history() should ignore unapplied
squashed migrations that have all of their `replaces` applied.
"""
loader = MigrationLoader(connection=None)
recorder = MigrationRecorder(connection)
recorder.record_applied('migrations', '0001_initial')
recorder.record_applied('migrations', '0002_second')
loader.check_consistent_history(connection)
recorder.record_applied('migrations', '0003_third')
loader.check_consistent_history(connection)
@override_settings(MIGRATION_MODULES={
"app1": "migrations.test_migrations_squashed_ref_squashed.app1",
"app2": "migrations.test_migrations_squashed_ref_squashed.app2",
})
@modify_settings(INSTALLED_APPS={'append': [
"migrations.test_migrations_squashed_ref_squashed.app1",
"migrations.test_migrations_squashed_ref_squashed.app2",
]})
def test_loading_squashed_ref_squashed(self):
"Tests loading a squashed migration with a new migration referencing it"
r"""
The sample migrations are structured like this:
app_1 1 --> 2 ---------------------*--> 3 *--> 4
\ / /
*-------------------*----/--> 2_sq_3 --*
\ / /
=============== \ ============= / == / ======================
app_2 *--> 1_sq_2 --* /
\ /
*--> 1 --> 2 --*
Where 2_sq_3 is a replacing migration for 2 and 3 in app_1,
as 1_sq_2 is a replacing migration for 1 and 2 in app_2.
"""
loader = MigrationLoader(connection)
recorder = MigrationRecorder(connection)
self.addCleanup(recorder.flush)
# Load with nothing applied: both migrations squashed.
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app1', '1_auto'),
('app2', '1_squashed_2'),
('app1', '2_squashed_3'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
# Fake-apply a few from app1: unsquashes migration in app1.
recorder.record_applied('app1', '1_auto')
recorder.record_applied('app1', '2_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app2', '1_squashed_2'),
('app1', '3_auto'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
# Fake-apply one from app2: unsquashes migration in app2 too.
recorder.record_applied('app2', '1_auto')
loader.build_graph()
plan = set(loader.graph.forwards_plan(('app1', '4_auto')))
plan = plan - loader.applied_migrations
expected_plan = {
('app2', '2_auto'),
('app1', '3_auto'),
('app1', '4_auto'),
}
self.assertEqual(plan, expected_plan)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.