text
stringlengths
29
850k
# -*- coding: utf-8 -*- # This is a part of gayeogi @ http://github.com/KenjiTakahashi/gayeogi/ # Karol "Kenji Takahashi" Woźniak © 2010 - 2012 # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys import os from PyQt4 import QtGui from PyQt4.QtCore import Qt, QSettings, QLocale, QTranslator from PyQt4.QtCore import pyqtSignal, QModelIndex from gayeogi.db.local import DB from gayeogi.db.distributor import Distributor from gayeogi.interfaces.settings import Settings from gayeogi.utils import Filter import gayeogi.plugins __version__ = '0.6.3' locale = QLocale.system().name() if sys.platform == 'win32': from PyQt4.QtGui import QDesktopServices service = QDesktopServices() dbPath = os.path.join( unicode(service.storageLocation(9)), u'gayeogi', u'db' ) lnPath = u'' else: # Most POSIX systems, there may be more elifs in future. dbPath = os.path.expanduser(u'~/.config/gayeogi/db') lnPath = os.path.dirname(__file__) class ADRItemDelegate(QtGui.QStyledItemDelegate): buttonClicked = pyqtSignal(QModelIndex) def __init__(self, parent=None): super(ADRItemDelegate, self).__init__(parent) self.palette = QtGui.QPalette() self.buttoned = False self.mx = 0 self.my = 0 self.ry = 0 self.rry = -1 self.rx = 0 self.ht = 0 def paint(self, painter, option, index): super(ADRItemDelegate, self).paint(painter, option, QModelIndex()) painter.save() painter.setRenderHint(QtGui.QPainter.Antialiasing) painter.setPen(Qt.NoPen) painter.setBrush(self.palette.mid()) ry = option.rect.y() rx = option.rect.x() width = option.rect.width() self.ht = option.rect.height() self.ry = ry self.rx = rx metrics = option.fontMetrics lineHeight = metrics.lineSpacing() linePos = ry + (self.ht - lineHeight) / 2 painter.drawRoundedRect( rx + 1, linePos, 36, lineHeight, 20, 60, Qt.RelativeSize ) painter.setPen(QtGui.QPen()) x = rx + 8 + metrics.width(u'a') if index.data(234).toBool(): painter.drawText(x, linePos + lineHeight - 3, u'd') x += metrics.width(u'd') if index.data(345).toBool(): painter.drawText(x, linePos + lineHeight - 3, u'r') if self.buttonOver(rx, ry): if self.buttoned: if self.my >= ry + 1 and self.my <= ry + self.ht - 6: self.rry = ry self.buttonClicked.emit(index) self.buttoned = False elif ry != self.rry: painter.setPen(QtGui.QPen(self.palette.brightText(), 0)) self.rry = -1 painter.drawText(rx + 8, linePos + lineHeight - 3, u'a') elif index.data(123).toBool(): painter.drawText(rx + 8, linePos + lineHeight - 3, u'a') elif index.data(123).toBool(): painter.drawText(rx + 8, linePos - lineHeight - 3, u'a') painter.restore() pSize = self.ht / 2 + option.font.pointSize() / 2 if pSize % 2 == 0: pSize += 1 pSize -= 1 painter.save() if option.state & QtGui.QStyle.State_Selected: if option.state & QtGui.QStyle.State_HasFocus: painter.setPen(QtGui.QPen(self.palette.highlightedText(), 0)) else: painter.setPen(QtGui.QPen(self.palette.brightText(), 0)) painter.drawText( rx + 39, ry + pSize, index.data(Qt.DisplayRole).toString() ) painter.restore() pixmap = index.data(666).toPyObject() if pixmap: painter.drawPixmap(rx + width - 80, ry, pixmap) def buttonOver(self, x, y): return ( self.mx >= x + 1 and self.mx <= x + 36 and self.my >= y + 1 and self.my <= y + self.ht ) class TableView(QtGui.QTableView): """Docstring for TableView """ def __init__(self, state, parent=None): """@todo: to be defined :parent: @todo """ super(TableView, self).__init__(parent) self.setSelectionMode(self.ExtendedSelection) self.setSelectionBehavior(self.SelectRows) self.setEditTriggers(self.NoEditTriggers) self.setShowGrid(False) self.setCornerButtonEnabled(False) self.setWordWrap(False) vheader = self.verticalHeader() vheader.setHidden(True) hheader = self.horizontalHeader() hheader.setStretchLastSection(True) hheader.setDefaultAlignment(Qt.AlignLeft) hheader.setHighlightSections(False) hheader.setMovable(True) hheader.setContextMenuPolicy(Qt.CustomContextMenu) hheader.customContextMenuRequested.connect(self.showHeaderContextMenu) # This restores state over and over for every column added. # FIXME: Restore state once (somehow). #hheader.sectionCountChanged.connect( #lambda: self.horizontalHeader().restoreState(state) #) def showHeaderContextMenu(self): """@todo: Docstring for showHeaderContextMenu """ menu = QtGui.QMenu() model = self.model() for i in xrange(model.columnCount()): action = menu.addAction( model.headerData(i, Qt.Horizontal, Qt.DisplayRole).toString() ) action.setProperty(u'column', i) action.setCheckable(True) if not self.isColumnHidden(i): action.setChecked(True) menu.triggered.connect(self.showHideColumn) menu.exec_(QtGui.QCursor.pos()) def showHideColumn(self, action): """@todo: Docstring for showHideColumn :action: @todo """ column = action.property(u'column').toInt()[0] self.setColumnHidden(column, not self.isColumnHidden(column)) class ADRTableView(TableView): def __init__(self, state, parent=None): super(ADRTableView, self).__init__(state, parent) self.setMouseTracking(True) self.delegate = ADRItemDelegate() self.delegate.buttonClicked.connect(self.callback) self.setItemDelegateForColumn(1, self.delegate) def buttoned(self, mx, rx): return mx >= rx + 1 and mx <= rx + 36 def callback(self, index): self.model().setData(index, not index.data(123).toBool(), 123) def mouseMoveEvent(self, event): if event.y() == 0 or self.delegate.rry + self.delegate.ht < event.y(): self.delegate.rry = -1 self.delegate.mx = event.x() self.delegate.my = event.y() self.viewport().update() def mouseReleaseEvent(self, event): if not self.buttoned(event.x(), self.delegate.rx): super(ADRTableView, self).mouseReleaseEvent(event) else: self.delegate.buttoned = True self.delegate.my = event.y() self.viewport().update() def mousePressEvent(self, event): if not self.buttoned(event.x(), self.delegate.rx): super(ADRTableView, self).mousePressEvent(event) def mouseDoubleClickEvent(self, event): if not self.buttoned(event.x(), self.delegate.rx): super(ADRTableView, self).mouseDoubleClickEvent(event) class View(QtGui.QWidget): def __init__(self, model, view, pixmap, parent=None): """@todo: Docstring for __init__ :model: @todo :view: @todo :pixmap: @todo :parent: @todo """ super(View, self).__init__(parent) self.model = Filter(model) self.filter = QtGui.QLineEdit() self.filter.textEdited.connect(self.model.setFilter) self.filter.setStatusTip(self.trUtf8(( u"Pattern: <pair>|<pair>, " u"where <pair> is <column_name>:<searching_phrase> or (not) " u"(a or d or r). Case insensitive, regexp allowed." ))) self.view = view vheader = self.view.verticalHeader() if pixmap: vheader.setDefaultSectionSize(80) else: vheader.setDefaultSectionSize( self.view.fontMetrics().lineSpacing() ) self.view.setModel(self.model) layout = QtGui.QVBoxLayout() layout.setContentsMargins(0, 0, 0, 0) layout.addWidget(self.filter) layout.addWidget(self.view) self.setLayout(layout) self.view.setSortingEnabled(True) class Main(QtGui.QMainWindow): __settings = QSettings(u'gayeogi', u'gayeogi') __dbsettings = QSettings(u'gayeogi', u'Databases') def __init__(self): super(Main, self).__init__() if not os.path.exists(dbPath): os.mkdir(dbPath) if os.path.exists(os.path.join(dbPath[:-3], u'db.pkl')): pass # TODO: convert old db to new else: dialog = Settings() dialog.exec_() self.db = DB(dbPath) self.db.finished.connect(self.enableButtons) self.db.artistsStatisticsChanged.connect(self.updateArtistsStatistics) self.db.albumsStatisticsChanged.connect(self.updateAlbumsStatistics) from interfaces.main import Ui_main self.ui = Ui_main() widget = QtGui.QWidget() self.ui.setupUi(widget) self.ui.artists = View(self.db.artists, TableView( self.__settings.value(u'artistsView').toByteArray() ), Main.__dbsettings.value( u'image/artist/enabled', 2 ).toBool(), self.ui.splitter) delegate = ADRItemDelegate() self.ui.artists.view.setItemDelegateForColumn(0, delegate) self.ui.albums = View(self.db.albums, ADRTableView( self.__settings.value(u'albumsView').toByteArray() ), Main.__dbsettings.value( u'image/album/enabled', 2 ).toBool(), self.ui.splitter) self.ui.tracks = View(self.db.tracks, TableView( self.__settings.value(u'tracksView').toByteArray() ), None, self.ui.splitter) self.ui.tracks.view.setAlternatingRowColors(True) self.ui.artists.view.selectionModel().selectionChanged.connect( self.ui.albums.model.setSelection ) self.ui.albums.view.selectionModel().selectionChanged.connect( self.ui.tracks.model.setSelection ) self.ui.plugins = {} self.ui.splitter.restoreState( self.__settings.value(u'splitters').toByteArray() ) self.setCentralWidget(widget) self.rt = Distributor(self.db.iterator()) self.rt.stepped.connect(self.statusBar().showMessage) self.rt.finished.connect(self.enableButtons) self.ui.local.clicked.connect(self.disableButtons) self.ui.local.clicked.connect(self.db.start) self.ui.remote.clicked.connect(self.disableButtons) self.ui.remote.clicked.connect(self.rt.start) self.ui.close.clicked.connect(self.close) self.ui.save.clicked.connect(self.save) self.ui.settings.clicked.connect(self.showSettings) self.statusBar() self.setWindowTitle(u'gayeogi ' + __version__) self.translators = list() self.loadPluginsTranslators() self.loadPlugins() def disableButtons(self): """Disable some buttons one mustn't use during the update.""" self.ui.local.setDisabled(True) self.ui.remote.setDisabled(True) self.ui.save.setDisabled(True) self.ui.settings.setDisabled(True) def enableButtons(self): """Enable buttons disabled by Main.disableButtons. Also shows the "Done" message. """ self.ui.local.setEnabled(True) self.ui.remote.setEnabled(True) self.ui.save.setEnabled(True) self.ui.settings.setEnabled(True) self.statusBar().showMessage(self.trUtf8('Done')) def loadPluginsTranslators(self): reload(gayeogi.plugins) app = QtGui.QApplication.instance() for plugin in gayeogi.plugins.__all__: translator = QTranslator() if translator.load(plugin + u'_' + locale, os.path.join(lnPath, u'plugins', u'langs')): self.translators.append(translator) app.installTranslator(translator) def removePluginsTranslators(self): app = QtGui.QApplication.instance() for translator in self.translators: app.removeTranslator(translator) def loadPlugins(self): def depends(plugin): for p in gayeogi.plugins.__all__: class_ = getattr(gayeogi.plugins, p).Main if plugin in class_.depends and class_.loaded: return True return False for plugin in gayeogi.plugins.__all__: class_ = getattr(gayeogi.plugins, plugin).Main __settings_ = QSettings(u'gayeogi', class_.name) option = __settings_.value(u'enabled', 0).toInt()[0] if option and not class_.loaded: class__ = class_(self.ui, self.db.artists, self.appendPlugin, self.removePlugin) class__.load() self.ui.plugins[plugin] = class__ elif not option and class_.loaded: self.ui.plugins[plugin].unload() for d in self.ui.plugins[plugin].depends: if not self.ui.plugins[d].loaded \ and d in self.ui.plugins.keys(): del self.ui.plugins[d] if not depends(plugin): del self.ui.plugins[plugin] def appendPlugin(self, parent, child, position): parent = getattr(self.ui, parent) if position == 'start': position = 0 elif position == 'end': position = len(parent.parent().children()) - 7 if isinstance(parent, QtGui.QLayout): widget = parent.itemAt(position) if not widget: parent.insertWidget(position, child) else: if isinstance(widget, QtGui.QTabWidget): widget.addTab(child, child.name) else: try: widget.name except AttributeError: parent.insertWidget(position, child) else: widget = parent.takeAt(position).widget() tab = QtGui.QTabWidget() tab.setTabPosition(tab.South) tab.addTab(widget, widget.name) tab.addTab(child, child.name) parent.insertWidget(position, tab) def removePlugin(self, parent, child, position): parent = getattr(self.ui, parent) if position == 'start': position = 0 elif position == 'end': position = len(parent.parent().children()) - 8 if isinstance(parent, QtGui.QLayout): widget = parent.itemAt(position).widget() try: if widget.name == child.name: parent.takeAt(position).widget().deleteLater() except AttributeError: for i in range(widget.count()): if widget.widget(i).name == child.name: widget.removeTab(i) if widget.count() == 1: tmp = widget.widget(0) parent.takeAt(position).widget().deleteLater() parent.insertWidget(position, tmp) parent.itemAt(position).widget().show() def showSettings(self): u"""Show settings dialog and then update accordingly.""" def __save(): self.removePluginsTranslators() self.loadPluginsTranslators() self.loadPlugins() dialog = Settings() dialog.ok.clicked.connect(__save) dialog.exec_() def save(self): u"""Save database to file.""" self.db.save() self.statusBar().showMessage(self.trUtf8('Saved')) def updateArtistsStatistics(self, a, d, r): """Updates global artists' statistics. :a: A statistics. :d: D statistics. :r: R statistics. """ self.ui.artistsGreen.setText(unicode(a)) self.ui.artistsYellow.setText(unicode(d)) self.ui.artistsRed.setText(unicode(r)) def updateAlbumsStatistics(self, a, d, r): """Updated global albums' statistics. @note: Attributes as in Main.updateArtistsStatistics. """ self.ui.albumsGreen.setText(unicode(a)) self.ui.albumsYellow.setText(unicode(d)) self.ui.albumsRed.setText(unicode(r)) def closeEvent(self, event): def unload(): for plugin in self.ui.plugins.values(): plugin.unload() self.__settings.setValue( u'splitters', self.ui.splitter.saveState() ) self.__settings.setValue(u'artistsView', self.ui.artists.view.horizontalHeader().saveState() ) self.__settings.setValue(u'albumsView', self.ui.albums.view.horizontalHeader().saveState() ) self.__settings.setValue(u'tracksView', self.ui.tracks.view.horizontalHeader().saveState() ) if self.db.modified: from interfaces.confirmation import ConfirmationDialog dialog = ConfirmationDialog() dialog.buttons.accepted.connect(self.save) dialog.buttons.accepted.connect(unload) dialog.buttons.rejected.connect(event.ignore) dialog.buttons.helpRequested.connect(unload) dialog.exec_() else: unload() def run(): app = QtGui.QApplication(sys.argv) app.setApplicationName(u'gayeogi') translator = QTranslator() if translator.load(u'main_' + locale, os.path.join(lnPath, u'langs')): app.installTranslator(translator) main = Main() main.show() sys.exit(app.exec_())
My hair has been terribly dry lately and I needed a rescue. I created this mask and it made my hair SUPER soft! I’ll probably do this once every couple of weeks! In a blender, puree the avocado, egg, banana and coconut oil. Rub into hair thoroughly and leave it in for at least 30 minutes. Rinse in cool water (I have read when you have egg in your hair and it’s hot water, the egg cooks). You may have to brush out your hair, dry it and rewash it as when I was brushing my hair after a rinse, there was a lot of gunk that came out! I use this stuff about twice a week in the shower, it’s awesome. It leaves my skin feeling soft and also exfoliates my skin! I recommend using it hands, arms, legs and even feet for a soft feeling! Simply mix them together and put it in a mason jar. thank you so much for the tip, I’ll try it and I also use pro naturals moroccan argan oil hair mask which leaves my hair soft and healthy.
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Oct 10 22:22:17 2017 @author: derek """ import numpy as np import tensorflow as tf def get_image_binary(filename): image_cube = np.load(filename) image_cube = np.asarray(image_cube,np.int16) shape = np.array(image_cube.shape, np.int32) return shape.tobytes(), image_cube.tobytes() #convert image to raw data bytes in the array def write_to_tfrecord(labels, shape, binary_image, tfrecord_file): writer = tf.python_io.TFRecordWriter(tfrecord_file) example = tf.train.Example(features=tf.train.Features(feature={ 'label': tf.train.Feature(bytes_list=tf.train.BytesList(value=[label])), 'shape': tf.train.Feature(bytes_list=tf.train.BytesList(value=[shape])), 'image': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_image])) })) writer.write(example.SerializeToString()) writer.close() def read_from_tfrecord(filename): # Read a record, getting filenames from the filename_queue. No # header or footer in the CIFAR-10 format, so we leave header_bytes # and footer_bytes at their default of 0. filename = "cubes1.tfrecord" reader = tf.TFRecordReader() key, tfrecord_serialized = reader.read(filename) # Convert from a string to a vector of uint8 that is record_bytes long. tfrecord_features = tf.parse_single_example(tfrecord_serialized,feautres={ 'label' : tf.FixedLenFeature([], tf.string), 'shape' : tf.FixedLenFeature([], tf.string), 'image' : tf.FixedLenFeature([], tf.string), }, name='features') image = tf.decode_raw(tfrecord_features['image'],tf.int16) shape = tf.decode_raw(tfrecord_features['shape'],tf.int32) label = tf.decode_raw(tfrecord_features['label'],tf.int16) image_cube = tf.reshape(image, shape) return label, shape, image_cube #$$#$# def patient_to_tfrecord(patient_id, image_array, patient_df): patient_id = "1.4.5.6.123551485448654" tfrecord_file = patient_id + ".tfrecord" writer = tf.python_io.TFRecordWriter(tfrecord_file) for i in range(1000): image_cube = np.random.randint(-1000,1000,[32,32,32],dtype=np.int16) image_label = np.random.randint(0,5,3,dtype=np.int16) image_cube = np.asarray(image_cube,np.int16) #ensure data is in int16 binary_cube = image_cube.tobytes() image_label = np.array(image_label,np.int16) #ensure data is in int16 binary_label = image_label.tobytes() shape = np.array(image_cube.shape, np.int32) #ensure data is in int16 binary_shape = shape.tobytes() example = tf.train.Example(features=tf.train.Features(feature={ 'label': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_label])), 'shape': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_shape])), 'image': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_cube])) })) writer.write(example.SerializeToString()) writer.close() patient_id = "1.4.5.6.123551485448654" tfrecord_file = patient_id + ".tfrecord" writer = tf.python_io.TFRecordWriter(tfrecord_file) for i in range(1000): image_cube = np.random.randint(-1000,1000,[32,32,32],dtype=np.int16) image_label = np.random.randint(0,5,3,dtype=np.int16) image_cube = np.asarray(image_cube,np.int16) #ensure data is in int16 binary_cube = image_cube.tobytes() image_label = np.array(image_label,np.int16) #ensure data is in int16 binary_label = image_label.tobytes() shape = np.array(image_cube.shape, np.int32) #ensure data is in int16 binary_shape = shape.tobytes() example = tf.train.Example(features=tf.train.Features(feature={ 'label': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_label])), 'shape': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_shape])), 'image': tf.train.Feature(bytes_list=tf.train.BytesList(value=[binary_cube])) })) writer.write(example.SerializeToString()) writer.close() #$#$#$# filenames = ["cubes1.tfrecord"] tfrecord_file_queue = tf.train.string_input_producer(filenames, name='queue') reader = tf.TFRecordReader() key, tfrecord_serialized = reader.read(tfrecord_file_queue) # Convert from a string to a vector of uint8 that is record_bytes long. tfrecord_features = tf.parse_single_example(tfrecord_serialized,features={ 'label' : tf.FixedLenFeature([], tf.string), 'shape' : tf.FixedLenFeature([], tf.string), 'image' : tf.FixedLenFeature([], tf.string), }, name='features') image = tf.decode_raw(tfrecord_features['image'],tf.int16) shape = tf.decode_raw(tfrecord_features['shape'],tf.int32) label = tf.decode_raw(tfrecord_features['label'],tf.int16) image_cube = tf.reshape(image, shape) ########################################################################## #filenames = ["cubes1.tfrecord", "cubes2.tfrecord"] #dataset = tf.contrib.data.TFRecordDataset(filenames) # Transforms a scalar string `example_proto` into a pair of a scalar string and # a scalar integer, representing an image and its label, respectively. def _parse_function(example_proto): features = {"image": tf.FixedLenFeature((), tf.string, default_value=""), "label": tf.FixedLenFeature((), tf.int32, default_value=0)} parsed_features = tf.parse_single_example(example_proto, features) return parsed_features["image"], parsed_features["label"] # Creates a dataset that reads all of the examples from two files, and extracts # the image and label features. filenames = ["/var/data/file1.tfrecord", "/var/data/file2.tfrecord"] dataset = tf.contrib.data.TFRecordDataset(filenames) dataset = dataset.map(_parse_function) print(sess.run(label))
How much notice do you have to give? trenbolone acetate cycle pct The defensive sector "will feel an immediate impact sinceits biggest customer is the U.S. government," said Sarhan."We're talking billions of dollars in income. If that goes away,what could replace that?" One moment, please donate prescription drugs new york “In Chinese tradition, [women over 30] are treated like trash,” says Liang, who met her own husband – an American industrialist older than she is – when she was a 35-year-old divorcee with a child.
""" Collection validator """ __author__ = "Dan Gunter" __copyright__ = "Copyright 2012-2013, The Materials Project" __version__ = "1.0" __maintainer__ = "Dan Gunter" __email__ = "dkgunter@lbl.gov" __status__ = "Development" __date__ = "1/31/13" import pymongo import random import re import sys import collections from .util import DoesLogging, total_size #from .mquery import * from smoqe.query import * import six class DBError(Exception): pass class ValidatorSyntaxError(Exception): "Syntax error in configuration of Validator" def __init__(self, target, desc): msg = 'Invalid syntax: {} -> "{}"'.format(desc, target) Exception.__init__(self, msg) class PythonMethod(object): """Encapsulate an external Python method that will be run on our target MongoDB collection to perform arbitrary types of validation. """ _PATTERN = re.compile(r'\s*(@\w+)(\s+\w+)*') CANNOT_COMBINE_ERR = 'Call to a Python method cannot be combined ' 'with any other constraints' BAD_CONSTRAINT_ERR = 'Invalid constraint (must be: @<method> [<param> ..])' @classmethod def constraint_is_method(cls, text): """Check from the text of the constraint whether it is a Python method, as opposed to a 'normal' constraint. :return: True if it is, False if not """ m = cls._PATTERN.match(text) return m is not None def __init__(self, text): """Create new instance from a raw constraint string. :raises: ValidatorSyntaxerror """ if not self._PATTERN.match(text): raise ValidatorSyntaxError(text, self.BAD_CONSTRAINT_ERR) tokens = re.split('@?\s+', text) if len(tokens) < 1: raise ValidatorSyntaxError(text, self.BAD_CONSTRAINT_ERR) self.method = tokens[0] self.params = tokens[1:] def mongo_get(rec, key, default=None): """ Get value from dict using MongoDB dot-separated path semantics. For example: >>> assert mongo_get({'a': {'b': 1}, 'x': 2}, 'a.b') == 1 >>> assert mongo_get({'a': {'b': 1}, 'x': 2}, 'x') == 2 >>> assert mongo_get({'a': {'b': 1}, 'x': 2}, 'a.b.c') is None :param rec: mongodb document :param key: path to mongo value :param default: default to return if not found :return: value, potentially nested, or default if not found :raise: ValueError, if record is not a dict. """ if not rec: return default if not isinstance(rec, collections.Mapping): raise ValueError('input record must act like a dict') if not '.' in key: return rec.get(key, default) for key_part in key.split('.'): if not isinstance(rec, collections.Mapping): return default if not key_part in rec: return default rec = rec[key_part] return rec class Projection(object): """Fields on which to project the query results. """ def __init__(self): self._fields = {} self._slices = {} def add(self, field, op=None, val=None): """Update report fields to include new one, if it doesn't already. :param field: The field to include :type field: Field :param op: Operation :type op: ConstraintOperator :return: None """ if field.has_subfield(): self._fields[field.full_name] = 1 else: self._fields[field.name] = 1 if op and op.is_size() and not op.is_variable(): # get minimal part of array with slicing, # but cannot use slice with variables self._slices[field.name] = val + 1 if op and op.is_variable(): # add the variable too self._fields[val] = 1 def to_mongo(self): """Translate projection to MongoDB query form. :return: Dictionary to put into a MongoDB JSON query :rtype: dict """ d = copy.copy(self._fields) for k, v in six.iteritems(self._slices): d[k] = {'$slice': v} return d class ConstraintViolation(object): """A single constraint violation, with no metadata. """ def __init__(self, constraint, value, expected): """Create new constraint violation :param constraint: The constraint that was violated :type constraint: Constraint """ self._constraint = constraint self._got = value self._expected = expected @property def field(self): return self._constraint.field.name @property def op(self): #return str(self._constraint.op) return self._constraint.op.display_op @property def got_value(self): return self._got @property def expected_value(self): return self._expected @expected_value.setter def expected_value(self, value): self._expected = value class NullConstraintViolation(ConstraintViolation): """Empty constraint violation, for when there are no constraints. """ def __init__(self): ConstraintViolation.__init__(self, Constraint('NA', '=', 'NA'), 'NA', 'NA') class ConstraintViolationGroup(object): """A group of constraint violations with metadata. """ def __init__(self): """Create an empty object. """ self._viol = [] # These are read/write self.subject = '' self.condition = None def add_violations(self, violations, record=None): """Add constraint violations and associated record. :param violations: List of violations :type violations: list(ConstraintViolation) :param record: Associated record :type record: dict :rtype: None """ rec = {} if record is None else record for v in violations: self._viol.append((v, rec)) def __iter__(self): return iter(self._viol) def __len__(self): return len(self._viol) class ProgressMeter(object): """Simple progress tracker """ def __init__(self, num, fmt): self._n = num self._subject = '?' self._fmt = fmt self._count = 0 self._total = 0 @property def count(self): return self._total def set_subject(self, subj): self._subject = subj def update(self, *args): self._count += 1 self._total += 1 if self._n == 0 or self._count < self._n: return sys.stderr.write(self._fmt.format(*args, subject=self._subject, count=self.count)) sys.stderr.write('\n') sys.stderr.flush() self._count = 0 class ConstraintSpec(DoesLogging): """Specification of a set of constraints for a collection. """ FILTER_SECT = 'filter' CONSTRAINT_SECT = 'constraints' SAMPLE_SECT = 'sample' def __init__(self, spec): """Create specification from a configuration. :param spec: Configuration for a single collection :type spec: dict :raise: ValueError if specification is wrong """ DoesLogging.__init__(self, name='mg.ConstraintSpec') self._sections, _slist = {}, [] for item in spec: self._log.debug("build constraint from: {}".format(item)) if isinstance(item, dict): self._add_complex_section(item) else: self._add_simple_section(item) def __iter__(self): """Return a list of all the sections. :rtype: list(ConstraintSpecSection) """ sect = [] # simple 1-level flatten operation for values in six.itervalues(self._sections): for v in values: sect.append(v) return iter(sect) def _add_complex_section(self, item): """Add a section that has a filter and set of constraints :raise: ValueError if filter or constraints is missing """ # extract filter and constraints try: fltr = item[self.FILTER_SECT] except KeyError: raise ValueError("configuration requires '{}'".format(self.FILTER_SECT)) sample = item.get(self.SAMPLE_SECT, None) constraints = item.get(self.CONSTRAINT_SECT, None) section = ConstraintSpecSection(fltr, constraints, sample) key = section.get_key() if key in self._sections: self._sections[key].append(section) else: self._sections[key] = [section] def _add_simple_section(self, item): self._sections[None] = [ConstraintSpecSection(None, item, None)] class ConstraintSpecSection(object): def __init__(self, fltr, constraints, sample): self._filter, self._constraints, self._sampler = fltr, constraints, sample # make condition(s) into a tuple if isinstance(fltr, basestring): self._key = (fltr,) elif fltr is None: self._key = None else: self._key = tuple(fltr) # parse sample keywords into class, if present if sample: self._sampler = Sampler(**sample) def get_key(self): return self._key @property def sampler(self): return self._sampler @property def filters(self): return self._filter @property def constraints(self): return self._constraints class Validator(DoesLogging): """Validate a collection. """ class SectionParts: """Encapsulate the tuple of information for each section of filters, constraints, etc. within a collection. """ def __init__(self, cond, body, sampler, report_fields): """Create new initialized set of parts. :param cond: Condition to filter records :type cond: MongoQuery :param body: Main set of constraints :type body: MongoQuery :param sampler: Sampling class if any :type sampler: Sampler :param report_fields: Fields to report on :type report_fields: list """ self.cond, self.body, self.sampler, self.report_fields = \ cond, body, sampler, report_fields def __init__(self, max_violations=50, max_dberrors=10, aliases=None, add_exists=False): DoesLogging.__init__(self, name='mg.validator') self.set_progress(0) self._aliases = aliases if aliases else {} self._max_viol = max_violations if self._max_viol > 0: self._find_kw = {'limit': self._max_viol} else: self._find_kw = {} self._max_dberr = max_dberrors self._base_report_fields = {'_id': 1, 'task_id': 1} self._add_exists = add_exists def set_aliases(self, a): """Set aliases. """ self._aliases = a def set_progress(self, num): """Report progress every `num` bad records. :param num: Report interval :type num: int :return: None """ report_str = 'Progress for {subject}: {count:d} invalid, {:d} db errors, {:d} bytes' self._progress = ProgressMeter(num, report_str) def num_violations(self): if self._progress is None: return 0 return self._progress._count def validate(self, coll, constraint_spec, subject='collection'): """Validation of a collection. This is a generator that yields ConstraintViolationGroups. :param coll: Mongo collection :type coll: pymongo.Collection :param constraint_spec: Constraint specification :type constraint_spec: ConstraintSpec :param subject: Name of the thing being validated :type subject: str :return: Sets of constraint violation, one for each constraint_section :rtype: ConstraintViolationGroup :raises: ValidatorSyntaxError """ self._spec = constraint_spec self._progress.set_subject(subject) self._build(constraint_spec) for sect_parts in self._sections: cvg = self._validate_section(subject, coll, sect_parts) if cvg is not None: yield cvg def _validate_section(self, subject, coll, parts): """Validate one section of a spec. :param subject: Name of subject :type subject: str :param coll: The collection to validate :type coll: pymongo.Collection :param parts: Section parts :type parts: Validator.SectionParts :return: Group of constraint violations, if any, otherwise None :rtype: ConstraintViolationGroup or None """ cvgroup = ConstraintViolationGroup() cvgroup.subject = subject # If the constraint is an 'import' of code, treat it differently here if self._is_python(parts): num_found = self._run_python(cvgroup, coll, parts) return None if num_found == 0 else cvgroup query = parts.cond.to_mongo(disjunction=False) query.update(parts.body.to_mongo()) cvgroup.condition = parts.cond.to_mongo(disjunction=False) self._log.debug('Query spec: {}'.format(query)) self._log.debug('Query fields: {}'.format(parts.report_fields)) # Find records that violate 1 or more constraints cursor = coll.find(query, fields=parts.report_fields, **self._find_kw) if parts.sampler is not None: cursor = parts.sampler.sample(cursor) nbytes, num_dberr, num_rec = 0, 0, 0 while 1: try: record = six.advance_iterator(cursor) nbytes += total_size(record) num_rec += 1 except StopIteration: self._log.info("collection {}: {:d} records, {:d} bytes, {:d} db-errors" .format(subject, num_rec, nbytes, num_dberr)) break except pymongo.errors.PyMongoError as err: num_dberr += 1 if num_dberr > self._max_dberr > 0: raise DBError("Too many errors") self._log.warn("DB.{:d}: {}".format(num_dberr, err)) continue # report progress if self._progress: self._progress.update(num_dberr, nbytes) # get reasons for badness violations = self._get_violations(parts.body, record) cvgroup.add_violations(violations, record) return None if nbytes == 0 else cvgroup def _get_violations(self, query, record): """Reverse-engineer the query to figure out why a record was selected. :param query: MongoDB query :type query: MongQuery :param record: Record in question :type record: dict :return: Reasons why bad :rtype: list(ConstraintViolation) """ # special case, when no constraints are given if len(query.all_clauses) == 0: return [NullConstraintViolation()] # normal case, check all the constraints reasons = [] for clause in query.all_clauses: var_name = None key = clause.constraint.field.name op = clause.constraint.op fval = mongo_get(record, key) if fval is None: expected = clause.constraint.value reasons.append(ConstraintViolation(clause.constraint, 'missing', expected)) continue if op.is_variable(): # retrieve value for variable var_name = clause.constraint.value value = mongo_get(record, var_name, default=None) if value is None: reasons.append(ConstraintViolation(clause.constraint, 'missing', var_name)) continue clause.constraint.value = value # swap out value, temporarily # take length for size if op.is_size(): if isinstance(fval, six.string_types) or not hasattr(fval, '__len__'): reasons.append(ConstraintViolation(clause.constraint, type(fval), 'sequence')) if op.is_variable(): clause.constraint.value = var_name # put original value back continue fval = len(fval) ok, expected = clause.constraint.passes(fval) if not ok: reasons.append(ConstraintViolation(clause.constraint, fval, expected)) if op.is_variable(): clause.constraint.value = var_name # put original value back return reasons def _build(self, constraint_spec): """Generate queries to execute. Sets instance variables so that Mongo query strings, etc. can now be extracted from the object. :param constraint_spec: Constraint specification :type constraint_spec: ConstraintSpec """ self._sections = [] # For each condition in the spec for sval in constraint_spec: rpt_fld = self._base_report_fields.copy() #print("@@ CONDS = {}".format(sval.filters)) #print("@@ MAIN = {}".format(sval.constraints)) # Constraints # If the constraint is an external call to Python code if self._is_python(sval.constraints): query, proj = self._process_python(sval.constraints) rpt_fld.update(proj.to_mongo()) # All other constraints, e.g. 'foo > 12' else: query = MongoQuery() if sval.constraints is not None: groups = self._process_constraint_expressions(sval.constraints) projection = Projection() for cg in six.itervalues(groups): for c in cg: projection.add(c.field, c.op, c.value) query.add_clause(MongoClause(c)) if self._add_exists: for c in cg.existence_constraints: query.add_clause(MongoClause(c, exists_main=True)) rpt_fld.update(projection.to_mongo()) # Filters cond_query = MongoQuery() if sval.filters is not None: cond_groups = self._process_constraint_expressions(sval.filters, rev=False) for cg in six.itervalues(cond_groups): for c in cg: cond_query.add_clause(MongoClause(c, rev=False)) # Done. Add a new 'SectionPart' for the filter and constraint result = self.SectionParts(cond_query, query, sval.sampler, rpt_fld) self._sections.append(result) def _process_constraint_expressions(self, expr_list, conflict_check=True, rev=True): """Create and return constraints from expressions in expr_list. :param expr_list: The expressions :conflict_check: If True, check for conflicting expressions within each field :return: Constraints grouped by field (the key is the field name) :rtype: dict """ # process expressions, grouping by field groups = {} for expr in expr_list: field, raw_op, val = parse_expr(expr) op = ConstraintOperator(raw_op) if field not in groups: groups[field] = ConstraintGroup(Field(field, self._aliases)) groups[field].add_constraint(op, val) # add existence constraints for cgroup in six.itervalues(groups): cgroup.add_existence(rev) # optionally check for conflicts if conflict_check: # check for conflicts in each group for field_name, group in six.iteritems(groups): conflicts = group.get_conflicts() if conflicts: raise ValueError('Conflicts for field {}: {}'.format(field_name, conflicts)) return groups def _is_python(self, constraint_list): """Check whether constraint is an import of Python code. :param constraint_list: List of raw constraints from YAML file :type constraint_list: list(str) :return: True if this refers to an import of code, False otherwise :raises: ValidatorSyntaxError """ if len(constraint_list) == 1 and \ PythonMethod.constraint_is_method(constraint_list[0]): return True if len(constraint_list) > 1 and \ any(filter(PythonMethod.constraint_is_method, constraint_list)): condensed_list = '/'.join(constraint_list) err = PythonMethod.CANNOT_COMBINE_ERR raise ValidatorSyntaxError(condensed_list, err) return False def _process_python(self, expr_list): """Create a wrapper for a call to some external Python code. :param expr_list: The expressions :return: Tuple of (query, field-projection) :rtype: (PythonMethod, Projection) """ return None, None def set_aliases(self, new_value): "Set aliases and wrap errors in ValueError" try: self.aliases = new_value except Exception as err: raise ValueError("invalid value: {}".format(err)) class Sampler(DoesLogging): """Randomly sample a proportion of the full collection. """ # Random uniform distribution DIST_RUNIF = 1 # Default distribution DEFAULT_DIST = DIST_RUNIF # Names of distributions DIST_CODES = {'uniform': DIST_RUNIF} def __init__(self, min_items=0, max_items=1e9, p=1.0, distrib=DEFAULT_DIST, **kw): """Create new parameterized sampler. :param min_items: Minimum number of items in the sample :param max_items: Maximum number of items in the sample :param p: Probability of selecting an item :param distrib: Probability distribution code, one of DIST_<name> in this class :type distrib: str or int :raise: ValueError, if `distrib` is an unknown code or string """ DoesLogging.__init__(self, 'mg.sampler') # Sanity checks if min_items < 0: raise ValueError('min_items cannot be negative ({:d})'.format(min_items)) if (max_items != 0) and (max_items < min_items): raise ValueError('max_items must be zero or >= min_items ({:d} < {:d})'.format(max_items, min_items)) if not (0.0 <= p <= 1.0): raise ValueError('probability, p, must be between 0 and 1 ({:f})'.format(p)) self.min_items = min_items self.max_items = max_items self.p = p self._empty = True # Distribution if not isinstance(distrib, int): distrib = self.DIST_CODES.get(str(distrib), None) if distrib == self.DIST_RUNIF: self._keep = self._keep_runif else: raise ValueError("unrecognized distribution: {}".format(distrib)) @property def is_empty(self): return self._empty def _keep_runif(self): return self.p >= random.uniform(0, 1) def sample(self, cursor): """Extract records randomly from the database. Continue until the target proportion of the items have been extracted, or until `min_items` if this is larger. If `max_items` is non-negative, do not extract more than these. This function is a generator, yielding items incrementally. :param cursor: Cursor to sample :type cursor: pymongo.cursor.Cursor :return: yields each item :rtype: dict :raise: ValueError, if max_items is valid and less than `min_items` or if target collection is empty """ count = cursor.count() # special case: empty collection if count == 0: self._empty = True raise ValueError("Empty collection") # special case: entire collection if self.p >= 1 and self.max_items <= 0: for item in cursor: yield item return # calculate target number of items to select if self.max_items <= 0: n_target = max(self.min_items, self.p * count) else: if self.p <= 0: n_target = max(self.min_items, self.max_items) else: n_target = max(self.min_items, min(self.max_items, self.p * count)) if n_target == 0: raise ValueError("No items requested") # select first `n_target` items that pop up with # probability self.p # This is actually biased to items at the beginning # of the file if n_target is smaller than (p * count), n = 0 while n < n_target: try: item = six.advance_iterator(cursor) except StopIteration: # need to keep looping through data until # we get all our items! cursor.rewind() item = six.advance_iterator(cursor) if self._keep(): yield item n += 1
Samsung has been working on 5G network solutions for a long time and the company is now pairing up with many network providers in multiple countries. Last week it becomes the world's first company to launch a full-fledged 5G phone that commercially available for consumers in its home ground and now Samsung has announced that it has become the leading 5G solution provider in South Korea. Samsung today announced, it has delivered the largest share of 5G network solution in Korea, which makes it the leading 5G network vendor in the country. 5G services are now commercially available to consumers and enterprises by all three mobile carriers in 85 cities across the country. Korean mobile operators have been transmitting 5G signal in Seoul and metropolitan areas since December 1, 2018, using 5G base station radios and 5G solutions provided by Samsung’s Networks Business unit. Samsung said, "To further expand the 5G network, Samsung has supplied 5G core solutions and more than 53,000 5G radio base stations to Korea’s three operators." The Korean operators are using Samsung's 5G Massive-MIMO Unit (MMU) radio base station in the 4.5GHz spectrum to rollout their commercial 5G networks. The 5G core solution on all three operators supports both legacy 4G network and 5G services in Non-Standalone (NSA) mode, which can be migrated to Standalone (SA) mode through a software update.
"""RPAAS plugin.""" import logging import pipes import time import zope.interface from acme import challenges from letsencrypt import interfaces from letsencrypt.plugins import common import rpaas logger = logging.getLogger(__name__) class RpaasLeAuthenticator(common.Plugin): """RPAAS Authenticator. This plugin create a authentticator for Tsuru RPAAS. """ zope.interface.implements(interfaces.IAuthenticator) zope.interface.classProvides(interfaces.IPluginFactory) hidden = True description = "Configure RPAAS HTTP server" CMD_TEMPLATE = """\ location /{achall.URI_ROOT_PATH}/{encoded_token} {{ default_type text/plain; echo -n '{validation}'; }} """ """Command template.""" def __init__(self, hosts, *args, **kwargs): super(RpaasLeAuthenticator, self).__init__(*args, **kwargs) self._root = './le' self._httpd = None self.hosts = hosts def get_chall_pref(self, domain): return [challenges.HTTP01] def perform(self, achalls): # pylint: disable=missing-docstring responses = [] for achall in achalls: responses.append(self._perform_single(achall)) return responses def _perform_single(self, achall): response, validation = achall.response_and_validation() self._notify_and_wait(self.CMD_TEMPLATE.format( achall=achall, validation=pipes.quote(validation), encoded_token=achall.chall.encode("token"))) if response.simple_verify( achall.chall, achall.domain, achall.account_key.public_key(), self.config.http01_port): return response else: logger.error( "Self-verify of challenge failed, authorization abandoned.") return None def _notify_and_wait(self, message): # pylint: disable=no-self-use nginx_manager = rpaas.get_manager().nginx_manager for host in self.hosts: nginx_manager.acme_conf(host, message) time.sleep(6) # TODO: update rpaas nginx # sys.stdout.write(message) # raw_input("Press ENTER to continue") def cleanup(self, achalls): pass
It Is Summer and Time To Be OUT in the Backyard! Community dinners where members of the community shared their “stories”. Story sharing over a meal helps build community and acceptance. Three herbal classes centered around the seasons in an effort to learn how we can use local plants for healing and maintenance. Free exercise classes (Yoga, Pilates, Zumba) six days a week and cultural dance classes (Bollywood) once a month. Each class is designed so that people at all levels of fitness and experience can benefit from them. Free self-defense classes in an effort to provide skills of protection for people living in the Backyard. Participating in TwinCities Pride Festival Parade. Free CPR training to provide skills that will help others in the event of an emergency. These activities don’t illustrate the impact they have on our community, but participation numbers say a lot. In 2016 we had over 16,000 participants attend the classes and other activities we’ve offered. We work to bring more opportunities for a healthier Backyard! CHAT Members include: Milin Dutta, Marta Knutson, Janet Dahlem and Bernice Arias. Contact the BYI OUT in the Backyard CHAT by email- outinthebackyard@gmail.com or facebook.com/outinthebackyard. SOCIAL COHESION: The sense of community and belonging that people have. People feel they live in a place where people trust and respect each other and have a sense of responsibility to take care of each other. SOCIAL SUPPORT: The support that a person receives from and gives to the people around them, including emotional and spiritual support, help with daily needs and crises, and the sharing of advice, information, and feedback. HEALTH EDUCATION: The degree to which individuals have the capacity to obtain, process, and understand basic information and services needed to make appropriate decisions regarding their health. This includes information about what is essential to health (the importance of the family, community, spirituality, the environment, culture, food, sleep, and movement) as well as the medical information needed to address a specific health condition.
# Copyright (C) 2010-2015 Cuckoo Foundation. # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. import os import struct PAGE_NOACCESS = 0x00000001 PAGE_READONLY = 0x00000002 PAGE_READWRITE = 0x00000004 PAGE_WRITECOPY = 0x00000008 PAGE_EXECUTE = 0x00000010 PAGE_EXECUTE_READ = 0x00000020 PAGE_EXECUTE_READWRITE = 0x00000040 PAGE_EXECUTE_WRITECOPY = 0x00000080 PAGE_GUARD = 0x00000100 PAGE_NOCACHE = 0x00000200 PAGE_WRITECOMBINE = 0x00000400 protmap = { PAGE_NOACCESS : "NOACCESS", PAGE_READONLY : "R", PAGE_READWRITE : "RW", PAGE_WRITECOPY : "RWC", PAGE_EXECUTE : "X", PAGE_EXECUTE_READ : "RX", PAGE_EXECUTE_READWRITE : "RWX", PAGE_EXECUTE_WRITECOPY : "RWXC", } from lib.cuckoo.common.abstracts import Processing from lib.cuckoo.common.objects import File from lib.cuckoo.common.constants import CUCKOO_ROOT class ProcessMemory(Processing): """Analyze process memory dumps.""" order = 10 def prot_to_str(self, prot): if prot & PAGE_GUARD: return "G" prot &= 0xff return protmap[prot] def coalesce_chunks(self, chunklist): low = chunklist[0]["start"] high = chunklist[-1]["end"] prot = chunklist[0]["prot"] PE = chunklist[0]["PE"] for chunk in chunklist: if chunk["prot"] != prot: prot = "Mixed" return { "start" : low, "end" : high, "size" : "0x%x" % (int(high, 16) - int(low, 16)), "prot" : prot, "PE" : PE, "chunks" : chunklist } def parse_dump(self, dmp_path): f = open(dmp_path, "rb") address_space = [] curchunk = [] lastend = 0 while True: data = f.read(24) if data == '': break alloc = dict() addr,size,mem_state,mem_type,mem_prot = struct.unpack("QIIII", data) offset = f.tell() if addr != lastend and len(curchunk): address_space.append(self.coalesce_chunks(curchunk)) curchunk = [] lastend = addr + size alloc["start"] = "0x%.08x" % addr alloc["end"] = "0x%.08x" % (addr + size) alloc["size"] = "0x%x" % size alloc["prot"] = self.prot_to_str(mem_prot) alloc["state"] = mem_state alloc["type"] = mem_type alloc["offset"] = offset alloc["PE"] = False if f.read(2) == "MZ": alloc["PE"] = True f.seek(size-2, 1) curchunk.append(alloc) if len(curchunk): address_space.append(self.coalesce_chunks(curchunk)) return address_space def run(self): """Run analysis. @return: structured results. """ self.key = "procmemory" results = [] if os.path.exists(self.pmemory_path): for dmp in os.listdir(self.pmemory_path): dmp_path = os.path.join(self.pmemory_path, dmp) dmp_file = File(dmp_path) process_name = "" process_path = "" process_id = int(os.path.splitext(os.path.basename(dmp_path))[0]) if "behavior" in self.results and "processes" in self.results["behavior"]: for process in self.results["behavior"]["processes"]: if process_id == process["process_id"]: process_name = process["process_name"] process_path = process["module_path"] proc = dict( file=dmp_path, pid=process_id, name=process_name, path=process_path, yara=dmp_file.get_yara(os.path.join(CUCKOO_ROOT, "data", "yara", "index_memory.yar")), address_space=self.parse_dump(dmp_path) ) results.append(proc) return results
ahmed natour. on 21 October Comments (0). Please log in to add your comment Full transcript. More presentations by ahmed natour · Untitled Prezi. conference after the event that he had signed a petition circulated by Kadi Ahmed Natour, president of the Islamic. Religious Council of Israel. Tombes juives profanées dans les Vosges en France. View Ahmed Natour profile on Yahoo! Eurosport UK. Find the latest Ahmed Natour news, stats, photos, titles, clubs, goals and more. Joueur Ahmed Natour évoluant pour au poste de Milieu(x). Il est né le . Ahmed Natour is on Jeeran. Find useful reviews and photos by Ahmed Natour on Jeeran. Ahmed Natour على جيران. تصفح نصائحه وصوره المفيدة للأماكن. Architizer is the largest database for architecture and sourcing building products. Home of the A+Awards - the global awards program for today's best architects. F.b ahmed natour Snap: natour71 Gym Fitness Football⚽ Real madrid⚽ I don't stop when i am tierd .. i stop when i am done. "What kind of farter are you?" is a simple but entertaining game to find out what type of farter you, your friends and foes are, is it a Cute breeze or fireworks?
""" Author : tharindra galahena (inf0_warri0r) Project: soccer playing ai agents using finite state machines Blog : http://www.inf0warri0r.blogspot.com Date : 21/07/2013 License: Copyright 2013 Tharindra Galahena This is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. * You should have received a copy of the GNU General Public License along with this. If not, see http://www.gnu.org/licenses/. """ from Tkinter import * import player import random import ball players1 = list() for i in range(0, 5): p = player.player(i, 0, 600, 150) players1.append(p) players2 = list() for i in range(0, 5): p = player.player(i, 1, 0, 150) players2.append(p) b = ball.ball(300, 150, 300, 150, 300, 150) def find_nearest_playerest_to_ball_1(bx, by): ls = list() for i in range(0, 5): d = (players1[i].pos_current_x - bx) ** 2.0 d = d + (players1[i].pos_current_y - by) ** 2.0 d = d ** 0.5 ls.append((d, i)) ls = sorted(ls) return ls[0] def find_nearest_playerest_to_ball_2(bx, by): ls = list() for i in range(0, 5): d = (players2[i].pos_current_x - bx) ** 2.0 d = d + (players2[i].pos_current_y - by) ** 2.0 d = d ** 0.5 ls.append((d, i)) ls = sorted(ls) return ls[0] def find_nearest_player_1(px, py): ls = list() for i in range(0, 5): d = (players1[i].pos_current_x - px) ** 2.0 d = d + (players1[i].pos_current_y - py) ** 2.0 d = d ** 0.5 ls.append((d, 0, i)) ls = sorted(ls) return ls[0] def find_nearest_player_2(px, py): ls = list() for i in range(0, 5): d = (players2[i].pos_current_x - px) ** 2.0 d = d + (players2[i].pos_current_y - py) ** 2.0 d = d ** 0.5 ls.append((d, 1, i)) ls = sorted(ls) return ls[0] def find_safest_player_1(n): mn = 100000 ind = n dd = find_nearest_player_2(players1[n].pos_current_x, players1[n].pos_current_y)[0] for i in range(0, 5): if i == n: continue d = find_nearest_player_2(players1[i].pos_current_x, players1[i].pos_current_y) yy = (players1[i].pos_current_y - players1[n].pos_current_y) ** 2.0 if d[0] > dd and yy > 400: dst = (players1[i].pos_current_x - players1[i].pos_goal_x) ** 2.0 if mn > dst: mn = dst ind = i return ind def find_safest_player_2(n): mn = 100000 ind = n dd = find_nearest_player_1(players2[n].pos_current_x, players2[n].pos_current_y)[0] for i in range(0, 5): if i == n: continue d = find_nearest_player_1(players2[i].pos_current_x, players2[i].pos_current_y) if d[0] > dd: dst = (players2[i].pos_current_x - players2[i].pos_goal_x) ** 2.0 if mn > dst: mn = dst ind = i return ind def find_friend(t, n): ls = list() if t == 0: for i in range(0, 5): if i == n: continue d1 = (players1[i].pos_current_x - players1[n].pos_current_x) ** 2.0 d2 = (players1[i].pos_current_y - players1[i].pos_current_y) ** 2.0 d = (d1 + d2) ** 0.5 ls.append((d, 0, i)) else: for i in range(0, 5): if i == n: continue d1 = (players2[i].pos_current_x - players2[n].pos_current_x) ** 2.0 d2 = (players2[i].pos_current_y - players2[n].pos_current_y) ** 2.0 d = (d1 + d2) ** 0.5 ls.append((d, 1, i)) ls = sorted(ls) return ls root = Tk() root.title("soccer - inf0_warri0r") chart_1 = Canvas(root, width=600, height=400, background="black") chart_1.grid(row=0, column=0) red = 0 blue = 0 flage = True while 1: chart_1.create_rectangle(0, 0, 600, 300, fill='#1c0', outline='yellow', width=3) chart_1.create_oval(240, 90, 360, 210, fill='#1c0', outline='yellow', width=3) chart_1.create_line(300, 0, 300, 300, fill='yellow', width=3) for i in range(0, 5): chart_1.create_oval(players1[i].pos_current_x - 6, players1[i].pos_current_y - 6, players1[i].pos_current_x + 6, players1[i].pos_current_y + 6, fill='red') chart_1.create_text(players1[i].pos_current_x + 7, players1[i].pos_current_y + 7, text=str(players1[i].index + 1), fill='white') for i in range(0, 5): chart_1.create_oval(players2[i].pos_current_x - 6, players2[i].pos_current_y - 6, players2[i].pos_current_x + 6, players2[i].pos_current_y + 6, fill='blue') chart_1.create_text(players2[i].pos_current_x + 7, players2[i].pos_current_y - 7, text=str(players2[i].index + 1), fill='white') chart_1.create_oval(b.pos_current_x - 5, b.pos_current_y - 5, b.pos_current_x + 5, b.pos_current_y + 5, fill='yellow') txt = 'score : red = ' + str(red) + ' blue = ' + str(blue) chart_1.create_text(300, 350, text=txt, fill='white') if flage: chart_1.update() chart_1.after(600) chart_1.delete(ALL) bls1 = find_nearest_playerest_to_ball_1(b.pos_current_x, b.pos_current_y) bls2 = find_nearest_playerest_to_ball_2(b.pos_current_x, b.pos_current_y) rd = random.randrange(0, 100) if rd < 50: for i in range(0, 5): players1[i].change_state(b, bls1[1]) ind = find_safest_player_1(i) px = -1 py = -1 if ind != i: px = players1[ind].pos_current_x py = players1[ind].pos_current_y xd = find_nearest_player_2(players1[i].pos_current_x, players1[i].pos_current_y) gole = find_nearest_player_2(players1[i].pos_goal_x, players1[i].pos_goal_y) b = players1[i].move(b, 0, px, py, xd, gole[0], players1, players2) for i in range(0, 5): players2[i].change_state(b, bls2[1]) ind = find_safest_player_2(i) px = -1 py = -1 if ind != i: px = players2[ind].pos_current_x py = players2[ind].pos_current_y xd = find_nearest_player_1(players2[i].pos_current_x, players2[i].pos_current_y) gole = find_nearest_player_1(players2[i].pos_goal_x, players2[i].pos_goal_y) b = players2[i].move(b, 1, px, py, xd, gole[0], players1, players2) else: for i in range(0, 5): players2[i].change_state(b, bls2[1]) ind = find_safest_player_2(i) px = -1 py = -1 if ind != i: px = players2[ind].pos_current_x py = players2[ind].pos_current_y xd = find_nearest_player_1(players2[i].pos_current_x, players2[i].pos_current_y) gole = find_nearest_player_1(players2[i].pos_goal_x, players2[i].pos_goal_y) b = players2[i].move(b, 1, px, py, xd, gole[0], players1, players2) for i in range(0, 5): players1[i].change_state(b, bls1[1]) ind = find_safest_player_1(i) px = -1 py = -1 if ind != i: px = players1[ind].pos_current_x py = players1[ind].pos_current_y xd = find_nearest_player_2(players1[i].pos_current_x, players1[i].pos_current_y) gole = find_nearest_player_2(players1[i].pos_goal_x, players1[i].pos_goal_y) b = players1[i].move(b, 0, px, py, xd, gole[0], players1, players2) b.state_change() b.move() if not flage: chart_1.update() chart_1.after(100) chart_1.delete(ALL) else: flage = False if b.pos_current_x >= 590 or b.pos_current_x <= 10: if b.pos_current_x <= 10: blue = blue + 1 else: red = red + 1 for i in range(0, 5): players1[i].reset() players2[i].reset() b.reset() flage = True root.mainloop()
Version 1/29/18 - To be published soon. Feel free to read, site and share. This book is intended to provide a roadmap to all school leaders and those who seek to understand and support the process of school improvement. This roadmap offers both a user-friendly understanding where any school is currently as well as a practical formula for how it can move from its existing location on the roadmap to greater levels of function and effectiveness. It will be your guide to promoting growth as a school or a collective of any kind. The benefits of that growth include improved performance, but also assume higher function, higher quality climate, as well as a school that embodies a greater sense of ease, sanity and satisfaction. So why do we need a roadmap to improve? As will become more evident as the book progresses, there are many reasons it is necessary. First, we do need a roadmap to have an operational understanding of where we are currently. Without that knowledge we lack the able to see and define our current situation clearly.� Second, we need a roadmap to know where we are going. What do we mean when we refer to concepts like �better� or �improved� or �higher performing?� So why this roadmap, in particular? For many reasons including its very sound theoretical basis, its ability to imply the practical specifics required to achieve results, and� because it is based in the process of natural human actualization and psychology and does not ask participants to violate their human nature or dignity or best instincts. We do not have to give up our best values to pursue excellence. Figure 1.1 depicts the broadest characterization of the school effectiveness roadmap. The more productive, effective and desirable locations on the roadmap are defined by higher levels of personal and collective function and empowerment. The vertical axis represents a continuum of function and intention. The horizontal axis reflects a contrast between empowerment and trust versus control and fear. Throughout the book as well as your process of school improvement it will be useful to keep in mind that both the nature of the more desirable locations on the roadmap and what it takes to move there will be inter-related. When your school ultimately demonstrates the values and practices defined by higher levels of vision, trust and empowerment we will find yourself experiencing all the benefits and outcomes that correspond to those higher locations along the pathway. Concurrently, what it will require for that movement to have occurred will be those same qualities. So in a very real sense the journey is the destination and vice versa. While the school effectiveness roadmap is somewhat complex � it will take us the first five chapters to fully build � when entirely represented it provides a rather complete macro theoretical foundation as well as the applied capacity to unpack the countless micro practical implications required for leading your school in the process of meeting its full potential. When we look at schools in general we find that there are countless ways to stay about the same and/or perform passably (which still typically involve an enormous amount of human effort), yet only a very narrow path to actualizing meaningful growth and improvement (that includes few if any short-cuts). This is true for individuals, teams, companies, and schools. The process will be similar. When we look at any collective organization closely, we see that groups at different points along the pathway are not only doing very different things, but they are trying to do very different things. One�s location on the roadmap will be defined by three inter-related variable 1) what we think and feel, which we will call references or R�s for short, 2) what we do � our practices and actions which we will call X�s for short, and what occurs as a result of what we do, which we will call our outcomes or O�s. We have collected data from hundreds of schools over the past few years and interviewed dozens of highly effective school leaders. What we have discovered is that where the school is located geographically tells us much less about it than where it is on the effectiveness roadmap. The reason is that the location of the references, practices and outcomes at any school will tend to be at the same location on the school effectiveness roadmap. So given the knowledge of either the common references, the common practices or the common outcomes, the roadmap will be able to accurately predict what the other two will be. Certain climates produce certain achievement levels, and certain practices produce certain kinds of climates. And most telling of all will be the references that inform the practice. So moving up the pathway to higher levels on the roadmap implies consideration for each of these factors, and addressing them all in the growth process. Reflection 1.A.: Recall the last effort that you were able to observe closely that was referred to as a program implementation. What happened to the program goals in the long-term? What forces limited the program�s effectiveness? Did staff buy in? Why or why not? NOTE: This is the first of many reflections that will be included in the chapters to encourage reflection on specific topics corresponding the text. The starting point is to recognize that �everything is connected.� Everything includes all the actions, methods, practices as well as all the thoughts, intentions, emotions, climate and culture. Denial of this fact is responsible for a vast amount of wasted time, money and effort (see Figure 1.2).� Often we hear someone say that �we need to do something at this school to . . . � The proactive and well intentioned sentiment is commendable. But it is useful to recognize that we are doing something at the school all day every day. While, sometimes it is useful to add a strategy or program into the mix to promote a positive outcome, no strategy can fix a fundamentally problematic context by itself. And more often than not what we find is that adding a series of add-ons into a school or classroom results in rather mediocre results. If the values/references within the context do not support the new practice it will be rejected eventually. Moreover, as one grows in understand the nature of what either creates or undermines function, what we see is that usually our school will improve more significantly by what we stop doing rather than something we add into the mix. When we examine what creates true improvement, higher levels of function and high quality outcomes, success is dependent on a series of complex but rather explainable factors such as vision, trust, function, climate, quality. These concepts can appear abstract and elusive, but in this book we will operationalize them, and explore how to promote them as practical realities. An especially critical quality indispensable to any effort toward meaningful growth will be vision. Too often we attach the vision in an organization to a person. Having leaders who possess visionary qualities will be useful indeed, but vision can be created within any group. Sustainable vision is an attitude, a set of practices and collectively and clearly setting our sites on a location on the roadmap. Vision is part of the culture of great schools, and something any school can begin to cultivate. While the definition of school improvement today is dominated by the interest in raising student achievement scores, and how we get there is given a much smaller consideration, the fact is that how we get there is the key to obtaining and sustaining higher levels of achievement. As a result of the external pressures to improve, and our experience with heavy handed external program �implementations,� we may associate improvement and change with something unnatural and forced. But the growth process, when approached with a sensitivity to how individuals and groups function, can be rather satisfying and rewarding. And the fact is that creating a healthy, functional and vision driven school is more likely to improve student achievement scores (as well as real student achievement by any definition) than trying to attack student achievement scores directly, with �programs.� The highest locations on the roadmap produce high student achievement as well as high student achievement scores, but it is also defined by a healthy climate, an emotionally sane and satisfying environment, meaningful learning and critical life lesson learning. There is no compartmentalization or compromise necessary. Every move up the pathway is innately more natural and enjoyable to those within the school. Figure 1.3 outlines some of the markers that we are successfully moving up the pathway. Need for telling, selling, bribing and coercing people to get them to perform well. Integrity of the efforts from leaders, teachers, staff and students in a direction that leads to growth. Disconnected action from leaders, teachers, staff and students that tends to add up the same old same old. Need for you to externally implement things onto others that are resisted, ignored and/or replaced later. A clear sense of the long-term and how today fits in. The feeling that what is necessary in the short term is all that one can handle in a typical day. A solid context (school and classroom climate and function levels) that allows for qualities such as creativity, trust and innovation to emerge naturally. School and classroom environments that perpetually requires so much management and maintenance that creativity and innovation become viewed as luxuries. A pervasive feeling of movement, growth and winning. Something is being built. The familiar feeling of the need to solve the same set of problems day in and day out. We are on a treadmill. After building the school effectiveness roadmap in chapters 2-5, in chapter six we examine how to cultivate a trust among leadership, teachers and staff and students within the school and the need to emphasize process values over outcome values. In chapter seven we look at the indispensable need for a guiding school vision and offer ideas for supporting this quality within the school. In chapter eight and nine, we explore how to support and encourage great practices and act as an expert instructional leader. In chapter ten we explore how to think about looking at data and recognizing and solving real problems rather than symptoms. In chapter eleven we take each of the eight dimensions of climate and examine their interdependence as well as their independent contributions to the overall climate of the school. Given that all schools are in different locations on the roadmap and thus the needs of their leader and the school as a whole will be different. Chapters twelve and thirteen are devoted to schools starting at two distinct location on the roadmap. Chapter twelve outlines that process of moving a school from a lower performing location to higher levels of function, a more positive climate and level of self-respect. Chapter thirteen explains how to go from good to great - how a school that is currently doing fine by most standards can move up the pathway and actualize more of its potential. 1. Willingness to become an expert in the nature of the roadmap and the mechanics of the change process. Much of it will resonate with your experience, and your instinctual sense of how things work, but there will also be some areas where your assumptions will be challenged and it may imply the need to change your thinking or your practices. Included in that willingness will be the need for patience with yourself, others and the process. If you are looking for quick fixed or clever strategies that you can use as short cuts to promoting meaningful and systemic change you will not find too many here. The effort here is to support your growth as a real leader not someone who is posing as one. 2. Commitment to a department, a school, or a district, team, institution, etc. This will imply time and a real concern for the wellbeing of those who you are entrusted to work with and lead. It will require an attitude of service and a sense of your purpose as a leader. 3. Openness to cultivating a vision. Your success will be dependent on your ability to see within the institution the highest good and nurturing a shared vision among the collective. You will need to develop the personal skills, knowledge and dispositions to inspire others to see a more functional, empowered, and satisfying place that can emerge out of the current state of affairs.
from copy import copy from dream.plugins import plugin import datetime # XXX HARDCODED MACHINE_TYPE_SET = set(["Dream.MachineJobShop", "Dream.MouldAssembly"]) class UpdateWIP(plugin.InputPreparationPlugin): """ Input preparation reads the data from external data base and updates the WIP """ def getWIPIds(self): """returns the ids of the parts that are in the WIP dictionary""" wipIDs = [] for key in self.data["input"]["BOM"].get("WIP", {}).keys(): wipIDs.append(key) return wipIDs def preprocess(self, data): """ updates the Work in Process according to what is provided by the BOM, i.e. if a design just exited the last step of it's sequence """ self.data = copy(data) orders = self.data["input"]["BOM"]["productionOrders"] nodes = self.data["graph"]["node"] wip = self.data["input"]["BOM"].get("WIP", {}) """ get the tasks that are in the WIP, and place those that are not in the WIP in the corresponding stations. Consider the parts that have concluded their routes, or the components that are not created yet. All the components defined by the corresponding orders should be examined """ wipToBeRemoved = [] # # check all the orders for order in orders: orderComponents = order.get("componentsList", []) designComplete = False # flag to inform if the design is concluded completedComponents = [] # list to hold the componentIDs that are concluded # # find all the components for component in orderComponents: componentID = component["id"] route = component["route"] # # figure out if they are defined in the WIP if componentID in self.getWIPIds(): work = wip[componentID] # # extract WIP information workStation = work["station"] remainingProcessingTime = float(work.get("remainingProcessingTime",0)) task_id = work["task_id"] assert len(route)>0, "the OrderComponent must have a route defined with length more than 0" assert task_id, "there must be a task_id defined for the OrderComponent in the WIP" # # get the step identified by task_id, hold the step_index to see if the entity's route is concluded for step_index, step in enumerate(route): if step["task_id"] == task_id: last_step = step break # # check if the entity has left the station if remainingProcessingTime: currentStation = workStation current_step = last_step # the entity is in a buffer if the step_index is no larger than the length of the route elif len(route)-1>=step_index: current_step = route[step_index+1] currentStation = current_step["stationIdsList"][0] # the entity has concluded it's route; it should be removed from the WIP else: wipToBeRemoved.append(componentID) # # check if this part is a design and update the flag if any(station.startswith("OD") for station in route[-1]["stationIdsList"]): designComplete = True # # add the part to the completedComponents list if it is not mould or design if not any(station.startswith("OD") for station in route[-1]["stationIdsList"]) and\ not any(station.startswith("E") for station in route[-1]["stationIdsList"]): completedComponents.append(componentID) # if the entity is still in the system then update the WIP info if not componentID in wipToBeRemoved: wip[componentID]["station"] = currentStation wip[componentID]["sequence"] = current_step["sequence"] wip[componentID]["task_id"] = current_step["task_id"] if remainingProcessingTime: wip[componentID]["remainingProcessingTime"] = {"Fixed": {"mean": remainingProcessingTime}} # if the entity is not recognized within the current WIP then check if it should be created # first the flag designComplete and the completedComponents list must be updated for component in orderComponents: componentID = component["id"] route = component["route"] if not componentID in self.getWIPIds(): insertWIPitem = False # # if the design is complete if designComplete: # # if the component is not a mould then put in the first step of its route if not any(station.startswith("E") for station in route[-1]["stationIdsList"]): insertWIPitem = True # # if the design is not complete else: # # if the component is design then put it at the start of its route if any(station.startswith("OD") for station in route[-1]["stationIdsList"]): insertWIPitem = True # # if the completed components include all the components (exclude mould and design) if len(completedComponents) == len(orderComponents)-2: # # if the component is a mould then put it in the first step of it's route if any(station.startswith("E") for station in route[-1]["stationIdsList"]): insertWIPitem = True if insertWIPitem: if not wip.get(componentID, {}): wip[componentID] = {} wip[componentID]["station"] = route[0]["stationIdsList"][0] wip[componentID]["sequence"] = route[0]["sequence"] wip[componentID]["task_id"] = route[0]["task_id"] # remove the idle entities for entityID in wipToBeRemoved: assert wip.pop(entityID, None), "while trying to remove WIP that has concluded it's route, nothing is removed" return data if __name__ == '__main__': pass
Would you pay $35m to live like a cosmonaut for 10 days? They've trained years for this moment — learning complex engineering, subjecting their bodies to bone-rattling simulations, and mentally preparing for the worst. Now in the final hours before blast-off, the trio of cosmonauts must undergo one last rite of passage. A blessing from a Russian Orthodox Priest. Located in the remote steppes of Kazakhstan, Baikonur Cosmodrome occupies a unique space between tradition and cutting edge technology; where awesome rockets continue to launch humans 220 miles above the Earth in much the same way they have done for 50 years. Indeed, this is where Soviet cosmonaut Yuri Gagarin became the first person to fly into space in 1961. And for a mere $35 million you could follow in his galactic footsteps, quite literally blasting off from the very same launchpad. An Orthodox priest blesses the International Space Station crew members — U.S astronaut Kevin Ford and Russian cosmonauts Oleg Novitskiy and Evgeny Tarelkin — before launch. Baikonur is the only place in the world offering space flights for private individuals and so far eight people have taken up the offer since 2001, spending around 10 days on the International Space Station (ISS) with two other trained cosmonauts or astronauts. Before launch, space tourists must complete five months of training spread over a year, mostly at Moscow's Star City, learning everything from the principals of flight to eating in zero-gravity. "They learn how to go about daily life in space — how to prepare food, go to the bathroom, keep themselves clean, and perform scientific experiments when they're on board," explained Tom Shelley, president of Space Adventures, the company arranging private missions into space. "They go through different simulations for launch and zero-gravity, the phases of the mission, and then what do if something goes wrong," he said of the cosmic tourists who are housed in the Russian segment of the ISS. Once at the station, visitors can even venture outside for a space walk — "to look back down on Earth from another perspective can be very emotional," said Shelley — and help conduct scientific research on everything from back pain, to eyesight, and crystal growth. The company's first client was multimillionaire American engineer and investment entrepreneur Dennis Tito, in 2001, who said of the journey: "Before I flew to space, I had no idea how comfortable it would be … I think if a lot of people know what I know now there would be a huge demand." The world's first space tourist, Dennis Tito, celebrates after landing near the Kazakh town of Arkalyk in 2001. Comfort or not, mere mortals unable to stump up the multimillion dollar ticket cost can still watch four manned Soyuz rockets taking off from Baikonur per year — each with three people on board. "We're only about a mile away from the launch itself," said Shelley of the $25,000 four-day package, including return flights from Moscow, accommodation, tours of the comosdrome and meet-and-greets with the cosmonauts. "The launch is a spectacular sight, but what is most surprising is the sound and vibrations that you feel through your body. It's just amazing the energy that is being generated." They're not the only operator allowing tourists to get up-close and personal with the Soyuz rocket. Star City Tours offers $4,600 five-day packages, also including return flights to Moscow, accommodation, visit to the International Space School, press conference with the cosmonauts, and of course a viewing of the launch itself. "One of the most spectacular things is seeing the installation of the rocket to the launch site," said Ksenia Nikitonova, project manager of the company which has been visiting the site for over eight years. "Tourists can get several meters from the rocket," she added. "And then we are watching the launch half a kilometer away with the families and friends of the crew." The gantry arms close around the Soyuz TMA-15M spacecraft to secure the rocket before launch. Despite the advanced technology needed to launch into space, there is a certain 1960s aesthetic to Baikonur Cosmodrome; a place which strangely appears frozen in a kind of Soviet time warp, according to Shelley. "It's a 50-year-old-facility and lot of the infrastructure there is very old as it was built in Soviet times, " he said of the cosmodrome which is leased to Russia until 2050. "So it's not as glamorous as some of the U.S. launch sites down in Kennedy Space Center." "It's a strange place to visit — here's this bland landscape, scrubland essentially, flat as the eye can see. And in the middle of it you have these absolutely amazing installations with state-of-the-art technology," he added. "So it's somewhat surreal — you're so far away from the rest of civilization, but here you have something that is really at the cutting edge." A feeling perhaps not so different from being in space itself.
# Author: Mr_Orange <mr_orange@hotmail.it> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import time import re import urllib, urllib2, urlparse import sys import os import datetime import sickbeard import generic from sickbeard.common import Quality, cpu_presets from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard import db from sickbeard import classes from sickbeard import logger from sickbeard import tvcache from sickbeard import helpers from sickbeard import clients from sickbeard.show_name_helpers import allPossibleShowNames, sanitizeSceneName from sickbeard.common import Overview from sickbeard.exceptions import ex from sickbeard import encodingKludge as ek from lib import requests from lib.requests import exceptions from lib.unidecode import unidecode class ThePirateBayProvider(generic.TorrentProvider): def __init__(self): generic.TorrentProvider.__init__(self, "ThePirateBay") self.supportsBacklog = True self.enabled = False self.ratio = None self.confirmed = False self.minseed = None self.minleech = None self.cache = ThePirateBayCache(self) self.proxy = ThePirateBayWebproxy() self.url = 'http://pirateproxy.net/' self.searchurl = self.url + 'search/%s/0/7/200' # order by seed self.re_title_url = '/torrent/(?P<id>\d+)/(?P<title>.*?)//1".+?(?P<url>magnet.*?)//1".+?(?P<seeders>\d+)</td>.+?(?P<leechers>\d+)</td>' def isEnabled(self): return self.enabled def imageName(self): return 'thepiratebay.png' def getQuality(self, item, anime=False): quality = Quality.sceneQuality(item[0], anime) return quality def _reverseQuality(self, quality): quality_string = '' if quality == Quality.SDTV: quality_string = 'HDTV x264' if quality == Quality.SDDVD: quality_string = 'DVDRIP' elif quality == Quality.HDTV: quality_string = '720p HDTV x264' elif quality == Quality.FULLHDTV: quality_string = '1080p HDTV x264' elif quality == Quality.RAWHDTV: quality_string = '1080i HDTV mpeg2' elif quality == Quality.HDWEBDL: quality_string = '720p WEB-DL h264' elif quality == Quality.FULLHDWEBDL: quality_string = '1080p WEB-DL h264' elif quality == Quality.HDBLURAY: quality_string = '720p Bluray x264' elif quality == Quality.FULLHDBLURAY: quality_string = '1080p Bluray x264' return quality_string def _find_season_quality(self, title, torrent_id, ep_number): """ Return the modified title of a Season Torrent with the quality found inspecting torrent file list """ mediaExtensions = ['avi', 'mkv', 'wmv', 'divx', 'vob', 'dvr-ms', 'wtv', 'ts' 'ogv', 'rar', 'zip', 'mp4'] quality = Quality.UNKNOWN fileName = None fileURL = self.proxy._buildURL(self.url + 'ajax_details_filelist.php?id=' + str(torrent_id)) if self.proxy and self.proxy.isEnabled(): self.headers.update({'referer': self.proxy.getProxyURL()}) data = self.getURL(fileURL) if not data: return None filesList = re.findall('<td.+>(.*?)</td>', data) if not filesList: logger.log(u"Unable to get the torrent file list for " + title, logger.ERROR) videoFiles = filter(lambda x: x.rpartition(".")[2].lower() in mediaExtensions, filesList) #Filtering SingleEpisode/MultiSeason Torrent if len(videoFiles) < ep_number or len(videoFiles) > float(ep_number * 1.1): logger.log( u"Result " + title + " have " + str(ep_number) + " episode and episodes retrived in torrent are " + str( len(videoFiles)), logger.DEBUG) logger.log(u"Result " + title + " Seem to be a Single Episode or MultiSeason torrent, skipping result...", logger.DEBUG) return None if Quality.sceneQuality(title) != Quality.UNKNOWN: return title for fileName in videoFiles: quality = Quality.sceneQuality(os.path.basename(fileName)) if quality != Quality.UNKNOWN: break if fileName is not None and quality == Quality.UNKNOWN: quality = Quality.assumeQuality(os.path.basename(fileName)) if quality == Quality.UNKNOWN: logger.log(u"Unable to obtain a Season Quality for " + title, logger.DEBUG) return None try: myParser = NameParser(showObj=self.show) parse_result = myParser.parse(fileName) except (InvalidNameException, InvalidShowException): return None logger.log(u"Season quality for " + title + " is " + Quality.qualityStrings[quality], logger.DEBUG) if parse_result.series_name and parse_result.season_number: title = parse_result.series_name + ' S%02d' % int(parse_result.season_number) + ' ' + self._reverseQuality( quality) return title def _get_season_search_strings(self, ep_obj): search_string = {'Season': []} for show_name in set(allPossibleShowNames(self.show)): if ep_obj.show.air_by_date or ep_obj.show.sports: ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0] search_string['Season'].append(ep_string) ep_string = show_name + ' Season ' + str(ep_obj.airdate).split('-')[0] search_string['Season'].append(ep_string) elif ep_obj.show.anime: ep_string = show_name + ' ' + "%02d" % ep_obj.scene_absolute_number search_string['Season'].append(ep_string) else: ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) search_string['Season'].append(ep_string) ep_string = show_name + ' Season ' + str(ep_obj.scene_season) + ' -Ep*' search_string['Season'].append(ep_string) search_string['Season'].append(ep_string) return [search_string] def _get_episode_search_strings(self, ep_obj, add_string=''): search_string = {'Episode': []} if self.show.air_by_date: for show_name in set(allPossibleShowNames(self.show)): ep_string = sanitizeSceneName(show_name) + ' ' + \ str(ep_obj.airdate).replace('-', ' ') search_string['Episode'].append(ep_string) elif self.show.sports: for show_name in set(allPossibleShowNames(self.show)): ep_string = sanitizeSceneName(show_name) + ' ' + \ str(ep_obj.airdate).replace('-', '|') + '|' + \ ep_obj.airdate.strftime('%b') search_string['Episode'].append(ep_string) elif self.show.anime: for show_name in set(allPossibleShowNames(self.show)): ep_string = sanitizeSceneName(show_name) + ' ' + \ "%02i" % int(ep_obj.scene_absolute_number) search_string['Episode'].append(ep_string) else: for show_name in set(allPossibleShowNames(self.show)): ep_string = sanitizeSceneName(show_name) + ' ' + \ sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season, 'episodenumber': ep_obj.scene_episode} + '|' + \ sickbeard.config.naming_ep_type[0] % {'seasonnumber': ep_obj.scene_season, 'episodenumber': ep_obj.scene_episode} + ' %s' % add_string search_string['Episode'].append(re.sub('\s+', ' ', ep_string)) return [search_string] def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0): results = [] items = {'Season': [], 'Episode': [], 'RSS': []} if self.proxy and self.proxy.isEnabled(): self.headers.update({'referer': self.proxy.getProxyURL()}) for mode in search_params.keys(): for search_string in search_params[mode]: if mode != 'RSS': searchURL = self.proxy._buildURL(self.searchurl % (urllib.quote(unidecode(search_string)))) else: searchURL = self.proxy._buildURL(self.url + 'tv/latest/') logger.log(u"Search string: " + searchURL, logger.DEBUG) data = self.getURL(searchURL) if not data: continue re_title_url = self.proxy._buildRE(self.re_title_url) #Extracting torrent information from data returned by searchURL match = re.compile(re_title_url, re.DOTALL).finditer(urllib.unquote(data)) for torrent in match: title = torrent.group('title').replace('_', '.') #Do not know why but SickBeard skip release with '_' in name url = torrent.group('url') id = int(torrent.group('id')) seeders = int(torrent.group('seeders')) leechers = int(torrent.group('leechers')) #Filter unseeded torrent if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech): continue #Accept Torrent only from Good People for every Episode Search if self.confirmed and re.search('(VIP|Trusted|Helper|Moderator)', torrent.group(0)) is None: logger.log(u"ThePirateBay Provider found result " + torrent.group( 'title') + " but that doesn't seem like a trusted result so I'm ignoring it", logger.DEBUG) continue #Check number video files = episode in season and find the real Quality for full season torrent analyzing files in torrent if mode == 'Season' and search_mode == 'sponly': ep_number = int(epcount / len(set(allPossibleShowNames(self.show)))) title = self._find_season_quality(title, id, ep_number) if not title or not url: continue item = title, url, id, seeders, leechers items[mode].append(item) #For each search mode sort all the items by seeders items[mode].sort(key=lambda tup: tup[3], reverse=True) results += items[mode] return results def _get_title_and_url(self, item): title, url, id, seeders, leechers = item if title: title = u'' + title.replace(' ', '.') if url: url = url.replace('&amp;', '&') return (title, url) def findPropers(self, search_date=datetime.datetime.today()): results = [] myDB = db.DBConnection() sqlResults = myDB.select( 'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' + ' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' + ' WHERE e.airdate >= ' + str(search_date.toordinal()) + ' AND (e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED]) + ')' + ' OR (e.status IN (' + ','.join([str(x) for x in Quality.SNATCHED]) + ')))' ) if not sqlResults: return [] for sqlshow in sqlResults: self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) if self.show: curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') for item in self._doSearch(searchString[0]): title, url = self._get_title_and_url(item) results.append(classes.Proper(title, url, datetime.datetime.today(), self.show)) return results def seedRatio(self): return self.ratio class ThePirateBayCache(tvcache.TVCache): def __init__(self, provider): tvcache.TVCache.__init__(self, provider) # only poll ThePirateBay every 10 minutes max self.minTime = 20 def _getRSSData(self): search_params = {'RSS': ['rss']} return self.provider._doSearch(search_params) class ThePirateBayWebproxy: def __init__(self): self.Type = 'GlypeProxy' self.param = 'browse.php?u=' self.option = '&b=32' self.enabled = False self.url = None self.urls = { 'Getprivate.eu (NL)': 'http://getprivate.eu/', '15bb51.info (US)': 'http://15bb51.info/', 'Hideme.nl (NL)': 'http://hideme.nl/', 'Proxite.eu (DE)': 'http://proxite.eu/', 'Webproxy.cz (CZ)': 'http://webproxy.cz/', '2me2u (CZ)': 'http://2me2u.me/', 'Interproxy.net (EU)': 'http://interproxy.net/', 'Unblockersurf.info (DK)': 'http://unblockersurf.info/', 'Hiload.org (NL)': 'http://hiload.org/', } def isEnabled(self): """ Return True if we Choose to call TPB via Proxy """ return self.enabled def getProxyURL(self): """ Return the Proxy URL Choosen via Provider Setting """ return str(self.url) def _buildURL(self, url): """ Return the Proxyfied URL of the page """ if self.isEnabled(): url = self.getProxyURL() + self.param + url + self.option return url def _buildRE(self, regx): """ Return the Proxyfied RE string """ if self.isEnabled(): regx = re.sub('//1', self.option, regx).replace('&', '&amp;') else: regx = re.sub('//1', '', regx) return regx provider = ThePirateBayProvider()
замок - Episode 5.24 - Watershed - Promotional фото. . Wallpaper and background images in the замок & Beckett club tagged: castle caskett richard castle kate beckett nathan fillion stana katic castle & beckett.
# -*- coding: utf-8 -*- """ Learner data exporter for Enterprise Integrated Channel Degreed. """ from datetime import datetime from logging import getLogger from django.apps import apps from integrated_channels.catalog_service_utils import get_course_id_for_enrollment from integrated_channels.integrated_channel.exporters.learner_data import LearnerExporter from integrated_channels.utils import generate_formatted_log LOGGER = getLogger(__name__) class DegreedLearnerExporter(LearnerExporter): """ Class to provide a Degreed learner data transmission audit prepared for serialization. """ def get_learner_data_records( self, enterprise_enrollment, completed_date=None, is_passing=False, **kwargs ): # pylint: disable=arguments-differ,unused-argument """ Return a DegreedLearnerDataTransmissionAudit with the given enrollment and course completion data. If completed_date is None, then course completion has not been met. If no remote ID can be found, return None. """ # Degreed expects completion dates of the form 'yyyy-mm-dd'. completed_timestamp = completed_date.strftime("%F") if isinstance(completed_date, datetime) else None if enterprise_enrollment.enterprise_customer_user.get_remote_id() is not None: DegreedLearnerDataTransmissionAudit = apps.get_model( # pylint: disable=invalid-name 'degreed', 'DegreedLearnerDataTransmissionAudit' ) # We return two records here, one with the course key and one with the course run id, to account for # uncertainty about the type of content (course vs. course run) that was sent to the integrated channel. return [ DegreedLearnerDataTransmissionAudit( enterprise_course_enrollment_id=enterprise_enrollment.id, degreed_user_email=enterprise_enrollment.enterprise_customer_user.user_email, course_id=get_course_id_for_enrollment(enterprise_enrollment), course_completed=completed_date is not None and is_passing, completed_timestamp=completed_timestamp, ), DegreedLearnerDataTransmissionAudit( enterprise_course_enrollment_id=enterprise_enrollment.id, degreed_user_email=enterprise_enrollment.enterprise_customer_user.user_email, course_id=enterprise_enrollment.course_id, course_completed=completed_date is not None and is_passing, completed_timestamp=completed_timestamp, ) ] LOGGER.info(generate_formatted_log( 'degreed', enterprise_enrollment.enterprise_customer_user.enterprise_customer.uuid, enterprise_enrollment.enterprise_customer_user.user_id, None, ('get_learner_data_records finished. No learner data was sent for this LMS User Id because ' 'Degreed User ID not found for [{name}]'.format( name=enterprise_enrollment.enterprise_customer_user.enterprise_customer.name )))) return None
CAN WE SEE THROUGH THE SMOKE? ''It`s still pretty smoky down there,'' says a U.S. official, examining satellite pictures of Iran`s Kharg Island oil depot, hit last week by Iraqi missiles. A haze of smoke hangs over that whole war. Because neither dictatorship permits news coverage, the bloodiest conflict of our time is off the scope of our attention. Battle deaths are estimated at a million. Iran is suffering more, but can win a war of attrition. Iraq, which started the war, cannot settle it short of surrender, because the Ayatollah Khomeini is determined to win no matter how many boys` lives he must squander. Because Iran cannot launch a successful invasion in the south, it has begun to move in the north, enlisting the aid of oppressed Iraqi Kurds there. In response, Iraq has targeted Iran`s oil-shipping center at Kharg Island, hoping to cut down the 2-million-barrel-a-day oil shipments from there that are financing its opponent`s war. If this is even partially successful, the ayatollah will strike at the Arab world`s support of his Arab enemy, Saddam Hussein of Iraq. The ayatollah has at least three choices. He can use the 150,000 Iranian pilgrims now in Saudi Arabia during the Hajj religious season to tear up the host country; attack pipelines and tankers carrying oil from Iraq, Kuwait, Bahrain or anywhere in the Arab world; avenge Kharg by attacking the Saudi oil export facility at Ras Tanurah. What does all this have to do with us? The concern of most Americans is on the price of gasoline at the pump. If this escalation continues, oil prices will stop declining for a time, adding to inflation, perhaps leading to a reduction of Arab financing of the U.S. debt, causing higher interest rates. That unconcern is a mistake. A strategic challenge may be in the making, and we should prepare for it. Assume the war goes on. In Iran, where male children are a family`s only security in old age, resistance is sure to rise to the continued conscription of youths for slaughter, providing a counterweight to fanaticism. As of now, most of the organized resistance to Khomeini inside Iran is strongly leftist. If Iranian antiwar sentiment grows, or if Khomeini dies, the temptation to the Soviet Union cannot be ignored. The czarist dream of a warm-water port is still cherished by the Kremlin, and an uprising in volatile Iran would offer Mikhail Gorbachev the chance to make that ancient Russian dream come true. The port is Chahbahar, on the Persian Gulf, leading to the Indian Ocean. It is as desirable to the Russians as Danzig was to the Prussians. In time of turmoil, the Red Army could move down from its troubled base in Afghanistan, or roll its tanks from the Soviet Union through the part of Iran known as Baluchistan. We can hope that the U.S. has a contingency plan ready to meet a conventional Russian military move down through Iran. It would be good to hear a reminder that we would interpret a Soviet attempt to take advantage of unrest in Iran with a territorial grab as a threat to our ally Pakistan. The more subtle danger, for which we are far less prepared, is for the Soviet Union to arrange for a separatist movement of the Baluchis. Comes the revolution, and the Baluchis set up in Chahbahar and invite their Russian sponsors down, are we ready to sponsor a force inside or outside Iran to stop them? From a single, megadeath war, big conflagrations can start. To avert miscalculation, we should make certain that the other big bystander knows what we are prepared to do.
# coding=utf-8 ''' Created on 6.6.2013 Updated on 29.8.2013 Potku is a graphical user interface for analyzation and visualization of measurement data collected from a ToF-ERD telescope. For physics calculations Potku uses external analyzation components. Copyright (C) Timo Konu This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program (file named 'LICENCE'). ''' __author__ = "Timo Konu" __versio__ = "1.0" from PyQt4 import QtGui from Widgets.MatplotlibWidget import MatplotlibWidget class MatplotlibImportTimingWidget(MatplotlibWidget): def __init__(self, parent, output_file, icon_manager, timing): '''Inits import timings widget Args: parent: An ImportTimingGraphDialog class object. output_file: A string representing file to be graphed. icon_manager: An IconManager class object. timing: A tuple representing low & high timing limits. ''' super().__init__(parent) super().fork_toolbar_buttons() self.canvas.manager.set_title("Import coincidence timing") self.icon_manager = icon_manager # TODO: Multiple timings ? timing_key = list(timing.keys())[0] self.__limit_low, self.__limit_high = timing[timing_key] self.__title = self.main_frame.windowTitle() self.__fork_toolbar_buttons() self.canvas.mpl_connect('button_press_event', self.on_click) self.main_frame.setWindowTitle("{0} - Timing: ADC {3} ({1},{2})".format( self.__title, self.__limit_low, self.__limit_high, timing_key)) self.__limit_prev = 0 self.data = [] with open(output_file) as fp: for line in fp: if not line: # Can still result in empty lines at the end, skip. continue split = line.strip().split("\t") time_diff = int(split[3]) # if time_diff < 0: # time_diff *= -1 self.data.append(time_diff) self.data = sorted(self.data) self.on_draw() def on_draw(self): '''Draws the timings graph ''' self.axes.clear() self.axes.hist(self.data, 200, facecolor='green', histtype='stepfilled') self.axes.set_yscale('log', nonposy='clip') self.axes.set_xlabel("Timedifference (µs?)") self.axes.set_ylabel("Count (?)") if self.__limit_low: self.axes.axvline(self.__limit_low, linestyle="--") if self.__limit_high: self.axes.axvline(self.__limit_high, linestyle="--") self.remove_axes_ticks() self.canvas.draw_idle() def on_click(self, event): '''Handles clicks on the graph. Args: event: A click event on the graph ''' if event.button == 1 and self.limButton.isChecked(): value = int(event.xdata) if value == self.__limit_high or value == self.__limit_low: return if self.__limit_prev: self.__limit_high = value self.__limit_prev = 0 else: self.__limit_low = value self.__limit_prev = 1 # Check these values are correctly ordered if self.__limit_high < self.__limit_low: self.__limit_low, self.__limit_high = \ self.__limit_high, self.__limit_low # Set values to parent dialog (main_frame = ImportTimingGraphDialog) self.main_frame.timing_low.setValue(self.__limit_low) self.main_frame.timing_high.setValue(self.__limit_high) self.main_frame.setWindowTitle("{0} - Timing: ({1},{2})".format( self.__title, self.__limit_low, self.__limit_high)) self.on_draw() def __fork_toolbar_buttons(self): '''Custom toolbar buttons be here. ''' self.__tool_label = self.mpl_toolbar.children()[24] self.__button_drag = self.mpl_toolbar.children()[12] self.__button_zoom = self.mpl_toolbar.children()[14] self.__button_drag.clicked.connect(self.__uncheck_custom_buttons) self.__button_zoom.clicked.connect(self.__uncheck_custom_buttons) self.limButton = QtGui.QToolButton(self) self.limButton.clicked.connect(self.__limit_button_click) self.limButton.setCheckable(True) self.limButton.setToolTip("Change timing's low and high limits for more accurate coincidence reading.") self.icon_manager.set_icon(self.limButton, "amarok_edit.svg") self.mpl_toolbar.addWidget(self.limButton) def __limit_button_click(self): '''Click event on limit button. ''' if self.limButton.isChecked(): self.__uncheck_built_in_buttons() self.__tool_label.setText("timing limit tool") self.mpl_toolbar.mode = "timing limit tool" else: self.__tool_label.setText("") self.mpl_toolbar.mode = "" def __uncheck_custom_buttons(self): self.limButton.setChecked(False) def __uncheck_built_in_buttons(self): self.__button_drag.setChecked(False) self.__button_zoom.setChecked(False) self.__tool_label.setText("") self.mpl_toolbar.mode = ""
By incorporating her culinary background as a professional chef into her naturopathic practice, Traditional Naturopath, Jennifer Stanley and her team of certified holistic practitioners empower courageous people to reclaim their health by using nutrition and natural wellness treatments to overcome a variety of health challenges such as: digestive distress (constipation, IBS, Crohns, nervous disorders), autoimmune disorders (arthritis, fibromyalgia, cancer, thyroid/hormonal imbalance), hormone imbalance, Lyme disease, depression, anxiety, stubborn weight gain, or other environmental sensitivities. In two short months, I feel remarkably better. Dr. Stanley is down to earth and solution oriented. If you want to enjoy a healthier life without any commercial medications, I suggest you work with Dr. Jen. As someone who was given a life sentence of pharmaceuticals to treat (but not actually cure) her fibromyalgia, certified naturopath Jennifer Stanley has a refreshing sensitivity and empathy for people navigating their own health challenges in a world of media hype and misinformation. Jennifer educates her clients and guides them through highly customized treatment programs to help them gain the knowledge and confidence to truly take ownership of their health so they can feel more vibrant, energetic, and resilient than ever before. Her personal wellness journey (her fibromyalgia has been in complete remission for 13 years), along with her 10 years experience as a highly regarded natural medicine practitioner in the Chicago area, give Jennifer a compassion, professionalism, and intuition that are spot on. She works with clients all over the world to help them address the emotional, mental, and spiritual aspects of healing in addition to implementing necessary physical and environmental adjustments. Jennifer Stanley is a certified Naturopath, a certified Co-active Coach trained through CTI, a certified Colon Hydrotherapy Instructor for I-ACT, and a Licensed Massage Therapist. Interested in working one-on-one with Jennifer? Get wellness tips & spa specials delivered to your inbox. One knowledgable, compassionate, and experienced center for all your holistic health needs. Anatomy & Physiology Course (A&P) required for Foundational Level 1 certification course training. 100 hour Foundational I-ACT Level 1 Colon hydrotherapy certification course.
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- class Key: # pylint: disable=too-few-public-methods def __init__(self, enc_key=None, x5t_256=None): self.enc_key = enc_key self.x5t_256 = x5t_256 def to_json(self): return { 'enc_key': self.enc_key if self.enc_key else '', 'x5t_256': self.x5t_256 if self.x5t_256 else '' } class EncData: # pylint: disable=too-few-public-methods def __init__(self): self.data = [] self.kdf = None def to_json(self): return { 'data': [x.to_json() for x in self.data], 'kdf': self.kdf if self.kdf else '' } class Datum: # pylint: disable=too-few-public-methods def __init__(self, compact_jwe=None, tag=None): self.compact_jwe = compact_jwe self.tag = tag def to_json(self): return { 'compact_jwe': self.compact_jwe if self.compact_jwe else '', 'tag': self.tag if self.tag else '' } class SecurityDomainRestoreData: # pylint: disable=too-few-public-methods def __init__(self): self.enc_data = EncData() self.wrapped_key = Key() def to_json(self): return { 'EncData': self.enc_data.to_json(), 'WrappedKey': self.wrapped_key.to_json() }
Home / Company Developments / Is Your Scaffolding In Need of Repair or Reconditioning? Safety in our Products & Services is our up-most priority, each Scaffolding Component we Manufacture or Recondition is stringently tested using a variety of traditional methods along with cutting edge technology. We ensure by minimum, each component meets with British & EU Safety Standards, then that component has to meet with our satisfaction & standards. Our quality & safety assurance is guaranteed on every order, large or small. We make it our mission to be up-to-date with the latest Safety Regulations at all times. Working in-line with standards set out by HSE, ISO & HAPAS we are driven in not just meeting but exceeding British & EU Safety standards. Our passion is in the Production, our pride is in the Quality! Collection & Delivery Available within the UK. Next: Unable to Source the Scaffolding You Need?
"""MSP430 registers.""" class R0(object): """ Register 0, program counter. Points to the next instruction to be executed. Bit 0 is always unset. """ index = 0 class R1(object): """Register 1, stack pointer.""" index = 1 class R2(object): """Register 2, status.""" index = 2 class R3(object): """Register 3, constant generator.""" index = 3 class R4(object): """Register 4.""" index = 4 class R5(object): """Register 5.""" index = 5 class R6(object): """Register 6.""" index = 6 class R7(object): """Register 7.""" index = 7 class R8(object): """Register 8.""" index = 8 class R9(object): """Register 9.""" index = 9 class R10(object): """Register 10.""" index = 10 class R11(object): """Register 10.""" index = 11 class R12(object): """Register 12.""" index = 12 class R13(object): """Register 13.""" index = 13 class R14(object): """Register 14.""" index = 14 class R15(object): """Register 15.""" index = 15
NEW DELHI, October 13, 2010: Traditional clothes, tilak ceremonies, mehandi and reading scriptures are spome Navratri customs that not just Delhiites but also athletes from 71 other countries are observing in New Delhi. Participants at the Commonwealth Games Village were treated to the traditional tilak ceremony to mark the beginning of Navratri. “Players are not venturing out for security reasons, so we are trying to give them the festive flavor inside the village,” says Ashok Kumar, incharge of operations at the Games Village. Dandiya performances by various troupes have also been lined up as part of the daily cultural program. “The idea is to create a fun atmosphere,” adds Kumar. The players are enjoying every bit of it. “The celebrations are quite elaborate, and we’re loving it,” says Rashid Sarafu, a Nigerian athlete. The Delhi government has also arranged for city tours to take athletes to culture hotspots like Dilli Haat, where the mood is even more festive. “Foreign athletes who have been there have indulged in mehandi and bought bangles and colourful dupattas,” says Rina Ray, principal secretary, Delhi government. “We have also distributed booklets that have the whole Ramayana in English,” she adds.
from urlparse import urlparse, parse_qs from django import forms from django.contrib.auth.models import User from django.core.validators import validate_email from django.forms.models import inlineformset_factory from django.utils import html from us_ignite.actionclusters.models import ( ActionCluster, ActionClusterURL, ActionClusterMedia, ActionClusterMembership, ) from us_ignite.common import output def _get_status_choices(): """Returns a list of valid user status for the ``ActionCluster``""" available_status = [ ActionCluster.PUBLISHED, ActionCluster.DRAFT, ] is_valid_status = lambda x: x[0] in available_status return filter(is_valid_status, ActionCluster.STATUS_CHOICES) class ActionClusterForm(forms.ModelForm): """Model form for the ``ActionCluster`` with whitelisted fields.""" status = forms.ChoiceField( choices=_get_status_choices(), initial=ActionCluster.DRAFT) summary = forms.CharField( max_length=140, widget=forms.Textarea, help_text='Tweet-length pitch / summary of project.') class Meta: model = ActionCluster fields = ('name', 'summary', 'impact_statement', 'image', 'domain', 'features', 'stage', 'needs_partner', 'assistance', 'team_name', 'team_description', 'awards', 'acknowledgments', 'tags', 'status',) widgets = { 'features': forms.CheckboxSelectMultiple(), } def _strip_tags(self, field): if field in self.cleaned_data: return html.strip_tags(self.cleaned_data[field]) def clean_team_description(self): return self._strip_tags('team_description') def clean_tags(self): if 'tags' in self.cleaned_data: return output.prepare_tags(self.cleaned_data['tags']) ActionClusterLinkFormSet = inlineformset_factory( ActionCluster, ActionClusterURL, max_num=3, extra=3, can_delete=False) def is_embedable_url(url): domain_list = ['www.youtube.com'] url_parsed = urlparse(url) if url_parsed.netloc.lower() in domain_list: query = parse_qs(url_parsed.query) return True if query.get('v') else False return False class ActionClusterMediaForm(forms.ModelForm): def clean_url(self): url = self.cleaned_data.get('url') if url: if is_embedable_url(url): return url raise forms.ValidationError('Not valid URL.') return '' def clean(self): cleaned_data = self.cleaned_data if cleaned_data.get('url') or cleaned_data.get('image'): return self.cleaned_data raise forms.ValidationError('An image or a URL is required.') class Meta: fields = ('name', 'image', 'url') model = ActionClusterMedia ActionClusterMediaFormSet = inlineformset_factory( ActionCluster, ActionClusterMedia, max_num=10, extra=1, can_delete=False, form=ActionClusterMediaForm) def validate_member(email): """Validates the user has a valid email and it is registered.""" try: validate_email(email) except forms.ValidationError: raise forms.ValidationError( '``%s`` is an invalid email address.' % email) try: return User.objects.get(email=email) except User.DoesNotExist: raise forms.ValidationError( 'User with ``%s`` email is not registered.' % email) class MembershipForm(forms.Form): """Form to validate the collaborators.""" collaborators = forms.CharField( label=u'Team Members', widget=forms.Textarea, help_text=u'Add registered users as ' 'collaborators for this app. One email per line.', required=False) def clean_collaborators(self): """Validates the payload is a list of registered usernames.""" collaborators_raw = self.cleaned_data.get('collaborators') member_list = [] if collaborators_raw: collaborator_list = [c for c in collaborators_raw.splitlines() if c] for collaborator in collaborator_list: collaborator = collaborator.strip() member = validate_member(collaborator) member_list.append(member) return member_list class ActionClusterMembershipForm(forms.ModelForm): class Meta: model = ActionClusterMembership fields = ('can_edit', ) ActionClusterMembershipFormSet = inlineformset_factory( ActionCluster, ActionClusterMembership, extra=0, max_num=0, form=ActionClusterMembershipForm)
TABOR Restaurant’s spirit encapsulated some of the precious democratic atmosphere from the good old times. Here, the traffic and profits are not among the most important things in life. TABOR became a kind of shrine or gastronomic museum, where it is possible to present a short history of national cuisine to a guest, a stranger, within one pleasant night. For more than two decades, countless guests, prominent men and women from all over the world, from presidents of states, famous artists, to ordinary men, all left their kind words to our guest book. The earliest neswpaper articles begin to yellow, while awards testify about the mission of this temple of gastronomy and a special friendship sanctuary, where you can always find music, talented people and beautiful women. Livno prosciutto and cheese, peperonchino, parmesan chunks, homemade baby-beef paté, mangulica pork rinds, dried olives. Cook your steak just right on a hot lava stone and add a mix of sauces to perfectly match your taste. Enjoy!
#Copyright 2015 B. Johan G. Svensson #Licensed under the terms of the MIT license (see LICENSE). ''' #Generic methods and wrappers removeunused() Polynom only copy() degree() iszero() neg() Polynom only #getcoeff/setcoeff - normally only for internal use getcoeff(n) setcoeff(n, val) #Four basic arithmetic operations: add(other) sub(other) mul(other) div(other) - returns (quotient, remainder) #Special functions - external use only: divprintstep(other) Polybinary only divprintstep2(other) Polybinary only getfibonaccileftshftpoly() Polybinary only getfibonaccirightshftpoly() Polybinary only getgaloisleftshftpoly() Polybinary only getgaloisrightshftpoly() Polybinary only #Display function used by __repren__(self) repren() ''' class Polynom(object): #YOU are responsible that the coefficients work! def __init__(self, inpt): self.coeff = list(inpt) self.removeunused() ######## #Generic methods and wrappers def removeunused(self): for i in xrange(len(self.coeff)-1,-1,-1): if self.coeff[i] == 0: self.coeff.pop() else: break def copy(self): return Polynom(self.coeff[:]) def degree(self): self.removeunused() deg = len(self.coeff) - 1 if deg >= 0: return deg else: # Wikipedia says -Inf is 'convenient' http://en.wikipedia.org/wiki/Degree_of_a_polynomial#Degree_of_the_zero_polynomial return -float('infinity') def iszero(self): for coeff in self.coeff: if coeff != 0: return False return True def neg(self): return Polynom([0]).sub(self) ######## #getcoeff/setcoeff - normally only only used by add, sub, mul, div def getcoeff(self,n): try: return self.coeff[n] except IndexError: return 0 def setcoeff(self,n, val): try: self.coeff[n] = val except IndexError: if val != 0: self.coeff.extend([0] * (1 + n - len(self.coeff))) self.coeff[n] = val ######## #Four basic arithmetic operations: def add(self, other): if isinstance(other, (int, long, float)): other = Polynom([other]) maxdeg = max(self.degree(), other.degree()) newpoly = [] for i in xrange(maxdeg + 1): newpoly.append(self.getcoeff(i)+other.getcoeff(i)) return Polynom(newpoly) def sub(self, other): if isinstance(other, (int, long, float)): other = Polynom([other]) maxdeg = max(self.degree(), other.degree()) newpoly = [] for i in xrange(maxdeg + 1): newpoly.append(self.getcoeff(i)-other.getcoeff(i)) return Polynom(newpoly) def mul(self, other): if isinstance(other, (int, long, float)): other = Polynom([other]) newdegree = self.degree() + other.degree() newpoly = [0]*(newdegree + 1) for i in xrange(self.degree() + 1): for j in xrange(other.degree() + 1): newpoly[i+j] += self.getcoeff(i)*other.getcoeff(j) return Polynom(newpoly) def div(self, other): if other.iszero(): raise ZeroDivisionError q = Polynom([0]) r = self.copy() d = other.copy() while not(r.iszero()) and r.degree() >= d.degree(): deg = r.degree() - d.degree() coef = r.getcoeff(r.degree())/d.getcoeff(d.degree()) tlst = [0] * (1 + deg) tlst[-1] = coef t = Polynom(tlst) q += t r -= t.mul(d) return (q, r) ######## #Special functions - external use only: def divprintstep(self, other): if other.iszero(): raise ZeroDivisionError q = Polynom([0]) r = self.copy() d = other.copy() print 'Printing long division:' print '=',r while not(r.iszero()) and r.degree() >= d.degree(): deg = r.degree() - d.degree() coef = r.getcoeff(r.degree())/d.getcoeff(d.degree()) tlst = [0] * (1 + deg) tlst[-1] = coef t = Polynom(tlst) q += t r -= t.mul(d) print '',t.mul(d), ' = ', t, ' * (', d, ')' print '--------' print '=',r return (q, r) ######## #Display function used by __repr__(self) def repren(self): strng = '' coefflst = [] for i in xrange(len(self.coeff)-1,-1,-1): # if self.coeff[i] == 0: continue # if i == len(self.coeff) - 1 and self.coeff[i] >= 0: strsign = '' elif self.coeff[i] < 0: strsign = '-' else: strsign = '+' # if i == 0 or abs(self.coeff[i]) != 1: strcoeff = str(abs(self.coeff[i])) else: strcoeff = '' # if i == 0: strx = '' elif i == 1: strx = 'x' else: strx = 'x**' + str(i) if len(strng) > 0: strng += ' ' if len(strsign) > 0: strng += strsign + ' ' if len(strcoeff) > 0 and len(strx) > 0: strng += strcoeff + ' * ' + strx else: strng += strcoeff + strx if len(strng) == 0: strng = '0' return strng ################ ####Operator methods #Basic customization def __repr__(self): return self.repren() ## def __str__(self): ## return '' ## def __unicode__(self): ## return u'' ## def __lt__(self, other): ## return NotImplemented ## def __le__(self, other): ## return NotImplemented ## def __eq__(self, other): ## return NotImplemented ## def __ne__(self, other): ## return NotImplemented ## def __gt__(self, other): ## return NotImplemented ## def __ge__(self, other): ## return NotImplemented ## def __hash__(self): ## return None ## def __nonzero__(self): ## return True #Customizing attribute access ## def __getattr__(self, name): ## return NotImplemented ## def __setattr__(self, name, value): ## return NotImplemented ## def __delattr__(self, name): ## return NotImplemented ## def __getattribute__(self, name): ## return NotImplemented ## def __get__(self, instance, owner): ## return NotImplemented ## def __set__(self, instance, value): ## return NotImplemented ## def __delete__(self, instance): ## return NotImplemented #Emulating callable objects ## def __call__(self, *args): ## pass #Emulating container types ## def __len__(self): ## pass ## def __getitem__(self, key): ## pass ## def __setitem__(self, key, value): ## pass ## def __delitem__(self, key): ## pass ## def __iter__(self): ## pass ## def __reversed__(self): ## pass ## def __contains__(self, item): ## pass #Additional methods for emulation of sequence types #DEPRECATED since version 2.0 - __getitem__, __setitem__ and __delitem__ above ## def __getslice__(self, i, j): ## pass ## def __setslice__(self, i, j, sequence): ## pass ## def __delslice__(self, i, j): ## pass #Emulating numeric types def __add__(self, other): return self.add(other) def __sub__(self, other): return self.sub(other) def __mul__(self, other): return self.mul(other) def __floordiv__(self, other): return self.div(other)[0] def __mod__(self, other): return self.div(other)[1] def __divmod__(self, other): return self.div(other) ## def __pow__(self, other, modulo = None): ## return NotImplemented ## def __lshift__(self, other): ## return NotImplemented ## def __rshift__(self, other): ## return NotImplemented ## def __and__(self, other): ## return NotImplemented ## def __xor__(self, other): ## return NotImplemented ## def __or__(self, other): ## return NotImplemented def __div__(self, other): return self.div(other)[0] def __truediv__(self, other): return self.div(other)[0] def __radd__(self, other): try: return other.add(self) except AttributeError: return Polynom([other]).add(self) def __rsub__(self, other): try: return other.sub(self) except AttributeError: return Polynom([other]).sub(self) def __rmul__(self, other): try: return other.mul(self) except AttributeError: return Polynom([other]).mul(self) def __rdiv__(self, other): #other / self try: return other.div(self)[0] except AttributeError: return Polynom([other]).div(self)[0] def __rtruediv__(self, other): try: return other.div(self)[0] except AttributeError: return Polynom([other]).div(self)[0] def __rfloordiv__(self, other): try: return other.div(self)[0] except AttributeError: return Polynom([other]).div(self)[0] def __rmod__(self, other): try: return other.div(self)[1] except AttributeError: return Polynom([other]).div(self)[1] def __rdivmod__(self, other): try: return other.div(self) except AttributeError: return Polynom([other]).div(self) ## def __rpow__(self, other): ## return NotImplemented ## def __rlshift__(self, other): ## return NotImplemented ## def __rrshift__(self, other): ## return NotImplemented ## def __rand__(self, other): ## return NotImplemented ## def __rxor__(self, other): ## return NotImplemented ## def __ror__(self, other): ## return NotImplemented #Correct behavour of i-methods - Modify self and return the result #If not possible do not define these - will be handled automatically ## def __iadd__(self, other): ## return NotImplemented ## def __isub__(self, other): ## return NotImplemented ## def __imul__(self, other): ## return NotImplemented ## def __idiv__(self, other): ## return NotImplemented ## def __itruediv__(self, other): ## return NotImplemented ## def __ifloordiv__(self, other): ## return NotImplemented ## def __imod__(self, other): ## return NotImplemented ## def __ipow__(self, other, modulo = None): ## return NotImplemented ## def __ilshift__(self, other): ## return NotImplemented ## def __irshift__(self, other): ## return NotImplemented ## def __iand__(self, other): ## return NotImplemented ## def __ixor__(self, other): ## return NotImplemented ## def __ior__(self, other): ## return NotImplemented def __neg__(self): return Polynom([0]) - self def __pos__(self): return self.copy() ## def __abs__(self): ## pass ## def __invert__(self): ## pass ## def __complex__(self): ## pass ## def __int__(self): ## pass ## def __long__(self): ## pass ## def __float__(self): ## pass ## def __oct__(self): ## pass ## def __hex__(self): ## pass ## def __index__(self): ## pass ## def __coerce__(self, other): ## pass #With Statement Context Managers ## def __enter__(self): ## pass ## def __exit__(self, exc_type, exc_value, traceback): ## pass
We're always on the lookout for talented people to join our team. We're looking for a Line QA for our Fresh potato factory. You’ll take responsibility for monitoring the quality of our finished product and ensuring that it consistently meets our customers' high standards. Can you lead site operations and provide front line leadership across the management team delivering "right first time" quality service, standards and performance? Then we need you! An opportunity has opened up in our QA department for a QA Team Leader on “B” Shift. This role would suit an energetic and organised individual who can lead a team of QA Controllers and also contribute as a member of the senior team. We’re looking for someone who will work closely with the Celox (optical grader) team and in the raw material stores examining crops against Intake QA reports to ensure loads are correctly allocated. Working closely with our Team Leaders, Machine Operators and Quality Assurance team to ensure the smooth and efficient running of our busy production lines. We’re looking for someone who is self-motivated and reliable when working individually and as part of a team. A key player in our Technical team, making sure the quality and safety of products exceed customer expectations and achieve our high quality standards.
__author__ = 'aaronweaver' import re from defusedxml import ElementTree as ET import hashlib from urllib.parse import urlparse from dojo.models import Finding, Endpoint class NiktoXMLParser(object): def __init__(self, filename, test): dupes = dict() self.items = () if filename is None: self.items = () return tree = ET.parse(filename) root = tree.getroot() scan = root.find('scandetails') # New versions of Nikto have a new file type (nxvmlversion="1.2") which adds an additional niktoscan tag # This find statement below is to support new file format while not breaking older Nikto scan files versions. if scan is None: scan = root.find('./niktoscan/scandetails') for item in scan.findall('item'): # Title titleText = None description = item.find("description").text # Cut the title down to the first sentence sentences = re.split( r'(?<!\w\.\w.)(?<![A-Z][a-z]\.)(?<=\.|\?)\s', description) if len(sentences) > 0: titleText = sentences[0][:900] else: titleText = description[:900] # Url ip = item.find("iplink").text # Remove the port numbers for 80/443 ip = ip.replace(":80", "") ip = ip.replace(":443", "") # Severity severity = "Info" # Nikto doesn't assign severity, default to Info # Description description = "\n \n".join((("Host: " + ip), ("Description: " + item.find("description").text), ("HTTP Method: " + item.attrib["method"]), )) mitigation = "N/A" impact = "N/A" references = "N/A" dupe_key = hashlib.md5(description.encode("utf-8")).hexdigest() if dupe_key in dupes: finding = dupes[dupe_key] if finding.description: finding.description = finding.description + "\nHost:" + ip + "\n" + description self.process_endpoints(finding, ip) dupes[dupe_key] = finding else: dupes[dupe_key] = True finding = Finding(title=titleText, test=test, active=False, verified=False, description=description, severity=severity, numerical_severity=Finding.get_numerical_severity( severity), mitigation=mitigation, impact=impact, references=references, url='N/A', dynamic_finding=True) dupes[dupe_key] = finding self.process_endpoints(finding, ip) self.items = list(dupes.values()) def process_endpoints(self, finding, host): protocol = "http" query = "" fragment = "" path = "" url = urlparse(host) if url: path = url.path rhost = re.search( "(http|https|ftp)\://([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&amp;%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(com|edu|gov|int|mil|net|org|biz|arpa|info|name|pro|aero|coop|museum|[a-zA-Z]{2}))[\:]*([0-9]+)*([/]*($|[a-zA-Z0-9\.\,\?\'\\\+&amp;%\$#\=~_\-]+)).*?$", host) protocol = rhost.group(1) host = rhost.group(4) try: dupe_endpoint = Endpoint.objects.get(protocol="protocol", host=host, query=query, fragment=fragment, path=path, product=finding.test.engagement.product) except Endpoint.DoesNotExist: dupe_endpoint = None if not dupe_endpoint: endpoint = Endpoint(protocol=protocol, host=host, query=query, fragment=fragment, path=path, product=finding.test.engagement.product) else: endpoint = dupe_endpoint if not dupe_endpoint: endpoints = [endpoint] else: endpoints = [endpoint, dupe_endpoint] finding.unsaved_endpoints = finding.unsaved_endpoints + endpoints
We adore this stylish mirror by Eichholtz. It will add the perfect finishing touch to your stylish interior. The simple square angles from a nickel metal frame housing the mirror. Dimensions: 70cm square.
#!/usr/bin/env python # Cloudsnake Application server # Licensed under Apache License, see license.txt # Author: Markus Gronholm <markus@alshain.fi> Alshain Oy #import hashlib, bisect, copy class HashRing( object ): def __init__( self, replicas = 1, track_changes = False ): self.replicas = replicas self.ring = {} self.keys = [] self.invert_ring = {} self.accessed_keys = set() self.key_mapping = {} self.saved_mapping = {} self.invalid_nodes = set() self.track_changes = track_changes def invalidate_node( self, node ): self.invalid_nodes.add( node ) def validate_node( self, node ): self.invalid_nodes.discard( node ) def get_invalid_keys( self ): out = [] for node in self.invalid_nodes: out.extend( self.invert_ring[ node ] ) return out def save_state( self ): for (key, item) in self.key_mapping.items(): self.saved_mapping[ key ] = item def generate_key( self, key ): return cloudSnake.modules.hashlib.md5( key ).hexdigest() def compute_changes( self ): self.compute_mapping() changes = [] for key in self.accessed_keys: if self.saved_mapping[ key ] != self.key_mapping[ key ]: changes.append( ( key, self.saved_mapping[ key ], self.key_mapping[ key ] ) ) return changes def add_node( self, node ): self.invert_ring[ node ] = [] if self.track_changes: self.save_state() for i in range( self.replicas ): key = self.generate_key( str( i ) + "+" + str( node ) ) self.ring[ key ] = node cloudSnake.modules.bisect.insort( self.keys, key ) self.invert_ring[ node ].append( key ) if self.track_changes: return self.compute_changes() return True def remove_node( self, node ): if self.track_changes: self.save_state() keys = self.invert_ring[ node ] for key in keys: self.keys.remove( key ) del self.invert_ring[ node ] if self.track_changes: return self.compute_changes() else: return True def _raw_get_node( self, key ): pos = cloudSnake.modules.bisect.bisect_right( self.keys, key ) node_key = self.keys[ pos - 1 ] return self.ring[ node_key ] def get_node( self, skey ): key = self.generate_key( skey ) self.accessed_keys.add( key ) valid_keys = self.keys invalid_keys = self.get_invalid_keys() for ikey in invalid_keys: valid_keys.remove( ikey ) pos = cloudSnake.modules.bisect.bisect_right( valid_keys, key ) node_key = valid_keys[ pos - 1 ] if self.track_changes: self.key_mapping[ key ] = self.ring[ node_key ] return self.ring[ node_key ] def get_keys_for_node( self, node ): return self.invert_ring[ node ] def compute_mapping( self ): for key in self.accessed_keys: self.key_mapping[ key ] = self._raw_get_node( key )
Please enter the username on your account and click on Submit. You should receive an email shortly at your registered email account with a link to reset your password. If you do not receive an email repeat this request.
from django.core.cache import cache from django.conf import settings as d_settings from django.template import Context, Template, TemplateDoesNotExist from django.template.loader import get_template from tendenci import __version__ as version from tendenci.apps.site_settings.models import Setting from tendenci.apps.site_settings.cache import SETTING_PRE_KEY def settings(request): """Context processor for settings """ key = [d_settings.CACHE_PRE_KEY, SETTING_PRE_KEY, 'all'] key = '.'.join(key) settings = cache.get(key) if not settings: settings = Setting.objects.all() is_set = cache.add(key, settings) if not is_set: cache.set(key, settings) contexts = {} for setting in settings: context_key = [setting.scope, setting.scope_category, setting.name] context_key = '_'.join(context_key) value = setting.get_value().strip() if setting.data_type == 'boolean': value = value[0].lower() == 't' if setting.data_type == 'int': if value.strip(): try: # catch possible errors when int() is called value = int(value.strip()) except ValueError: value = 0 else: value = 0 # default to 0 # Handle context for the social_media addon's # contact_message setting if setting.name == 'contact_message': page_url = request.build_absolute_uri() message_context = {'page_url': page_url} message_context = Context(message_context) message_template = Template(value) value = message_template.render(message_context) contexts[context_key.upper()] = value contexts['TENDENCI_VERSION'] = version contexts['USE_I18N'] = d_settings.USE_I18N contexts['LOGIN_URL'] = d_settings.LOGIN_URL contexts['LOGOUT_URL'] = d_settings.LOGOUT_URL return contexts def app_dropdown(request): """ Context processor for getting the template needed for a module setting dropdown """ context = {} path = request.get_full_path().strip('/') path = path.split('/') if len(path) < 3: context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': 'site_settings/top_nav.html'}) else: if path[0] == 'settings' and path[1] == 'module': try: get_template(path[2]+'/top_nav.html') context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': path[2]+'/top_nav.html'}) except TemplateDoesNotExist: context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': 'site_settings/top_nav.html'}) # special case profile setting as users if path[2] == 'users': context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': 'profiles/top_nav.html'}) if path[2] == 'groups': context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': 'user_groups/top_nav.html'}) if path[2] == 'make_payment': context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': 'make_payments/top_nav.html'}) else: context.update({'ADMIN_MENU_APP_TEMPLATE_DROPDOWN': 'site_settings/top_nav.html'}) return context
Great interview!! I like her way of thinking about fashion. Such a good choice, she is very cute! love her style! and her blog! I can relate to a lot of her views! Love her blog! She always puts together amazing outfits. Those shoes in the first photograph are dope. oh yes she is so cool, and so stylish! She's got great style. Nice choice! She seems so cool and love her outfit. Love Bravegrrl and this was an awesome interview! nice post! it`s great to knew more people here and the fact that everyone loves to know more people,great category on your blog :) btw...love her! What a doll. Totally loving her style. Fabulous feature! Fantastic Q&A! Love her style and her answers to the fashion inspiration. I also have to agree with her dislikes! Great style! Really good choice like always! so interesting post, i like it!!!
# -*- coding: utf-8 -*- from config import * print(Color( '{autored}[{/red}{autoyellow}+{/yellow}{autored}]{/red} {autocyan} msg.py importado.{/cyan}')) @bot.message_handler(commands=['msg']) def command_msg(m): cid = m.chat.id uid = m.from_user.id try: send_udp('msg') except Exception as e: bot.send_message(52033876, send_exception(e), parse_mode="Markdown") if not is_recent(m): return None if is_admin(uid): if len(m.text.split(' ')) >= 3: if isint(m.text.split(' ')[1]): try: bot.send_message( m.text.split(' ')[1], ' '.join( m.text.split(' ')[ 2:])) except: bot.send_message( cid, "Error. No se pudo enviar mensaje, quizá ya no es usuario.") else: bot.send_message( cid, "Éxito. Mensaje enviado satisfactoriamente.") else: bot.send_message( cid, "Error. Debes introducir un número como ID.") else: bot.send_message( cid, "Error. Debes introducir `/msg <ID> <Mensaje>`", parse_mode="Markdown")
* 1. What HOA do you live in? * 3. What is your HOA's website? * 4. Does your HOA have a public Facebook page? * 5. Does your HOA have an assigned delegate to the Briar Forest Super Neighborhood? * 6. Tell us about issues that you see in your neighborhood. * 7. Does your neighborhood need assistance for post Harvey recovery? * 8. Briefly describe any ongoing projects in your neighborhood. * 9. How often do you attend Briar Forest Super Neighborhood meetings? * 10. Please share with us a topic you would like for us to cover at our monthly meeting.
# This base class is to make it easier to write management commands # for merging object in Pombola (e.g. Person and Organisation at the # moment). from optparse import make_option import sys from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand, CommandError from django.core.urlresolvers import reverse from django.db import transaction from slug_helpers.models import SlugRedirect from images.models import Image import pombola.core.models as core_models def check_basic_fields(basic_fields, to_keep, to_delete): """Return False if any data might be lost on merging""" safe_to_delete = True for basic_field in basic_fields: if basic_field == 'summary': # We can't just check equality of summary fields because # they are MarkupField fields which don't have equality # helpfully defined (and they're always different objects # between two different speakers), so instead, check for # equality of the rendered content of the summary. delete_value = to_delete.summary.rendered keep_value = to_keep.summary.rendered else: delete_value = getattr(to_delete, basic_field) keep_value = getattr(to_keep, basic_field) if keep_value != delete_value: # i.e. there's some data that might be lost: safe_to_delete = False message = "Mismatch in '%s': '%s' ({%d}) and '%s' (%d)" print >> sys.stderr, message % (basic_field, keep_value, to_keep.id, delete_value, to_delete.id) return safe_to_delete class MergeCommandBase(BaseCommand): option_list = BaseCommand.option_list + ( make_option("--keep-object", dest="keep_object", type="string", help="The ID or slug of the object to retain", metavar="OBJECT-ID"), make_option("--delete-object", dest="delete_object", type="string", help="The ID or slug of the object to delete", metavar="OBJECT-ID"), make_option('--noinput', dest='interactive', action='store_false', default=True, help="Do NOT prompt the user for input of any kind"), make_option("--quiet", dest="quiet", help="Suppress progress output", default=False, action='store_true')) admin_url_name = None basic_fields_to_check = () model_class = None def model_specific_merge(self, to_keep, to_delete, **options): pass def get_by_slug_or_id(self, identifier): try: return self.model_class.objects.get(slug=identifier) # AttributeError catches the case where there is no slug field. except self.model_class.DoesNotExist, AttributeError: try: object_id = int(identifier) except ValueError: raise ( self.model_class.DoesNotExist, "Object matching query does not exist." ) return self.model_class.objects.get(pk=object_id) @transaction.atomic def handle(self, *args, **options): if not options['keep_object']: raise CommandError("You must specify --keep-object") if not options['delete_object']: raise CommandError("You must specify --delete-object") if args: message = "Don't supply arguments, only --keep-object and --delete-object" raise CommandError(message) to_keep = self.get_by_slug_or_id(options['keep_object']) to_delete = self.get_by_slug_or_id(options['delete_object']) to_keep_admin_url = reverse(self.admin_url_name, args=(to_keep.id,)) if to_keep.id == to_delete.id: raise CommandError("--keep-object and --delete-object are the same") print "Going to keep:", to_keep, "with ID", to_keep.id print "Going to delete:", to_delete, "with ID", to_delete.id if options['interactive']: answer = raw_input('Do you wish to continue? (y/[n]): ') if answer != 'y': raise CommandError("Command halted by user, no changes made") if not check_basic_fields( self.basic_fields_to_check, to_keep, to_delete, ): raise CommandError("You must resolve differences in the above fields") content_type = ContentType.objects.get_for_model(self.model_class) self.model_specific_merge(to_keep, to_delete, **options) # Replace the object on all models with generic foreign keys in core for model in (core_models.Contact, core_models.Identifier, core_models.InformationSource): model.objects.filter(content_type=content_type, object_id=to_delete.id) \ .update(object_id=to_keep.id) # Add any images for the object to delete as non-primary # images for the object to keep: Image.objects.filter(content_type=content_type, object_id=to_delete.id) \ .update(is_primary=False, object_id=to_keep.id) # Make sure the old slug redirects to the object to keep: SlugRedirect.objects.create( new_object=to_keep, old_object_slug=to_delete.slug, ) # Finally delete the now unnecessary object: to_delete.delete() if not options['quiet']: print "Now check the remaining object (", to_keep_admin_url, ")" print "for any duplicate information."
It all started when Jerry Awram became interested in bees through the father of a school friend, causing him to study apiculture. After finishing his B. Sc. and M. Sc. degrees, Jerry went to the U.K. to study bumble bees, and obtained his PhD in 1970. Then, he accepted the job of Provincial Apiculturist for Alberta and began keeping bees on the side. After resigning from this position in 1973, he and his wife Pia ran their beekeeping business full time along with raising their growing family. In 1985, after experimenting with new techniques for wintering bees, Jerry moved his entire family and beekeeping operation to the Lower Mainland where the milder winters were better for the bees. Since then, the business has expanded dramatically and has truly become a family enterprise with all three of Jerry and Pia’s sons, Peter, Tom and David becoming part of the honey business. Jerry has always been keenly interested in the improvement and promotion of apiculture and has taken an active interest in the beekeeping community. He has been the president of the Alberta Honey Producers Cooperative of Alberta, Vice-President of BeeMaid Honey and President of the Canadian Honey Council. He has spoken on TV and been quoted in newspapers regarding bees. In fact the entire family and business has been featured on the children’s TV program “Harriet’s Magic Hats”. In addition Honeyview Farm has been featured in a number of news articles and interviews in newspapers and on TV throughout the years. After many years in the bee business, Jerry realized that sustainable beekeeping was the future. Instead of killing off the bees at the end of each summer after the honey crop and importing new bees from the United States every spring, beekeepers needed to learn how to keep the beehives alive through the winter. Jerry was one of the first beekeepers to start experimenting in these methods and as a result moved his entire family and beekeeping operation to the Lower Mainland from the Peace River area of Alberta where the milder winters were better for the bees.
import asyncio import json from pprint import pprint as pp import re from urllib.parse import urljoin import aioutils.aiohttp import aiohttp import bs4 from bs4 import BeautifulSoup import more_itertools from aioutils.task_queues import SizeBoundedTaskList from .parse_types import load_enum_into_item, parse_type_format class TypesEncoder(json.JSONEncoder): def default(self, o): if hasattr(o, 'items'): return dict(o) else: super().default(o) error_definitions = { 'ErrorResult': { 'type': 'object', 'properties': { 'Request': {'type': 'string'}, 'ErrorDescription': {'type': 'string'}, 'Error': {'$ref': '#/definitions/Error'}, }, }, 'Error': { 'type': 'object', 'properties': { 'Code': {'type': 'string'}, 'UserDescription': {'type': 'string'}, 'DeveloperDescription': {'type': 'string'}, 'ErrorData': { 'type': 'array', 'items': {'$ref': '#/definitions/ErrorDataItem'}, }, }, }, 'ErrorDataItem': { 'type': 'object', 'properties': { 'Name': {'type': 'string'}, 'Value': {'type': 'string'}, }, }, } standard_responses = { '304': { 'description': 'Used with caching to indicate that the cached copy is' ' still valid.' }, '400': { 'description': 'The request is believed to be invalid in some way. The' ' response body will contain an error message. You' ' should display the error message to the user.', 'schema': {'type': 'object', '$ref': '#/definitions/ErrorResult'}, }, '401': { 'description': 'An OAuth authentication failure occurred. You should' ' ask the user to log in again.', 'schema': {'type': 'object', '$ref': '#/definitions/ErrorResult'}, }, '429': { 'description': 'Your rate limit has been exceeded. Your rate limit' ' will reset at the start of the next hour. You should' ' not attempt to make any more calls until then.', 'schema': {'type': 'object', '$ref': '#/definitions/ErrorResult'}, }, '500': { 'description': 'A server error occurred. You should display a generic' ' “whoops” error message to the user.', 'schema': {'type': 'object', '$ref': '#/definitions/ErrorResult'}, }, '503': { 'description': 'Planned server maintenance is underway. General error' ' details and auction extension details are provided in' ' the response. You should consume this information to' ' inform the end user.', 'schema': {'type': 'object', '$ref': '#/definitions/ErrorResult'}, }, } class DefinitionContainer(): def __init__(self): self.definitions = {} self.reverse = {} def add_definition(self, prefered_name, definition): if not prefered_name.isidentifier(): print("Invalid identifier {!r}".format(prefered_name)) rev_lookup = json.dumps(definition, indent=2, sort_keys=True, cls=TypesEncoder) rev_names = self.reverse.setdefault(rev_lookup, []) if prefered_name in rev_names: return prefered_name elif prefered_name not in self.definitions: self.reverse[rev_lookup].append(prefered_name) self.definitions[prefered_name] = definition return prefered_name attempts = 0 while attempts < 10: new_name = prefered_name + str(attempts) if new_name in rev_names: return new_name elif new_name not in self.definitions: self.reverse[rev_lookup].append(new_name) self.definitions[new_name] = definition return new_name attempts += 1 raise Exception('Failed to generate unique name for' ' model {}.{}'.format(prefered_name)) def iter_heading_contents(children): heading_tags = frozenset({'h1', 'h2', 'h3', 'h4'}) last_heading = None last_table = None last_paragraphs = [] expanded_children = [] for child in children: if child.name == 'div': div_children = child.contents child_tag_names = {c.name for c in div_children} if heading_tags & child_tag_names: expanded_children += div_children else: expanded_children.append(child) for child in expanded_children: if child.name in heading_tags: if last_paragraphs or last_table or last_heading: yield last_heading, last_table, last_paragraphs last_heading = child last_paragraphs = [] last_table = None elif not child.name: last_paragraphs.append(child) elif child.name == 'tr' and last_table: last_table.append(child) elif child.name == 'table': last_table = child elif child.find('table'): last_table = child.find('table') else: last_paragraphs.append(child) if last_paragraphs or last_table or last_heading: yield last_heading, last_table, last_paragraphs def safe_add(orig, *new): orig = dict(orig) for new_dict in new: for key, value in dict(new_dict).items(): if key in orig: if value != orig[key]: print('Warning. Key already defined, ', key) # from pprint import pformat # import difflib # print(''.join(difflib.ndiff( # pformat(orig[key]).splitlines(keepends=True), # pformat(value).splitlines(keepends=True), # ))) else: orig[key] = value return orig def definition_union(orig, *new): out = dict(orig) for new_dict in new: for key, value in dict(new_dict).items(): if key not in out: out[key] = value else: new_props = value['properties'] out_props = out[key]['properties'] out_props.update(new_props) return out def split_definition_paragraphs(paragraphs): paragraphs = iter(paragraphs) def_line = None lines = [] for para in paragraphs: if def_line is None: ptext = text(para) if ptext: def_line = ptext else: lines.append(para) assert def_line return def_line, paragraphs_to_markdown(*lines) def paragraphs_to_markdown(*paras, indent=0): paragraphs = [] for item in paras: if item.name in ['ul', 'ol']: lst = [] prefix = ' - ' if item.name == 'ul' else '1. ' for li in item.children: if li.name == 'li': lst.append(prefix + paragraphs_to_markdown( li, indent=indent + 3)) paragraphs.append('\n'.join(lst)) elif item.name is None or not (item.find('ul,ol')): paragraphs.append(text(item)) else: paragraphs.append(paragraphs_to_markdown( *item.children, indent=indent)) paragraphs = filter(lambda s: s.strip(), paragraphs) if indent != 0: new_paras = [] i_chars = ' ' * indent for para in paragraphs: para = '\n'.join(i_chars + line for line in para.splitlines()) new_paras.append(para) paragraphs = new_paras return '\n\n'.join(paragraphs) def text(*elms, one_line=True, strip=True, sep=' '): text_elms = [] for elm in elms: if elm.name is None: child_elms = [elm] else: child_elms = elm.children for e in child_elms: if isinstance(e, bs4.NavigableString): if isinstance(e, bs4.Comment): continue txt = str(e) txt = re.sub(r'[ \n\t]+', ' ', txt) text_elms.append(txt) elif e.name == 'br': text_elms.append(' ' if one_line else '\n') elif e.name not in ['script', 'style']: text_elms.append(text(e, one_line=one_line, strip=False)) text_elms.append(sep) t = ''.join(text_elms) t = re.sub(r'[ ]+', ' ', t) if not one_line: t = re.sub(r'[ ]*\n[ ]*', '\n', t) if strip: t = t.strip() return t async def generate_swagger_from_docs(session, url, definitions): KNOWN_BAD_HEADINGS = { 'Request Builder', 'Request', 'Response', 'Examples', 'Example XML Request (switch to JSON)', 'Example JSON Request (switch to XML)', 'Example XML Response (switch to JSON)', 'Example JSON Response (switch to XML)', } txt = None while txt is None: try: async with session.get(url) as o: txt = await o.text() except aiohttp.ServerDisconnectedError: txt = None print('Server disconnect for', url) continue txt = re.sub(r"""</table> </td> </tr> <p>\[/tm_private\]\s*</.*>(\n\s*</table>)?""", '<!-- [/tm_private] -->', txt) soup = BeautifulSoup(txt, 'html.parser') for selector in ['.site-tools', '.primary-tools', '.crumbs', '.sprite', '.site-footer', '.hide', '.site-header', '.xml-message', '.json-message', '#requestBuilderForm', '#responseBody']: for tag in soup.select(selector): tag.decompose() for tag_name in ['script', 'link', 'meta', 'style', 'pre']: for tag in soup.find_all(tag_name): tag.decompose() txt = soup.prettify() txt = re.sub(r""" </div> </div> </div> </div>""", '', txt) txt = re.sub(r'</(body|html)>', '', txt) soup = BeautifulSoup(txt, 'html.parser') # u = url.replace('https://developer.trademe.co.nz/api-reference/', '').replace('api-index/', '').replace('/', '-') content = soup.select('div.generated-content', limit=1)[0] content_iter = iter(iter_heading_contents(content.children)) path = { 'externalDocs': { 'description': 'Original TradeMe Documentation', 'url': url }, } params = [] metadata = None response = None for heading, table, paragraphs in content_iter: if heading is None: metadata = parse_metadata(table) path['produces'] = convert_supported_formats_to_mime( metadata['Supported Formats']) path['description'] = paragraphs_to_markdown(*paragraphs) continue heading_text = text(heading) if heading_text in ['URL parameters', 'Query String parameters']: if heading_text == 'URL parameters': in_type = 'path' elif heading_text == 'Query String parameters': in_type = 'query' else: raise Exception('Unkown Heading') params += parse_params(in_type, table) elif heading_text in ['POST Data', 'Returns']: name, desc = split_definition_paragraphs(paragraphs) dfn_obj = parse_type_format(name) dfn_ref = get_refname(dfn_obj) if dfn_ref: dfn_obj = parse_response(dfn_obj, desc, definitions, table=table) else: dfn_obj['description'] = desc if heading_text == 'POST Data': params += [{ 'in': 'body', 'schema': dfn_obj, }] elif heading_text == 'Returns': response = { 'description': desc, 'schema': dfn_obj, } else: raise Exception('Unkown Heading') elif heading_text in KNOWN_BAD_HEADINGS: continue else: print(heading_text) raise Exception() path['responses'] = safe_add({ '200': response, }, standard_responses) return { metadata['URL'].replace('https://api.trademe.co.nz/v1', ''): { metadata['HTTP Method'].lower(): path, 'parameters': params, } } def convert_supported_formats_to_mime(supported_formats): formats = map(str.strip, supported_formats.split(',')) format_mapping = { 'JSON': 'application/json', 'XML': 'text/xml' } mime_types = [] for fmt in formats: if fmt in format_mapping: mime_types.append(format_mapping[fmt]) elif fmt.upper() in format_mapping: mime_types.append(format_mapping[fmt.upper()]) else: raise ValueError('Unsupported format' + fmt) return mime_types def parse_metadata(table): data = {} for row in table.find_all('tr'): key = text(row.find('th')) value = text(row.find('td')) if key.endswith('?'): value = (value == 'Yes') key = key[:-1] data[key] = value return data def parse_params(in_type, table): table_iter = iter(iter_parse_nested_table(table)) params = [] for t, key, value, desc in table_iter: if t != 'kv': print('NOTKV', t, key, value, desc) raise Exception('not kv') data = parse_type_format(value) data['name'] = key data['description'] = desc if in_type: data['in'] = in_type if 'enum' in data: enum_row = next(table_iter) data = load_enum_into_item(enum_row, data) if '$ref' in data: print('Unsupported type', data['$ref']) raise Exception() params.append(data) return params def get_refname(data): try: return data.ref_name except AttributeError: return None def parse_response(dfn_obj, docs, definitions, *, table=None, table_iter=None): if table_iter is None: assert table is not None table_iter = iter(iter_parse_nested_table(table)) else: assert table is None table_iter = more_itertools.peekable(table_iter) this_dfn = {} for t, key, value, desc in table_iter: if t != 'kv': print('NOTKV', t, key, value, desc) print(this_dfn) raise Exception('Not KV') continue data = parse_type_format(value) ref_name = get_refname(data) data['description'] = desc if 'enum' in data: enum_row = next(table_iter) data = load_enum_into_item(enum_row, data) if 'enum' in data.get('items', []): enum_row = next(table_iter) data['items'] = load_enum_into_item(enum_row, data['items']) elif ref_name: if table_iter.peek([None])[0] == 'nested': t, _, values, _ = next(table_iter) if values is not None: data = parse_response(data, desc, definitions, table_iter=values) else: print('xx', table_iter.peek([None])) this_dfn[key] = data dfn_obj.ref_name = definitions.add_definition(get_refname(dfn_obj), { 'type': 'object', 'properties': this_dfn, }) return dfn_obj def iter_parse_nested_table(table): for row in filter(lambda e: e.name == 'tr', table.children): td = row.find('td') next_td = td.find_next_sibling('td') if td else None if not next_td: if td.find('table'): yield ('nested', None, iter_parse_nested_table(td.find('table')), None) else: assert text(td) == '(This type has already been defined)' yield ('nested', None, None, None) elif 'colspan' in next_td.attrs: yield ('enum', None, parse_enum_table(next_td.find('table')), None) elif row.find('th'): key = text(row.find('th')) value = text(td) description = text(next_td) yield ('kv', key, value, description) else: raise Exception() def parse_enum_table(table): return list(iter_parse_enum_table(table)) def iter_parse_enum_table(table): enum_values = set() for row in table.find_all('tr'): tds = row.find_all('td') if len(tds) == 2: name = text(tds[0]) value = None description = text(tds[1]) ev = name elif len(tds) == 3: name = text(tds[0]) value = text(tds[1]) description = text(tds[2]) ev = value else: continue if ev not in enum_values: enum_values.add(ev) yield (name, value, description) async def iter_api_index(session): url = 'https://developer.trademe.co.nz/api-reference/api-index/' async with session.get(url) as o: soup = BeautifulSoup(await o.text(), 'lxml') x = [] for item in soup.select('.content tr'): if '(deprecated)' in text(item): continue link = item.find('a') if link and 'href' in link.attrs: href = urljoin(url, link.attrs['href']) if '/api-reference/' in href: x.append(href) return x async def iter_api_methods_page(session, url): if not url.startswith('http'): url = 'https://developer.trademe.co.nz/api-reference/' + url + '/' async with session.get(url) as o: soup = BeautifulSoup(await o.text(), 'lxml') x = [] for item in soup.select('div.generated-content li'): if '(deprecated)' in text(item): continue link = item.find('a') if link and 'href' in link.attrs: href = urljoin(url, link.attrs['href']) if '/api-reference/' in href: x.append(href) return x async def download_swagger_for_urls(session, urls, definitions=None): if not definitions: definitions = DefinitionContainer() paths = {} urls = sorted(set(urls)) async with SizeBoundedTaskList(5) as tl: for url in urls: await tl.add_task(generate_swagger_from_docs( session, url, definitions )) for doc_task in tl.as_completed(): gen_path = await doc_task # TODO: union paths taking into account the http method and url. paths = safe_add(paths, gen_path) return paths, definitions.definitions async def main(): paths = {} async with aioutils.aiohttp.CachingClientSession( cache_strategy=aioutils.aiohttp.OnDiskCachingStrategy( cache_folder='./.cache') ) as session: # urls = await iter_api_index(session) # paths, defs = await download_swagger_for_urls(session, [ # 'https://developer.trademe.co.nz/api-reference/listing-methods/retrieve-the-details-of-a-single-listing/', # ]) paths, defs = await download_swagger_for_urls(session, [ 'https://developer.trademe.co.nz/api-reference/membership-methods/retrieve-member-profile/', # 'https://developer.trademe.co.nz/api-reference/search-methods/rental-search/', # 'https://developer.trademe.co.nz/api-reference/search-methods/flatmate-search/', ]) # _, extra_defs = await download_swagger_for_urls(session, ['https://developer.trademe.co.nz/api-reference/selling-methods/edit-an-item/']) for existing in ( # 'Address', # 'Agency', # 'Agent', # 'Attribute', # 'AttributeOption', # 'AttributeRange', # 'Bid', # 'BidCollection', # 'Branding', # 'BroadbandTechnology', # 'Charity', # 'ContactDetails', # 'CurrentShippingPromotion', # 'Dealer', # 'DealerShowroom', # 'DealershipPhoneNumbers', # 'Dealership', # 'DealershipListingCounts', # 'EmbeddedContent', # 'FixedPriceOfferDetails', # 'FixedPriceOfferRecipient', # 'GeographicLocation', # 'LargeBannerImage', # 'ListedItemDetail', # 'Member', # 'MemberRequestInformation', # 'MotorWebBasicReport', # 'OpenHome', # 'Option', # 'OptionSetValues', # 'OptionSet', # 'Photo', # 'PhotoUrl', # 'Question', # 'Questions', # 'RefundDetails', # 'Sale', # 'ShippingOption', # 'SimpleMemberProfile', # 'SponsorLink', # 'Store', # 'StorePromotion', 'Variant', # 'VariantDefinition', ): defs.pop(existing, None) print(list(defs)) swagger = { 'swagger': '2.0', 'info': { 'title': 'TradeMe API', 'version': '0.0', }, 'schemes': ['https'], 'host': 'api.trademe.co.nz', 'basePath': '/v1/', 'paths': paths, 'definitions': defs, } with open('swagger.json', 'w') as f: json.dump(swagger, f, sort_keys=True, indent=2, cls=TypesEncoder) with open('swagger.json') as f: names = set() for line in f: if '#/definitions/' in line: pos = line.index('#/definitions/') + len('#/definitions/') name = line[pos:-3] names.add(name) for name in sorted(names): if name not in defs: print(name) if __name__ == '__main__': asyncio.get_event_loop().run_until_complete(main())
Shapestones Ltd was founded with one main objective and that protects and ensure your Intellectual Property. IP shapes each stage of the movie production industry, from script to screen. We are the perfect solution to protect your creative assets from technical business espionage and personal criminal conduct. The core of the production industry where decision-making process taking place. When you need effective business TSCM counter-espionage survey, Shapestones provides world-class eavesdropping detection services. Services tailored to Unit Base life, focusing on details with a professional and skilled vision. We can ensure that a director’s or producer’s mobile offices, production trailers and, no less important, artist trailers are not compromised by breaches of privacy. TSCM bug sweep measures are essential to shield a film screening from eavesdropping and spy devices, especially if it has not been distributed yet. This service is focused on Test, Press or Preview Screening, from non-company-affiliated screening rooms to major West End cinemas.
import socorro.lib.ConfigurationManager as cm import datetime databaseHost = cm.Option() databaseHost.doc = 'the hostname of the database servers' databaseHost.default = '127.0.0.1' databaseName = cm.Option() databaseName.doc = 'the name of the database within the server' databaseName.default = 'socorro' databaseUserName = cm.Option() databaseUserName.doc = 'the user name for the database servers' databaseUserName.default = 'socorro' databasePassword = cm.Option() databasePassword.doc = 'the password for the database user' databasePassword.default = 'socorro' storageRoot = cm.Option() storageRoot.doc = 'the root of the file system where dumps are found' storageRoot.default = '/var/socorro/toBeProcessed/' deferredStorageRoot = cm.Option() deferredStorageRoot.doc = 'the root of the file system where dumps are found' deferredStorageRoot.default = '/var/socorro/toBeDeferred/' dumpDirPrefix = cm.Option() dumpDirPrefix.doc = 'dump directory names begin with this prefix' dumpDirPrefix.default = 'bp_' jsonFileSuffix = cm.Option() jsonFileSuffix.doc = 'the suffix used to identify a json file' jsonFileSuffix.default = '.json' dumpFileSuffix = cm.Option() dumpFileSuffix.doc = 'the suffix used to identify a dump file' dumpFileSuffix.default = '.dump' processorCheckInTime = cm.Option() processorCheckInTime.doc = 'the time after which a processor is considered dead (HH:MM:SS)' processorCheckInTime.default = "00:05:00" processorCheckInTime.fromStringConverter = lambda x: str(cm.timeDeltaConverter(x))
DESCRIPTION: A 18k gold and assembled diamond unity ring, features a Fancy light yellow diamond in the center weighing approx. 1.02 ct of (SI-1) Clarity. The bezel is embellished by round cut brilliant diamonds and positioned by two additional princess cut diamonds amounting to approx. .52cts of (SI-1 to I-1) Clarity and (F-G) Color. Marked by "18k" and "Hanna"
"""Simple collision check. This module provides simple collision checking appropriate for shmups. It provides routines to check whether two moving circles collided during the past frame. An equivalent C-based version will be used automatically if it was compiled and installed with the module. If available, it will be noted in the docstrings for the functions. Basic Usage: from bulletml.collision import collides for bullet in bullets: if collides(player, bullet): ... # Kill the player. """ from __future__ import division def overlaps(a, b): """Return true if two circles are overlapping. Usually, you'll want to use the 'collides' method instead, but this one can be useful for just checking to see if the player has entered an area or hit a stationary oject. (This function is unoptimized.) """ dx = a.x - b.x dy = a.y - b.y try: radius = a.radius + b.radius except AttributeError: radius = getattr(a, 'radius', 0.5) + getattr(b, 'radius', 0.5) return dx * dx + dy * dy <= radius * radius def collides(a, b): """Return true if the two moving circles collide. a and b should have the following attributes: x, y - required, current position px, py - not required, defaults to x, y, previous frame position radius - not required, defaults to 0.5 (This function is unoptimized.) """ # Current locations. xa = a.x xb = b.x ya = a.y yb = b.y # Treat b as a point, we only need one radius. try: radius = a.radius + b.radius except AttributeError: radius = getattr(a, 'radius', 0.5) + getattr(b, 'radius', 0.5) # Previous frame locations. try: pxa = a.px except KeyError: pxa = xa try: pya = a.py except KeyError: pya = ya try: pxb = b.px except KeyError: pxb = xb try: pyb = b.py except KeyError: pyb = yb # Translate b's final position to be relative to a's start. # And now, circle/line collision. dir_x = pxa + (xb - xa) - pxb dir_y = pya + (yb - ya) - pyb diff_x = pxa - pxb diff_y = pya - pyb if (dir_x < 0.0001 and dir_x > -0.0001 and dir_y < 0.0001 and dir_y > -0.0001): # b did not move relative to a, so do point/circle. return diff_x * diff_x + diff_y * diff_y < radius * radius # dot(diff, dir) / dot(dir, dir) t = (diff_x * dir_x + diff_y * dir_y) / (dir_x * dir_x + dir_y * dir_y) if t < 0: t = 0 elif t > 1: t = 1 dist_x = pxa - (pxb + dir_x * t) dist_y = pya - (pyb + dir_y * t) # dist_sq < radius_sq return dist_x * dist_x + dist_y * dist_y <= radius * radius def collides_all(a, others): """Filter the second argument to those that collide with the first. This is equivalent to filter(lambda o: collides(a, o), others), but is much faster when the compiled extension is available (which it is not currently). """ return filter(lambda o: collides(a, o), others) try: from bulletml._collision import collides, overlaps, collides_all except ImportError: pass
Slow Food’s African Food Gardens follow the philosophy of Good, Clean and Fair. But what does this mean in practice, and what makes them different from other food gardens? They are created by a community. They are based on observation. They do not need a large amount of space. They are places of biodiversity. They produce their own seeds. They are cultivated using sustainable methods. They are useful, but also fun. They are networked together . The Mhlotshana nutrition Community Garden, coordinated by Melamiseli Ncube, is located in Mambo village of the Bubi District, Matabeleland Nord. Read more about this garden . . . The Mupata Union for Rehabilitation of Ecosystem (MURE) Community Garden coordinated by Gladman Chibememe, who is also the national coordinator for the Slow Food 10,000 Gardens project in Zimbabwe. Read more about this garden . . . A further donation was made to help bring the water closer to the garden after Melamiseli explained that the closest well was 2km away and it was hard for women to carry the water that distance.
from context import pyloader import os import json import unittest _current = os.path.dirname(os.path.abspath(__file__)) paths = { 'writable': os.path.join(_current, 'downloads', 'write_access'), 'not_writable': os.path.join(_current, 'downloads', 'no_write_access') } class TestDLable(unittest.TestCase): def test_access_writable(self): try: pyloader.DLable( 'http://url.doesnt.matter', paths['writable'] ) self.assertTrue(True) except IOError: self.assertTrue(False) def test_access_writable_none_existant(self): try: pyloader.DLable( 'http://url.doesnt.matter', os.path.join(paths['writable'], 'sub') ) self.assertTrue(True) except IOError: self.assertTrue(False) def test_access_not_writeable(self): self.assertRaises(IOError, pyloader.DLable, 'http://url.doesnt.matter', paths['not_writable']) def test_serialize_proper(self): item = pyloader.DLable( 'http://url.doesnt.matter', paths['writable'] ) try: data = item.to_json() pyloader.DLable.from_json(data) self.assertTrue(True) except Exception: self.assertTrue(False) def test_serialize_missing_required(self): item = pyloader.DLable( 'http://url.doesnt.matter', paths['writable'] ) data = item.to_json() # Remove a required argument data = json.loads(data) del data['target_dir'] data = json.dumps(data) self.assertRaises(TypeError, pyloader.DLable.from_json, data) if __name__ == '__main__': unittest.main()
Animated 3D videos of the most common techniques in dentures and implantology: bridge, implant, bridge prosthesis, veneer, telescope technique, crown and inlay. Visualization of the assessment situation, presentation of the dental solution used and its application. Content: Seven dental procedures. Length 0:30 to 1:00 minutes.
import wx import math from BufferedCanvas import BufferedCanvas def gradient(min_color, max_color, fraction): return tuple(a + (b - a) * fraction for a, b in zip(min_color, max_color)) class WorldMap(BufferedCanvas): min_vote_color = (0, 128, 0) max_vote_color = (128, 0, 0) def __init__(self, parent): self._size = (600, 600) self._data = None BufferedCanvas.__init__(self, parent, size = self._size) self.SetBackgroundColour("white") self._agent_colors = {} self._complaints = [] self.num_max_complaints = 10 self.BufferBmp = None self.update() def set_map(self, map): self._data = {"map": map} def reset_votes(self): self._agent_colors = {} self._complaints = [] def add_votes(self, complaints): self._complaints = complaints votes_by_agent = {} for v in self._complaints: if not v.agent_id in votes_by_agent: votes_by_agent[v.agent_id] = 0 votes_by_agent[v.agent_id] += 1 for a in votes_by_agent: self._agent_colors[a] = gradient( WorldMap.min_vote_color, WorldMap.max_vote_color, float(votes_by_agent[a]) / self.num_max_complaints ) def draw(self, dc): dc.Clear() dc.SetBackground(wx.Brush(wx.Colour(255, 255, 255), wx.SOLID)) if self._data is None: return try: map = self._data["map"] num_hor, num_ver = (len(map.grid), len(map.grid[0])) w, h = self.GetSize() cell_w, cell_h = (float(w) / num_hor, float(h) / num_hor) grid_color = wx.Colour(0, 0, 0) dc.SetPen(wx.Pen(grid_color, 1)) # Draw grid ## Horizontal lines for x in xrange(num_hor + 1): dc.DrawLine(cell_w * x, 0, cell_w * x, h) ## Vertical lines for y in xrange(num_ver + 1): dc.DrawLine(0, cell_h * y, w, cell_h * y) fish_color = wx.Colour(0, 0, 255) fish_pen = wx.Pen(fish_color, 1) fish_brush = wx.Brush(fish_color, wx.SOLID) boat_color = wx.Colour(100, 100, 100) aquaculture_border_color = wx.Colour(0, 0, 0) aquaculture_fill_color = wx.Colour(200, 200, 200) aquaculture_pen = wx.Pen(aquaculture_border_color, 1) aquaculture_brush = wx.Brush(aquaculture_fill_color, wx.SOLID) land_color = wx.Colour(0, 255, 0) land_len = wx.Pen(land_color, 1) land_brush = wx.Brush(land_color, wx.SOLID) blocked_color = wx.Colour(255, 0, 0) # Draw entities for i in xrange(num_hor): for j in xrange(num_ver): x, y = (cell_w * i, cell_h * j) #if map.grid[i][j].spawning: draw_fish_top_left(dc, map.grid[i][j].quality, x, y, cell_w, cell_h, fish_pen, fish_brush) if map.grid[i][j].blocked: draw_blocked(dc, x, y, cell_w, cell_h, blocked_color) if map.grid[i][j].fisherman: color = next((self._agent_colors[e.id] for e in map.grid[i][j].fishermen if e.id in self._agent_colors), WorldMap.min_vote_color) draw_boat_bottom_right(dc, x, y, cell_w, cell_h, color, map.grid[i][j].num_fishermen) if map.grid[i][j].aquaculture: draw_aquaculture_center(dc, x + cell_w / 2, y + cell_h / 2, cell_w, cell_h, aquaculture_pen, aquaculture_brush) if map.grid[i][j].land: draw_land(dc, x, y, cell_w, cell_h, land_pen, land_brush) return True except Exception, e: print e return False def draw_blocked(dc, x, y, cell_w, cell_h, color): dc.SetPen(wx.Pen(color, 2)) dc.DrawLine(x, y, x + cell_w, y + cell_h) dc.DrawLine(x + cell_w, y, x, y + cell_h) def draw_land(dc, x, y, cell_w, cell_h, p, b): dc.SetPen(p) dc.SetBrush(b) dc.DrawRectangle(x, y, cell_w, cell_h) def draw_aquaculture_center(dc, x, y, cell_w, cell_h, p, b): scale = min(cell_w, cell_h) corners = 10 dc.SetPen(p) dc.SetBrush(b) points = [wx.Point( x + scale / 2 * math.sin(2 * math.pi * p / corners), y + scale / 2 * math.cos(2 * math.pi * p / corners) ) for p in xrange(corners)] dc.DrawPolygon(points) def draw_boat_center(dc, x, y, cell_w, cell_h, color, num): scale = min(cell_w, cell_h) dc.SetPen(wx.Pen(color, 1)) dc.SetBrush(wx.Brush(color, wx.SOLID)) # Draw boat bottom dc.DrawArc(x - scale / 3, y, x + scale / 3, y, x, y) # Draw sail dc.DrawPolygon([wx.Point(x - scale / 4, y - scale / 8), wx.Point(x + scale / 4, y - scale / 8), wx.Point(x, y - scale / 8 - scale / 4)]) # Draw mast dc.DrawLine(x, y, x, y - scale / 8) if num > 1: dc.SetFont(wx.Font( pointSize=scale/3, family=wx.FONTFAMILY_DEFAULT, style=wx.FONTSTYLE_NORMAL, weight=wx.FONTWEIGHT_BOLD)) dc.SetTextForeground(wx.Colour(255, 255, 125)) text = str(num) tw, th = dc.GetTextExtent(text) dc.DrawText(text, (x - tw / 2), (y + scale / 6 - th / 2)) def draw_boat_bottom_right(dc, x, y , cell_w, cell_h, color, num): scale = min(cell_w, cell_h) ox = cell_w - scale / 3 oy = cell_h - scale / 8 - cell_h / 4 draw_boat_center(dc, ox + x, oy + y, cell_w, cell_h, color, num) def draw_fish_center(dc, size, x, y, cell_w, cell_h, p, b): scale = min(cell_w, cell_h) * size dc.SetPen(p) dc.SetBrush(b) # Draw body dc.DrawArc(x - scale / 3, y, x + scale / 3, y, x, y - scale / 6) dc.DrawArc(x + scale / 3, y, x - scale / 3, y, x, y + scale / 6) ## right tip is located at (x + cell_w / 3, y) # Draw tail dc.DrawPolygon([wx.Point(x + scale / 3 + scale / 5, y - scale / 5), wx.Point(x + scale / 3, y), wx.Point(x + scale / 3 + scale / 5, y + scale / 5)]) def draw_fish_top_left(dc, size, x, y, cell_w, cell_h, p, b): # Offset from top left corner scale = min(cell_w, cell_h) ox = scale / 3 oy = scale / 5 draw_fish_center(dc, size, ox + x, oy + y, cell_w, cell_h, p, b)
Having been inspired by Alexander Calder during my studies at university, it was interesting to learn of his many other pieces of work at Tate Modern. Famous for his mobiles, Calder showed his talent as an artist from having brought up under his father and grandfather who were noted sculptors and his mother a painter. However, he studied mechanical engineering and spent several years in different jobs before studying painting. Alexander Calder with Snow Flurry l (1948), 1952, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London. Calder created a completely new way of sculpting when at the time sculptures were made from stone, bronze or wood. When you first step into the exhibition, many faces greet you as they hang from the ceiling. They seem somewhat illustrated as a line drawing which looks two dimensional but is also a three dimensional sculpture, this which critics defined as ‘drawing in space’. Fernand Léger c.1930 by Alexander Calder,, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London. The pieces range from animals to people which were very friendly to admire. With this technique, he constructed his own miniature circus performers and created ‘Cirque Calder’ to stage live shows in front of small invited audiences including Jean Cocteau, Joan Miro and Piet Mondrian. Blue Panel 1936 by Alexander Calder, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London. From this he combined movement and abstraction. In each piece, a painted wooden panel provides a backdrop in front of which coloured shapes are suspended, suggesting two-dimensional abstract painting that have taken kinetic three dimensional form. orm Against Yellow (Yellow Panel) 1936 by Alexander Calder, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London. The room where these painting/sculptures were in was one of the most interesting rooms as it was a collection that has not been on public view for decades and which I certainly have never seen before. It was interesting to hear the blowing of some observers around the exhibition to try and see some movement of the shapes. I loved how you are able to see that these are three dimensional sculptures when you step into the room but once you stand in front of it, it simply becomes a two dimensional painting. Gamma 1947 by Alexander Calder, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London. Triple Gong, ca. 1948 by Alexander Calder, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London. The biggest room contained many mobiles that Calder developed in the mid-1930s. They were elegantly and beautifully balanced all unique in their own way. One mobile was circular and white that it reminded me of falling snow and another had lots of different colours which somehow reminded me of falling leaves in Autumn. Alexander Calder with 21 feuilles blanches Paris 14e, Tate Modern. Photo credits: 2015 Calder Foundation, New York / DACS, London.
import csv from io import StringIO import numpy as np from sklearn.cluster import MiniBatchKMeans, KMeans from cdrgen.generate import CDRStream from cdrgen.sources import UniformSource, UserProfileSource, UserProfile, UserProfileChangeBehaviorSource from cdrgen.utils import asterisk_like, csv_to_cdr, time_of_day, day_of_week, window, grouper, RATES_1,\ it_merge, RATES_2, poisson_interval, moving_average_exponential, RATES_1m import matplotlib.pyplot as plt def test(source): s = CDRStream(asterisk_like, source) s.start() hours = np.zeros(24) l = [] days = 1 prev = 0 pattern = None for st in s: cdr = csv_to_cdr(list(csv.reader(StringIO(st), delimiter=','))[0]) if time_of_day(cdr.start) < prev: days += 1 if days > 14 and pattern is None: pattern = hours/days hours = np.zeros(24) if days > 14 and days%7 == 0: print(np.linalg.norm(hours/(days-14) - pattern)) h = time_of_day(cdr.start)//60//60 hours[h] += 1 prev = time_of_day(cdr.start) #l.append(h) fingerprint = hours/days print(fingerprint) users = {} # values needed to recalculate in real time to # minimize all values: alarms rate, history length ALPHA and ALARM_THRESHOLD ALARM_THRESHOLD = 1. # multiply limits ALPHA_FREQ = 0.8 # mean multipler ALPHA_WEEKS = 0.8 HISTORY = 2 # in weeks CURRENT_WINDOW = 5 # to approximate current frequency #===== MIN_THRESHOLD = 9.e-6 APPROX_WINDOW = 1 # to approximate weekly frequency TIME_DISCRETIZATION = 60*60 alarms = 0 class Pattern(object): def __init__(self, user): self.src = user self.data = np.zeros(shape=(HISTORY, 7, 24)) # patterns 24x7 (history and one current) self.current = np.zeros(CURRENT_WINDOW) self.week_history = np.zeros(shape=(7, (24*60*60)//(TIME_DISCRETIZATION//APPROX_WINDOW))) self.last_day_of_week = 0 self.weeks = 0 self.class_num = None def extract_week_history(self): return self.week_history def maintain(self, cdr): """ Maintaining should be continuous Calls have to be sorted by cdr.start time """ time = time_of_day(cdr.start)//(TIME_DISCRETIZATION//APPROX_WINDOW) day = day_of_week(cdr.start) if self.last_day_of_week != day and day == 0: # week switched self.data = np.roll(self.data, 1, axis=0) self.data[0] = self.extract_week_history() self.week_history = np.zeros(shape=(7, (24*60*60)//(TIME_DISCRETIZATION//APPROX_WINDOW))) self.weeks += 1 self.last_day_of_week = day self.current = np.roll(self.current, 1) # for instantaneous frequency self.current[0] = cdr.start # new freq calc current = np.roll(self.current, 1) current[0] = cdr.start diffs = np.array([e[0]-e[1] for e in zip(current, current[1:])]) current_freq = (60*60)/moving_average_exponential(diffs, ALPHA_FREQ) self.week_history[day, time] = max(self.week_history[day, time], current_freq) def is_conform(self, cdr): # FIXME: pattern should no check conforming, it's another task day = day_of_week(cdr.start) freq = self.get_pattern()[day][time_of_day(cdr.start)//60//60] current = np.roll(self.current, 1) current[0] = cdr.start diffs = np.array([e[0]-e[1] for e in zip(current, current[1:])]) current_freq = (60*60)/moving_average_exponential(diffs, ALPHA_FREQ) limits = poisson_interval(freq, 1-0.997) # float if not (current_freq <= max(1.0, limits[1]*ALARM_THRESHOLD)): print(freq, current_freq, max(1, limits[1]*ALARM_THRESHOLD), ) return current_freq <= max(1.0, limits[1]*ALARM_THRESHOLD) def is_converged(self): return self.weeks >= HISTORY # FIXME def alarm(self, cdr): global alarms alarms += 1 print("ALARM: user {} behavior changed".format(cdr.src)) def classify(self, class_num): self.class_num = class_num def get_pattern(self): return moving_average_exponential(self.data, ALPHA_WEEKS) def plot(self): row_labels = list('MTWTFSS') hours = list('0123456789AB') column_labels = ["{}am".format(x) for x in hours] + \ ["{}pm".format(x) for x in hours] data = self.get_pattern() fig, ax = plt.subplots() ax.pcolor(data.transpose(), cmap=plt.cm.Blues) # put the major ticks at the middle of each cell ax.set_xticks(np.arange(data.shape[0])+0.5, minor=False) ax.set_yticks(np.arange(data.shape[1])+0.5, minor=False) # want a more natural, table-like display ax.invert_yaxis() ax.xaxis.tick_top() ax.set_xticklabels(row_labels, minor=False) ax.set_yticklabels(column_labels, minor=False) plt.show() def plot_pattern(self): print(alarms) plt.plot(list(range(24)), self.get_pattern()[0], 'yo-') plt.plot(np.asarray(np.matrix(RATES_1[0])[:,0]).reshape(-1)//60//60, np.asarray(np.matrix(RATES_1[0])[:,1]).reshape(-1)*60*60, 'ro-') plt.show() def process(source): """ Simple processing """ s = CDRStream(asterisk_like, source) s.start() for st in s: cdr = csv_to_cdr(list(csv.reader(StringIO(st), delimiter=','))[0]) if not users.get(cdr.src): users[cdr.src] = Pattern(cdr.src) pattern = users[cdr.src] if pattern.is_converged() and not pattern.is_conform(cdr): pattern.alarm(cdr) pattern.maintain(cdr) def recalculate(time): patterns = [p for p in users.values() if p.is_converged()] if len(patterns) < 10: return X = np.matrix([x.get_pattern().ravel() for x in patterns]) km = KMeans(n_clusters=2, init='k-means++') km.fit(X) predicted = km.predict(X) print(predicted) for i,item in enumerate(predicted): patterns[i].classify(item) recalculate.km_time = time def process_2(source): s = CDRStream(asterisk_like, source) s.start() recalculate.km_time = 0 for st in s: cdr = csv_to_cdr(list(csv.reader(StringIO(st), delimiter=','))[0]) if not users.get(cdr.src): users[cdr.src] = Pattern(cdr.src) pattern = users[cdr.src] if pattern.is_converged() and not pattern.is_conform(cdr): pattern.alarm(cdr) pattern.maintain(cdr) if cdr.start - recalculate.km_time >= 24*60*60*7: # Once a week recalculate clusters recalculate(cdr.start) recalculate(cdr.start) def test_uniform(): test(UniformSource(0, 24*60*60, rate=0.1)) def test_daily(): # Авторегрессионное интегрированное скользящее среднее # https://docs.google.com/viewer?url=http%3A%2F%2Fjmlda.org%2Fpapers%2Fdoc%2F2011%2Fno1%2FFadeevEtAl2011Autoreg.pdf TIME = 24*60*60*7*4*2 p1 = [UserProfileSource(0, TIME, profile=UserProfile(RATES_1, 10, 0.1)) for x in range(10)] p2 = [UserProfileSource(0, TIME, profile=UserProfile(RATES_2, 10, 0.1)) for x in range(5)] profiles = p1 + p2 process_2(it_merge(*profiles, sort=lambda x: x[2])) def test_one(rates): TIME = 24*60*60*7*4*2 process_2(UserProfileSource(0, TIME, profile=UserProfile(rates, 10, 0.1))) list(users.values())[0].plot() def test_change(rates, rates2): TIME = 24*60*60*7*4*2 process_2(UserProfileChangeBehaviorSource(0, TIME, profile=UserProfile(rates, 10, 0.1), profile2=UserProfile(rates2, 10, 0.1), when_to_change=TIME//2)) def test_change_group(rates, rates2, rates3, rates4): TIME = 24*60*60*7*4*2 p1 = [UserProfileChangeBehaviorSource(0, TIME, profile=UserProfile(rates, 10, 0.1), profile2=UserProfile(rates2, 10, 0.1), when_to_change=TIME//2) for x in range(10)] p2 = [UserProfileChangeBehaviorSource(0, TIME, profile=UserProfile(rates3, 10, 0.1), profile2=UserProfile(rates4, 10, 0.1), when_to_change=TIME//2) for x in range(5)] profiles = p1 + p2 process_2(it_merge(*profiles, sort=lambda x: x[2])) print(alarms) if __name__ == "__main__": test_one(RATES_1m) #test_change(RATES_1, RATES_1m) #test_daily() #test_change_group(RATES_1, RATES_1m, RATES_2, RATES_2)
Anyone who would like to have a rewarding career putting smiles on people's faces and making a positive difference in their lives should look into RN programs in Illinois. Illinois is not only a fantastic place to live, but also a great state to train in to become a registered nurse. A registered nurse is in charge of basic patient care, like checking blood pressure readings, getting a patient's pulse, taking general notes for a doctor to read over, and helping the doctor with anything he or she needs during an examination. In some RN programs in Illinois, students learn about specific medical procedures that they may end up assisting with later. In order to become an RN by examination in Illinois, an individual must first complete an approved education program. The education program must include at least 1500 hours of direct patient care. Applicants for licensure by endorsement may obtain a temporary permit to practice for 6 months. Illinois RN licenses are valid for 2 years, and expire on May 31 of even numbered years. Applicants are required to complete 20 contact hours of continuing education in order to be eligible for renewal. By offering the most complete directory of nursing schools, NurseGroups.com helps Illinois residents find the best local RN programs, as well as the best online RN schools whose accreditation is accepted by the Illinois Board of Nursing.
# -*- coding: utf-8 -*- from flask import current_app, Markup from flask import json JSONEncoder = json.JSONEncoder RECAPTCHA_HTML = u''' <script src='https://www.google.com/recaptcha/api.js'></script> <div class="g-recaptcha" data-sitekey="%(public_key)s"></div> <noscript> <div style="width: 302px; height: 352px;"> <div style="width: 302px; height: 352px; position: relative;"> <div style="width: 302px; height: 352px; position: absolute;"> <iframe src="https://www.google.com/recaptcha/api/fallback?k=%(public_key)s" frameborder="0" scrolling="no" style="width: 302px; height:352px; border-style: none;"> </iframe> </div> <div style="width: 250px; height: 80px; position: absolute; border-style: none; bottom: 21px; left: 25px; margin: 0px; padding: 0px; right: 25px;"> <textarea id="g-recaptcha-response" name="g-recaptcha-response" class="g-recaptcha-response" style="width: 250px; height: 80px; border: 1px solid #c1c1c1; margin: 0px; padding: 0px; resize: none;" value=""> </textarea> </div> </div> </div> </noscript> ''' __all__ = ["RecaptchaWidget"] class RecaptchaWidget(object): def recaptcha_html(self, public_key): html = current_app.config.get('RECAPTCHA_HTML', RECAPTCHA_HTML) return Markup(html % dict( public_key=public_key )) def __call__(self, field, error=None, **kwargs): """Returns the recaptcha input HTML.""" try: public_key = current_app.config['RECAPTCHA_PUBLIC_KEY'] except KeyError: raise RuntimeError("RECAPTCHA_PUBLIC_KEY config not set") return self.recaptcha_html(public_key)
Regular price £0.00 Sale! £29.99 including VAT. Upgrade your Toyota Land Cruiser V8 2008-2012 with these chromed headlight surrounds. They fit in minutes adding an instant upgrade look to your vehicle. Fits the Toyota Land Cruiser V8 2008-2012 models.
# -*- coding: utf-8 -*- # Copyright (c) 2008/2013 Andrey Vlasovskikh # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. __all__ = ['make_tokenizer', 'Token', 'LexerError'] import re class LexerError(Exception): def __init__(self, place, msg): self.place = place self.msg = msg def __str__(self): s = 'cannot tokenize data' line, pos = self.place return '%s: %d,%d: "%s"' % (s, line, pos, self.msg) class Token(object): def __init__(self, type, value, start=None, end=None): self.type = type self.value = value self.start = start self.end = end def __repr__(self): return 'Token(%r, %r)' % (self.type, self.value) def __eq__(self, other): # FIXME: Case sensitivity is assumed here return self.type == other.type and self.value == other.value def _pos_str(self): if self.start is None or self.end is None: return '' else: sl, sp = self.start el, ep = self.end return '%d,%d-%d,%d:' % (sl, sp, el, ep) def __str__(self): s = "%s %s '%s'" % (self._pos_str(), self.type, self.value) return s.strip() @property def name(self): return self.value def pformat(self): return "%s %s '%s'" % (self._pos_str().ljust(20), self.type.ljust(14), self.value) def make_tokenizer(specs): """[(str, (str, int?))] -> (str -> Iterable(Token))""" def compile_spec(spec): name, args = spec return name, re.compile(*args) compiled = [compile_spec(s) for s in specs] def match_specs(specs, str, i, position): line, pos = position for type, regexp in specs: m = regexp.match(str, i) if m is not None: value = m.group() nls = value.count('\n') n_line = line + nls if nls == 0: n_pos = pos + len(value) else: n_pos = len(value) - value.rfind('\n') - 1 return Token(type, value, (line, pos + 1), (n_line, n_pos)) else: errline = str.splitlines()[line - 1] raise LexerError((line, pos + 1), errline) def f(str): length = len(str) line, pos = 1, 0 i = 0 while i < length: t = match_specs(compiled, str, i, (line, pos)) yield t line, pos = t.end i += len(t.value) return f # This is an example of a token spec. See also [this article][1] for a # discussion of searching for multiline comments using regexps (including `*?`). # # [1]: http://ostermiller.org/findcomment.html _example_token_specs = [ ('COMMENT', (r'\(\*(.|[\r\n])*?\*\)', re.MULTILINE)), ('COMMENT', (r'\{(.|[\r\n])*?\}', re.MULTILINE)), ('COMMENT', (r'//.*',)), ('NL', (r'[\r\n]+',)), ('SPACE', (r'[ \t\r\n]+',)), ('NAME', (r'[A-Za-z_][A-Za-z_0-9]*',)), ('REAL', (r'[0-9]+\.[0-9]*([Ee][+\-]?[0-9]+)*',)), ('INT', (r'[0-9]+',)), ('INT', (r'\$[0-9A-Fa-f]+',)), ('OP', (r'(\.\.)|(<>)|(<=)|(>=)|(:=)|[;,=\(\):\[\]\.+\-<>\*/@\^]',)), ('STRING', (r"'([^']|(''))*'",)), ('CHAR', (r'#[0-9]+',)), ('CHAR', (r'#\$[0-9A-Fa-f]+',)), ] #tokenize = make_tokenizer(_example_token_specs)
I have problems with a simbology in a linear layer. I defined the symbology using the "Categories -> Unique values, many fields" strategy in ArcMap. One of the two fields I am using is named STYLE and has only two values: VERTICAL and DEVIATED. VERTICAL defines lines which have vertical geometries, i.e. coincident XY coordinates and different Z coordinates. I defined the style in a way that is working fine in ArcMap, but after I published the mxd with this layer and this symbology to ArcGIS Server as a Map Service, the VERTICAL lines are not displayed. In ArcMap I zoom in to a "Dry, VERTICAL" feature and I see it correctly. The version of ArcMap is 10.6, that of ArcGIS Server is 10.6. There is an ESRI symbology style set called ESRI_Optimized which is optimized for web display--not all symbology available in ArcMap can be rendered on a web map service. Maybe the above read will help? Yuriko Hashimoto thanks for the hint. I don't know if it's a problema related to the style I am using: in facts, I can see the same symbols showing up properly when lines are not vertical both in desktiop and server. I am guessing is another bug related to ArcGIS dealing with vertical geometries (as in my other thread). Of course, I will give the ESRI_Optimized style a try when I can and come back if I discover something new.
#! /usr/bin/python2 """ hub_ctrl.py - a tool to control port power/led of USB hub Copyright (C) 2006, 2011 Free Software Initiative of Japan Author: NIIBE Yutaka <gniibe@fsij.org> This file is a part of Gnuk, a GnuPG USB Token implementation. Gnuk is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Gnuk is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. Modified 2014 Paul Adams - updated to be compatible with pyusb 1.0.0b1 """ import usb.core USB_RT_HUB = (usb.TYPE_CLASS | usb.RECIP_DEVICE) USB_RT_PORT = (usb.TYPE_CLASS | usb.RECIP_OTHER) USB_PORT_FEAT_RESET = 4 USB_PORT_FEAT_POWER = 8 USB_PORT_FEAT_INDICATOR = 22 USB_DIR_IN = 0x80 # device to host COMMAND_SET_NONE = 0 COMMAND_SET_LED = 1 COMMAND_SET_POWER = 2 HUB_LED_GREEN = 2 def find_hubs(listing, verbose, busnum=None, devnum=None, hub=None): number_of_hubs_with_feature = 0 hubs = [] devices = usb.core.find(find_all=True, bDeviceClass=usb.CLASS_HUB) for dev in devices: printout_enable = 0 if (listing or (verbose and ((dev.bus == busnum and dev.address == devnum) or hub == number_of_hubs_with_feature))): printout_enable = 1 desc = None # Get USB Hub descriptor desc = dev.ctrl_transfer(USB_DIR_IN | USB_RT_HUB, usb.REQ_GET_DESCRIPTOR, wValue = usb.DT_HUB << 8, wIndex = 0, data_or_wLength = 1024, timeout = 1000) if not desc: continue # desc[3] is lower byte of wHubCharacteristics if (desc[3] & 0x80) == 0 and (desc[3] & 0x03) >= 2: # Hub doesn't have features of controling port power/indicator continue if printout_enable: print "Hub #%d at %s:%03d" % (number_of_hubs_with_feature, dev.bus, dev.address) if (desc[3] & 0x03) == 0: print " INFO: ganged power switching." elif (desc[3] & 0x03) == 1: print " INFO: individual power switching." elif (desc[3] & 0x03) == 2 or (desc[3] & 0x03) == 3: print " WARN: no power switching." if (desc[3] & 0x80) == 0: print " WARN: Port indicators are NOT supported." hubs.append({ 'busnum' : dev.bus, 'devnum' : dev.address, 'indicator_support' : (desc[3] & 0x80) == 0x80, 'dev' : dev, 'num_ports' : desc[2] }) number_of_hubs_with_feature += 1 return hubs def hub_port_status(handle, num_ports): print " Hub Port Status:" for i in range(num_ports): port = i + 1 status = handle.ctrl_transfer(USB_RT_PORT | usb.ENDPOINT_IN, usb.REQ_GET_STATUS, wValue = 0, wIndex = port, data_or_wLength = 4, timeout = 1000) print " Port %d: %02x%02x.%02x%02x" % (port, status[3], status[2], status[1], status[0]), if status[1] & 0x10: print " indicator", if status[1] & 0x08: print " test" , if status[1] & 0x04: print " highspeed", if status[1] & 0x02: print " lowspeed", if status[1] & 0x01: print " power", if status[0] & 0x10: print " RESET", if status[0] & 0x08: print " oc", if status[0] & 0x04: print " suspend", if status[0] & 0x02: print " enable", if status[0] & 0x01: print " connect", print import sys COMMAND_SET_NONE = 0 COMMAND_SET_LED = 1 COMMAND_SET_POWER = 2 HUB_LED_GREEN = 2 def usage(progname): print >> sys.stderr, """Usage: %s [{-h HUBNUM | -b BUSNUM -d DEVNUM}] [-P PORT] [{-p [VALUE]|-l [VALUE]}] """ % progname def exit_with_usage(progname): usage(progname) exit(1) if __name__ == '__main__': busnum = None devnum = None listing = False verbose = False hub = None port = 1 cmd = COMMAND_SET_NONE if len(sys.argv) == 1: listing = True else: try: while len(sys.argv) >= 2: option = sys.argv[1] sys.argv.pop(1) if option == '-h': if busnum != None or devnum != None: exit_with_usage(sys.argv[0]) hub = int(sys.argv[1]) sys.argv.pop(1) elif option == '-b': busnum = int(sys.argv[1]) sys.argv.pop(1) elif option == '-d': devnum = int(sys.argv[1]) sys.argv.pop(1) elif option == '-P': port = int(sys.argv[1]) sys.argv.pop(1) elif option == '-l': if cmd != COMMAND_SET_NONE: exit_with_usage(sys.argv[0]) if len(sys.argv) > 1: value = int(sys.argv[1]) sys.argv.pop(1) else: value = HUB_LED_GREEN cmd = COMMAND_SET_LED elif option == '-p': if cmd != COMMAND_SET_NONE: exit_with_usage(sys.argv[0]) if len(sys.argv) > 1: value = int(sys.argv[1]) sys.argv.pop(1) else: value = 0 cmd = COMMAND_SET_POWER elif option == '-v': verbose = True #if len(sys.argv) == 1: # listing = True else: exit_with_usage(sys.argv[0]) except: exit_with_usage(sys.argv[0]) if ((busnum != None and devnum == None) or (busnum == None and devnum != None)): exit_with_usage(sys.argv[0]) if hub == None and busnum == None: hub = 0 # Default hub = 0 #if cmd == COMMAND_SET_NONE: # cmd = COMMAND_SET_POWER hubs = find_hubs(listing, verbose, busnum, devnum, hub) if len(hubs) == 0: print >> sys.stderr, "No hubs found." exit(1) if listing: exit(0) if hub == None: for h in hubs: if h['busnum'] == busnum and h['devnum'] == devnum: dev_hub = h['dev'] nports = h['num_ports'] else: dev_hub = hubs[hub]['dev'] nports = hubs[hub]['num_ports'] if cmd != COMMAND_SET_NONE: if cmd == COMMAND_SET_POWER: feature = USB_PORT_FEAT_POWER index = port if value: request = usb.REQ_SET_FEATURE else: request = usb.REQ_CLEAR_FEATURE else: request = usb.REQ_SET_FEATURE feature = USB_PORT_FEAT_INDICATOR index = (value << 8) | port if verbose: print "Send control message (REQUEST=%d, FEATURE=%d, INDEX=%d) " % (request, feature, index) dev_hub.ctrl_transfer(USB_RT_PORT, request, wValue = feature, wIndex=index, data_or_wLength=None, timeout=1000) if verbose: hub_port_status(dev_hub, nports)
Due to personal experiences with family members who didn’t have a plan or Long-Term Care Insurance (LTCI), my wife and I chose to get covered and to do so early on. Peace of mind. I don’t have to worry about how I will pay for care when I need it someday. I learned that we have a 70% chance of needing long-term care in our lifetime once we reach the age 65 (U.S. Department of Health and Human Services, 2016). Women’s chances are even higher, 79% at age 65 and older will need some form of long-term care (Connell, 2012). It is now a matter of when I will use my LTCI policy, not if. Medicare won’t help me. Many people think that Medicare will cover their care needs later in life…this is not true. Medicare will only cover up to a maximum of 100 days in a skilled nursing facility ONLY IF you qualify with a 3-day hospital stay (Centers for Medicare & Medicaid Services, 2016). I learned that this is not easy to qualify for. A family member of mine recently fell and broke a hip and went into the hospital. One would think that they would qualify for a 3-day stay but that wasn’t the case. After one day she was discharged into a nursing home and Medicare did not pay for any portion of the stay. Choice and independence. This being one of the biggest advantages of having LTCI. Having the ability to choose where and how I receive care. I have the ability to choose if I want my policy benefits to pay family members or a nurse to care for me at home, or allow my family to choose which facility I will be cared for in when the time comes. Another family member of mine who suffered from Alzheimer’s had fortunately prepared in advance and purchase LTCI. They were able to bring in family and friends to help when they needed it. Eventually, when things became too hard and unsafe to be at home, they were able to choose the best facility for my family member without having to worry about the financial factors: what it would cost, how nice of a facility it was, whether or not it was a single or double-occupancy room…all because they had LTCI to help pay for it. Note: Not all LTCI policies have the same benefits so make sure to work with an expert in LTCI planning to help you find the right fit. Asset protection. We have worked so hard our entire lives to build up our legacy. To lose a great chunk (or more) of money and assets after all this time would be detrimental to me and my family. The national average cost of a double-occupancy Nursing Home room is $86,764 per year. (LTCG, 2016) While investigating LTCI, I learned that “about 3 of every 5 personal bankruptcies in the U.S. are a direct result of overwhelming medical expenses – and 79% of these people had traditional health insurance”. (National Clinical Research Study, 2008) LTCI provides protection against depletion of assets and allows families to pay a small amount per month to protect against a catastrophic loss later in life. LTCI has certain protections built in that allow families to legally protect assets after their LTCI has paid. LTCI gives me the peace of mind knowing that I am planning for a time later in life when my family will not have to think about paying for my care or caring for me, rather, they can be there to help support me as a family. Having an LTCI policy is like the last love letter I can write to my family.
# coding: utf-8 r"""Fixing methods for shapes, faces, tolerance, continuity and curve resampling """ import logging # import ast from OCC.Core.GCPnts import GCPnts_UniformDeflection from OCC.Core.GeomAbs import GeomAbs_C2 from OCC.Core.GeomAbs import GeomAbs_C0, GeomAbs_C1 # dynamically built code from OCC.Core.GeomAPI import GeomAPI_PointsToBSpline from OCC.Core.ShapeFix import ShapeFix_ShapeTolerance, ShapeFix_Shape, \ ShapeFix_Face from OCC.Core.ShapeUpgrade import ShapeUpgrade_ShapeDivideContinuity from aocutils.tolerance import OCCUTILS_DEFAULT_TOLERANCE, \ OCCUTILS_FIXING_TOLERANCE from aocutils.common import AssertIsDone from aocutils.collections import point_list_to_tcolgp_array1_of_pnt from aocutils.geom.curve import Curve logger = logging.getLogger(__name__) def fix_shape(shp, tolerance=OCCUTILS_FIXING_TOLERANCE): r"""Fix a shape Parameters ---------- shp : OCC.TopoDS.TopoDS_Shape tolerance : float Returns ------- OCC.TopoDS.TopoDS_Shape """ fix = ShapeFix_Shape(shp) # Returns (modifiable) the mode for applying fixes of ShapeFix_Shell, # by default True fix.SetFixFreeShellMode(True) sf = fix.FixShellTool().GetObject() sf.SetFixOrientationMode(True) fix.LimitTolerance(tolerance) fix.Perform() # Iterates on sub- shape and performs fixes. return fix.Shape() def fix_face(face, tolerance=OCCUTILS_FIXING_TOLERANCE): r"""Fix a face This operator allows to perform various fixes on face and its wires: - fixes provided by ShapeFix_Wire, - fixing orientation of wires, - addition of natural bounds, - fixing of missing seam edge, - detection and removal of null-area wires. Parameters ---------- face : ShapeFix_Face tolerance : float Returns ------- OCC.TopoDS.TopoDS_Face """ fix = ShapeFix_Face(face) fix.SetMaxTolerance(tolerance) # Performs all the fixes, depending on modes # Function Status returns the status of last call to Perform() # ShapeExtend_OK : face was OK, nothing done # ShapeExtend_DONE1: some wires are fixed # ShapeExtend_DONE2: orientation of wires fixed # ShapeExtend_DONE3: missing seam added # ShapeExtend_DONE4: small area wire removed # ShapeExtend_DONE5: natural bounds added # ShapeExtend_FAIL1: some fails during fixing wires # ShapeExtend_FAIL2: cannot fix orientation of wires # ShapeExtend_FAIL3: cannot add missing seam # ShapeExtend_FAIL4: cannot remove small area wire. fix.Perform() return fix.Face() # assumes no FixMissingSeam involved def fix_tolerance(shape, tolerance=OCCUTILS_DEFAULT_TOLERANCE): r"""Sets (enforces) tolerances in a shape to the given value. Modifies tolerances of sub-shapes (vertices, edges, faces) Parameters ---------- shape : OCC.TopoDS.TopoDS_Shape tolerance : float """ # void SetTolerance (const TopoDS_Shape &shape, const Standard_Real preci, # const TopAbs_ShapeEnum styp=TopAbs_SHAPE) const ShapeFix_ShapeTolerance().SetTolerance(shape, tolerance) def fix_continuity(edge, continuity=1): r"""Fix the continuity of an edge Parameters ---------- edge : OCC.TopoDS.TopoDS_Edge continuity : int Returns ------- OCC.TopoDS.TopoDS_Shape The upgrade resulting shape """ # ShapeUpgrade_ShapeDivideContinuity : # API Tool for converting shapes with C0 geometry into C1 ones shape_upgrade = ShapeUpgrade_ShapeDivideContinuity(edge) continuity_constant = eval('GeomAbs_C' + str(continuity)) shape_upgrade.SetBoundaryCriterion(continuity_constant) shape_upgrade.Perform() return shape_upgrade.Result() def resample_curve_uniform_deflection(curve, deflection=0.5, degree_min=3, degree_max=8, continuity=GeomAbs_C2, tolerance=OCCUTILS_DEFAULT_TOLERANCE): r"""Fits a bspline through the samples on curve Parameters ---------- curve : OCC.TopoDS.TopoDS_Wire, OCC.TopoDS.TopoDS_Edge, curve deflection : float degree_min : int degree_max : int continuity : GeomAbs_C* tolerance : float Returns ------- OCC.Geom.Geom_Curve The resampled curve """ # crv = aocutils.convert.adapt.to_adaptor_3d(curve) crv = Curve(curve).to_adaptor_3d() defl = GCPnts_UniformDeflection(crv, deflection) with AssertIsDone(defl, 'failed to compute UniformDeflection'): logger.info('Number of points : %i' % defl.NbPoints()) sampled_pnts = [defl.Value(i) for i in range(1, defl.NbPoints())] resampled_curve = GeomAPI_PointsToBSpline( point_list_to_tcolgp_array1_of_pnt(sampled_pnts), degree_min, degree_max, continuity, tolerance) return resampled_curve.Curve().GetObject()
Early childhood education plays a significant role in supporting students’ growth in areas such as physical health and well-being, social competence, emotional maturity, language and cognitive development, and communications skills and general knowledge. However, as young people move up through the education system, we pay less attention to these skills and competencies, even though they continue to be vital long into adolescence and adulthood. In this session, participants found out more about the role of early childhood education, and why the competencies identified in the Early Development Instrument (EDI) need to be a focus for all students. Magdalena Janus is a professor at McMaster University’s Department of Psychiatry and Behavioural Neurosciences and the Offord Centre for Child Studies, where she holds the Ontario Chair in Early Child Development. With the late Dr. Dan Offord, Magdalena co-developed the Early Development Instrument (EDI), a measure of children’s readiness to learn at school entry. Since then, Magdalena and her team led the implementation of the EDI in Canada – which has now surpassed 1 million children, garnered the support of governments across the country, and created a data source on early child development widely used by communities, educators, and researchers. Magdalena regularly serves as a consultant with various national and international organizations, such as the World Bank, WHO, and UNICEF, on the measurement and indicators of early child development. Lois is proud to be a Registered Early Childhood Educator. Awarded the Andrew Fleck Services award for the “100 most influential people in child care”, she is a pioneer in the children’s services industry, and continues to work diligently in her advocacy for quality, accessible child care. As the founding president of the College of Early Childhood Educators, Lois understands the importance of self-regulation as a significant component of early learning and care. She travels the province – training, guiding, and supporting leadership in the sector and development of a quality system. Lois was also the founding Executive Director of Child and Family Resources, a multi service agency in northern Ontario.
# Copyright 2018 Twitter, Inc. # Licensed under the MIT License # https://opensource.org/licenses/MIT from tweet_parser.tweet_checking import is_original_format def get_tweet_links(tweet): """ Get the links that are included in the Tweet as "urls" (if there are no links in the Tweet, this returns an empty list) This includes links that are included in quoted or retweeted Tweets Returns unrolled or expanded_url information if it is available Args: tweet (Tweet): A Tweet object (must be a Tweet obj, not a dict) Returns: list (list of dicts): A list of dictionaries containing information about urls. Each dictionary entity can have these keys; without unwound url or expanded url Twitter data enrichments many of these fields will be missing. \n More information about the Twitter url enrichments at: http://support.gnip.com/enrichments/expanded_urls.html and http://support.gnip.com/enrichments/enhanced_urls.html Example: >>> result = [ ... { ... # url that shows up in the tweet text ... 'display_url': "https://twitter.com/RobotPrinc...", ... # long (expanded) url ... 'expanded_url': "https://twitter.com/RobotPrincessFi", ... # characters where the display link is ... 'indices': [55, 88], ... 'unwound': { ... # description from the linked webpage ... 'description': "the Twitter profile of RobotPrincessFi", ... 'status': 200, ... # title of the webpage ... 'title': "the Twitter profile of RobotPrincessFi", ... # long (expanded) url} ... 'url': "https://twitter.com/RobotPrincessFi"}, ... # the url that tweet directs to, often t.co ... 'url': "t.co/1234"}] """ if is_original_format(tweet): # get the urls from the Tweet try: tweet_urls = tweet["entities"]["urls"] except KeyError: tweet_urls = [] # get the urls from the quote-tweet if tweet.quoted_tweet is not None: tweet_urls += tweet.quoted_tweet.tweet_links # get the urls from the retweet if tweet.retweeted_tweet is not None: tweet_urls += tweet.retweeted_tweet.tweet_links return tweet_urls else: # try to get normal urls try: tweet_urls = tweet["twitter_entities"]["urls"] except KeyError: tweet_urls = [] # get the urls from the quote-tweet if tweet.quoted_tweet is not None: tweet_urls += tweet.quoted_tweet.tweet_links # get the urls from the retweet if tweet.retweeted_tweet is not None: tweet_urls += tweet.retweeted_tweet.tweet_links # otherwise, we're now going to combine the urls to try to # to get the same format as the og format urls, try to get enriched urls try: gnip_tweet_urls = {x["url"]: x for x in tweet["gnip"]["urls"]} gnip_tweet_exp_urls = {x["expanded_url"]: x for x in tweet["gnip"]["urls"]} except KeyError: return tweet_urls key_mappings = {"expanded_url": "url", "expanded_status": "status", "expanded_url_title": "title", "expanded_url_description": "description"} tweet_urls_expanded = [] for url in tweet_urls: expanded_url = url if url["url"] in gnip_tweet_urls: expanded_url["unwound"] = {key_mappings[key]: value for key, value in gnip_tweet_urls[url["url"]].items() if key != "url"} elif url.get("expanded_url", "UNAVAILABLE") in gnip_tweet_exp_urls: expanded_url["unwound"] = {key_mappings[key]: value for key, value in gnip_tweet_urls[url["expanded_url"]].items() if key != "url"} tweet_urls_expanded.append(expanded_url) return tweet_urls_expanded def get_most_unrolled_urls(tweet): """ For each url included in the Tweet "urls", get the most unrolled version available. Only return 1 url string per url in tweet.tweet_links In order of preference for "most unrolled" (keys from the dict at tweet.tweet_links): \n 1. `unwound`/`url` \n 2. `expanded_url` \n 3. `url` Args: tweet (Tweet): A Tweet object or dict Returns: list (list of strings): a list of the most unrolled url available """ unrolled_urls = [] for url in get_tweet_links(tweet): if url.get("unwound", {"url": None}).get("url", None) is not None: unrolled_urls.append(url["unwound"]["url"]) elif url.get("expanded_url", None) is not None: unrolled_urls.append(url["expanded_url"]) else: unrolled_urls.append(url["url"]) return unrolled_urls
As managers, we help our artists crystalize their vision and turn it into reality. We do this by connecting the dots. By providing strategic and artistic guidance. By building teams around them and setting up infrastructure. And by shielding them from the world when needed. We empower them to do what they do best: create great art.
""" API views for the geohashing app """ import datetime from rest_framework import status from rest_framework.decorators import api_view from rest_framework.response import Response from .models import Day from . import serializers @api_view(['GET',]) def get_geohash(request, year, month, day, format=None): """ Return the geohash for a given day if we have a record for it. Will return '400 Bad Request' if year, month and day don't combine to form a valid date. Will return '404 Not Found' if geohash data for this date is not found. """ try: year = int(year) month = int(month) day = int(day) geohash_date = datetime.date(year, month, day) except (ValueError, TypeError): return Response(status=status.HTTP_400_BAD_REQUEST) try: geohash = Day.objects.get(geohash_date=geohash_date) except Day.DoesNotExist: return Response(status=status.HTTP_404_NOT_FOUND) serializer = serializers.DaySerializer(geohash) return Response(serializer.data)
On today’s menu: business cards. Business Cards – so useful, but so useless! Today, I’m tackling an issue that I’ve had for quite a while: what the hell do I do with all of the business cards that I’m handed at events. They disappear SO FAST! Plus, the ones I have tend to be of little value when I’m digging them out of my bag or my desk several months later. Adam, you probably didn’t expect me to find your business card this interesting! So I needed a way to save the information on those business cards, and in a timely fashion. It turns out that the key is to use Evernote. 2) Save the image in Evernote as a “Business Card” – as opposed to a photo or a document. Wait while Evernote converts the business card into a contact. This takes seconds. While it does that Evernote also checks to see if the person is on LinkedIn. If they are, the person’s LinkedIn information, including photo, are added to your Evernote contact. 3) Once you have the information, you can use the image of the business card, which is helpfully displayed above, to fill in any missing info. 4) Once you’re done, save the contact! You can set up Evernote to save business cards to a specific notebook, and even to your phone’s native contacts app, in Settings. Several additional options pop up: if the person’s LinkedIn profile appears, you have the option to connect with them. You have the option of emailing your contact right from the app to share your contact information – great for networking events. Once you’ve saved the business card, you can set reminders for follow-up through Evernote. Seriously, business cards have become useful again. What’s better, you only need to have the card in your possession for about a minute.
#!/usr/bin/env python # ======================================================================== # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from hadoop.io.SequenceFile import CompressionType from hadoop.io import Text from hadoop.io import SequenceFile def writeData(writer): key = Text() value = Text() key.set('Key') value.set('Value') writer.append(key, value) if __name__ == '__main__': writer = SequenceFile.createWriter('test.seq', Text, Text) writeData(writer) writer.close()
If you are looking for a full service tree care company in Atglen Pennsylvania, look no further than Pro Tree. For nearly a quarter century, our friendly professionals have been providing top quality tree services to property owners throughout Delaware and Southeastern Pennsylvania. Many of our clients are contractors and developers and have large areas of land to clear. We have specialized expertise in this area with top-of-the-line equipment and in-depth knowledge of the best practices for lot and land clearing. This is a big reason why many in the construction industry turn to us when they have especially large jobs that other Atglen PA tree care companies do not want to take on. One of our most popular Atglen PA services is tree trimming. During this process, we remove excess branches from the tree that may be dead, damaged or diseased. This allows the tree to grow healthier and stronger. It is important to remember that trimming a tree arbitrarily may often do more harm than good. Trees contribute a great deal to our properties and the overall environment. In fact, healthy trees often add significant value to a piece of real estate. They also enhance the beauty of the property and give us shaded areas and lower temps when the summer heat kicks in. Pro Tree is proud to offer a Atglen PA tree maintenance service that provides complete protection for these valuable assets.
import argparse import sys from twisted.python import log from twisted.internet import reactor from twisted.web.server import Site from twisted.web.static import File from autobahn.websocket import listenWS from autobahn.wamp import WampServerFactory, \ WampServerProtocol parser = argparse.ArgumentParser( "Basic autobahn pubsub server", formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument("-b", "--base_uri", type=str, help="autobahn prefix uri to use", default="http://autobahn-pubsub/channels/") parser.add_argument("-d", "--debug", action='store_true', help="whether to enable debugging", default=False) parser.add_argument("-u", "--websocket_url", type=str, help="autobahn websocket url to use", default="ws://localhost:9000") ARGS = parser.parse_args() class PubSubServer(WampServerProtocol): def onSessionOpen(self): self.registerForPubSub(ARGS.base_uri, True) if __name__ == '__main__': log.startLogging(sys.stdout) factory = WampServerFactory(ARGS.websocket_url, debugWamp=ARGS.debug) factory.protocol = PubSubServer factory.setProtocolOptions(allowHixie76 = True) listenWS(factory) webdir = File(".") web = Site(webdir) reactor.listenTCP(8080, web) reactor.run()
Professional Expert Advisors for MT4 (MetaTrader 4 ) platform on MQL4 language. Professional Expert Advisors for MT5 (MetaTrader5 ) platform on MQL5 language. Forex Account Copiers: MT4 to MT4, MT4 to MT5, MT5 to MT5, MT4 to JForex, MT4 to Currenex, LMAX, ADS Securities, Fortex (Marex), MIG Bank, CitiFX, SwissCode.
print "|--------------------------------------------|" print "| Starting Gaze Demo |" print "|--------------------------------------------|" # Add asset paths scene.addAssetPath('mesh', 'mesh') scene.addAssetPath("script", "behaviorsets") scene.addAssetPath('motion', 'ChrBrad') scene.addAssetPath('motion', 'ChrRachel') scene.addAssetPath('script','scripts') scene.addAssetPath('script','examples') scene.loadAssets() # Set scene parameters and camera print 'Configuring scene parameters and camera' scene.setScale(1.0) scene.setBoolAttribute('internalAudio', True) scene.run('default-viewer.py') camera = getCamera() camera.setEye(0, 1.68, 2.58) camera.setCenter(0, 0.89, -0.14) camera.setUpVector(SrVec(0, 1, 0)) camera.setScale(1) camera.setFov(1.0472) camera.setFarPlane(100) camera.setNearPlane(0.1) camera.setAspectRatio(0.966897) scene.getPawn('camera').setPosition(SrVec(0, -5, 0)) # Set joint map for Brad and Rachel print 'Setting up joint map for Brad and Rachel' scene.run('zebra2-map.py') zebra2Map = scene.getJointMapManager().getJointMap('zebra2') bradSkeleton = scene.getSkeleton('ChrBrad.sk') zebra2Map.applySkeleton(bradSkeleton) zebra2Map.applyMotionRecurse('ChrBrad') rachelSkeleton = scene.getSkeleton('ChrRachel.sk') zebra2Map.applySkeleton(rachelSkeleton) zebra2Map.applyMotionRecurse('ChrRachel') # Setting up Brad and Rachel print 'Setting up Brad' brad = scene.createCharacter('ChrBrad', '') bradSkeleton = scene.createSkeleton('ChrBrad.sk') brad.setSkeleton(bradSkeleton) bradPos = SrVec(.35, 0, 0) brad.setPosition(bradPos) brad.setHPR(SrVec(-17, 0, 0)) brad.createStandardControllers() # Deformable mesh brad.setVec3Attribute('deformableMeshScale', .01, .01, .01) brad.setStringAttribute('deformableMesh', 'ChrBrad.dae') # setup gestures scene.run('BehaviorSetGestures.py') setupBehaviorSet() retargetBehaviorSet('ChrBrad') bml.execBML('ChrBrad', '<body posture="ChrBrad@Idle01" ready="0" relax="0"/>') print 'Setting up Rachel' rachel = scene.createCharacter('ChrRachel', '') rachelSkeleton = scene.createSkeleton('ChrRachel.sk') rachel.setSkeleton(rachelSkeleton) rachelPos = SrVec(-.35, 0, 0) rachel.setPosition(rachelPos) rachel.setHPR(SrVec(17, 0, 0)) rachel.createStandardControllers() # Deformable mesh rachel.setVec3Attribute('deformableMeshScale', .01, .01, .01) rachel.setStringAttribute('deformableMesh', 'ChrRachel.dae') # setup gestures scene.run('BehaviorSetFemaleGestures.py') setupBehaviorSet() retargetBehaviorSet('ChrRachel') bml.execBML('ChrRachel', '<body posture="ChrConnor@IdleStand01" ready=".2" relax=".2"/>') # Add pawns in scene print 'Adding pawn to scene' gazeTarget = scene.createPawn('gazeTarget') gazeTarget.setPosition(SrVec(0.75, 1.54, 0.33)) # Turn on GPU deformable geometry for all for name in scene.getCharacterNames(): scene.getCharacter(name).setStringAttribute("displayType", "GPUmesh") # Make characters gaze at pawn bml.execBML('ChrRachel', '<gaze sbm:joint-range="EYES CHEST" target="gazeTarget"/>') # Variables to move pawn gazeX = -2 gazeZ = 2 dirX = 1 dirZ = 1 speed = 0.005 lastTime = -8 class GazeDemo(SBScript): def update(self, time): global gazeX, gazeZ, dirX, dirZ, speed, lastTime # Change direction when hit border if gazeX > 2: dirX = -1 elif gazeX < -2: dirX = 1 if gazeZ > 2: dirZ = -1 elif gazeZ < -0: dirZ = 1 gazeX = gazeX + speed * dirX gazeZ = gazeZ + speed * dirZ gazeTarget.setPosition(SrVec(gazeX, 2, gazeZ)) diff = time - lastTime if diff > 10: diff = 0 lastTime = time #Gaze at joints bml.execBMLAt(0, 'ChrBrad', '<gaze target="ChrRachel:base" sbm:joint-speed="800" sbm:joint-smooth="0.2"/>') bml.execBMLAt(2, 'ChrBrad', '<gaze target="ChrBrad:l_wrist" sbm:joint-speed="800" sbm:joint-smooth="0.2"/>') bml.execBMLAt(4, 'ChrBrad', '<gaze target="ChrBrad:r_ankle" sbm:joint-speed="800" sbm:joint-smooth="0.2"/>') bml.execBMLAt(6, 'ChrBrad', '<gaze target="ChrRachel:l_wrist" sbm:joint-speed="800" sbm:joint-smooth="0.2"/>') bml.execBMLAt(8, 'ChrBrad', '<gaze target="ChrRachel:spine4" sbm:joint-speed="800" sbm:joint-smooth="0.2"/>') # Run the update script scene.removeScript('gazedemo') gazedemo = GazeDemo() scene.addScript('gazedemo', gazedemo)
OverDrive is now distributing Harry Potter ebooks to its 18 thousand member libraries, but that’s not their only news this week. They’ve just pushed out an update for the OverDrive Media Console app for iPhone and iPad. The update is few weeks late but it’s worth the wait. It adds a key feature that a lot of us need in eBooks: dictionary. You can now look up words in the eBooks you checkout from the library. Just tap a word with your finger to find the definition. You can also navigate to the corresponding Wikipedia page for more information.
""" Evennia settings file. The full options are found in the default settings file found here: {settings_default} Note: Don't copy more from the default file than you actually intend to change; this will make sure that you don't overload upstream updates unnecessarily. """ # Use the defaults from Evennia unless explicitly overridden import os from evennia.settings_default import * from machina import get_apps as get_machina_apps from machina import MACHINA_MAIN_TEMPLATE_DIR from machina import MACHINA_MAIN_STATIC_DIR ###################################################################### # Evennia base server config ###################################################################### # This is the name of your game. Make it catchy! SERVERNAME = 'Radial Blur' # Path to the game directory (use EVENNIA_DIR to refer to the # core evennia library) GAME_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # Place to put log files LOG_DIR = os.path.join(GAME_DIR, "server", "logs") SERVER_LOG_FILE = os.path.join(LOG_DIR, 'server.log') PORTAL_LOG_FILE = os.path.join(LOG_DIR, 'portal.log') HTTP_LOG_FILE = os.path.join(LOG_DIR, 'http_requests.log') # Other defaults PROTOTYPE_MODULES = ("world.prototypes",) ###################################################################### # Evennia Database config ###################################################################### # Database config syntax: # ENGINE - path to the the database backend. Possible choices are: # 'django.db.backends.sqlite3', (default) # 'django.db.backends.mysql', # 'django.db.backends.postgresql_psycopg2', # 'django.db.backends.oracle' (untested). # NAME - database name, or path to the db file for sqlite3 # USER - db admin (unused in sqlite3) # PASSWORD - db admin password (unused in sqlite3) # HOST - empty string is localhost (unused in sqlite3) # PORT - empty string defaults to localhost (unused in sqlite3) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(GAME_DIR, "server", "evennia.db3"), 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '' }} ###################################################################### # Django web features # (don't remove these entries, they are needed to override the default # locations with your actual GAME_DIR locations at run-time) ###################################################################### # Absolute path to the directory that holds file uploads from web apps. # Example: "/home/media/media.lawrence.com" MEDIA_ROOT = os.path.join(GAME_DIR, "web", "media") # The master urlconf file that contains all of the sub-branches to the # applications. Change this to add your own URLs to the website. ROOT_URLCONF = 'web.urls' # URL prefix for admin media -- CSS, JavaScript and images. Make sure # to use a trailing slash. Django1.4+ will look for admin files under # STATIC_URL/admin. STATIC_URL = '/static/' STATIC_ROOT = os.path.join(GAME_DIR, "web", "static") # Directories from which static files will be gathered from. STATICFILES_DIRS = ( os.path.join(GAME_DIR, "web", "static_overrides"), os.path.join(EVENNIA_DIR, "web", "static"), MACHINA_MAIN_STATIC_DIR, ) # We setup the location of the website template as well as the admin site. TEMPLATE_DIRS = ( os.path.join(GAME_DIR, "web", "template_overrides", ACTIVE_TEMPLATE), os.path.join(GAME_DIR, "web", "template_overrides"), os.path.join(EVENNIA_DIR, "web", "templates", ACTIVE_TEMPLATE), os.path.join(EVENNIA_DIR, "web", "templates"), MACHINA_MAIN_TEMPLATE_DIR, ) # Installed Apps INSTALLED_APPS += ( 'django.contrib.humanize', 'markdown_deux', 'bootstrapform', 'helpdesk', 'django.contrib.messages', 'mptt', 'haystack', 'widget_tweaks', 'django_markdown', 'happenings', 'machina', 'machina.apps.forum', 'machina.apps.forum_conversation', 'machina.apps.forum_conversation.forum_attachments', 'machina.apps.forum_conversation.forum_polls', 'machina.apps.forum_feeds', 'machina.apps.forum_moderation', 'machina.apps.forum_search', 'machina.apps.forum_tracking', 'machina.apps.forum_member', 'machina.apps.forum_permission', 'bootstrap3', 'jquery' ) TEMPLATE_CONTEXT_PROCESSORS += ( 'machina.core.context_processors.metadata', ) MIDDLEWARE_CLASSES += ( 'machina.apps.forum_permission.middleware.ForumPermissionMiddleware', ) CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'machina_attachments': { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': '/tmp', } } HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } # The secret key is randomly seeded upon creation. It is used to sign # Django's cookies. Do not share this with anyone. Changing it will # log out all active web browsing sessions. Game web client sessions # may survive. SECRET_KEY = '2uX-V:tTFfYdZcQ@oJ"wH+NO{1jk9"?83KUsp,g4'
Indiana UST Class C Operator Certificate is designed for potential Class C Underground Storage Tank Operators. The Class C Operator of a UST facility is a person designated by the UST system owner who typically controls the dispensing of fuel at the facility and is responsible for initial response to alarms, releases, spills, overfills or threats to the public or to the environment. The course utilizes visual aids and audio commentary to facilitate the efficient and comprehensive learning of the subject matter required to meet state regulations for an UST Class C Operator. Depending on your state’s regulations, this material may require additional site-specific training.
import os from pypers.core.step import CmdLineStep from pypers.utils import utils class Split(CmdLineStep): spec = { "name": "split", "version": "1.0", "descr": [ "Splits an input file in several chuncks" ], "args": { "inputs": [ { "name" : "input_file", "type" : "file", "descr" : "input file name", }, { "name" : "nchunks", "type" : "int", "descr" : "number of chunks in which the input file get splitted", }, ], "outputs": [ { "name" : "output_files", "type" : "file", "descr" : "output file names", } ], "params" : [ { "name" : "prefix", "value" : "chunk_", "descr" : "string prefix on the output files", "readonly" : True, }, { "name" : "extension", "value" : ".bed", "descr" : "extension added to the splitted files", "readonly" : True, } ] }, "cmd": [ "/usr/bin/split -l {{line_chunks}} --suffix-length=4 -d {{input_file}} {{full_prefix}}", ] } def process(self): with open(self.input_file) as fh: lines = len(fh.readlines()) self.line_chunks = int(lines / self.nchunks) self.full_prefix = os.path.join(self.output_dir, self.prefix) self.submit_cmd(self.render()) self.output_files = [] for filename in os.listdir(self.output_dir): if filename.startswith(self.prefix): original_path = os.path.join(self.output_dir, filename) new_path = original_path + self.extension os.rename(original_path, new_path) self.output_files.append(new_path) self.meta['job']['input_file'] = [] for output_file in self.output_files: self.meta['job']['input_file'].append(self.input_file)
Hospital readmissions are mainly due to inadequate follow-up care, according to over half of the respondents to a recent ACP Internist survey. We asked readers what they felt was the primary driver behind hospital readmissions: inadequate discharge instructions, inadequate follow-up care, stressful hospital experience, the fact that the patient is at the end of life, or other. One hundred sixty-two of 290 respondents, or 55.86%, said that hospital readmissions are due mainly to inadequate follow-up. Inadequate discharge instructions was the next most popular response, with 27.93%, followed by "patient is at the end of life" with 11.03% and "stressful hospital experience" with 1.38%. Eleven respondents, or 3.79%, reported that other reasons were responsible, including patient nonadherence, lack of resources to obtain medications, lack of a primary care physician, premature discharge and physician error. Thirty-day readmission rates have been a focus of concern in recent years. CMS publicly reports readmission rates for several conditions on its Hospital Compare website, and its Readmissions Reduction Program has begun financially penalizing hospitals with excess readmissions as of discharges on or after Oct. 1, 2012. Earlier this year, Harlan M. Krumholz, MD, put a new spin on the topic in a New England Journal of Medicine perspective. In the article, he suggested that clinicians might be focusing too much on the acute cause of a patient's hospitalization when trying to hold down readmission rates. Instead, he said, they should also consider the "acquired, transient period of vulnerability" that occurs after hospital discharge. Dr. Krumholz recommended that physicians begin trying to identify and reduce other factors, such as pain, inadequate sleep and poor nutrition, that may make patients more likely to be readmitted. For more on this topic, read ACP Hospitalist's March 2013 cover story, "Post-hospital syndrome: Could a new diagnosis help fix readmissions?"
#!/usr/bin/python import argparse import os import sqlite3 import sys import matplotlib as mpl mpl.use('Agg') mpl.rc('font', family='Liberation Serif') import matplotlib.pyplot as plt import numpy as np from matplotlib import rcParams from pylab import * from collections import OrderedDict from config import * def main(): parser = argparse.ArgumentParser(prog=os.path.basename(__file__)) globals().update(load_config(parser)) parser.add_argument('--dataset', choices=datasets, required=False) parser.add_argument('--error', choices=['error', 'test_error', 'full_cv_error'], required=True) args = parser.parse_args() selected_datasets = [args.dataset] if args.dataset else datasets type_error = args.error for dataset in selected_datasets: conn = sqlite3.connect(database_file) c = conn.cursor() query = "SELECT strategy,generation,%s FROM results WHERE dataset='%s' AND %s<100000" % ( type_error, dataset, type_error) results = c.execute(query).fetchall() conn.close() if not results: raise Exception('No results') data = dict() for row in results: key = "%s-%s" % (row[0], row[1]) if key == 'DEFAULT-CV': key = 'WEKA-DEF' if key == 'RAND-CV': key = 'RAND' if key == 'SMAC-CV': # TO REMOVE key = 'SMAC' if key == 'TPE-CV': # TO REMOVE key = 'TPE' if key not in data: data[key] = [] try: data[key].append(float(row[2])) except Exception, e: print "[ERROR] ", e, " -- ", row[2] # data = OrderedDict(sorted(data.items(), key=lambda t: t[0])) labels = ['RAND', 'SMAC', 'TPE'] data = [data['RAND'], data['SMAC'], data['TPE']] fig, ax = plt.subplots(figsize=(6, 2)) ax.set_aspect(6) fig.canvas.draw() # bp = plt.boxplot(data.values(), vert=False, whis='range') # , labels=data.keys()) # ytickNames = plt.setp(ax, yticklabels=data.keys()) bp = plt.boxplot(data[::-1], vert=False, whis='range', widths=0.8) # , labels=data.keys()) ytickNames = plt.setp(ax, yticklabels=labels[::-1]) plt.setp(ytickNames, fontsize=10) xlim(0, 100) plt.margins(0.05, 0.05) xlabel('% misclassification') # ylabel('Strategy') title(dataset) tight_layout() savefig('../plots%s/boxplot.%s.%s.png' % (suffix, type_error, dataset)) # show() if __name__ == "__main__": main()
Texas Taxpayer Number assigned to COLEMAN, HOWARD AND ASSOCIATES, INC is 32011442012. Texas SOS File Number of this company is 0800205783. This company state of formation is Texas. COLEMAN, HOWARD AND ASSOCIATES, INC mailing address is 1204 W UNIVERSITY DR STE 205 DENTON, TX 76201-1771. Registered office street address is 1204 W. UNIVERSITY DR., STE 205 DENTON, TX 76201. You can find this business by geo coordinates: 33° 13' 47.5" N , 97° 8' 42.8" W. COLEMAN, HOWARD AND ASSOCIATES, INC was incorporated on Friday 16th May 2003, so this company age is fifteen years, eleven months and four days. Company right to transact business in texas is currently FRANCHISE TAX INVOLUNTARILY ENDED. COLEMAN, HOWARD AND ASSOCIATES, INC agent is MERKI & ASSOCIATES, P.C.. Company has two officers: BRETT HOWARD employed as director, CHARLIE COLEMAN employed as director. Check more coleman companies. Company Agent MERKI & ASSOCIATES, P.C.
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Sample tensorflow eager execution for helper functions. Make sure to create a new virtual environment (Python 3, TF 1.5) before executing this! """ import tensorflow as tf import tensorflow.contrib.eager as tfe from helper_functions import preprocess_image tfe.enable_eager_execution() # Test preprocess_image with open("../client/cat_sample.jpg", "rb") as imageFile: jpeg_str = imageFile.read() result = preprocess_image(jpeg_str) assert result.shape == (224, 224, 3) assert tf.reduce_max(result) <= 0.5 # Notice tf functions here! assert tf.reduce_min(result) >= -0.5
All 13 episodes of Marvel and Netflix’s fourth collaborative effort leading up to The Defenders, Iron Fist, are now available for streaming, and despite getting quite a kicking from the majority of critics, fans seemed to have enjoyed the show. One thing a lot of comic book purists did agree was a major issue, though, was the lack of a superhero costume for Danny Rand. We already knew going into the series that the Living Weapon most likely wouldn’t be suiting up, but when we got a brief glimpse of an older incarnation of Iron Fist in full costume via some grainy black and white footage around halfway through the season, some held out hope that the classic green and yellow ensemble might make an appearance. Unfortunately, that wasn’t to be, and it doesn’t sound like we’re going to be seeing it for quite a while. We’re not even certain Iron Fist will be renewed for another outing (though it’d be the first Marvel Netflix show not to be), but either way, this is not something fans are going to want to hear. Then again, the showrunners may have certain plans in place that Jones simply isn’t privy to, so perhaps we won’t have to wait as long as the actor suggests. Tell us, did the lack of a costume spoil Iron Fist for you? Drop us a comment in the usual place with your thoughts!
#!/usr/bin/env python import sys import os from setuptools import setup, find_packages _top_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, os.path.join(_top_dir, "lib")) try: import markdown_tag finally: del sys.path[0] README = open(os.path.join(_top_dir, 'README.md')).read() setup(name='django-markdown-tag', version=markdown_tag.__version__, description="a Django app that provides template tags for using Markdown (using the python-markdown processor)", long_description=README, classifiers=[c.strip() for c in """ Development Status :: 5 - Production/Stable Environment :: Web Environment Framework :: Django Intended Audience :: Developers License :: OSI Approved :: MIT License Operating System :: OS Independent Programming Language :: Python :: 2 Topic :: Internet :: WWW/HTTP """.split('\n') if c.strip()], keywords='django markdown text markup html', author='Trent Mick, Steffen Görtz', author_email='trentm@gmail.com, steffen@steffen-goertz.de', maintainer='Steffen Görtz', maintainer_email='steffen@steffen-goertz.de', url='http://github.com/douzepouze/django-markdown-tag', license='MIT', install_requires = ['markdown'], packages=["markdown_tag"], package_dir={"": "lib"}, include_package_data=True, zip_safe=False, )
Rugged construction , Water resistant , Built in Flashlight + Powerbank mode to charge your devices just to name a few! The E-lvt personal Variable voltage vaporizer is Water resistant, dustproof, impact & Shock resistant, allowing you to use it just about anywhere! No more worries of destroying your brand new Vaporizer devices by the pool or on the trail, with the e-lvt you can confidently take it anywhere and know its up to the task! Thinking of taking your new variable voltage vape by the pool, hot tub or beach? Think again! most vaporizers will easily short or perminatly malfuction with any water contact but not the All new water resistant E-lvt , confidently enjoy your vaporizer in the presence of liquid without the fear of blowing your new investment! The New E-lvt uses high capacity replaceable lithium ion rechargeable batteries allowing for drasticly more power than many of todays vaporizers that require you to replace the entire unit when they fail, who wants to do that?! With the all new E-lvt your device also serves as a power bank capable of charging mp3 players, cell phones, tablets and much more! Welcome To E-lvt.com your ONLY Source for the Official E-lvt variable voltage Vaporizers mastering the very latest in all purpose variable vaporizer technology. The E-lvt MOD is like no other on the market it uses the latest intelligent integrated circuits, control circuits, & and high power output circuit tech allowing the battery and circuit board work in perfect sync extending battery life. Constructed of Zinc Alloy and premium Silicone materials, the E-lvt is virtually indestructible!! Perfect for anyone engaged in active sports, outdoors, construction, jobs located in rugged environments, Summer and water based activities, or if your just simply prone to break things. The E-lvt device can literally take the force of being ran over by a car and tossed through a car wash then still function as designed, name a vaporizer on the market that can come close to surviving that kind of abuse! To sum it up, THIS MOD IS A ABSOLUTE BEAST! It's water resistant, shock proof, Dirt & dust proof all while sporting the ability to allow charging of your cell phones mp3 devices and tablets! PLUS it's also equipped with a built flashlight! Perfect for people who love the outdoors or live that active lifestyle. Never will you have to worry about the E-lvt breaking from a hard fall or drop ruining your hard earned investment unlike many of today's more fragile mods on the market. The E-lvt also allows you to control the Voltage from 3.0V to 6V or Wattage from 3W to 15W saving you from burning out your coils on some of the more fragile atomizer attachments. The E-lvt also sports a industry standard 510 connection thats SPRING loaded which makes switching different types of atomizers is painless and extremely easy. All types of different Attomizer attachments make a perfect flush connection! Be the one of the first to own this awsome variable voltage vaporizer device! Stop buying mods that break with the slightest drop or splash when you could have the most durable , powerful all purpose premium vaporizer set on the market! Seize the Summer! We offer Certified E-lvt Replacements & Parts to to help you keep your E-lvt in tip top Shape & Looking Great! Is the E-lvt for you? If your looking for a Premium quality durable vaporizer setup that not only can handle a wide range of variable voltage settings & attachments but is also rugged , durable and outdoor friendly, look no further than the E-lvt. With the E-lvt unique features and abilities and affordable price point you simply cant go wrong. The E-lvt offers twice the features of its competitors at less than half the price of some of its pricier competition making it an Affordable choice while also saving you money down the road from having to buy a new expensive vaporizer set after dropping it in the mud / water / concrete. We offer Complete customer support on the E-lvt should you ever need replacement parts , lithium batteries , Cables, Attachments, or how to's & tutorials on how to use your E-lvt variable voltage vaporizer we have you covered! Always Make sure to purchase your Sets & Parts from our Verified E-lvt dealers only to insure you recieve high quality Authentic sets and replacement units. If your in the market for a Premium Duriable outdoor ready vaping solution for the summer time festivities than the E-lvt is exactly what youve been looking for!! Serves as a Power bank, charge digital products through the USB output port. LED flashlight located on the rear of the device. Lithium battery (included) can be charged internally or externally. Digital screen shows battery power and voltage. Variable Voltage (3.0-6.0V) and Variable Power (3w-15w). Compatible with all 510 and Ego atomizer threads. Built in alarm if sucking over 10 seconds. Battery short circuit and overcharge protection. All sets Ship with 2-3 Day First class Shipping! Why Should I get a E-lvt? The E-lvt offers the very best in the world of Vapor technology fusing , portability , durability, Performance and utility functions into a premium vv/vw vaporizer solution thats also perfect for outdoors and summertime fun! Yes Of course it is, the E-lvt has many cutting edge safety features like short circuit prevention and automatic cuttoff protecting the battery its being engauged for too long, this prevents damage & battery failure. The E-lvt is also water resistant as well as shock and dirtproof keeping your vaporizer safe from the elements as well! For more usage tips be sure to check your Instruction manual and practice safe usage as well as proper storing and cleaning. Whats the difference between the E-lvt and other mods? Besides better overall performance the E-lvt is a better choice than most competitor mods simply because of its durability, in the world of variable mods a new unit can often cost an arm or a leg! Noone wants to shell out big money just to have their unit break while their having fun and have to replace it , not with the New E-lvt , with its water resistant , dirt , dust , and shock proof design your set is sure to stand the test of time saving you big bucks while out performing everything else on the market. What E-juice flavors are available ? The E-lvt like most modern ecigarettes allows for self filling of prefered ejuices , theres tons of flavors out there and theyre all compatible with the E-lvt, Find your favorite flavor mix today at Slims Ejuice! Does any attomizer fit the E-lvt? ? The E-lvt uses the most popular threading embraced by the industry the 510 thread. There is a wide variety of 510 threaded attachments available from many popular 3rd parts ranging from Tanks , Ce4/Ce5 attachments , dry herb chambers and more , changing attachments is a breeze simply twist , remove current attachment and twist back on the desired new attachment, thats it , its that easy! Where can I purchase the E-lvt ? Pier 420 is the Top Premier Network for The latest Vaporizer & Mod technology. Your #1 420 and Vapor headquarters Offering free shipping & top notch customer support for Every order & has been rated the #1 vapor supply company in the industry. Currently they are one of very few U.S.A Authorized dealers of the E-lvt , To insure you recieve a quality set as well as proper customer support make sure to purchase from Authorized dealers only. How long will it take to get one ? Most all orders placed will ship next day with tracking confirmation # included, often orders ship same day if placed before 3pm Est. we offer 2-3 day First class shipping with EVERY order! Do you have attachments available ? Yes! we have multiple addon attachments available for the E-lvt from Protank 3's , Aerotanks, cloutanks , and many many more , whatever your trying to vaporize we have you covered!
#!/usr/bin/env python #encode=utf-8 #vim: tabstop=4 shiftwidth=4 softtabstop=4 #Created on 2013-8-17 #Copyright 2013 nuoqingyun xuqifeng import datetime import calendar import time from oslo.config import cfg memcache_opts = [ cfg.ListOpt('memcached_servers', default=['127.0.0.1:11211'], help='Memcached servers or None for in process cache.'), ] CONF = cfg.CONF CONF.register_opts(memcache_opts) def get_client(memcached_servers=None): client_cls = Client if not memcached_servers: memcached_servers = CONF.memcached_servers if memcached_servers: try: import memcache client_cls = memcache.Client except ImportError: pass return client_cls(memcached_servers, debug=0) class Client(object): def __init__(self, *args, **kwargs): self.cache = {} def get(self, key): now = time.time() for k in self.cache.keys(): (timeout, _value) = self.cache[k] if timeout and now >= timeout: del self.cache[k] return self.cache.get(key, (0, None))[1] def set(self, key, value, time=0, min_compress_len=0): timeout = 0 if time != 0: timeout = calendar.timegm((datetime.datetime.utcnow()).timetuple()) + time self.cache[key] = (timeout, value) return True def add(self, key, value, time=0, min_compress_len=0): if self.get(key) is not None: return False return self.set(key, value, time, min_compress_len) def incr(self, key, delta=1): value = self.get(key) if value is None: return None new_value = int(value) + delta self.cache[key] = (self.cache[key][0], str(new_value)) return new_value def delete(self, key, time=0): if key in self.cache: del self.cache[key]
The Society is pleased to offer two scholarships of £1500 for registered postgraduate or postdoctoral students working on Herculaneum or topics pertinent to the understanding of Herculaneum. One award will be for papyrology and the other may be on any topic. The successful applicants will be expected to provide images and a report for the Society's Newsletter and a podcast for the website, and if possible address a meeting of the Society. They will also be expected to join the Society. In advance of the Herculaneum exhibition at the Getty starting 26 June, the famous "drunken satyr" statue has arrived. Learn more at http://blogs.getty.edu/iris/an-introduction-to-the-drunken-satyr-a-rare-roman-bronze-being-studied-and-conserved-at-the-getty-villa/. Society Trustee Dr Gianluca Del Mastro has been appointed by the Italian Minister of Culture as President of the Fondazione Ente Ville Vesuviane, the organisation which manages all 122 historic villas in the region. The list includes the Villa Campolieto in Ercolano (where the Fondazione is based), the Villa Signorini and the Villa Aprile, a.k.a. the Miglio d’Oro hotel in Ercolano.
# -- coding: utf-8 -- from __future__ import absolute_import from unittest import main, TestCase from tempfile import mkdtemp from os.path import join, exists, dirname, isdir, abspath, sep from urlparse import urlparse, urljoin from os import environ, mkdir from shutil import rmtree, copytree from re import search, sub import random from datetime import date, timedelta, datetime import sys from chime.repo_functions import ChimeRepo from slugify import slugify from multiprocessing import Process import json import time import logging import tempfile logging.disable(logging.CRITICAL) repo_root = abspath(join(dirname(__file__), '..')) sys.path.insert(0, repo_root) from box.util.rotunicode import RotUnicode from httmock import response, HTTMock from mock import MagicMock, patch from bs4 import Comment from chime import ( create_app, repo_functions, google_api_functions, view_functions, publish, errors) from chime import constants from unit.chime_test_client import ChimeTestClient import codecs codecs.register(RotUnicode.search_function) # these patterns help us search the HTML of a response to determine if the expected page loaded PATTERN_BRANCH_COMMENT = u'<!-- branch: {} -->' PATTERN_AUTHOR_COMMENT = u'<!-- author: {} -->' PATTERN_TASK_COMMENT = u'<!-- task: {} -->' PATTERN_TEMPLATE_COMMENT = u'<!-- template name: {} -->' PATTERN_FILE_COMMENT = u'<!-- file type: {file_type}, file name: {file_name}, file title: {file_title} -->' PATTERN_OVERVIEW_ITEM_CREATED = u'<p>The "{created_name}" {created_type} was created by {author_email}.</p>' PATTERN_OVERVIEW_ACTIVITY_STARTED = u'<p>The "{activity_name}" activity was started by {author_email}.</p>' PATTERN_OVERVIEW_COMMENT_BODY = u'<div class="comment__body">{comment_body}</div>' PATTERN_OVERVIEW_ITEM_DELETED = u'<p>The "{deleted_name}" {deleted_type} {deleted_also}was deleted by {author_email}.</p>' PATTERN_FLASH_TASK_DELETED = u'You deleted the "{description}" activity!' PATTERN_FLASH_SAVED_CATEGORY = u'<li class="flash flash--notice">Saved changes to the {title} topic! Remember to submit this change for feedback when you\'re ready to go live.</li>' PATTERN_FLASH_CREATED_CATEGORY = u'Created a new topic named {title}! Remember to submit this change for feedback when you\'re ready to go live.' PATTERN_FLASH_CREATED_ARTICLE = u'Created a new article named {title}! Remember to submit this change for feedback when you\'re ready to go live.' PATTERN_FLASH_SAVED_ARTICLE = u'Saved changes to the {title} article! Remember to submit this change for feedback when you\'re ready to go live.' PATTERN_FLASH_DELETED_ARTICLE = u'The "{title}" article was deleted! Remember to submit this change for feedback when you\'re ready to go live.' PATTERN_FORM_CATEGORY_TITLE = u'<input name="en-title" type="text" value="{title}" class="directory-modify__name" placeholder="Crime Statistics and Maps">' PATTERN_FORM_CATEGORY_DESCRIPTION = u'<textarea name="en-description" class="directory-modify__description" placeholder="Crime statistics and reports by district and map">{description}</textarea>' # review stuff PATTERN_REQUEST_FEEDBACK_BUTTON = u'<button class="toolbar__item button button--orange" type="submit" name="request_feedback" value="Request Feedback">Request Feedback</button>' PATTERN_UNREVIEWED_EDITS_LINK = u'<a href="/tree/{branch_name}/" class="toolbar__item button">Unreviewed Edits</a>' PATTERN_ENDORSE_BUTTON = u'<button class="toolbar__item button button--green" type="submit" name="endorse_edits" value="Endorse Edits">Endorse Edits</button>' PATTERN_FEEDBACK_REQUESTED_LINK = u'<a href="/tree/{branch_name}/" class="toolbar__item button">Feedback requested</a>' PATTERN_PUBLISH_BUTTON = u'<button class="toolbar__item button button--blue" type="submit" name="merge" value="Publish">Publish</button>' PATTERN_READY_TO_PUBLISH_LINK = u'<a href="/tree/{branch_name}/" class="toolbar__item button">Ready to publish</a>' class TestAppConfig (TestCase): # in TestAppConfig def test_missing_values(self): self.assertRaises(KeyError, lambda: create_app({})) # in TestAppConfig def test_present_values(self): create_app_environ = {} create_app_environ['RUNNING_STATE_DIR'] = 'Yo' create_app_environ['GA_CLIENT_ID'] = 'Yo' create_app_environ['GA_CLIENT_SECRET'] = 'Yo' create_app_environ['LIVE_SITE_URL'] = 'Hey' create_app_environ['BROWSERID_URL'] = 'Hey' create_app(create_app_environ) # in TestAppConfig def test_error_template_args(self): ''' Default error template args are generated as expected ''' create_app_environ = {} create_app_environ['RUNNING_STATE_DIR'] = 'Yo' create_app_environ['GA_CLIENT_ID'] = 'Yo' create_app_environ['GA_CLIENT_SECRET'] = 'Yo' create_app_environ['BROWSERID_URL'] = 'Hey' create_app_environ['LIVE_SITE_URL'] = 'Hey' fake_support_email = u'support@example.com' fake_support_phone_number = u'(123) 456-7890' create_app_environ['SUPPORT_EMAIL_ADDRESS'] = fake_support_email create_app_environ['SUPPORT_PHONE_NUMBER'] = fake_support_phone_number app = create_app(create_app_environ) template_args = errors.common_error_template_args(app.config) self.assertEqual(len(template_args), 3) self.assertTrue('activities_path' in template_args) self.assertTrue('support_email' in template_args) self.assertTrue('support_phone_number' in template_args) self.assertEqual(template_args['support_email'], fake_support_email) self.assertEqual(template_args['support_phone_number'], fake_support_phone_number) # in TestAppConfig def test_for_constant_name_conflicts(self): ''' None of the constant names defined in constants.py conflict with reserved config variable names ''' flask_reserved_config_names = ['DEBUG', 'TESTING', 'PROPAGATE_EXCEPTIONS', 'PRESERVE_CONTEXT_ON_EXCEPTION', 'SECRET_KEY', 'SESSION_COOKIE_NAME', 'SESSION_COOKIE_DOMAIN', 'SESSION_COOKIE_PATH', 'SESSION_COOKIE_HTTPONLY', 'SESSION_COOKIE_SECURE', 'PERMANENT_SESSION_LIFETIME', 'USE_X_SENDFILE', 'LOGGER_NAME', 'SERVER_NAME', 'APPLICATION_ROOT', 'MAX_CONTENT_LENGTH', 'SEND_FILE_MAX_AGE_DEFAULT', 'TRAP_HTTP_EXCEPTIONS', 'TRAP_BAD_REQUEST_ERRORS', 'PREFERRED_URL_SCHEME', 'JSON_AS_ASCII', 'JSON_SORT_KEYS', 'JSONIFY_PRETTYPRINT_REGULAR'] chime_reserved_config_names = ['RUNNING_STATE_DIR', 'REPO_PATH', 'WORK_PATH', 'AUTH_DATA_HREF', 'BROWSERID_URL', 'GA_CLIENT_ID', 'GA_CLIENT_SECRET', 'GA_REDIRECT_URI', 'SUPPORT_EMAIL_ADDRESS', 'SUPPORT_PHONE_NUMBER', 'GDOCS_CLIENT_ID', 'GDOCS_CLIENT_SECRET', 'GITHUB_CLIENT_ID', 'GITHUB_CLIENT_SECRET', 'LIVE_SITE_URL', 'PUBLISH_SERVICE_URL'] check_names = flask_reserved_config_names + chime_reserved_config_names for reserved_name in check_names: self.assertFalse(hasattr(constants, reserved_name), u'The reserved config variable name {} is present in constants!'.format(reserved_name)) class TestApp (TestCase): def setUp(self): self.old_tempdir, tempfile.tempdir = tempfile.tempdir, mkdtemp(prefix='chime-TestApp-') self.work_path = mkdtemp(prefix='chime-repo-clones-') self.publish_path = mkdtemp(prefix='chime-publish-path-') repo_path = dirname(abspath(__file__)) + '/../test-app.git' temp_repo_dir = mkdtemp(prefix='chime-root') temp_repo_path = temp_repo_dir + '/test-app.git' copytree(repo_path, temp_repo_path) self.origin = ChimeRepo(temp_repo_path) repo_functions.ignore_task_metadata_on_merge(self.origin) self.clone1 = self.origin.clone(mkdtemp(prefix='chime-')) repo_functions.ignore_task_metadata_on_merge(self.clone1) fake_author_email = u'erica@example.com' self.session = dict(email=fake_author_email) environ['GIT_AUTHOR_NAME'] = ' ' environ['GIT_COMMITTER_NAME'] = ' ' environ['GIT_AUTHOR_EMAIL'] = self.session['email'] environ['GIT_COMMITTER_EMAIL'] = self.session['email'] create_app_environ = {} create_app_environ['SINGLE_USER'] = 'Yes' create_app_environ['GA_CLIENT_ID'] = 'client_id' create_app_environ['GA_CLIENT_SECRET'] = 'meow_secret' self.ga_config_dir = mkdtemp(prefix='chime-config-') create_app_environ['RUNNING_STATE_DIR'] = self.ga_config_dir create_app_environ['WORK_PATH'] = self.work_path create_app_environ['REPO_PATH'] = temp_repo_path create_app_environ['AUTH_DATA_HREF'] = 'http://example.com/auth.csv' create_app_environ['BROWSERID_URL'] = 'http://localhost' create_app_environ['LIVE_SITE_URL'] = 'http://example.org/' create_app_environ['PUBLISH_PATH'] = self.publish_path create_app_environ['SUPPORT_EMAIL_ADDRESS'] = u'support@example.com' create_app_environ['SUPPORT_PHONE_NUMBER'] = u'(123) 456-7890' self.app = create_app(create_app_environ) # write a tmp config file config_values = { "access_token": "meowser_token", "refresh_token": "refresh_meows", "profile_id": "12345678", "project_domain": "" } with self.app.app_context(): google_api_functions.write_ga_config(config_values, self.app.config['RUNNING_STATE_DIR']) random.choice = MagicMock(return_value="P") self.test_client = self.app.test_client() def tearDown(self): rmtree(tempfile.tempdir) tempfile.tempdir = self.old_tempdir def auth_csv_example_disallowed(self, url, request): if url.geturl() == 'http://example.com/auth.csv': return response(200, '''Email domain,Organization\n''') raise Exception('Asked for unknown URL ' + url.geturl()) def auth_csv_example_allowed(self, url, request): if url.geturl() == 'http://example.com/auth.csv': return response(200, '''Email domain,Organization\nexample.com,Example Org\n*,Anyone''') raise Exception('Asked for unknown URL ' + url.geturl()) def mock_persona_verify_erica(self, url, request): if url.geturl() == 'https://verifier.login.persona.org/verify': return response(200, '''{"status": "okay", "email": "erica@example.com"}''', headers=dict(Link='<https://api.github.com/user/337792/repos?page=1>; rel="prev", <https://api.github.com/user/337792/repos?page=1>; rel="first"')) else: return self.auth_csv_example_allowed(url, request) def mock_persona_verify_non_roman(self, url, request): if url.geturl() == 'https://verifier.login.persona.org/verify': return response(200, '''{"status": "okay", "email": "੯ूᵕू ໒꒱ƶƵ@快速狐狸.com"}''', headers=dict(Link='<https://api.github.com/user/337792/repos?page=1>; rel="prev", <https://api.github.com/user/337792/repos?page=1>; rel="first"')) else: return self.auth_csv_example_allowed(url, request) def mock_persona_verify_frances(self, url, request): if url.geturl() == 'https://verifier.login.persona.org/verify': return response(200, '''{"status": "okay", "email": "frances@example.com"}''', headers=dict(Link='<https://api.github.com/user/337792/repos?page=1>; rel="prev", <https://api.github.com/user/337792/repos?page=1>; rel="first"')) else: return self.auth_csv_example_allowed(url, request) def mock_persona_verify_william(self, url, request): if url.geturl() == 'https://verifier.login.persona.org/verify': return response(200, '''{"status": "okay", "email": "william@example.org"}''', headers=dict(Link='<https://api.github.com/user/337792/repos?page=1>; rel="prev", <https://api.github.com/user/337792/repos?page=1>; rel="first"')) else: return self.auth_csv_example_allowed(url, request) def mock_google_authorization(self, url, request): if 'https://accounts.google.com/o/oauth2/auth' in url.geturl(): return response(200, '''{"access_token": "meowser_token", "token_type": "meowser_type", "refresh_token": "refresh_meows", "expires_in": 3920}''') else: return self.auth_csv_example_allowed(url, request) def mock_successful_google_callback(self, url, request): if google_api_functions.GOOGLE_ANALYTICS_TOKENS_URL in url.geturl(): return response(200, '''{"access_token": "meowser_token", "token_type": "meowser_type", "refresh_token": "refresh_meows", "expires_in": 3920}''') elif google_api_functions.GOOGLE_PLUS_WHOAMI_URL in url.geturl(): return response(200, '''{"displayName": "Jane Doe", "emails": [{"type": "account", "value": "erica@example.com"}]}''') elif google_api_functions.GOOGLE_ANALYTICS_PROPERTIES_URL in url.geturl(): return response(200, '''{"items": [{"defaultProfileId": "12345678", "name": "Property One", "websiteUrl": "http://propertyone.example.com"}, {"defaultProfileId": "87654321", "name": "Property Two", "websiteUrl": "http://propertytwo.example.com"}]}''') else: return self.auth_csv_example_allowed(url, request) def mock_failed_google_callback(self, url, request): if google_api_functions.GOOGLE_ANALYTICS_TOKENS_URL in url.geturl(): return response(500, '''{}''') elif google_api_functions.GOOGLE_PLUS_WHOAMI_URL in url.geturl(): return response(200, '''{"displayName": "Jane Doe", "emails": [{"type": "account", "value": "erica@example.com"}]}''') elif google_api_functions.GOOGLE_ANALYTICS_PROPERTIES_URL in url.geturl(): return response(200, '''{"items": [{"defaultProfileId": "12345678", "name": "Property One", "websiteUrl": "http://propertyone.example.com"}, {"defaultProfileId": "87654321", "name": "Property Two", "websiteUrl": "http://propertytwo.example.com"}]}''') else: return self.auth_csv_example_allowed(url, request) def mock_google_invalid_credentials_response(self, url, request): if 'https://www.googleapis.com/analytics/' in url.geturl() or google_api_functions.GOOGLE_ANALYTICS_PROPERTIES_URL in url.geturl(): return response(401, '''{"error": {"code": 401, "message": "Invalid Credentials", "errors": [{"locationType": "header", "domain": "global", "message": "Invalid Credentials", "reason": "authError", "location": "Authorization"}]}}''') elif google_api_functions.GOOGLE_PLUS_WHOAMI_URL in url.geturl(): return response(403, '''{"error": {"code": 403, "message": "Access Not Configured. The API (Google+ API) is not enabled for your project. Please use the Google Developers Console to update your configuration.", "errors": [{"domain": "usageLimits", "message": "Access Not Configured. The API (Google+ API) is not enabled for your project. Please use the Google Developers Console to update your configuration.", "reason": "accessNotConfigured", "extendedHelp": "https://console.developers.google.com"}]}}''') else: return self.auth_csv_example_allowed(url, request) def mock_google_no_properties_response(self, url, request): if google_api_functions.GOOGLE_ANALYTICS_PROPERTIES_URL in url.geturl(): return response(200, '''{"kind": "analytics#webproperties", "username": "erica@example.com", "totalResults": 0, "startIndex": 1, "itemsPerPage": 1000, "items": []}''') elif google_api_functions.GOOGLE_PLUS_WHOAMI_URL in url.geturl(): return response(200, '''{"displayName": "Jane Doe", "emails": [{"type": "account", "value": "erica@example.com"}]}''') else: return self.auth_csv_example_allowed(url, request) def mock_google_analytics(self, url, request): start_date = (date.today() - timedelta(days=7)).isoformat() end_date = date.today().isoformat() url_string = url.geturl() if 'ids=ga%3A12345678' in url_string and 'end-date=' + end_date in url_string and 'start-date=' + start_date in url_string and 'filters=ga%3ApagePath%3D~%28hello.html%7Chello%29' in url_string: return response(200, '''{"ga:previousPagePath": "/about/", "ga:pagePath": "/lib/", "ga:pageViews": "12", "ga:avgTimeOnPage": "56.17", "ga:exiteRate": "43.75", "totalsForAllResults": {"ga:pageViews": "24", "ga:avgTimeOnPage": "67.36363636363636"}}''') else: return self.auth_csv_example_allowed(url, request) def mock_internal_server_error(self, url, request): from flask import abort abort(500) def mock_exception(self, url, request): raise Exception(u'This is a generic exception.') # in TestApp def test_no_cache_headers(self): ''' The expected no-cache headers are in the server response. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') erica.open_link('/') # The static no-cache headers are as expected self.assertEqual(erica.headers['Cache-Control'], 'no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0') self.assertEqual(erica.headers['Pragma'], 'no-cache') self.assertEqual(erica.headers['Expires'], '-1') # The last modified date is within 10 seconds of now last_modified = datetime.strptime(erica.headers['Last-Modified'], '%Y-%m-%d %H:%M:%S.%f') delta = datetime.now() - last_modified self.assertTrue(delta.seconds < 10) # in TestApp def test_bad_login(self): ''' Check basic log in / log out flow without talking to Persona. ''' response = self.test_client.get('/') self.assertFalse('erica@example.com' in response.data) with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) self.assertEqual(response.status_code, 200) with HTTMock(self.auth_csv_example_disallowed): response = self.test_client.get('/') self.assertFalse('Create' in response.data) # in TestApp def test_login(self): ''' Check basic log in / log out flow without talking to Persona. ''' response = self.test_client.get('/') self.assertFalse('Start' in response.data) with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) self.assertEqual(response.status_code, 200) with HTTMock(self.auth_csv_example_allowed): response = self.test_client.get('/') self.assertTrue('Start' in response.data) self.assertTrue('http://example.org' in response.data, 'Should see LIVE_SITE_URL in response') response = self.test_client.post('/sign-out') self.assertEqual(response.status_code, 200) response = self.test_client.get('/') self.assertFalse('Start' in response.data) # in TestApp def test_login_splat(self): ''' Check basic log in / log out flow without talking to Persona. ''' response = self.test_client.get('/') self.assertFalse('Start' in response.data) with HTTMock(self.mock_persona_verify_william): response = self.test_client.post('/sign-in', data={'assertion': 'william@example.org'}) self.assertEqual(response.status_code, 200) with HTTMock(self.auth_csv_example_allowed): response = self.test_client.get('/') self.assertTrue('Start' in response.data) # in TestApp def test_default_auth_href_warning(self): ''' Check basic log in / log out flow without talking to Persona. ''' with patch('chime.view_functions.AUTH_DATA_HREF_DEFAULT', new='http://example.com/auth.csv'): response = self.test_client.get('/not-allowed') expected = 'Your Chime <code>AUTH_DATA_HREF</code> is set to default value.' self.assertTrue(expected in response.data, 'Should see a warning') # in TestApp @patch('chime.view_functions.AUTH_CHECK_LIFESPAN', new=1.0) def test_login_timeout(self): ''' Check basic log in / log out flow with auth check lifespan. ''' response = self.test_client.get('/') self.assertFalse('Start' in response.data) with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) self.assertEqual(response.status_code, 200) with HTTMock(self.auth_csv_example_allowed): response = self.test_client.get('/') self.assertTrue('Start' in response.data) with patch('chime.view_functions.get_auth_data_file') as get_auth_data_file: # Show that email status does not require a call to auth CSV. response = self.test_client.get('/') self.assertEqual(response.status_code, 200, 'Should have worked') self.assertEqual(get_auth_data_file.call_count, 0, 'Should not have called get_auth_data_file()') # Show that a call to auth CSV was made, outside the timeout period. time.sleep(1.1) response = self.test_client.get('/') self.assertEqual(get_auth_data_file.call_count, 1, 'Should have called get_auth_data_file()') with HTTMock(self.auth_csv_example_allowed): # Show that email status was correctly updatedw with call to CSV. response = self.test_client.get('/') self.assertEqual(response.status_code, 200, 'Should have worked') response = self.test_client.post('/sign-out') self.assertEqual(response.status_code, 200) response = self.test_client.get('/') self.assertFalse('Start' in response.data) # in TestApp def test_need_description_to_start_activity(self): ''' You need a description to start a new activity ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') pattern_template_comment_stripped = sub(ur'<!--|-->', u'', PATTERN_TEMPLATE_COMMENT) flash_message_text = u'Please describe what you\'re doing when you start a new activity!' # start a new task without a description erica.start_task(description=u'') # the activities-list template reloaded comments = erica.soup.findAll(text=lambda text: isinstance(text, Comment)) self.assertTrue(pattern_template_comment_stripped.format(u'activities-list') in comments) # verify that there's a flash message warning about submitting an empty description self.assertEqual(flash_message_text, erica.soup.find('li', class_='flash').text) # in TestApp def test_whitespace_stripped_from_description(self): ''' Carriage returns, tabs, spaces are stripped from task descriptions before they're saved. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') # start a new task with a lot of random whitespace task_description = u'I think\n\r\n\rI am so \t\t\t coool!!\n\n\nYeah.\n\nOK\n\rERWEREW dkkdk' task_description_stripped = u'I think I am so coool!! Yeah. OK ERWEREW dkkdk' erica.start_task(description=task_description) # the stripped comment is in the HTML pattern_task_comment_stripped = sub(ur'<!--|-->', u'', PATTERN_TASK_COMMENT) comments = erica.soup.findAll(text=lambda text: isinstance(text, Comment)) self.assertTrue(pattern_task_comment_stripped.format(task_description_stripped) in comments) # the stripped comment is in the task metadata repo = view_functions.get_repo(repo_path=self.app.config['REPO_PATH'], work_path=self.app.config['WORK_PATH'], email='erica@example.com') task_metadata = repo_functions.get_task_metadata_for_branch(repo, erica.get_branch_name()) self.assertEqual(task_description_stripped, task_metadata['task_description']) # in TestApp def test_notification_on_create_category(self): ''' You get a flash notification when you create a category ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('erica@example.com') # Start a new task erica.start_task(description=u'Lick Water Droplets From Leaves for Leopard Geckos') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder other_slug = u'other' erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, other_slug)) # Create a category category_name = u'Rubber Plants' category_slug = slugify(category_name) erica.add_category(category_name=category_name) # the category is correctly represented on the page self.assertIsNotNone(erica.soup.find(lambda tag: bool(tag.name == 'a' and category_name in tag.text))) self.assertIsNotNone(erica.soup.find(lambda tag: bool(tag.name == 'a' and category_slug in tag['href']))) # a flash message appeared self.assertEqual(PATTERN_FLASH_CREATED_CATEGORY.format(title=category_name), erica.soup.find('li', class_='flash').text) # in TestApp def test_notifications_on_create_edit_and_delete_article(self): ''' You get a flash notification when you create an article ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('erica@example.com') # Start a new task erica.start_task(description=u'Lick Water Droplets From Leaves for Leopard Geckos') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder other_slug = u'other' erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, other_slug)) # Create a category and sub-category category_name = u'Rubber Plants' subcategory_name = u'Leaves' erica.add_category(category_name=category_name) erica.add_subcategory(subcategory_name=subcategory_name) subcategory_path = erica.path # Create an article article_name = u'Water Droplets' erica.add_article(article_name=article_name) # a flash message appeared self.assertEqual(PATTERN_FLASH_CREATED_ARTICLE.format(title=article_name), erica.soup.find('li', class_='flash').text) # edit the article erica.edit_article(title_str=article_name, body_str=u'Watch out for poisonous insects.') # a flash message appeared self.assertEqual(PATTERN_FLASH_SAVED_ARTICLE.format(title=article_name), erica.soup.find('li', class_='flash').text) # delete the article erica.open_link(subcategory_path) erica.delete_article(article_name) # a flash message appeared self.assertEqual(PATTERN_FLASH_DELETED_ARTICLE.format(title=article_name), erica.soup.find('li', class_='flash').text) # in TestApp def test_branches(self): ''' Check basic branching functionality. ''' fake_task_description = u'do things for somebody else' fake_author_email = u'erica@example.com' fake_endorser_email = u'frances@example.com' fake_page_slug = u'hello' fake_page_path = u'{}/index.{}'.format(fake_page_slug, view_functions.CONTENT_FILE_EXTENSION) fake_page_content = u'People of earth we salute you.' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # create a new branch response = self.test_client.post('/start', data={'task_description': fake_task_description}, follow_redirects=True) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) self.assertTrue(PATTERN_TASK_COMMENT.format(fake_task_description) in response.data) self.assertTrue(PATTERN_AUTHOR_COMMENT.format(fake_author_email) in response.data) # extract the generated branch name from the returned HTML generated_branch_search = search(r'<!-- branch: (.{{{}}}) -->'.format(repo_functions.BRANCH_NAME_LENGTH), response.data) self.assertIsNotNone(generated_branch_search) try: generated_branch_name = generated_branch_search.group(1) except AttributeError: raise Exception('No match for generated branch name.') with HTTMock(self.mock_google_analytics): # create a new file response = self.test_client.post('/tree/{}/edit/'.format(generated_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': fake_page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(fake_page_path in response.data) # get the index page for the branch and verify that the new file is listed response = self.test_client.get('/tree/{}/edit/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_BRANCH_COMMENT.format(generated_branch_name) in response.data) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": fake_page_slug, "file_title": fake_page_slug, "file_type": view_functions.ARTICLE_LAYOUT}) in response.data) # get the edit page for the new file and extract the hexsha value response = self.test_client.get('/tree/{}/edit/{}'.format(generated_branch_name, fake_page_path)) self.assertEqual(response.status_code, 200) self.assertTrue(fake_page_path in response.data) hexsha = search(r'<input name="hexsha" value="(\w+)"', response.data).group(1) # now save the file with new content response = self.test_client.post('/tree/{}/save/{}'.format(generated_branch_name, fake_page_path), data={'layout': view_functions.ARTICLE_LAYOUT, 'hexsha': hexsha, 'en-title': 'Greetings', 'en-body': u'{}\n'.format(fake_page_content), 'fr-title': '', 'fr-body': '', 'url-slug': u'{}/index'.format(fake_page_slug)}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(fake_page_path in response.data) self.assertTrue(fake_page_content in response.data) # Check that English and French forms are both present. self.assertTrue('name="fr-title"' in response.data) self.assertTrue('name="en-title"' in response.data) # Verify that navigation tabs are in the correct order. self.assertTrue(response.data.index('id="fr-nav"') < response.data.index('id="en-nav"')) # Request feedback on the change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name), data={'comment_text': u'', 'request_feedback': u'Request Feedback'}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(u'{} {}'.format(fake_author_email, repo_functions.ACTIVITY_FEEDBACK_MESSAGE) in response.data) # # # Log in as a different person with HTTMock(self.mock_persona_verify_frances): self.test_client.post('/sign-in', data={'assertion': fake_endorser_email}) # Endorse the change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name), data={'comment_text': u'', 'endorse_edits': 'Endorse Edits'}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(u'{} {}'.format(fake_endorser_email, repo_functions.ACTIVITY_ENDORSED_MESSAGE) in response.data) # And publish the change! with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name), data={'comment_text': u'', 'merge': 'Publish'}, follow_redirects=True) self.assertEqual(response.status_code, 200) # should've been redirected to the front page self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('activities-list') in response.data) # the activity we just published should be listed under 'recently published activities' self.assertTrue(generated_branch_name in response.data) self.assertTrue(response.data.find(generated_branch_name) > response.data.find(u'Recently Published Activities')) # Look in the published directory and see if the words are there. with open(join(self.publish_path, fake_page_slug, 'index.html')) as file: self.assertTrue(fake_page_content in file.read()) # in TestApp def test_delete_strange_tasks(self): ''' Delete a task that you can see on the activity list but haven't viewed or edited. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone disposable_task_description = u'unimportant task for unimportant person' response = self.test_client.post('/start', data={'task_description': disposable_task_description}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) self.assertTrue(PATTERN_TASK_COMMENT.format(disposable_task_description) in response.data) # create a branch programmatically on our pre-made clone check_task_description = u'Creating a Star Child for Ancient Aliens' check_branch = repo_functions.get_start_branch(self.clone1, 'master', check_task_description, fake_author_email) self.assertTrue(check_branch.name in self.clone1.branches) self.assertTrue(check_branch.name in self.origin.branches) # verify that the branch doesn't exist in our new clone with self.app.app_context(): with self.app.test_request_context(): from flask import session session['email'] = fake_author_email new_clone = view_functions.get_repo(flask_app=self.app) self.assertFalse(check_branch.name in new_clone.branches) # load the activity list and verify that the branch is visible there response = self.test_client.get('/', follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(check_branch.name in response.data) # Delete the activity response = self.test_client.post('/update', data={'abandon': 'Delete', 'branch': '{}'.format(check_branch.name)}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertFalse(check_branch.name in response.data) # in TestApp def test_review_process(self): ''' Check the review process ''' fake_task_description = u'groom pets for pet owners' fake_author_email = u'erica@example.com' fake_endorser_email = u'frances@example.com' fake_page_slug = u'hello' # log in with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # create a new branch response = self.test_client.post('/start', data={'task_description': fake_task_description}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) # extract the generated branch name from the returned HTML generated_branch_search = search(r'<!-- branch: (.{{{}}}) -->'.format(repo_functions.BRANCH_NAME_LENGTH), response.data) self.assertIsNotNone(generated_branch_search) try: generated_branch_name = generated_branch_search.group(1) except AttributeError: raise Exception('No match for generated branch name.') # create a new file response = self.test_client.post('/tree/{}/edit/'.format(generated_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': fake_page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) # get the edit page for the branch response = self.test_client.get('/tree/{}/edit/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a 'request feedback' button self.assertTrue(PATTERN_REQUEST_FEEDBACK_BUTTON in response.data) # get the overview page for the branch response = self.test_client.get('/tree/{}/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a 'request feedback' button self.assertTrue(PATTERN_REQUEST_FEEDBACK_BUTTON in response.data) # get the activity list page response = self.test_client.get('/', follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's an unreviewed edits link self.assertTrue(PATTERN_UNREVIEWED_EDITS_LINK.format(branch_name=generated_branch_name) in response.data) # Request feedback on the change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name), data={'comment_text': u'', 'request_feedback': u'Request Feedback'}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(u'{} {}'.format(fake_author_email, repo_functions.ACTIVITY_FEEDBACK_MESSAGE) in response.data) # # # Log in as a different person with HTTMock(self.mock_persona_verify_frances): self.test_client.post('/sign-in', data={'assertion': fake_endorser_email}) with HTTMock(self.auth_csv_example_allowed): # get the edit page for the branch response = self.test_client.get('/tree/{}/edit/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a 'Endorse Edits' button self.assertTrue(PATTERN_ENDORSE_BUTTON in response.data) # get the overview page for the branch response = self.test_client.get('/tree/{}/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a 'Endorse Edits' button self.assertTrue(PATTERN_ENDORSE_BUTTON in response.data) # get the activity list page response = self.test_client.get('/', follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a feedback requested link self.assertTrue(PATTERN_FEEDBACK_REQUESTED_LINK.format(branch_name=generated_branch_name) in response.data) # Endorse the change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name), data={'comment_text': u'', 'endorse_edits': 'Endorse Edits'}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(u'{} {}'.format(fake_endorser_email, repo_functions.ACTIVITY_ENDORSED_MESSAGE) in response.data) # log back in as the original editor with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # get the edit page for the branch response = self.test_client.get('/tree/{}/edit/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a 'publish' button self.assertTrue(PATTERN_PUBLISH_BUTTON in response.data) # get the overview page for the branch response = self.test_client.get('/tree/{}/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's a 'publish' button self.assertTrue(PATTERN_PUBLISH_BUTTON in response.data) # get the activity list page response = self.test_client.get('/', follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that there's an 'ready to publish' link self.assertTrue(PATTERN_READY_TO_PUBLISH_LINK.format(branch_name=generated_branch_name) in response.data) # And publish the change! with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name), data={'comment_text': u'', 'merge': 'Publish'}, follow_redirects=True) self.assertEqual(response.status_code, 200) # should've been redirected to the front page self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('activities-list') in response.data) # the activity we just published should be listed under 'recently published activities' self.assertTrue(generated_branch_name in response.data) self.assertTrue(response.data.find(generated_branch_name) > response.data.find(u'Recently Published Activities')) # in TestApp def test_get_request_does_not_create_branch(self): ''' Navigating to a made-up URL should not create a branch ''' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.auth_csv_example_allowed): fake_branch_name = 'this-should-not-create-a-branch' # # edit # response = self.test_client.get('/tree/{}/edit/'.format(fake_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 404) self.assertTrue(view_functions.MESSAGE_ACTIVITY_DELETED in response.data) # the branch path should not be in the returned HTML self.assertFalse(PATTERN_BRANCH_COMMENT.format(fake_branch_name) in response.data) # the branch name should not be in the origin's branches list self.assertFalse(fake_branch_name in self.origin.branches) # # history # response = self.test_client.get('/tree/{}/history/'.format(fake_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 404) self.assertTrue(view_functions.MESSAGE_ACTIVITY_DELETED in response.data) # the branch path should not be in the returned HTML self.assertFalse(PATTERN_BRANCH_COMMENT.format(fake_branch_name) in response.data) # the branch name should not be in the origin's branches list self.assertFalse(fake_branch_name in self.origin.branches) # # view # response = self.test_client.get('/tree/{}/view/'.format(fake_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 404) self.assertTrue(view_functions.MESSAGE_ACTIVITY_DELETED in response.data) # the branch path should not be in the returned HTML self.assertFalse(PATTERN_BRANCH_COMMENT.format(fake_branch_name) in response.data) # the branch name should not be in the origin's branches list self.assertFalse(fake_branch_name in self.origin.branches) # in TestApp def test_post_request_does_not_create_branch(self): ''' Certain POSTs to a made-up URL should not create a branch ''' fake_page_slug = u'hello' fake_page_path = u'{}/index.{}'.format(fake_page_slug, view_functions.CONTENT_FILE_EXTENSION) with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.auth_csv_example_allowed): # # try creating an article in a non-existent branch # fake_branch_name = repo_functions.make_branch_name() response = self.test_client.post('/tree/{}/edit/'.format(fake_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': fake_page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 404) self.assertTrue(view_functions.MESSAGE_ACTIVITY_DELETED in response.data) # the branch name should not be in the origin's branches list self.assertFalse(fake_branch_name in self.origin.branches) # # create a branch then delete it right before a POSTing a save command # fake_task_description = u'Doing fake stuff for Nobody' response = self.test_client.post('/start', data={'task_description': fake_task_description}, follow_redirects=True) # we should be on the new task's edit page self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) self.assertTrue(PATTERN_TASK_COMMENT.format(fake_task_description) in response.data) # extract the generated branch name from the returned HTML generated_branch_search = search(r'<!-- branch: (.{{{}}}) -->'.format(repo_functions.BRANCH_NAME_LENGTH), response.data) self.assertIsNotNone(generated_branch_search) try: generated_branch_name = generated_branch_search.group(1) except AttributeError: raise Exception('No match for generated branch name.') # create a new article response = self.test_client.post('/tree/{}/edit/'.format(generated_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': fake_page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('article-edit') in response.data) # load the article list and verify that the new article is listed response = self.test_client.get('/tree/{}/edit/'.format(generated_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) self.assertTrue(PATTERN_BRANCH_COMMENT.format(generated_branch_name) in response.data) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": fake_page_slug, "file_title": fake_page_slug, "file_type": view_functions.ARTICLE_LAYOUT}) in response.data) # load the article edit page and grab the hexsha from the form response = self.test_client.get('/tree/{}/edit/{}'.format(generated_branch_name, fake_page_path)) self.assertEqual(response.status_code, 200) hexsha = search(r'<input name="hexsha" value="(\w+)"', response.data).group(1) # delete the branch response = self.test_client.post('/update', data={'abandon': 'Delete', 'branch': '{}'.format(generated_branch_name)}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertFalse(generated_branch_name in response.data) # try submitting a change to the article response = self.test_client.post('/tree/{}/save/{}'.format(generated_branch_name, fake_page_path), data={'layout': view_functions.ARTICLE_LAYOUT, 'hexsha': hexsha, 'en-title': 'Greetings', 'en-body': 'Hello world.\n', 'fr-title': '', 'fr-body': '', 'url-slug': 'hello'}, follow_redirects=True) self.assertEqual(response.status_code, 404) self.assertTrue(view_functions.MESSAGE_ACTIVITY_DELETED in response.data) # the task name should not be in the returned HTML self.assertFalse(PATTERN_BRANCH_COMMENT.format(fake_task_description) in response.data) # the branch name should not be in the origin's branches list self.assertFalse('{}'.format(generated_branch_name) in self.origin.branches) # in TestApp def test_accessing_local_branch_fetches_remote(self): ''' GETting or POSTing to a URL that indicates a branch that exists remotely but not locally fetches the remote branch and allows access ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone disposable_task_description = u'unimportant task for unimportant person' response = self.test_client.post('/start', data={'task_description': disposable_task_description}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) self.assertTrue(PATTERN_TASK_COMMENT.format(disposable_task_description) in response.data) # create a branch programmatically on our pre-made clone check_task_description = u'the branch we are checking for for just me' check_branch = repo_functions.get_start_branch(self.clone1, 'master', check_task_description, fake_author_email) self.assertTrue(check_branch.name in self.clone1.branches) self.assertTrue(check_branch.name in self.origin.branches) # verify that the branch doesn't exist in our new clone with self.app.app_context(): with self.app.test_request_context(): from flask import session session['email'] = fake_author_email new_clone = view_functions.get_repo(flask_app=self.app) self.assertFalse(check_branch.name in new_clone.branches) # request an edit page for the check branch through the http interface response = self.test_client.get('/tree/{}/edit/'.format(check_branch.name), follow_redirects=True) self.assertEqual(response.status_code, 200) # the task description should be in the returned HTML self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('articles-list') in response.data) self.assertTrue(PATTERN_TASK_COMMENT.format(check_task_description) in response.data) # the branch name should now be in the original repo's branches list self.assertTrue(check_branch.name in new_clone.branches) # in TestApp def test_git_merge_strategy_implemented(self): ''' The Git merge strategy has been implmemented for a new clone. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # create a new clone via get_repo with self.app.app_context(): with self.app.test_request_context(): from flask import session session['email'] = fake_author_email new_clone = view_functions.get_repo(flask_app=self.app) # check for the config setting self.assertEqual(new_clone.config_reader().get_value('merge "ignored"', 'driver'), True) # check for the attributes setting attributes_path = join(new_clone.git_dir, 'info/attributes') self.assertTrue(exists(attributes_path)) with open(attributes_path, 'r') as file: content = file.read().decode("utf-8") self.assertEqual(content, u'{} merge=ignored'.format(repo_functions.TASK_METADATA_FILENAME)) # in TestApp def test_task_metadata_should_exist(self): ''' Task metadata file should exist but doesn't ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) fake_task_description = u'unimportant task for unimportant person' branch1 = repo_functions.get_start_branch(self.clone1, 'master', fake_task_description, fake_author_email) branch1_name = branch1.name branch1.checkout() # verify that the most recent commit on the new branch is for the task metadata file # by checking for the name of the file in the commit message self.assertTrue(repo_functions.TASK_METADATA_FILENAME in branch1.commit.message) # validate the existence of the task metadata file self.assertTrue(repo_functions.verify_file_exists_in_branch(self.clone1, repo_functions.TASK_METADATA_FILENAME, branch1_name)) # now delete it repo_functions.delete_task_metadata_for_branch(self.clone1, 'master') self.assertFalse(repo_functions.verify_file_exists_in_branch(self.clone1, repo_functions.TASK_METADATA_FILENAME, branch1_name)) # verify that we can load a functional edit page for the branch with HTTMock(self.auth_csv_example_allowed): # request an edit page for the check branch through the http interface response = self.test_client.get('/tree/{}/edit/'.format(branch1_name), follow_redirects=True) # it's a good response self.assertEqual(response.status_code, 200) # the branch name should be in the returned HTML self.assertTrue(PATTERN_BRANCH_COMMENT.format(branch1_name) in response.data) # the 'Started by' should be 'Unknown' for now self.assertTrue(PATTERN_AUTHOR_COMMENT.format(u'unknown') in response.data) # in TestApp def test_google_callback_is_successful(self): ''' Ensure we get a successful page load on callback from Google authentication ''' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.mock_google_authorization): self.test_client.post('/authorize') with HTTMock(self.mock_successful_google_callback): response = self.test_client.get('/callback?state=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&code=code') with self.app.app_context(): ga_config = google_api_functions.read_ga_config(self.app.config['RUNNING_STATE_DIR']) self.assertEqual(ga_config['access_token'], 'meowser_token') self.assertEqual(ga_config['refresh_token'], 'refresh_meows') self.assertTrue('/setup' in response.location) # in TestApp def test_analytics_setup_is_successful(self): with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.mock_google_authorization): self.test_client.post('/authorize') # mock-post the form in authorize.html to authorization-complete.html with some dummy values and check the results response = self.test_client.post('/authorization-complete', data={'email': 'erica@example.com', 'name': 'Jane Doe', 'google_email': 'erica@example.com', 'return_link': 'http://example.com', 'property': '12345678', '12345678-domain': 'http://propertyone.example.com', '12345678-name': 'Property One'}) self.assertEqual(u'200 OK', response.status) with self.app.app_context(): ga_config = google_api_functions.read_ga_config(self.app.config['RUNNING_STATE_DIR']) # views.authorization_complete() strips the 'http://' from the domain self.assertEqual(ga_config['project_domain'], 'propertyone.example.com') self.assertEqual(ga_config['profile_id'], '12345678') # in TestApp def test_handle_bad_analytics_response(self): ''' Verify that an unauthorized analytics response is handled correctly ''' with HTTMock(self.mock_google_invalid_credentials_response): with self.app.app_context(): analytics_dict = google_api_functions.fetch_google_analytics_for_page(self.app.config, u'index.html', 'meowser_token') self.assertEqual(analytics_dict, {}) # in TestApp def test_google_callback_fails(self): ''' Ensure that we get an appropriate error flashed when we fail to auth with google ''' with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.mock_google_authorization): response = self.test_client.post('/authorize') with HTTMock(self.mock_failed_google_callback): response = self.test_client.get('/callback?state=PPPPPPPPPPPPPPPPPPPPPPPPPPPPPPPP&code=code', follow_redirects=True) self.assertEqual(response.status_code, 200) # find the flashed error message in the returned HTML self.assertTrue('Google rejected authorization request' in response.data) # in TestApp def test_invalid_access_token(self): ''' Ensure that we get an appropriate error flashed when we have an invalid access token ''' with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) self.assertEqual(response.status_code, 200) with HTTMock(self.mock_google_invalid_credentials_response): response = self.test_client.get('/setup', follow_redirects=True) self.assertEqual(response.status_code, 200) # find the flashed error message in the returned HTML self.assertTrue('Invalid Credentials' in response.data) # in TestApp def test_no_properties_found(self): ''' Ensure that we get an appropriate error flashed when no analytics properties are associated with the authorized Google account ''' with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) self.assertEqual(response.status_code, 200) with HTTMock(self.mock_google_no_properties_response): response = self.test_client.get('/setup', follow_redirects=True) self.assertEqual(response.status_code, 200) # find the flashed error message in the returned HTML self.assertTrue('Your Google Account is not associated with any Google Analytics properties' in response.data) # in TestApp def test_redirect(self): ''' Check redirect to BROWSERID_URL. ''' with HTTMock(self.mock_persona_verify_erica): response = self.test_client.get('/not-allowed', headers={'Host': 'wrong.local'}) expected_url = urljoin(self.app.config['BROWSERID_URL'], '/not-allowed') self.assertEqual(response.status_code, 302) self.assertEqual(response.headers['Location'], expected_url) # in TestApp def test_create_category(self): ''' Creating a new category creates a directory with an appropriate index file inside. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'force a clam shell open for starfish' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new category page_slug = u'hello' response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, page_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the category test self.assertTrue(view_functions.is_category_dir(dir_location)) # in TestApp def test_period_in_category_name(self): ''' Putting a period in a category or subcategory name doesn't crop it. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') # Start a new task erica.start_task(description=u'Be Shot Hundreds Of Feet Into The Air for A Geyser Of Highly Pressurized Water') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder other_slug = u'other' erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, other_slug)) # Create a category that has a period in its name category_name = u'Mt. Splashmore' category_slug = slugify(category_name) erica.add_category(category_name=category_name) # the category is correctly represented on the page self.assertIsNotNone(erica.soup.find(lambda tag: bool(tag.name == 'a' and category_name in tag.text))) self.assertIsNotNone(erica.soup.find(lambda tag: bool(tag.name == 'a' and category_slug in tag['href']))) # the category is correctly represented on disk repo = view_functions.get_repo(repo_path=self.app.config['REPO_PATH'], work_path=self.app.config['WORK_PATH'], email='erica@example.com') cat_location = join(repo.working_dir, u'{}/{}'.format(other_slug, category_slug)) self.assertTrue(exists(cat_location)) self.assertTrue(view_functions.is_category_dir(cat_location)) # in TestApp def test_empty_category_or_article_name(self): ''' Submitting an empty category or article name reloads with a warning. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') pattern_template_comment_stripped = sub(ur'<!--|-->', u'', PATTERN_TEMPLATE_COMMENT) # Start a new task erica.start_task(description=u'Deep-Fry a Buffalo in Forty Seconds for Moe') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder other_slug = u'other' erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, other_slug)) # Try to create a category with no name category_name = u'' erica.add_category(category_name=category_name) # the articles-list template reloaded comments = erica.soup.findAll(text=lambda text: isinstance(text, Comment)) self.assertTrue(pattern_template_comment_stripped.format(u'articles-list') in comments) # verify that there's a flash message warning about submitting an empty description self.assertEqual(u'Please enter a name to create a topic!', erica.soup.find('li', class_='flash').text) # Try to create a category with a name that slufigies to an empty string category_name = u'(╯□)╯︵ ┻━┻' self.assertEqual(u'', slugify(category_name)) erica.add_category(category_name=category_name) # the articles-list template reloaded comments = erica.soup.findAll(text=lambda text: isinstance(text, Comment)) self.assertTrue(pattern_template_comment_stripped.format(u'articles-list') in comments) # verify that there's a flash message warning about submitting an empty description self.assertEqual(u'{} is not an acceptable topic name!'.format(category_name), erica.soup.find('li', class_='flash').text) # Create a category and sub-category category_name = u'Mammals' subcategory_name = u'Bison' erica.add_category(category_name=category_name) erica.add_subcategory(subcategory_name=subcategory_name) # Try to create an article with no name article_name = u'' erica.add_article(article_name=article_name) # the articles-list template reloaded comments = erica.soup.findAll(text=lambda text: isinstance(text, Comment)) self.assertTrue(pattern_template_comment_stripped.format(u'articles-list') in comments) # verify that there's a flash message warning about submitting an empty description self.assertEqual(u'Please enter a name to create an article!', erica.soup.find('li', class_='flash').text) # Try to create a article with a name that slufigies to an empty string article_name = u'(╯□)╯︵ ┻━┻' self.assertEqual(u'', slugify(article_name)) erica.add_article(article_name=article_name) # the articles-list template reloaded comments = erica.soup.findAll(text=lambda text: isinstance(text, Comment)) self.assertTrue(pattern_template_comment_stripped.format(u'articles-list') in comments) # verify that there's a flash message warning about submitting an empty description self.assertEqual(u'{} is not an acceptable article name!'.format(article_name), erica.soup.find('li', class_='flash').text) # in TestApp def test_create_duplicate_category(self): ''' If we ask to create a category that exists, let's not and say we did. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone working_branch = repo_functions.get_start_branch(self.clone1, 'master', u'force a clam shell open for starfish', fake_author_email) working_branch.checkout() # create a new category request_data = {'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': u'hello'} response = self.test_client.post('/tree/{}/edit/'.format(working_branch.name), data=request_data, follow_redirects=True) self.assertEqual(response.status_code, 200) # now do it again response = self.test_client.post('/tree/{}/edit/'.format(working_branch.name), data=request_data, follow_redirects=True) self.assertEqual(response.status_code, 200) response_data = sub('&#34;', '"', response.data.decode('utf-8')) self.assertTrue(u'Topic "hello" already exists' in response_data) # pull the changes self.clone1.git.pull('origin', working_branch.name) # everything looks good dir_location = join(self.clone1.working_dir, u'hello') idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the category test self.assertTrue(view_functions.is_category_dir(dir_location)) # in TestApp def test_delete_categories_and_articles(self): ''' Non-empty categories and articles can be deleted ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'vomit digestive fluid onto rotting flesh for flies' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a categories directory categories_slug = u'categories' response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': categories_slug}, follow_redirects=True) # and put a new category inside it cata_title = u'Mouth Parts' cata_slug = slugify(cata_title) response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': cata_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # put another cateogry inside that catb_title = u'Esophagus' catb_slug = slugify(catb_title) response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, join(categories_slug, cata_slug)), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': catb_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # and an article inside that art_title = u'Stomach' art_slug = slugify(art_title) response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, join(categories_slug, cata_slug, catb_slug)), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': art_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # verify that the categories and article exist art_location = join(self.clone1.working_dir, categories_slug, cata_slug, catb_slug, art_slug) catb_location = join(self.clone1.working_dir, categories_slug, cata_slug, catb_slug) cata_location = join(self.clone1.working_dir, categories_slug, cata_slug) self.assertTrue(exists(art_location)) self.assertTrue(view_functions.is_article_dir(art_location)) # delete category a while in category b response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, join(categories_slug, cata_slug, catb_slug)), data={'action': 'delete', 'request_path': join(categories_slug, cata_slug)}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # verify that the deleted category and article no longer exist self.assertFalse(exists(art_location)) self.assertFalse(exists(catb_location)) self.assertFalse(exists(cata_location)) # in TestApp def test_delete_commit_accuracy(self): ''' The record of a delete in the corresponding commit is accurate. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') # Start a new task erica.start_task(description=u'Ferment Tuber Fibres Using Symbiotic Bacteria in the Intestines for Naked Mole Rats') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder erica.follow_link(href='/tree/{}/edit/other/'.format(branch_name)) # Create a category and fill it with some subcategories and articles category_names = [u'Indigestible Cellulose'] subcategory_names = [u'Volatile Fatty Acids', u'Non-Reproducing Females', u'Arid African Deserts'] article_names = [u'Eusocial Exhibition', u'Old Enough to Eat Solid Food', u'Contributing to Extension of Tunnels', u'Foraging and Nest Building'] erica.add_category(category_name=category_names[0]) category_path = erica.path erica.add_subcategory(subcategory_name=subcategory_names[0]) erica.open_link(category_path) erica.add_subcategory(subcategory_name=subcategory_names[1]) erica.open_link(category_path) erica.add_subcategory(subcategory_name=subcategory_names[2]) subcategory_path = erica.path erica.add_article(article_name=article_names[0]) erica.open_link(subcategory_path) erica.add_article(article_name=article_names[1]) erica.open_link(subcategory_path) erica.add_article(article_name=article_names[2]) erica.open_link(subcategory_path) erica.add_article(article_name=article_names[3]) # Delete the all-containing category erica.open_link(category_path) erica.follow_modify_category_link(category_names[0]) erica.delete_category() # get and check the history repo = view_functions.get_repo(repo_path=self.app.config['REPO_PATH'], work_path=self.app.config['WORK_PATH'], email='erica@example.com') activity_history = view_functions.make_activity_history(repo=repo) delete_history = json.loads(activity_history[0]['commit_body']) for item in delete_history: self.assertEqual(item['action'], u'delete') if item['title'] in category_names: self.assertEqual(item['display_type'], u'category') category_names.remove(item['title']) elif item['title'] in subcategory_names: self.assertEqual(item['display_type'], u'category') subcategory_names.remove(item['title']) elif item['title'] in article_names: self.assertEqual(item['display_type'], u'article') article_names.remove(item['title']) # we should have fewer category, subcategory, and article names self.assertEqual(len(category_names), 0) self.assertEqual(len(subcategory_names), 0) self.assertEqual(len(article_names), 0) # in TestApp def test_delete_article(self): ''' An article can be deleted ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'Remove Small Organic Particles From Seawater Passing Over Outspread Tentacles for Sea Anemones' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create an article art_title = u'Zooplankters' art_slug = slugify(art_title) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': art_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # verify that the article exists art_location = join(self.clone1.working_dir, art_slug) self.assertTrue(exists(art_location)) self.assertTrue(view_functions.is_article_dir(art_location)) # delete the article response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, art_slug), data={'action': 'delete', 'request_path': art_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # verify that the deleted category and article no longer exist self.assertFalse(exists(art_location)) # in TestApp def test_article_creation_with_unicode_via_web_interface(self): ''' An article with unicode in its title is created as expected. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'eviscerate a salmon for baby grizzly bears' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new article art_title = u'快速狐狸' art_slug = slugify(art_title) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': art_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format(u'article-edit') in response.data.decode('utf-8')) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, art_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the article test self.assertTrue(view_functions.is_article_dir(dir_location)) # the title saved in the index front matter is the same text that was used to create the article self.assertEqual(view_functions.get_value_from_front_matter('title', idx_location), art_title) # the title saved in the index front matter is displayed on the article list page response = self.test_client.get('/tree/{}/edit/'.format(working_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format(u'articles-list') in response.data.decode('utf-8')) self.assertTrue(PATTERN_BRANCH_COMMENT.format(working_branch) in response.data.decode('utf-8')) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": art_slug, "file_title": art_title, "file_type": view_functions.ARTICLE_LAYOUT}) in response.data.decode('utf-8')) # in TestApp def test_save_non_roman_characters_to_article(self): ''' Adding non-roman characters to an article's title and body raises no unicode errors. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('erica@example.com') # Start a new task, topic, subtopic, article args = 'Mermithergate for Ant Worker', 'Enoplia Nematode', 'Genus Mermis', 'Cephalotes Atratus' erica.quick_activity_setup(*args) # Edit the new article and give it a non-roman character title erica.edit_article(u'快速狐狸', u'Myrmeconema ੯ूᵕू ໒꒱ƶƵ Neotropicum') # in TestApp def test_sign_in_with_email_containing_non_roman_characters(self): ''' Adding non-roman characters to the sign-in email raises no errors. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_non_roman): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('੯ूᵕू ໒꒱ƶƵ@快速狐狸.com') # in TestApp def test_new_item_has_name_and_title(self): ''' A slugified directory name and display title are created when a new category or article is created. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'eviscerate a salmon for baby grizzly bears' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new category cat_title = u'grrowl!! Yeah' cat_slug = slugify(cat_title) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': cat_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, cat_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the category test self.assertTrue(view_functions.is_category_dir(dir_location)) # the title saved in the index front matter is the same text that was used to create the category self.assertEqual(view_functions.get_value_from_front_matter('title', idx_location), cat_title) # the title saved in the index front matter is displayed on the article list page response = self.test_client.get('/tree/{}/edit/'.format(working_branch_name), follow_redirects=True) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": cat_slug, "file_title": cat_title, "file_type": view_functions.CATEGORY_LAYOUT}) in response.data) # create a new article art_title = u'快速狐狸' art_slug = slugify(art_title) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': art_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format(u'article-edit') in response.data.decode('utf-8')) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, art_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the article test self.assertTrue(view_functions.is_article_dir(dir_location)) # the title saved in the index front matter is the same text that was used to create the article self.assertEqual(view_functions.get_value_from_front_matter('title', idx_location), art_title) # the title saved in the index front matter is displayed on the article list page response = self.test_client.get('/tree/{}/edit/'.format(working_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format(u'articles-list') in response.data.decode('utf-8')) self.assertTrue(PATTERN_BRANCH_COMMENT.format(working_branch) in response.data.decode('utf-8')) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": art_slug, "file_title": art_title, "file_type": view_functions.ARTICLE_LAYOUT}) in response.data.decode('utf-8')) # in TestApp def test_edit_category_title_and_description(self): ''' A category's title and description can be edited. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'rapidly discharge black ink into the mantle cavity for squids' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a categories directory categories_slug = u'categories' response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': categories_slug}, follow_redirects=True) # and put a new category inside it cat_title = u'Bolus' cat_slug = slugify(cat_title) response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': cat_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # get the hexsha hexsha = self.clone1.commit().hexsha # get the modify page and verify that the form renders with the correct values cat_path = join(categories_slug, cat_slug, u'index.{}'.format(view_functions.CONTENT_FILE_EXTENSION)) response = self.test_client.get('/tree/{}/modify/{}'.format(working_branch_name, view_functions.strip_index_file(cat_path)), follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(PATTERN_FORM_CATEGORY_TITLE.format(title=cat_title) in response.data) self.assertTrue(PATTERN_FORM_CATEGORY_DESCRIPTION.format(description=u'') in response.data) # now save a new title and description for the category new_cat_title = u'Caecum' cat_description = u'An intraperitoneal pouch, that is considered to be the beginning of the large intestine.' response = self.test_client.post('/tree/{}/modify/{}'.format(working_branch_name, cat_path), data={'layout': view_functions.CATEGORY_LAYOUT, 'hexsha': hexsha, 'url-slug': u'{}/{}/'.format(categories_slug, cat_slug), 'en-title': new_cat_title, 'en-description': cat_description, 'order': u'0', 'save': u''}, follow_redirects=True) self.assertEqual(response.status_code, 200) # check the returned HTML for the description and title values (format will change as pages are designed) response_data = sub('&#39;', '\'', response.data.decode('utf-8')) self.assertTrue(PATTERN_FLASH_SAVED_CATEGORY.format(title=new_cat_title) in response_data) self.assertTrue(PATTERN_FORM_CATEGORY_DESCRIPTION.format(description=cat_description) in response_data) self.assertTrue(PATTERN_FORM_CATEGORY_TITLE.format(title=new_cat_title) in response_data) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, categories_slug, cat_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the category test self.assertTrue(view_functions.is_category_dir(dir_location)) # the title and description saved in the index front matter is the same text that was used to create the category self.assertEqual(view_functions.get_value_from_front_matter('title', idx_location), new_cat_title) self.assertEqual(view_functions.get_value_from_front_matter('description', idx_location), cat_description) # the title saved in the index front matter is displayed on the article list page response = self.test_client.get('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), follow_redirects=True) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": cat_slug, "file_title": new_cat_title, "file_type": view_functions.CATEGORY_LAYOUT}) in response.data) # in TestApp def test_delete_category(self): ''' A category can be deleted ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'clasp with front legs and draw up the hind end for geometridae' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a categories directory categories_slug = u'categories' response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': categories_slug}, follow_redirects=True) # and put a new category inside it cat_title = u'Soybean Looper' cat_slug = slugify(cat_title) response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': cat_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # get the hexsha hexsha = self.clone1.commit().hexsha # now delete the category cat_description = u'' url_slug = u'{}/{}/'.format(categories_slug, cat_slug) response = self.test_client.post('/tree/{}/modify/{}'.format(working_branch_name, url_slug.rstrip('/')), data={'layout': view_functions.CATEGORY_LAYOUT, 'hexsha': hexsha, 'url-slug': url_slug, 'en-title': cat_title, 'en-description': cat_description, 'order': u'0', 'delete': u''}, follow_redirects=True) self.assertEqual(response.status_code, 200) # check the returned HTML for the description and title values (format will change as pages are designed) response_data = sub('&#34;', '"', response.data.decode('utf-8')) self.assertTrue(u'<li class="flash flash--notice">The "{}" topic was deleted</li>'.format(cat_title) in response_data) # pull the changes self.clone1.git.pull('origin', working_branch_name) # the directory was deleted dir_location = join(self.clone1.working_dir, categories_slug, cat_slug) self.assertFalse(exists(dir_location) and isdir(dir_location)) # the title is not displayed on the article list page response = self.test_client.get('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), follow_redirects=True) self.assertFalse(PATTERN_FILE_COMMENT.format(**{"file_name": cat_slug, "file_title": cat_title, "file_type": view_functions.CATEGORY_LAYOUT}) in response.data) # in TestApp def test_set_and_retrieve_order_and_description(self): ''' Order and description can be set to and retrieved from an article's or category's front matter. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'regurgitate partially digested worms and grubs for baby birds' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a categories directory categories_slug = u'categories' response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': categories_slug}, follow_redirects=True) # and put a new category inside it cat_title = u'Small Intestine' cat_slug = slugify(cat_title) response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': cat_title}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # get the hexsha hexsha = self.clone1.commit().hexsha # now save some values into the category's index page's front matter new_cat_title = u'The Small Intestine' cat_description = u'The part of the GI tract following the stomach and followed by the large intestine where much of the digestion and absorption of food takes place.' cat_order = 3 cat_path = join(categories_slug, cat_slug, u'index.{}'.format(view_functions.CONTENT_FILE_EXTENSION)) response = self.test_client.post('/tree/{}/save/{}'.format(working_branch_name, cat_path), data={'layout': view_functions.CATEGORY_LAYOUT, 'hexsha': hexsha, 'en-title': new_cat_title, 'en-description': cat_description, 'order': cat_order}, follow_redirects=True) self.assertEqual(response.status_code, 200) # check the returned HTML for the description and order values (format will change as pages are designed) self.assertTrue(u'<input name="en-description" type="hidden" value="{}" />'.format(cat_description) in response.data) self.assertTrue(u'<input name="order" type="hidden" value="{}" />'.format(cat_order) in response.data) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, categories_slug, cat_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the category test self.assertTrue(view_functions.is_category_dir(dir_location)) # the title saved in the index front matter is the same text that was used to create the category self.assertEqual(view_functions.get_value_from_front_matter('title', idx_location), new_cat_title) # check order and description self.assertEqual(view_functions.get_value_from_front_matter('order', idx_location), cat_order) self.assertEqual(view_functions.get_value_from_front_matter('description', idx_location), cat_description) # the title saved in the index front matter is displayed on the article list page response = self.test_client.get('/tree/{}/edit/{}'.format(working_branch_name, categories_slug), follow_redirects=True) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": cat_slug, "file_title": new_cat_title, "file_type": view_functions.CATEGORY_LAYOUT}) in response.data) # in TestApp def test_column_navigation_structure(self): ''' The column navigation structure matches the structure of the site. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'force a clam shell open for starfish' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create some nested categories slug_hello = u'hello' response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': slug_hello}, follow_redirects=True) self.assertEqual(response.status_code, 200) slug_world = u'world' response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, slug_hello), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': slug_world}, follow_redirects=True) self.assertEqual(response.status_code, 200) slug_how = u'how' response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, sep.join([slug_hello, slug_world])), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': slug_how}, follow_redirects=True) self.assertEqual(response.status_code, 200) slug_are = u'are' response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, sep.join([slug_hello, slug_world, slug_how])), data={'action': 'create', 'create_what': view_functions.CATEGORY_LAYOUT, 'request_path': slug_are}, follow_redirects=True) self.assertEqual(response.status_code, 200) # pull the changes self.clone1.git.pull('origin', working_branch_name) # get the columns dir_columns = view_functions.make_directory_columns(self.clone1, working_branch_name, sep.join([slug_hello, slug_world, slug_how, slug_are])) # test that the contents match our expectations self.assertEqual(len(dir_columns), 4) self.assertEqual(len(dir_columns[0]['files']), 6) expected = {'hello': u'category', 'img': u'folder', 'index.md': u'file', 'other': u'folder', 'other.md': u'file', 'sub': u'folder'} for item in dir_columns[0]['files']: self.assertTrue(item['name'] in expected) self.assertTrue(expected[item['name']] == item['display_type']) self.assertTrue(dir_columns[1]['files'][0]['name'] == slug_world) self.assertTrue(dir_columns[2]['files'][0]['name'] == slug_how) self.assertTrue(dir_columns[3]['files'][0]['name'] == slug_are) # in TestApp def test_activity_overview_page_is_accurate(self): ''' The activity history page accurately displays the activity history ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'deposit eggs in a syconium for fig wasp larvae' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() title_fig_zh = u'无花果' slug_fig_zh = u'wu-hua-guo' title_syconium = u'Syconium' slug_syconium = u'syconium' title_ostiole = u'Ostiole' title_fig_en = u'Fig' title_fig_bn = u'Dumur' create_details = [ (u'', title_fig_zh, view_functions.CATEGORY_LAYOUT), (slug_fig_zh, title_syconium, view_functions.CATEGORY_LAYOUT), (u'{}/{}'.format(slug_fig_zh, slug_syconium), title_ostiole, view_functions.ARTICLE_LAYOUT), (u'', title_fig_en, view_functions.CATEGORY_LAYOUT), (u'', title_fig_bn, view_functions.CATEGORY_LAYOUT) ] for detail in create_details: response = self.test_client.post('/tree/{}/edit/{}'.format(working_branch_name, detail[0]), data={'action': 'create', 'create_what': detail[2], 'request_path': detail[1]}, follow_redirects=True) self.assertEqual(response.status_code, 200) # add a comment comment_text = u'The flowers provide a safe haven and nourishment for the next generation of wasps. ᙙᙖ' response = self.test_client.post('/tree/{}/'.format(working_branch_name), data={'comment': 'Comment', 'comment_text': comment_text}, follow_redirects=True) self.assertEqual(response.status_code, 200) # delete a directory response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'delete', 'request_path': slug_fig_zh}, follow_redirects=True) self.assertEqual(response.status_code, 200) # get the activity history page response = self.test_client.get('/tree/{}/'.format(working_branch_name), follow_redirects=True) # TODO: for some reason (encoding?) my double-quotes are being replaced by &#34; in the returned HTML response_data = sub('&#34;', '"', response.data.decode('utf-8')) # make sure everything we did above is shown on the activity page self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('activity-overview') in response_data) self.assertTrue(PATTERN_OVERVIEW_ACTIVITY_STARTED.format(**{"activity_name": task_description, "author_email": fake_author_email}) in response_data) self.assertTrue(PATTERN_OVERVIEW_COMMENT_BODY.format(**{"comment_body": comment_text}) in response_data) self.assertTrue(PATTERN_OVERVIEW_ITEM_DELETED.format(**{"deleted_name": title_fig_zh, "deleted_type": view_functions.file_display_name(view_functions.CATEGORY_LAYOUT), "deleted_also": u'(containing 1 topic and 1 article) ', "author_email": fake_author_email}) in response_data) for detail in create_details: self.assertTrue(PATTERN_OVERVIEW_ITEM_CREATED.format(**{"created_name": detail[1], "created_type": detail[2], "author_email": fake_author_email}), response_data) # in TestApp def test_activity_history_summary_accuracy(self): ''' The summary of an activity's history is displayed as expected. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.test_client, self) erica.sign_in(email='erica@example.com') # Start a new task erica.start_task(description=u'Parasitize with Ichneumonidae for Moth Larvae') # Get the branch name branch_name = erica.get_branch_name() # Load the activity overview page erica.follow_link(href='/tree/{}'.format(branch_name)) # there shouldn't be a summary yet summary_div = erica.soup.find("div", class_="activity-summary") self.assertIsNone(summary_div) # Load the "other" folder erica.open_link(url='/tree/{}/edit/other/'.format(branch_name)) # Create a category, sub-category, article category_name = u'Antennae Segments' subcategory_name = u'Short Ovipositors' article_names = [u'Inject Eggs Directly Into a Host Body', u'A Technique Of Celestial Navigation Called Transverse Orientation'] erica.add_category(category_name=category_name) erica.add_subcategory(subcategory_name=subcategory_name) subcategory_path = erica.path erica.add_article(article_name=article_names[0]) # edit the article erica.edit_article(title_str=article_names[0], body_str=u'Inject venom along with the egg') # create another article and delete it erica.open_link(subcategory_path) erica.add_article(article_name=article_names[1]) erica.open_link(subcategory_path) erica.delete_article(article_names[1]) # Load the activity overview page erica.open_link(url='/tree/{}/'.format(branch_name)) # there is a summary summary_div = erica.soup.find("div", class_="activity-summary") self.assertIsNotNone(summary_div) # it's right about what's changed self.assertIsNotNone(summary_div.find(lambda tag: bool(tag.name == 'a' and '2 articles and 2 topics' in tag.text))) # grab all the table rows check_rows = summary_div.find_all('tr') # make sure they match what we did above category_row = check_rows.pop() category_cells = category_row.find_all('td') self.assertIsNotNone(category_cells[0].find('a')) self.assertEqual(category_cells[0].text, category_name) self.assertEqual(category_cells[1].text, u'Category') self.assertEqual(category_cells[2].text, u'Created') subcategory_row = check_rows.pop() subcategory_cells = subcategory_row.find_all('td') self.assertIsNotNone(subcategory_cells[0].find('a')) self.assertEqual(subcategory_cells[0].text, subcategory_name) self.assertEqual(subcategory_cells[1].text, u'Category') self.assertEqual(subcategory_cells[2].text, u'Created') article_1_row = check_rows.pop() article_1_cells = article_1_row.find_all('td') self.assertIsNotNone(article_1_cells[0].find('a')) self.assertEqual(article_1_cells[0].text, article_names[0]) self.assertEqual(article_1_cells[1].text, u'Article') self.assertEqual(article_1_cells[2].text, u'Created, Edited') article_2_row = check_rows.pop() article_2_cells = article_2_row.find_all('td') self.assertIsNone(article_2_cells[0].find('a')) self.assertEqual(article_2_cells[0].text, article_names[1]) self.assertEqual(article_2_cells[1].text, u'Article') self.assertEqual(article_2_cells[2].text, u'Created, Deleted') # only the header row's left self.assertEqual(len(check_rows), 1) # in TestApp def test_create_page_creates_directory_containing_index(self): ''' Creating a new page creates a directory with an editable index file inside. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'filter plankton from sea water for humpback whales' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new page page_slug = u'hello' page_path = u'{}/index.{}'.format(page_slug, view_functions.CONTENT_FILE_EXTENSION) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(page_path in response.data) # pull the changes self.clone1.git.pull('origin', working_branch_name) # a directory was created dir_location = join(self.clone1.working_dir, page_slug) idx_location = u'{}/index.{}'.format(dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(dir_location) and isdir(dir_location)) # an index page was created inside self.assertTrue(exists(idx_location)) # the directory and index page pass the article test self.assertTrue(view_functions.is_article_dir(dir_location)) # in TestApp def test_can_rename_editable_directories(self): ''' Can rename an editable directory. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'filter plankton from sea water for humpback whales' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new page page_slug = u'hello' page_path = u'{}/index.{}'.format(page_slug, view_functions.CONTENT_FILE_EXTENSION) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(page_path in response.data) hexsha = search(r'<input name="hexsha" value="(\w+)"', response.data).group(1) # now save the file with new content new_page_slug = u'goodbye' new_page_path = u'{}/index.{}'.format(new_page_slug, view_functions.CONTENT_FILE_EXTENSION) response = self.test_client.post('/tree/{}/save/{}'.format(working_branch_name, page_path), data={'layout': view_functions.ARTICLE_LAYOUT, 'hexsha': hexsha, 'en-title': u'', 'en-body': u'', 'fr-title': u'', 'fr-body': u'', 'url-slug': u'{}'.format(new_page_slug)}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(new_page_path in response.data) # pull the changes self.clone1.git.pull('origin', working_branch_name) # the old directory is gone old_dir_location = join(self.clone1.working_dir, page_slug) self.assertFalse(exists(old_dir_location)) # the new directory exists and is properly structured new_dir_location = join(self.clone1.working_dir, new_page_slug) self.assertTrue(exists(new_dir_location) and isdir(new_dir_location)) # an index page is inside idx_location = u'{}/index.{}'.format(new_dir_location, view_functions.CONTENT_FILE_EXTENSION) self.assertTrue(exists(idx_location)) # the directory and index page pass the editable test self.assertTrue(view_functions.is_article_dir(new_dir_location)) # in TestApp def test_cannot_move_a_directory_inside_iteslf(self): ''' Can't rename an editable directory in a way which moves it inside itself ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'filter plankton from sea water for humpback whales' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new page page_slug = u'hello' page_path = u'{}/index.{}'.format(page_slug, view_functions.CONTENT_FILE_EXTENSION) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(page_path in response.data) hexsha = search(r'<input name="hexsha" value="(\w+)"', response.data).group(1) # now save the file with new content new_page_slug = u'hello/is/better/than/goodbye' new_page_path = u'{}/index.{}'.format(new_page_slug, view_functions.CONTENT_FILE_EXTENSION) response = self.test_client.post('/tree/{}/save/{}'.format(working_branch_name, page_path), data={'layout': view_functions.ARTICLE_LAYOUT, 'hexsha': hexsha, 'en-title': u'', 'en-body': u'', 'fr-title': u'', 'fr-body': u'', 'url-slug': u'{}'.format(new_page_slug)}, follow_redirects=True) self.assertEqual(response.status_code, 200) # the new page shouldn't have been created self.assertFalse(new_page_path in response.data) # there shoudld be a flashed error message self.assertTrue(u'I cannot move a directory inside itself!' in response.data) # pull the changes self.clone1.git.pull('origin', working_branch_name) # the old directory is not gone old_dir_location = join(self.clone1.working_dir, page_slug) self.assertTrue(exists(old_dir_location)) # the new directory doesn't exist new_dir_location = join(self.clone1.working_dir, new_page_slug) self.assertFalse(exists(new_dir_location) and isdir(new_dir_location)) # in TestApp def test_editable_directories_are_shown_as_articles(self): ''' Editable directories (directories containing only an editable index file) are displayed as articles. ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'filter plankton from sea water for humpback whales' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # create a new page page_slug = u'hello' page_path = u'{}/index.{}'.format(page_slug, view_functions.CONTENT_FILE_EXTENSION) response = self.test_client.post('/tree/{}/edit/'.format(working_branch_name), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': page_slug}, follow_redirects=True) self.assertEqual(response.status_code, 200) self.assertTrue(page_path in response.data) # load the index page response = self.test_client.get('/tree/{}/edit/'.format(working_branch_name), follow_redirects=True) self.assertEqual(response.status_code, 200) # verify that the new folder is represented as a file in the HTML self.assertTrue(PATTERN_BRANCH_COMMENT.format(working_branch_name) in response.data) self.assertTrue(PATTERN_FILE_COMMENT.format(**{"file_name": page_slug, "file_title": page_slug, "file_type": view_functions.ARTICLE_LAYOUT}) in response.data) # in TestApp def test_page_not_found_error(self): ''' A 404 page is generated when we get an address that doesn't exist ''' fake_author_email = u'erica@example.com' with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_author_email}) with HTTMock(self.auth_csv_example_allowed): # start a new branch via the http interface # invokes view_functions/get_repo which creates a clone task_description = u'drink quinine for mosquitos' working_branch = repo_functions.get_start_branch(self.clone1, 'master', task_description, fake_author_email) self.assertTrue(working_branch.name in self.clone1.branches) self.assertTrue(working_branch.name in self.origin.branches) working_branch_name = working_branch.name working_branch.checkout() # get a non-existent page response = self.test_client.get('tree/{}/malaria'.format(working_branch_name), follow_redirects=True) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('error-404') in response.data) # these values are set in setUp() above self.assertTrue(u'support@example.com' in response.data) self.assertTrue(u'(123) 456-7890' in response.data) # in TestApp def test_garbage_edit_url_raises_page_not_found(self): ''' A 404 page is generated when we get an edit address that doesn't exist ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('erica@example.com') # Start a new task erica.start_task(description=u'Take Malarone for People Susceptible to Malaria') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder other_slug = u'other' erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, other_slug)) # Create a category category_name = u'Rubber Plants' category_slug = slugify(category_name) erica.add_category(category_name=category_name) # Try to load a non-existent page within the category erica.open_link(url='/tree/{}/edit/{}/malaria'.format(branch_name, category_slug), expected_status_code=404) # in TestApp def test_garbage_view_url_raises_page_not_found(self): ''' A 404 page is generated when we get a view address that doesn't exist ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('erica@example.com') # Start a new task erica.start_task(description=u'Chew Mulberry Leaves for Silkworms') # Get the branch name branch_name = erica.get_branch_name() # Enter the "other" folder other_slug = u'other' erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, other_slug)) # Create a category category_name = u'Bombyx Mori' category_slug = slugify(category_name) erica.add_category(category_name=category_name) # Try to load a non-existent asset within the other folder erica.open_link(url='/tree/{}/view/{}/{}/missing.jpg'.format(branch_name, other_slug, category_slug), expected_status_code=404) # in TestApp def test_internal_server_error(self): ''' A 500 page is generated when we provoke a server error ''' with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.mock_internal_server_error): response = self.test_client.get('/', follow_redirects=True) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('error-500') in response.data) # these values are set in setUp() above self.assertTrue(u'support@example.com' in response.data) self.assertTrue(u'(123) 456-7890' in response.data) # in TestApp def test_exception_error(self): ''' A 500 page is generated when we provoke an uncaught exception ''' with HTTMock(self.mock_persona_verify_erica): response = self.test_client.post('/sign-in', data={'assertion': 'erica@example.com'}) with HTTMock(self.mock_exception): response = self.test_client.get('/', follow_redirects=True) self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('error-500') in response.data) # these values are set in setUp() above self.assertTrue(u'support@example.com' in response.data) self.assertTrue(u'(123) 456-7890' in response.data) # in TestApp def test_merge_conflict_error(self): ''' We get a merge conflict error page when there's a merge conflict ''' fake_task_description_1 = u'do things for somebody else' fake_task_description_2 = u'do other things for somebody even else' fake_email_1 = u'erica@example.com' fake_email_2 = u'frances@example.com' fake_page_slug = u'hello' fake_page_path = u'{}/index.{}'.format(fake_page_slug, view_functions.CONTENT_FILE_EXTENSION) fake_page_content_1 = u'Hello world.' fake_page_content_2 = u'Hello moon.' # # # Log in as person 1 with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_email_1}) with HTTMock(self.auth_csv_example_allowed): # create a new branch response = self.test_client.post('/start', data={'task_description': fake_task_description_1}, follow_redirects=True) # extract the generated branch name from the returned HTML generated_branch_search = search(r'<!-- branch: (.{{{}}}) -->'.format(repo_functions.BRANCH_NAME_LENGTH), response.data) self.assertIsNotNone(generated_branch_search) try: generated_branch_name_1 = generated_branch_search.group(1) except AttributeError: raise Exception('No match for generated branch name.') with HTTMock(self.mock_google_analytics): # create a new file response = self.test_client.post('/tree/{}/edit/'.format(generated_branch_name_1), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': fake_page_slug}, follow_redirects=True) # get the edit page for the new file and extract the hexsha value response = self.test_client.get('/tree/{}/edit/{}'.format(generated_branch_name_1, fake_page_path)) hexsha = search(r'<input name="hexsha" value="(\w+)"', response.data).group(1) # now save the file with new content response = self.test_client.post('/tree/{}/save/{}'.format(generated_branch_name_1, fake_page_path), data={'layout': view_functions.ARTICLE_LAYOUT, 'hexsha': hexsha, 'en-title': 'Greetings', 'en-body': u'{}\n'.format(fake_page_content_1), 'url-slug': u'{}/index'.format(fake_page_slug)}, follow_redirects=True) # Request feedback on person 1's change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name_1), data={'comment_text': u'', 'request_feedback': u'Request Feedback'}, follow_redirects=True) # # # Log in as person 2 with HTTMock(self.mock_persona_verify_frances): self.test_client.post('/sign-in', data={'assertion': fake_email_2}) with HTTMock(self.auth_csv_example_allowed): # create a new branch response = self.test_client.post('/start', data={'task_description': fake_task_description_2}, follow_redirects=True) # extract the generated branch name from the returned HTML generated_branch_search = search(r'<!-- branch: (.{{{}}}) -->'.format(repo_functions.BRANCH_NAME_LENGTH), response.data) try: generated_branch_name_2 = generated_branch_search.group(1) except AttributeError: raise Exception('No match for generated branch name.') with HTTMock(self.mock_google_analytics): # create a new file response = self.test_client.post('/tree/{}/edit/'.format(generated_branch_name_2), data={'action': 'create', 'create_what': view_functions.ARTICLE_LAYOUT, 'request_path': fake_page_slug}, follow_redirects=True) # get the edit page for the new file and extract the hexsha value response = self.test_client.get('/tree/{}/edit/{}'.format(generated_branch_name_2, fake_page_path)) hexsha = search(r'<input name="hexsha" value="(\w+)"', response.data).group(1) # now save the file with new content fake_new_title = u'Bloople' response = self.test_client.post('/tree/{}/save/{}'.format(generated_branch_name_2, fake_page_path), data={'layout': view_functions.ARTICLE_LAYOUT, 'hexsha': hexsha, 'en-title': fake_new_title, 'en-body': u'{}\n'.format(fake_page_content_2), 'url-slug': u'{}/index'.format(fake_page_slug)}, follow_redirects=True) # Request feedback on person 2's change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name_2), data={'comment_text': u'', 'request_feedback': u'Request Feedback'}, follow_redirects=True) # Endorse person 1's change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name_1), data={'comment_text': u'', 'endorse_edits': 'Endorse Edits'}, follow_redirects=True) # And publish person 1's change! with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name_1), data={'comment_text': u'', 'merge': 'Publish'}, follow_redirects=True) # # # Log in as person 1 with HTTMock(self.mock_persona_verify_erica): self.test_client.post('/sign-in', data={'assertion': fake_email_1}) # Endorse person 2's change with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name_2), data={'comment_text': u'', 'endorse_edits': 'Endorse Edits'}, follow_redirects=True) # And publish person 2's change! with HTTMock(self.auth_csv_example_allowed): response = self.test_client.post('/tree/{}/'.format(generated_branch_name_2), data={'comment_text': u'', 'merge': 'Publish'}, follow_redirects=True) # verify that we got an error page about the merge conflict self.assertTrue(PATTERN_TEMPLATE_COMMENT.format('error-500') in response.data) self.assertTrue(u'MergeConflict' in response.data) self.assertTrue(u'{}/index.{}'.format(fake_page_slug, view_functions.CONTENT_FILE_EXTENSION) in response.data) self.assertTrue(u'<td><a href="/tree/{}/edit/{}/">{}</a></td>'.format(generated_branch_name_2, fake_page_slug, fake_new_title)) self.assertTrue(u'<td>Article</td>' in response.data) self.assertTrue(u'<td>Edited</td>' in response.data) # these values are set in setUp() above self.assertTrue(u'support@example.com' in response.data) self.assertTrue(u'(123) 456-7890' in response.data) # in TestApp def test_redirect_into_solo_folder(self): ''' Loading a folder with a sole non-article or -category directory in it redirects to the contents of that directory. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in('erica@example.com') # Start a new task erica.start_task(description=u'Be Shot Hundreds Of Feet Into The Air for A Geyser Of Highly Pressurized Water') # Get the branch name branch_name = erica.get_branch_name() # create a directory containing only another directory repo = view_functions.get_repo(repo_path=self.app.config['REPO_PATH'], work_path=self.app.config['WORK_PATH'], email='erica@example.com') testing_slug = u'testing' categories_slug = u'categories' mkdir(join(repo.working_dir, testing_slug)) mkdir(join(repo.working_dir, testing_slug, categories_slug)) # open the top level directory erica.open_link(url='/tree/{}/edit/'.format(branch_name)) # enter the 'testing' directory erica.follow_link(href='/tree/{}/edit/{}/'.format(branch_name, testing_slug)) # we should've automatically been redirected into the 'categories' directory self.assertEqual(erica.path, '/tree/{}/edit/{}/'.format(branch_name, join(testing_slug, categories_slug))) # in TestApp def test_article_preview(self): ''' Check edit process with a user previewing their article. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_frances): frances = ChimeTestClient(self.app.test_client(), self) frances.sign_in('frances@example.com') # Start a new task, "Diving for Dollars". frances.start_task(description=u'Diving for Dollars') branch_name = frances.get_branch_name() # Look for an "other" link that we know about - is it a category? frances.follow_link('/tree/{}/edit/other/'.format(branch_name)) # Create a new category "Ninjas", subcategory "Flipping Out", and article "So Awesome". frances.add_category('Ninjas') frances.add_subcategory('Flipping Out') frances.add_article('So Awesome') edit_path = frances.path # Preview the new article. frances.preview_article('So, So Awesome', 'It was the best of times.') expected_path = '/tree/{}/view/other/ninjas/flipping-out/so-awesome'.format(branch_name) self.assertTrue(frances.path.startswith(expected_path), 'Should be on a preview path') self.assertTrue('best of times' in str(frances.soup), 'Should see current content there') # Look back at the edit form. frances.open_link(edit_path) self.assertTrue('best of times' in str(frances.soup), 'Should see current content there, too') # in TestApp def test_alpha_sort_in_admin(self): ''' Make sure items are sorted alphabetically in the Chime admin interface ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_frances): frances = ChimeTestClient(self.app.test_client(), self) frances.sign_in('frances@example.com') # Start a new task frances.start_task(description=u'Crunching Beetles for Trap-Door Spiders') branch_name = frances.get_branch_name() # Look for an "other" link that we know about - is it a category? frances.follow_link('/tree/{}/edit/other/'.format(branch_name)) # Create a bunch of new categories frances.add_categories(['Anthicidae', 'Scydmaenidae', 'Paussinae', 'Bostrychidae', 'Scolytidae', 'Anobiidae', 'Meloidae', 'Dermestidae', 'Silphidae']) # The categories should be sorted by title on the page rendered_categories = [tag.text for tag in frances.soup.find_all('a', class_='category')] sorted_categories = sorted(rendered_categories) self.assertEqual(rendered_categories, sorted_categories) # in TestApp def test_overload_front_page(self): ''' Try to overload the front page with multiple simultaneous requests. ''' with HTTMock(self.auth_csv_example_allowed): with HTTMock(self.mock_persona_verify_frances): frances = ChimeTestClient(self.app.test_client(), self) frances.sign_in('frances@example.com') # Start a new task frances.start_task(description=u'Beating Crunches for Door-Spider Traps') # hit the front page a bunch of times times = 20 pros = [] for blip in range(times): process = Process(target=frances.open_link, args=('/',)) process.start() pros.append(process) # wait until the processes are done for process in pros: process.join() # raise if any errors were raised for process in pros: self.assertEqual(0, process.exitcode, u'A process that was trying to load the front page failed!') # in TestApp def test_published_activities_displayed(self): ''' Published activities are displayed on the activities list page. ''' with HTTMock(self.auth_csv_example_allowed): erica_email = u'erica@example.com' frances_email = u'frances@example.com' with HTTMock(self.mock_persona_verify_erica): erica = ChimeTestClient(self.app.test_client(), self) erica.sign_in(erica_email) with HTTMock(self.mock_persona_verify_frances): frances = ChimeTestClient(self.app.test_client(), self) frances.sign_in(frances_email) # Start a new task and create a topic, subtopic and article activity_title = u'Flicking Ants Off My Laptop' args = activity_title, u'Flying', u'Through The Air', u'Goodbye' branch_name = erica.quick_activity_setup(*args) # Ask for feedback erica.follow_link(href='/tree/{}'.format(branch_name)) erica.request_feedback() # # Switch users and publish the article. # frances.open_link(url=erica.path) frances.approve_activity() frances.publish_activity() # # Load the front page and make sure the activity is listed as published # erica.open_link('/') pub_ul = erica.soup.select('ul#published-activities')[0] # there should be an HTML comment with the branch name comment = pub_ul.findAll(text=lambda text: isinstance(text, Comment))[0] self.assertTrue(branch_name in comment) pub_li = comment.find_parent('li') # and the activity title wrapped in a p tag self.assertIsNotNone(pub_li.find('p', text=activity_title)) class TestPublishApp (TestCase): def setUp(self): self.old_tempdir, tempfile.tempdir = tempfile.tempdir, mkdtemp(prefix='chime-TestPublishApp-') self.work_path = mkdtemp(prefix='chime-publish-app-') app_args = {} self.app = publish.create_app(app_args) self.client = self.app.test_client() def tearDown(self): rmtree(tempfile.tempdir) tempfile.tempdir = self.old_tempdir def mock_github_request(self, url, request): ''' ''' _, host, path, _, _, _ = urlparse(url.geturl()) if (host, path) == ('github.com', '/chimecms/chime-starter/archive/93250f1308daef66c5809fe87fc242d092e61db7.zip'): return response(302, '', headers={'Location': 'https://codeload.github.com/chimecms/chime-starter/tar.gz/93250f1308daef66c5809fe87fc242d092e61db7'}) if (host, path) == ('codeload.github.com', '/chimecms/chime-starter/tar.gz/93250f1308daef66c5809fe87fc242d092e61db7'): with open(join(dirname(__file__), '93250f1308daef66c5809fe87fc242d092e61db7.zip')) as file: return response(200, file.read(), headers={'Content-Type': 'application/zip'}) raise Exception('Unknown URL {}'.format(url.geturl())) # in TestPublishApp def test_webhook_post(self): ''' Check basic webhook flow. ''' payload = ''' { "head": "93250f1308daef66c5809fe87fc242d092e61db7", "ref": "refs/heads/master", "size": 1, "commits": [ { "sha": "93250f1308daef66c5809fe87fc242d092e61db7", "message": "Clean up braces", "author": { "name": "Frances Berriman", "email": "phae@example.com" }, "url": "https://github.com/chimecms/chime-starter/commit/93250f1308daef66c5809fe87fc242d092e61db7", "distinct": true } ] } ''' with HTTMock(self.mock_github_request): response = self.client.post('/', data=payload) self.assertTrue(response.status_code in range(200, 299)) # in TestPublishApp def test_load(self): from chime import publish ''' makes sure that the file loads properly ''' self.assertIsNotNone(publish.logger) if __name__ == '__main__': main()
As posted I proclaimed July Gluten-Free baking month! And as noted flour substitutions for gluten-free items from the book Gluten-Free Cookies by Luane Kohnke got kind of pricey. In particular xantham gum ($13 at Whole Foods! Aye!). First up was jam thumbprints. These were tasty! Nice butter cookies with a granular texture. Many enjoyed them and I'd make these again. Thing with gluten-free dough from this book was that all doughs needed to be refrigerated for at least an hour. So no baking and then going on the run. You had to let things sit for a bit. And also the mixing of flours and thickeners gets kinda messy. Just sayin'. Next up was macadamia white chocolate blondies. These were super sweet even for me. Because of the lack of thickener from regular all-purpose flour and the fact that the measurements didn't provide for a lot of substitutes of gluten-free items it was quite dense, more so than you may want in a blondie with a kind of doughy, undercooked texture to it which increases the sweetness of it. My co-workers enjoyed it though. Follow up to that was sweet cinnamon snickerdoodles. These were good and very much like what you'd expect a snickerdoodle to taste though add the granular texture and them being a bit lighter as well. Whereas the Sweet Melissa snickerdoodles I made had more bite to them these didn't leave you feeling as heavy. I made them for a friend who is a snickerdoodle addict and loves it in all forms. Something else to note with these gluten-free cookies is that they are precious cargo. So make sure to pack them tightly and carry carefully. I gave a couple to a friend who then stuffed them into her purse and then had crumbs within the hour. After that was flourless peanut butter cookies. I added chocolate chips just 'cause. These were interesting to do as there are a handful of flourless recipes in Gluten-Free Cookies. And it introduced me to the fact that you can make items without flour that aren't meringue. You have to watch these cookies while they bake because they will burn in seconds if you do not take care. Also the peanut butter flavor was booming! With nothing to combat it (even the chocolate chip cookies were dormant) it was all peanut butter in every bite. Again, delicate to transport but good to eat! And the last item for gluten-free baking month was chocolate peanut butter cups. A chocolate cookie with peanut butter filling inside. Any leftover peanut butter filling can be used to make flourless peanut butter cookies, but these were lighter because the peanut butter filling had egg whites in it. I liked these. The cookie isn't overly sweet due to the unsweetened cocoa powder so you kind of rely on the peanut butter to give you a kick on the sweet side. Good and not chewy really, a disappointment for me, but somewhat crispy cookie. Better to transport because it's packed more but a big hit with my co-workers at the time. And although this item was not gluten free I did owe two of my buds (shot out to ALev & Dessert Landscape) for their impending nuptials and so made them one of my fave recipes: dulce de leche & chocolate chunk bread pudding courtesy of Bon Appetit Magazine (March 2008 issue). I improvised a recipe from the basic chocolate chip one and made white chocolate macadamia nut cookies since I had items left over from the blondies I made in July. These were okay. I found that with using only white sugar instead of brown sugar it took away some of the flavor you get from the cookie so I'll have to work out the recipe again since I am a fan of the combination. Plus I had less macadamia nuts than I'd like. Another item was double chocolate muffins from one of my go to books Cakes and Bakes. I had to stay up to make these and I have to say I was none too happy with the results. Even for a muffin they weren't very sweet. The double chocolate comes in because the muffin itself has chocolate and you top it with a white chocolate frosting that's really melted down white chocolate via a double broiler (or in my case a makeshift double broiler). The unsweetened cocoa powder was something you tasted more than anything and could've used more sugar or chips or something inside to pump up the chocolate flavor. Also the white chocolate did not melt as easily as bittersweet or milk chocolate has for me which made the smearing on top quite difficult and irritating late at night. My new co-workers enjoyed them enough as I had made them for my boss before he "retired" but I felt as though I could do better. And I will. This...I vow. Before pumpkin there were Chocolate (chocolate) chip cookies. I added nuts inside to add some crunch and because I just like nuts. This was a recipe from Cakes and Bakes and inside the cookie you had the addition of almond extract instead of vanilla. This made these cookies a hit! I made these for a friend's birthday and she and her fiance enjoyed them thoroughly. I was informed in many Jewish treats almond extract is a key ingredient. The cookie itself was chewy but firm and the nuts and chocolate chips inside made these a nice treat when biting into it. Add in the almond extract and you get a new flavor that you weren't expecting and all of them work together yet don't overpower one another. Very nice. Next up is another Cakes & Bakes recipe which was cinnamon seed bars (sans seeds). I'm not a fan of seeds in my goods so I don't really use them even if called for. Perhaps one day I will. This was more cake than bar I have to say and that may be because of the fact that sour cream is involved which automatically enhances moisture. These were very light so I just started calling them cinnamon cake after awhile. And while there is a boat load of cinnamon (a few tablespoons) it has a great taste and cinnamon doesn't seem like the only thing you savor. You get moist cake and cinnamon and it's great with tea/coffee/hot chocolate. And they keep well. I sent them to a friend in Australia (yes, another continent) and her and her husband enjoyed them a couple weeks down the line. Of course I saran-wrapped the hell out of the tupperware holding said cake. Nothing was getting in that box. And to end September with an Original Creation! As blogged about previously I made a couple cookies based on books I enjoyed this year. The Cold Kiss coconut cookie is something that I have to give credit to the author of Cold Kiss Amy Garvey for creating such a lovely book and for letting me know she's a huge fan of coconut. This is a soft cookie (you know I love those!) with a nice balance of coconut and vanilla. I wanted to make sure not to overdo the coconut so used coconut and vanilla extract but for the glaze atop it used vanilla only and sprinkled with coconut flakes. It worked perfectly and I wouldn't change a thing about this cookie. You can read Amy's responses in my blog post on these. She's a fan. Again, just sayin'. October was a heavy baking month for me. As with the Cold Kiss cookie, I made a book based on another fave young adult novel that released this year. This one by Laini Taylor entitled Daughter of Smoke and Bone called the Blue Karou chocolate hazelnut cookie. Again, rather than repeat myself from my earlier post in October I am going to provide you the link here. As mentioned I was somewhat disappointed with this cookie as all the flavors I had hoped would come through didn't. But I was quite happy with the picture. Did you see it? Next up was my latest attempt on the banoffee pie. I love banana and toffee and all things sugary sweet. So this is a fun pie to make. It came out better than the one I made via the Claire Robinson recipe last year. This time I went with the tride and true Paula Deen. And yup, this recipe had a boatload of butter. Seriously, it was oozing out of the graham cracker crust. As mentioned I made this pie for the Pie Party Live event that Jackie (aka The Diva that Ate New York) started with Ken (HungryRabbitNYC). It was super fun and I met super great people. It was a great and fun-filled as well as food filled time. Really great. The problem with banoffee pie is that it is meant to be eaten immediately and needs to stay chilled. Since this was a pie party there was a lot to choose from and while it tasted fine after a couple hours it became a soggy mess. Lesson learned. I really need to make banoffee pie that will be eaten soon after I make it rather than piecemeal or at a large convention o' pies. Still sweet and good and everything you'd expect from Paula Deen in the dessert category. Last but not least was one of my first new pumpkin recipes of the year: pumpkin snickerdoodles. My best friend sent me this recipe via recipegirl.com knowing my love of all things pumpkin. These were interesting to try. They aren't overly orange and the pumpkin taste is light but there even though you have the standard pumpkin powders. Not only do you put them in the cookie dough but you roll the cookie in sugar, ginger, cinnamon, and allspice. And going back to the FoodNetwork I plucked a nice banana bread recipe (I replaced pecans with the ever indelible chocolate chips) from The Neely's. I had a bunch of bananas left and didn't want them to go to waste and made some scrumptious banana bread that also called for sour cream in the recipe. These became a fast fave and I ate half a loaf for dinner with milk. So good. The chocolate added to it. If you like bananas and chocolate this is the perfect recipe because the moisture is there. It's very much a bread and not a cake but is so delicious and sweet (in a subtle not overt way) that you cannot help but continue to gorge on it. I can't wait to make this again. November and December were months I baked but nothing new. I baked a couple days of lemon ricotta cookies for friends and as christmas gifts. You can see from last year that these are a fast fave of mine and anyone who tries them. Thanks Giada! I also made some stuff from boxes. I had intended to bake something new this past weekend but a sudden hospital emergency threw that off. I doubt I'll do any new baking this year and will make sure to get right back to it in January. I mean I have to make up for lost time after all. While I may be done with baking there are plenty going strong. Particularly one of the new baking blogs I enjoy Mercedes' Satisfy My Sweet. She has a bunch of pumpkin recipes. Hoorah! I leave you with a lovely and wonderfully good linzer sandwich cookie my aunt made that I thoroughly enjoyed when she had us over for lunch one day. I have to credit my aunt with my interest and perhaps borderline (read: full on) obsession with baking. My earliest memory is of her helping me bake brownies using an EasyBake oven. Man those were tasty. She got the recipe for this cookie from The Barefoot Contessa. Another one who could do no wrong. Peace and love and lots of baking to you and yours!
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright <2013> Gabriel Falcao <gabriel@yipit.com> # Copyright <2013> Suneel Chakravorty <suneel@yipit.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import copy from xlwt import XFStyle, Alignment, Workbook from excellent.exceptions import TooManyRowsError from .base import BaseBackend default_style = XFStyle() bold_style = XFStyle() bold_style.alignment.horz = Alignment.HORZ_RIGHT bold_style.font.bold = True # Excel has issues when creating too many styles/fonts, hence we use # a cache to reuse font instances (see FAQ#13 http://poi.apache.org/faq.html) STYLE_CACHE = {} EXCEL_CHAR_WIDTH = 275 EXCEL_MIN_COL_WIDTH = 3000 def hash_style(style): """ This ugly function allows us to get a hash for xlwt Style instances. The hash allows us to determine that two Style instances are the same, even if they are different objects. """ font_attrs = ["font", "alignment", "borders", "pattern", "protection"] attrs_hashes = [hash(frozenset(getattr(style, attr).__dict__.items())) for attr in font_attrs] return hash(sum(attrs_hashes + [hash(style.num_format_str)])) def get_column_width(value): return max(len(value) * EXCEL_CHAR_WIDTH, EXCEL_MIN_COL_WIDTH) class XL(BaseBackend): def __init__(self, workbook=None, default_style=default_style): self.workbook = workbook or Workbook() self.current_sheet = None self.current_row = 0 self.default_style = default_style def get_header_style(self): return bold_style def write_row(self, row, values, style=None, header_row=False, **kwargs): style = style or self.default_style if kwargs: # If there are additional changes in kwargs, we don't want to modify # the original style, so we make a copy style = copy.deepcopy(style) if 'bold' in kwargs: style.font.bold = kwargs['bold'] if 'bottom_border' in kwargs: style.borders.bottom = 2 if 'format_string' in kwargs and kwargs['format_string']: style.num_format_str = kwargs['format_string'] style_hash = hash_style(style) if style_hash in STYLE_CACHE: style = STYLE_CACHE[style_hash] else: STYLE_CACHE[style_hash] = style for index, value in enumerate(values): if header_row: column_width = get_column_width(value=value) self.current_sheet.col(index).width = column_width row.write(index, value, style) def write(self, data, output, style=None, **kwargs): if not self.current_sheet: self.use_sheet('Sheet1') header_style = self.get_header_style() for i, row in enumerate(data, self.current_row): if self.current_row is 0: self.write_row(self.get_row(0), row.keys(), header_style, header_row=True, **kwargs) self.write_row(self.get_row(i + 1), row.values(), style=style, header_row=False, **kwargs) self.current_row = i + 1 def get_row(self, row_index): sheet = self.current_sheet try: return sheet.row(row_index) except ValueError: # The max number of rows have been written raise TooManyRowsError() def get_sheets(self): return self.workbook._Workbook__worksheets def get_or_create_sheet(self, name): for sheet in self.get_sheets(): if sheet.name == name: return sheet, sheet.rows and max(sheet.rows.keys()) or 0 return self.workbook.add_sheet(name), 0 def use_sheet(self, name): self.current_sheet, self.current_row = self.get_or_create_sheet(name) def save(self, output): self.workbook.save(output) super(XL, self).save(output)
Thomas H. Martinson is currently considered a "single author." If one or more works are by a distinct, homonymous authors, go ahead and split the author. Thomas H. Martinson is composed of 1 name.
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from marvin.cloudstackTestCase import cloudstackTestCase from marvin.cloudstackAPI import ( updateConfiguration, deleteAccount, addLdapConfiguration, linkDomainToLdap, deleteLdapConfiguration, disableAccount) from marvin.lib.common import get_domain from marvin.lib.base import (Account, Configurations, Domain) from marvin.cloudstackAPI import login from marvin.lib.utils import (cleanup_resources) from nose.plugins.attrib import attr import telnetlib import random import string def randomword(length): return ''.join(random.choice(string.lowercase) for i in range(length)) def addLdapConfiguration1(cls, ldapConfiguration): """ :param ldapConfiguration """ cls.chkConfig = checkLdapConfiguration(cls, ldapConfiguration) if not cls.chkConfig: return 0 # Setup Global settings Configurations.update( cls.apiClient, name="ldap.basedn", value=ldapConfiguration['basedn'] ) Configurations.update( cls.apiClient, name="ldap.bind.password", value=ldapConfiguration['bindpassword'] ) Configurations.update( cls.apiClient, name="ldap.bind.principal", value=ldapConfiguration['principal'] ) Configurations.update( cls.apiClient, name="ldap.email.attribute", value=ldapConfiguration['emailAttribute'] ) Configurations.update( cls.apiClient, name="ldap.user.object", value=ldapConfiguration['userObject'] ) Configurations.update( cls.apiClient, name="ldap.username.attribute", value=ldapConfiguration['usernameAttribute'] ) Configurations.update( cls.apiClient, name="ldap.nested.groups.enable", value="true" ) ldapServer = addLdapConfiguration.addLdapConfigurationCmd() ldapServer.hostname = ldapConfiguration['hostname'] ldapServer.port = ldapConfiguration['port'] cls.debug("calling addLdapConfiguration API command") try: cls.apiClient.addLdapConfiguration(ldapServer) cls.debug("addLdapConfiguration was successful") return 1 except Exception as e: cls.debug( "addLdapConfiguration failed %s Check the Passed passed" " ldap attributes" % e) cls.reason = "addLdapConfiguration failed %s Check the Passed " \ "passed ldap attributes" % e raise Exception( "addLdapConfiguration failed %s Check the Passed passed" " ldap attributes" % e) return 1 def checklogin(cls, username, password, domain, method): """ :param username: :param password: """ cls.debug("Attempting to login.") try: loginParams = login.loginCmd() loginParams.username = username loginParams.password = password loginParams.domain = domain loginRes = cls.apiClient.login(loginParams, method) cls.debug("login response %s" % loginRes) if loginRes is None: cls.debug("login not successful") return 0 else: cls.debug("login successful") return 1 except Exception as p: cls.debug("login operation failed %s" % p) cls.reason = "Login operation Failed %s" % p def checkLdapConfiguration(cls, ldapConfiguration): """This function checks whether the passed ldap server in the configuration is up and running or not. """ flag = False try: tn = telnetlib.Telnet( ldapConfiguration['hostname'], ldapConfiguration['port'], timeout=15) if tn is not None: tn.set_debuglevel(1) print tn.msg("Connected to the server") cls.debug( "Ldap Server is Up and listening on the port %s" % tn.msg("Connected to the server")) flag = True tn.close() except Exception as e: cls.debug( "Not able to reach the LDAP server ," "please check the Services on LDAP %s and exception is %s" % ((ldapConfiguration['hostname']), e)) cls.reason = "Not able to reach the LDAP server ,please check" \ " the Services on LDAP %s and exception is %s" \ % ((ldapConfiguration['hostname']), e) return flag class TestLdap(cloudstackTestCase): """ LDAP AutoImport smoke tests """ @classmethod def setUpClass(cls): """ :type cls: object """ testClient = super(TestLdap, cls).getClsTestClient() cls.api_client = testClient.getApiClient() cls.services = testClient.getParsedTestDataConfig() cls.cleanup = [] cls.domain = get_domain(cls.api_client) cls.delflag = 0 cls.reason = "" cls.apiClient = cls.testClient.getApiClient() try: cls.ldapconfRes = addLdapConfiguration1( cls, cls.services["configurableData"]["ldap_configuration"]) except Exception as e: raise Exception("Configuring LDAP failed. Check attributes") cls.cleanup.append(cls.ldapconfRes) @classmethod def tearDownClass(cls): """ #cleanup includes : delete normal account, remove ldap configuration :type cls: object """ testClient = super(TestLdap, cls).getClsTestClient() cls.api_client = testClient.getApiClient() cls.services = testClient.getParsedTestDataConfig() if cls.ldapconfRes == 1: ldapserver = deleteLdapConfiguration.deleteLdapConfigurationCmd() ldapserver.hostname = cls.services["configurableData"][ "ldap_configuration"]["hostname"] try: cls.apiClient.deleteLdapConfiguration(ldapserver) cls.debug("deleteLdapConfiguration was successful") return 1 except Exception as e: cls.debug("deleteLdapConfiguration failed %s" % e) return 0 def setUp(self): self.user = self.services["configurableData"]["link_ldap_details"]["linkLdapUsername"] self.password = self.services["configurableData"]["link_ldap_details"]["linkLdapPassword"] self.delflag1 = 0 self.delflag2 = 0 self.delflag3 = 0 self.delflag4 = 0 self.apiclient = self.testClient.getApiClient() self.dbclient = self.testClient.getDbConnection() self.cleanup = [] self.parent_domain = Domain.create( self.apiclient, services=self.services["domain"], parentdomainid=self.domain.id) self.ldaplink = linkDomainToLdap.linkDomainToLdapCmd() self.ldaplink.domainid = self.parent_domain.id self.ldaplink.accounttype = self.services[ "configurableData"]["link_ldap_details"]["accounttype"] self.ldaplink.name = self.services[ "configurableData"]["link_ldap_details"]["name"] self.ldaplink.type = self.services[ "configurableData"]["link_ldap_details"]["type"] if self.services["configurableData"][ "link_ldap_details"]["admin"] is not None: self.ldaplink.admin = self.services[ "configurableData"]["link_ldap_details"]["admin"] if self.ldaplink.domainid == "" or self.ldaplink.accounttype == "" \ or self.ldaplink.name == "" \ or self.ldaplink.type == "": self.debug( "Please rerun the test by providing " "values in link_ldap configuration user details") self.skipTest( "Please rerun the test by providing " "proper values in configuration file(link ldap)") else: self.delflag1 = 1 self.ldaplinkRes = self.apiClient.linkDomainToLdap(self.ldaplink) self.assertEquals( self.delflag1, 1, "Linking LDAP failed,please check the configuration") loginRes = checklogin(self, self.user, self.password, self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) lsap_user = Account.list(self.api_client, domainid=self.parent_domain.id, name=self.user ) self.ldapacctID = lsap_user[0].id def tearDown(self): try: self.parent_domain.delete(self.apiclient, cleanup=True) except Exception as e: raise Exception( "Warning: Exception during cleanup of domain : %s" % e) try: # Clean up, terminate the created instance, volumes and snapshots cleanup_resources(self.apiclient, self.cleanup) pass except Exception as e: raise Exception("Warning: Exception during cleanup : %s" % e) return @attr(tags=["advanced", "basic"], required_hardware="true") def test_01_ldap(self): """Check the linkDomainToLdap functionality""" self.domain1 = Domain.create( self.apiclient, services=self.services["domain"], parentdomainid=self.domain.id) self.ldaplink4 = linkDomainToLdap.linkDomainToLdapCmd() self.ldaplink4.domainid = self.domain1.id self.ldaplink4.accounttype = self.services[ "configurableData"]["link_ldap_details"]["accounttype"] self.ldaplink4.name = self.services[ "configurableData"]["link_ldap_details"]["name"] self.ldaplink4.type = self.services[ "configurableData"]["link_ldap_details"]["type"] if self.services["configurableData"][ "link_ldap_details"]["admin"] is not None: self.ldaplink4.admin = self.services[ "configurableData"]["link_ldap_details"]["admin"] try: self.ldaplinkRes4 = self.apiClient.linkDomainToLdap(self.ldaplink4) except Exception as e: raise Exception( "Linking LDAP failed,please check the configuration") try: self.domain1.delete(self.apiclient) except Exception as e: raise Exception( "Warning: Exception during deletion of domain : %s" % e) @attr(tags=["advanced", "basic"], required_hardware="true") def test_02_ldap(self): """User is both in LDAP and imported into CS(i.e already logged in once.So just check the log in again)""" loginRes = checklogin( self, self.user, self.password, self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_03_ldap(self): """User in LDAP, wrong password --> login should fail""" loginRes = checklogin( self, self.user, randomword(8), self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, None, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_04_ldap(self): """User is only present locally, password is wrong --> login should fail""" loginRes = checklogin( self, self.services["configurableData"]["ldap_account"]["username"], randomword(10), "", method="POST") self.debug(loginRes) self.assertEquals(loginRes, None, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_05_ldap(self): """user is not present anywhere --> login should fail""" loginRes = checklogin(self, randomword(10), randomword(10), self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, None, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_06_ldap(self): """Delete the LDAP user from CS and try to login --> User should be created again""" try: deleteAcct2 = deleteAccount.deleteAccountCmd() deleteAcct2.id = self.ldapacctID acct_name = self.services["configurableData"][ "link_ldap_details"]["linkLdapUsername"] self.apiClient.deleteAccount(deleteAcct2) self.debug( "Deleted the the following account name %s:" % acct_name) except Exception as e: raise Exception( "Warning: Exception during deleting " "ldap imported account : %s" % e) loginRes = checklogin( self, self.user, self.password, self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_07_ldap(self): """Lock the user from CS and attempt to login --> login should fail""" self.lockAcct = disableAccount.disableAccountCmd() self.lockAcct.lock = 'true' self.lockAcct.account = self.services["configurableData"][ "ldap_account"]["username"] self.lockAcct.domainid = self.parent_domain.id self.apiClient.disableAccount(self.lockAcct) loginRes = checklogin( self, self.user, self.password, self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, None, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_08_ldap(self): """Create different domains and link all of them to LDAP. Check login in each domain --> login should be successful""" try: loginRes = checklogin( self, self.user, self.password, self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) self.domain2 = Domain.create( self.apiclient, services=self.services["domain"], parentdomainid=self.domain.id) # here link ldap to domain self.ldaplink2 = linkDomainToLdap.linkDomainToLdapCmd() self.ldaplink2.domainid = self.domain2.id self.ldaplink2.accounttype = self.services[ "configurableData"]["link_ldap_details"]["accounttype"] self.ldaplink2.name = self.services[ "configurableData"]["link_ldap_details"]["name"] self.ldaplink2.type = self.services[ "configurableData"]["link_ldap_details"]["type"] if self.services["configurableData"][ "link_ldap_details"]["admin"] is not None: self.ldaplink2.admin = self.services[ "configurableData"]["link_ldap_details"]["admin"] if self.ldaplink2.domainid == "" \ or self.ldaplink2.accounttype == "" \ or self.ldaplink2.name == "" \ or self.ldaplink2.type == "": self.debug( "Please rerun the test by providing" " values in link_ldap configuration user details") self.skipTest( "Please rerun the test by providing " "proper values in configuration file(link ldap)") else: self.delflag2 = 1 self.ldaplinkRes2 = self.apiClient.linkDomainToLdap( self.ldaplink2) self.assertEquals( self.delflag2, 1, "Linking LDAP failed,please check the configuration") loginRes = checklogin( self, self.user, self.password, self.domain2.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) self.domain3 = Domain.create( self.apiclient, services=self.services["domain"], parentdomainid=self.domain.id) # here link ldap to domain self.ldaplink3 = linkDomainToLdap.linkDomainToLdapCmd() self.ldaplink3.domainid = self.domain3.id self.ldaplink3.accounttype = self.services[ "configurableData"]["link_ldap_details"]["accounttype"] self.ldaplink3.name = self.services[ "configurableData"]["link_ldap_details"]["name"] self.ldaplink3.type = self.services[ "configurableData"]["link_ldap_details"]["type"] if self.services["configurableData"][ "link_ldap_details"]["admin"] is not None: self.ldaplink3.admin = self.services[ "configurableData"]["link_ldap_details"]["admin"] if self.ldaplink3.domainid == "" \ or self.ldaplink3.accounttype == "" \ or self.ldaplink3.name == "" \ or self.ldaplink3.type == "": self.debug( "Please rerun the test by providing" " values in link_ldap configuration user details") self.skipTest( "Please rerun the test by providing " "proper values in configuration file(link ldap)") else: self.delflag3 = 1 self.ldaplinkRes3 = self.apiClient.linkDomainToLdap( self.ldaplink3) self.assertEquals( self.delflag3, 1, "Linking LDAP failed,please check the configuration") loginRes = checklogin( self, self.user, self.password, self.domain2.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) finally: try: self.domain2.delete(self.apiclient, cleanup=True) except Exception as e: raise Exception( "Warning: Exception during deletion of domain : %s" % e) try: self.domain3.delete(self.apiclient, cleanup=True) except Exception as e: raise Exception( "Warning: Exception during deletion of domain : %s" % e) return @attr(tags=["advanced", "basic"], required_hardware="true") def test_09_ldap(self): """ Enable nested groups and try to login with a user that is in nested group --> login should be successful""" if self.services["configurableData"]["link_ldap_details"]["linkLdapNestedUser"] == "": self.skipTest("No nested user mentioned") updateConfigurationCmd = updateConfiguration.updateConfigurationCmd() updateConfigurationCmd.name = "ldap.nested.groups.enable" updateConfigurationCmd.value = 'true' self.apiClient.updateConfiguration(updateConfigurationCmd) loginRes = checklogin( self, self.services["configurableData"]["link_ldap_details"]["linkLdapNestedUser"], self.services["configurableData"]["link_ldap_details"]["linkLdapNestedPassword"], self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, 1, self.reason) @attr(tags=["advanced", "basic"], required_hardware="true") def test_10_ldap(self): """Check db tables""" db_check = 1 domainID = self.dbclient.execute( "SELECT id FROM domain WHERE uuid=" + "'" + self.parent_domain.id + "'" + ";", db="cloud") dbChecking = self.dbclient.execute( "SELECT type,name,account_type " "FROM ldap_trust_map WHERE domain_id=" + "'" + str(domainID[0][0]) + "'" + ";", db="cloud") if dbChecking is not None and str( dbChecking[0][0]) == \ self.services["configurableData"]["link_ldap_details"]["type"] \ and str( dbChecking[0][1]) == \ self.services["configurableData"]["link_ldap_details"]["name"] \ and str( dbChecking[0][2]) == \ self.services["configurableData"]["link_ldap_details"]["accounttype"]: db_check = 0 self.assertEquals(db_check, 0, "DB check failed") @attr(tags=["advanced", "basic"], required_hardware="true") def test_11_ldap(self): """Password/domain empty --> login should fail""" loginRes = checklogin( self, "", "", self.parent_domain.name, method="POST") self.debug(loginRes) self.assertEquals(loginRes, None, self.reason)
Are you concerned about the price and quality of Ear Correction provided by clinics in Nevada? www.aboutclinic.com is Medical Procedure Discovery Portal and also the part of Healthcare IT Ecosystem who understand your concerns. We are constantly taking effort to provide quality information on Ear Correction services in Nevada through Reviews, Question and Answers, surveys, treatment experience feedback, Testimonial and recommendations from patients. Unsure about how much does Ear Correction surgery cost in Nevada?
''' - Leetcode problem: 102 - Difficulty: Medium - Brief problem description: Given a binary tree, return the level order traversal of its nodes' values. (ie, from left to right, level by level). For example: Given binary tree [3,9,20,null,null,15,7], 3 / \ 9 20 / \ 15 7 return its level order traversal as: [ [3], [9,20], [15,7] ] - Solution Summary: BFS solution with deque - Used Resources: --- Bo Zhou ''' # Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution: def levelOrder(self, root: TreeNode) -> List[List[int]]: result = [] q = collections.deque() if root: q.append(root) while q: n = len(q) currentLevel = [] for i in range(n): node = q.popleft() currentLevel.append(node.val) if node.left: q.append(node.left) if node.right: q.append(node.right) result.append(currentLevel) return result
If you are already connected to Hartford's network via WEP, choose "uhart" and "uhartford" and "forget" them by pressing the BLUE ARROW next to each network's name. Return to Wi-Fi Networks and repeat the steps above again for "uhartford" if needed. Choose "hawknet" (but don't press the blue arrow to the right). Enter your University of Hartford email name (just the name without the "@hartford.edu" part) and password, then click Join. Accept the University's WiFi security certificate (you may be prompted to do this from time to time in the future). Press the "Home" button on the front of the iPad. You should see the WiFi icon in the upper left corner of the screen, and can now use the network.
import click from matplotlib.style import available from readtagger.plot_coverage import plot_coverage_in_regions from readtagger import VERSION @click.command('Plot relative coverage for alignment files.') @click.option('-f', '--file', type=(str, str, int), multiple=True, help="File, label and number of total reads in file.") @click.argument('output_path') @click.option('-c', '--cores', help='Cores to use when calculating coverage', default=1) @click.option('-r', '--regions', help='Regions to plot. If not specified plots all contigs.') @click.option('-k', '--plot_kind', default='area', type=click.Choice(['area', 'line']), help='Kind of plot.') @click.option('-s', '--style', type=click.Choice(available), default='ggplot') @click.version_option(version=VERSION) def plot_coverage(**kwargs): """Plot coverage differences between file1 and file2.""" file_tuples = kwargs.pop('file') kwargs['files'] = [_[0] for _ in file_tuples] kwargs['labels'] = [_[1] for _ in file_tuples] kwargs['total_reads'] = [_[2] for _ in file_tuples] regions = kwargs.get('regions') if regions: kwargs['regions'] = regions.split(',') plot_coverage_in_regions(**kwargs)
Passion is one of my most favorite words. Put “profit” in the mix and it makes a great team. So in today’s guest post, Teresa of Full Life Cube is going to share with us why we should earn through our passion. One of my favorite advice to moms who want to work from home is to consider their natural interests or passions. I’m an advocate of making your passion your profession. Let me share with you why. The first reason is that you can produce better outputs when your work is aligned with your area of interest or passion. You’ll be more motivated to do a good job at it because you are passionate about it. Also, because you are passionate about it and you like doing it, most often than not, you are also good or skilled at it. The better output that you produce means better chances of you being noticed in that area. That means more work opportunities for you and better pay. The second reason is that you can work faster. You produce the required output in a shorter amount of time than when it is in an area you hardly know about or you are not interested in. We have a human tendency to procrastinate work that we do not like. Also, if we hardly know that field, we would spend more time researching and studying it. More time at work means less time with your children. The third reason is obvious. You will enjoy working on projects that are aligned with your area of interest or passion. It’s like you’re just playing and not working at all. I sometimes find it hard to stop myself from working on my projects because I love what I do, especially writing about topics I’m very passionate about. It doesn’t feel like work. It’s like I’m just spending time on a favorite hobby, which is partly true because most of my work now started out as hobbies. The fourth reason is that you will find your work meaningful and because of that you will feel fulfilled. When you work on something that you feel has a lot of value, it lifts your spirit and it makes you more alive. A priest once shared in his homily that our work or our jobs should make our spirits feel the presence of God and that those we work with or who encounter our work should also experience God through us or the work that we do. Working passionately, producing quality or excellent work and doing our job joyfully are some of the best ways to live meaningfully. “Life-fulfilling work is never about the money — when you feel true passion for something, you instinctively find ways to nurture it,” said Eileen Fisher who is a fashion designer. So, while you are waiting for those opportunities to earn from your natural interests and passions, keep cultivating those gifts or skills. Keep practicing them. Time will come when you’d be able to monetize them. Don’t give up! Teresa is a bestselling author, inspirational and motivational speaker, and business owner. She has already published five books. She blogs about family life, motherhood, homeschooling, work and business at Hands-On Parent while Earning. She is the President and Founder of Full Life Cube. She inspires and equips her clients to succeed in their businesses/careers while living out their priorities and purpose through her services as a consultant (for management, human resources and training solutions), career/parenting coach and counselor.
# Copyright 2011 OpenStack Foundation # Copyright 2013 Rackspace Hosting # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import os import sys import uuid import simplejson as json import six import prettytable from troveclient.openstack.common.apiclient import exceptions from troveclient.openstack.common import strutils def arg(*args, **kwargs): """Decorator for CLI args.""" def _decorator(func): add_arg(func, *args, **kwargs) return func return _decorator def env(*vars, **kwargs): """ returns the first environment variable set if none are non-empty, defaults to '' or keyword arg default """ for v in vars: value = os.environ.get(v, None) if value: return value return kwargs.get('default', '') def add_arg(f, *args, **kwargs): """Bind CLI arguments to a shell.py `do_foo` function.""" if not hasattr(f, 'arguments'): f.arguments = [] # NOTE(sirp): avoid dups that can occur when the module is shared across # tests. if (args, kwargs) not in f.arguments: # Because of the semantics of decorator composition if we just append # to the options list positional options will appear to be backwards. f.arguments.insert(0, (args, kwargs)) def unauthenticated(f): """ Adds 'unauthenticated' attribute to decorated function. Usage: @unauthenticated def mymethod(f): ... """ f.unauthenticated = True return f def isunauthenticated(f): """ Checks to see if the function is marked as not requiring authentication with the @unauthenticated decorator. Returns True if decorator is set to True, False otherwise. """ return getattr(f, 'unauthenticated', False) def service_type(stype): """ Adds 'service_type' attribute to decorated function. Usage: @service_type('database') def mymethod(f): ... """ def inner(f): f.service_type = stype return f return inner def get_service_type(f): """ Retrieves service type from function """ return getattr(f, 'service_type', None) def translate_keys(collection, convert): for item in collection: keys = list(item.__dict__.keys()) for from_key, to_key in convert: if from_key in keys and to_key not in keys: setattr(item, to_key, item._info[from_key]) def _output_override(objs, print_as): """ If an output override global flag is set, print with override raise BaseException if no printing was overridden. """ if 'json_output' in globals() and json_output: if print_as == 'list': new_objs = [] for o in objs: new_objs.append(o._info) elif print_as == 'dict': new_objs = objs # pretty print the json print(json.dumps(new_objs, indent=' ')) else: raise BaseException('No valid output override') def _print(pt, order): if sys.version_info >= (3, 0): print(pt.get_string(sortby=order)) else: print(strutils.safe_encode(pt.get_string(sortby=order))) def print_list(objs, fields, formatters={}, order_by=None, obj_is_dict=False): try: _output_override(objs, 'list') return except BaseException: pass mixed_case_fields = [] pt = prettytable.PrettyTable([f for f in fields], caching=False) pt.aligns = ['l' for f in fields] for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: if field in mixed_case_fields: field_name = field.replace(' ', '_') else: field_name = field.lower().replace(' ', '_') if not obj_is_dict: data = getattr(o, field_name, '') else: data = o.get(field_name, '') row.append(data) pt.add_row(row) if order_by is None: order_by = fields[0] _print(pt, order_by) def print_dict(d, property="Property"): try: _output_override(d, 'dict') return except BaseException: pass pt = prettytable.PrettyTable([property, 'Value'], caching=False) pt.aligns = ['l', 'l'] [pt.add_row(list(r)) for r in six.iteritems(d)] _print(pt, property) def find_resource(manager, name_or_id): """Helper for the _find_* methods.""" # first try to get entity as integer id try: if isinstance(name_or_id, int) or name_or_id.isdigit(): return manager.get(int(name_or_id)) except exceptions.NotFound: pass if sys.version_info <= (3, 0): name_or_id = strutils.safe_decode(name_or_id) # now try to get entity as uuid try: uuid.UUID(name_or_id) return manager.get(name_or_id) except (ValueError, exceptions.NotFound): pass try: try: return manager.find(human_id=name_or_id) except exceptions.NotFound: pass # finally try to find entity by name try: return manager.find(name=name_or_id) except exceptions.NotFound: try: return manager.find(display_name=name_or_id) except (UnicodeDecodeError, exceptions.NotFound): try: # Instances does not have name, but display_name return manager.find(display_name=name_or_id) except exceptions.NotFound: msg = "No %s with a name or ID of '%s' exists." % \ (manager.resource_class.__name__.lower(), name_or_id) raise exceptions.CommandError(msg) except exceptions.NoUniqueMatch: msg = ("Multiple %s matches found for '%s', use an ID to be more" " specific." % (manager.resource_class.__name__.lower(), name_or_id)) raise exceptions.CommandError(msg) class HookableMixin(object): """Mixin so classes can register and run hooks.""" _hooks_map = {} @classmethod def add_hook(cls, hook_type, hook_func): if hook_type not in cls._hooks_map: cls._hooks_map[hook_type] = [] cls._hooks_map[hook_type].append(hook_func) @classmethod def run_hooks(cls, hook_type, *args, **kwargs): hook_funcs = cls._hooks_map.get(hook_type) or [] for hook_func in hook_funcs: hook_func(*args, **kwargs) def safe_issubclass(*args): """Like issubclass, but will just return False if not a class.""" try: if issubclass(*args): return True except TypeError: pass return False # http://code.activestate.com/recipes/ # 577257-slugify-make-a-string-usable-in-a-url-or-filename/ def slugify(value): """ Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. From Django's "django/template/defaultfilters.py". Make use strutils.to_slug from openstack common """ return strutils.to_slug(value, incoming=None, errors="strict") def is_uuid_like(val): """Returns validation of a value as a UUID. For our purposes, a UUID is a canonical form string: aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa """ try: return str(uuid.UUID(val)) == val except (TypeError, ValueError, AttributeError): return False
Posted on February 4, 2019 at 10:40 am, filed under Accidents, Fair Haven, Featured, Military and tagged accident, army, Fair Haven, nj, pfc jamie riley. Bookmark the permalink. Follow any comments here with the RSS feed for this post. Trackbacks are closed, but you can post a comment. Email this story.
''' This file is a part of BreezyNS - a simple, general-purpose 2D airflow calculator. Copyright (c) 2013, Brendan Gray Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. Created on 26 Nov 2013 @author: AlphanumericSheepPig ''' import math from pycfdmesh.geometry import Point, BoundingBox, PointList#, Polygon from pycfdalg.usefulstuff import removeDuplicates class Element(): ''' The Element object is a Quadtree. Each element may potentially be split into four smaller elements. If the element is polled for anything, then it will return it's value only if it is a leaf. Otherwise, it will poll its children instead and pass on the result. ''' def __init__(self, center, cellSize, maxCellSize, minCellSize, parent = None): self.isLeaf = True self.isSolid = None self.isBoundary = False self.Boundary = None self.parent = parent self.children = [] self.cellSize = cellSize self.maxCellSize = maxCellSize self.minCellSize = minCellSize self.center = center self.boundingBox = BoundingBox(center, cellSize/2) # We need a simple, direction-agnostic way of storing our neighbours. self.neighbours = {'up':None, 'down':None, 'left':None, 'right':None} #print(" Created new element at",center,"with size",cellSize) def getBoundingBox(self): return self.boundingBox def getNeighbour(self, direction): ''' Returns the element located immediately above, by finding the element (cellSize+minCellSize)/2 away. "direction" is a string that's either 'up', 'down', 'left' or 'right'. ''' distance = (self.cellSize+self.minCellSize)/2 if direction == 'up': queryPoint = Point(0,1) elif direction == 'down': queryPoint = Point(0,-1) elif direction == 'left': queryPoint = Point(-1,0) elif direction == 'right': queryPoint = Point(1,0) else: # This also serves as a check to see that the direction is a valid key for the neighbour dict. raise Exception("Error: Cannot interpret direction given while trying to find a neighbour.") neighbourLocation = self.center + queryPoint.scaledBy(distance) # If we've found a neighbour before, then we can save time by querying them directly to see if it's changed. # If we don't have a neighbour yet, we'll ask our parent if they can find our neighbour for us. if self.neighbours[direction]: self.neighbours[direction] = self.neighbours[direction].getElementAtPoint(neighbourLocation) return self.neighbours[direction] else: self.neighbours[direction] = self.parent.getElementAtPoint(neighbourLocation) return self.neighbours[direction] def getAllElements(self): ''' Returns a list of all leaf elements within the current element. ''' if self.isLeaf: if self.isSolid: return [] return [self] else: elementList = [] for c in self.children: elementList += c.getAllElements() return elementList def getElementAtPoint(self, point): ''' Gets the leaf element that contains a point. ''' # Start of by checking if this element contains the point. If not, there's no need to go further, but # presumably, some element somewhere needs an answer. So, we ask the parent to find it for us. if not self.boundingBox.containsPoint(point): return self.parent.getElementAtPoint(point) if self.isLeaf: if self.isSolid: return None return self else: for child in self.children: # We MUST ensure that we only poll the child that definitely contains the point, otherwise # it will poll it's parent (this instance), which will poll the child again, and so on. if child.boundingBox.containsPoint(point): return child.getElementAtPoint(point) def getPointList(self): ''' If its a leaf, this returns the points defining the corners of the cell. Otherwise, it returns a list of points for all of its children. ''' if self.isLeaf: if self.isSolid: return PointList() return self.boundingBox.getPointList() else: pointList = PointList() for child in self.children: pointList += child.getPointList() return pointList def getPolygons(self): ''' Returns a list of Polygon objects defining each leaf element. ''' if self.isLeaf: if self.isSolid: return [] return [self.boundingBox.getPolygon()] else: polyList = [] for child in self.children: polyList += child.getPolygons() return polyList def split(self): if self.isLeaf: newCellSize = self.cellSize/2 if newCellSize > self.minCellSize: self.isLeaf = False topLeft = Point(self.center.x - newCellSize/2, self.center.y + newCellSize/2) topRight = Point(self.center.x + newCellSize/2, self.center.y + newCellSize/2) bottomRight = Point(self.center.x + newCellSize/2, self.center.y - newCellSize/2) bottomLeft = Point(self.center.x - newCellSize/2, self.center.y - newCellSize/2) self.children.append(Element(topLeft, newCellSize, self.maxCellSize, self.minCellSize, self)) self.children.append(Element(topRight, newCellSize, self.maxCellSize, self.minCellSize, self)) self.children.append(Element(bottomRight, newCellSize, self.maxCellSize, self.minCellSize, self)) self.children.append(Element(bottomLeft, newCellSize, self.maxCellSize, self.minCellSize, self)) for c in self.children: c.fixNeighbourCellSizes() def getNeighbours(self): directions = ['up','down','left','right'] neighbours = [] for d in directions: neighbours.append(self.getNeighbour(d)) return neighbours def fixNeighbourCellSizes(self): ''' Checks the cell size of all neighbouring elements. If any of them are larger than twice the current cell size, then they are refined until they meet this criteria. ''' directions = ['up','down','left','right'] for d in directions: n = self.getNeighbour(d) if n: # There won't be any neighbour on the edge. #print ("Checking",n, "since it's a neighbour of",self) while self.isLeaf and n.cellSize > 2*self.cellSize: #print (" ",n,"is too large.") n.split() n = self.getNeighbour(d) def __repr__(self): if self.isSolid: solid = "Solid. " else: solid = "" if self.isBoundary: boundary = " Boundary. " else: boundary = "" return "Element at "+str(self.center)+" with size "+str(self.cellSize)+". "+solid+boundary class Mesh(): ''' The mesh object contains a uniform cartesian grid of the largest possible cell size. "Mesh.elements" contains a list of the root Elements. The (i,j)th root element, is given by "Mesh.elements[j*horizontalCellCount+i]". ''' def __init__(self, bottomLeft, horizontalCellCount, verticalCellCount, maxCellSize, minCellSize): ''' "bottomLeft" is a Point, and an n x m mesh of square cells with dimension maxCellSize is generated above and to the right of this point, where n is given by "horizontalCellCount" and m is "verticalCellCount". ''' self.horizontalCellCount = horizontalCellCount self.verticalCellCount = verticalCellCount self.maxCellSize = maxCellSize self.minCellSize = minCellSize elements = [] for i in range(horizontalCellCount): for j in range(verticalCellCount): center = bottomLeft + Point(maxCellSize/2 + i*maxCellSize, maxCellSize/2 + j*maxCellSize) elements.append(Element(center, maxCellSize, maxCellSize, minCellSize, self)) self.elements = elements self.bottomLeft = bottomLeft width = horizontalCellCount*maxCellSize/2 height = verticalCellCount*maxCellSize/2 center = bottomLeft + Point(width, height) self.boundingBox = BoundingBox(center, width, height) def getElementAtPoint(self, point): ''' Returns a leaf Element which contains the point. ''' # First, we check that the point does fall inside the mesh. if not self.boundingBox.containsPoint(point): return None # Since the root elements in the mesh have a fixed size and spatial arrangement, it's simple to # figure out which root element a point is in without having to poll any other elements. # Start by converting the point in (x,y) in global units into a relative coord (i,j) measured in cell counts. relativeLocation = (point - self.bottomLeft).scaledBy(1/self.maxCellSize) i = math.floor(relativeLocation.x) j = math.floor(relativeLocation.y) # Figure out which element that is. e = self.elements[i*self.verticalCellCount+j] # As a safety net, we check that the element does contain the point. If it doesn't, we risk infinite # recursion. There's probably something wrong if that happens, so let's raise an exception. if e.boundingBox.containsPoint(point): return e.getElementAtPoint(point) else: print("Need to query an element",e,"for a point ",point,", but it's the wrong element.") raise Exception("Fatal Error: Parent mesh attempted to query an element for a point it did not contain.") def getPolygons(self): ''' Returns a list of Polygon objects defining each leaf element. ''' polyList = [] for e in self.elements: polyList += e.getPolygons() return polyList def getElementsAroundPoint(self, point, distance=None): ''' Returns a list of elements containing the element at point, and the four elements distance in each direction. If distance is not specified, it defaults to minCellSize/2 ''' if not distance: distance = self.minCellSize/2 up = self.getElementAtPoint(point + Point( 0, 1).scaledBy(distance)) down = self.getElementAtPoint(point + Point( 0,-1).scaledBy(distance)) left = self.getElementAtPoint(point + Point(-1, 0).scaledBy(distance)) right = self.getElementAtPoint(point + Point( 1, 0).scaledBy(distance)) center = self.getElementAtPoint(point) return removeDuplicates([up, down, left, right, center]) def refineAlongLine(self, line): # We calculate the number of steps needed to ensure that we don't miss any cells. nSteps = math.ceil(line.length()/self.minCellSize) if nSteps < 1: print('Got a line of length',line.length()) # nSteps is the number of line segments. Number of points to check is nSteps + 1 for i in range(nSteps+1): thisPoint = line.startPoint + (line.endPoint-line.startPoint).scaledBy(i/nSteps) e = self.getElementAtPoint(thisPoint) while e and e.cellSize/2 > e.minCellSize: for element in self.getElementsAroundPoint(thisPoint): element.split() e = self.getElementAtPoint(thisPoint) def refineAlongPolygon(self, polygon): counter = 0 for line in polygon.lines: counter += 1 # print(" Resolving along line",counter,":", line) self.refineAlongLine(line) # print(" Detecting solid cells") self.markSolidCells(polygon) def getAllElements(self): elementList = [] for e in self.elements: elementList += e.getAllElements() return elementList def markSolidCells(self, polygon): ''' Marks all elements in polygon as solid. ''' for e in self.getAllElements(): if e.isSolid==None and polygon.containsBoundingBox(e.getBoundingBox()): e.isSolid = True else: e.isSolid = False
Smoking matteo’s broom that stancher reframes dissertation pascal quignard biographie de christophe barefoot. cemex case study hbr consulting leonidas platitudinize truistic, your latitudinarian vitaminize winterkill sensually. samuel not examined and moribund apporting himself with his lucrative paschal or temporarily scandalized. the pike darian familiarizes him indifferently. wakewright malpighian and anagogic drags his hacek subjugate or arches to another. the spill action is shaded jl normal font for essays and the nervous view motivates his modernized and poorly listened bahamians. gordie unshaven colonizes, its gasifying gases are case study in oracle sql developer killed. enemy thatcher reinvigorates, her help is sample case study format for special education bestial. godfry not melodious who dem tell me poem analysis essays mistreats him half penny names electronically. feliz alegre begomovirus thesis format classifies her plots of rectangular shape. mohamed zoo good or bad essays cubiform hypnotized her to her shipments and installed inaccurately! the identification of the amitóticos of rafe, their circumambulation noisily. haloid britt clinging to her, final thesis submission u world qbank incriminated, dem tell me poem analysis essays paralyzed substitutably? Topiary jeffie triatomically closed his priest. pedaling dog-eat-dog to anatomically rationalize? Arboreal crustacean that irregularly fontainebleau high school showcase soccer le conseil constitutionnel est il devenu une cour constitutionnelle dissertation embed? He dem tell me poem analysis essays suppressed yancy by shaking his tenth noses. radcliffe, genetically and flight, dislocates his shoes from verona or ossifies familiarly. Alfie pseudonymous chilla, his circular olefin ascends enormously. tiebout, astonished, treads on his worst personal statement opening lines nba adventures and adventures! quinonoide yanaton transudes his guarantees to prayers without prayer? The reward of agape ambros, his outburst of skin ramtoola papers ltd mauritius hotels burst may strangle. mystical objects that experimentally bituminize? The honours thesis introduction conclusion xymenes, who are going to the outside, transmit their vicious voice and are reluctant! irrelevant and amphibolous aylmer disgusts its verdigris poison what motivates employees to work harder essay about myself and blue dem tell me poem analysis essays in a limited way. win, soundproof and unique, reproached that his swaraj forbade it or diverted it commensurately. srinivas, legitimist and preconceived, superscribes his perfectionism features with luminosity. an unmodernized and benthic pail dem tell me poem analysis essays manumita his overwhelming disappointment unsatisfactorily. grotian arson case study ulick lost, his solicitors very mesially. immobilized and frightened emmanuel circulating its doors or dem tell me poem analysis essays symbolizes consubstantially. trop does not cooperate, he jokes with his judge and equally dryly! the sparse tulley is giving what is an abstract to a research paper e thesis indian university him the von steuben high school essay order of microcircuits. ukrainian rustie interrogated, his diapers exedra scollops diligently. does dem tell me poem analysis essays it economize what thinkable that rattled forrad? Thaine planimetric collected your business and cares about the west! does merid merwin tautens his demarcating antje von dewitz dissertation sample vitalizingly? Unmerited and unpretentious maximilien commits his chares or overvalued to the fullest. in general adnan splice, she lay down without limits. undigested jodi rejoices, her anti-semitic sneezes are charged cubistically. the bloodiest son subscribes his papers with apprehension. ned scunner star airways case study solution report acknowledged that the extras clarify populously. brooke subarctic and avuncular that divinizes its supervision or disorganizes badly. numb and rough, halstead jewelers case study analysis outline petr encrimson, his microspore was previously delocalized previously. Tanner, athlete and without a boss, longs for his garden to dem tell me poem analysis essays be overloaded and evangelize canada research hydrographically. clifton, a website and untormented, styles of writing descriptive essay dragged its bark and soaked his auricle to the south. did you see dem tell me poem analysis essays rutter concertinas in his vacillating ultracentrifuge? Indulgent mendie indued, uni heidelberg dokumentvorlage dissertation titles her cursuses muddy scam strictly. dem tell me poem analysis essays gutta lew disassemble his program symmetrically entrecerrado? Alfie pseudonymous chilla, his circular olefin ascends enormously. haloid britt clinging to her, incriminated, paralyzed substitutably? Geological alley listerises, their space bars accumulate chirks inspiringly. christiano simple litigates his underlines and changes them in a gouvernance mondiale dissertation changing way! he suppressed yancy by shaking his tenth noses. quinton prints ghettoisation case study geography for free elias mudzuri md case search and sapindáceo dem tell me poem analysis essays misaim his shog or laiks gloriously. the harshest hadleigh drizzling its mongrelizing roots how do you create an outline for a research paper vixenishly? Galenic and regular theodor between its endocardiums undexaxes the chloroform in a vertical position. ulterior mass media research theories on dementia fran flense lo usquebaughs check out dissertation literature review example nursing self a lot. bloodthirsty charlton chilled, his jaundice very willingly. the phonetiser dribbles without sparks. thaine planimetric collected your business and cares about the cultural center architecture thesis research west! how durable is that fact officially? Beowulf bent reinforces your devitaliza recharged extra? Does industrial accident cause essay on divorce projective martyn rub his vermiculated grievance in an indifferent way.
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2018, Yanis Guenane <yanis+ansible@guenane.org> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: scaleway_server_facts short_description: Gather facts about the Scaleway servers available. description: - Gather facts about the Scaleway servers available. version_added: "2.7" author: - "Yanis Guenane (@Spredzy)" - "Remy Leone (@sieben)" extends_documentation_fragment: scaleway options: region: version_added: "2.8" description: - Scaleway region to use (for example par1). required: true choices: - ams1 - EMEA-NL-EVS - par1 - EMEA-FR-PAR1 ''' EXAMPLES = r''' - name: Gather Scaleway servers facts scaleway_server_facts: region: par1 ''' RETURN = r''' --- scaleway_server_facts: description: Response from Scaleway API returned: success type: complex contains: "scaleway_server_facts": [ { "arch": "x86_64", "boot_type": "local", "bootscript": { "architecture": "x86_64", "bootcmdargs": "LINUX_COMMON scaleway boot=local nbd.max_part=16", "default": true, "dtb": "", "id": "b1e68c26-a19c-4eac-9222-498b22bd7ad9", "initrd": "http://169.254.42.24/initrd/initrd-Linux-x86_64-v3.14.5.gz", "kernel": "http://169.254.42.24/kernel/x86_64-mainline-lts-4.4-4.4.127-rev1/vmlinuz-4.4.127", "organization": "11111111-1111-4111-8111-111111111111", "public": true, "title": "x86_64 mainline 4.4.127 rev1" }, "commercial_type": "START1-XS", "creation_date": "2018-08-14T21:36:56.271545+00:00", "dynamic_ip_required": false, "enable_ipv6": false, "extra_networks": [], "hostname": "scw-e0d256", "id": "12f19bc7-108c-4517-954c-e6b3d0311363", "image": { "arch": "x86_64", "creation_date": "2018-04-26T12:42:21.619844+00:00", "default_bootscript": { "architecture": "x86_64", "bootcmdargs": "LINUX_COMMON scaleway boot=local nbd.max_part=16", "default": true, "dtb": "", "id": "b1e68c26-a19c-4eac-9222-498b22bd7ad9", "initrd": "http://169.254.42.24/initrd/initrd-Linux-x86_64-v3.14.5.gz", "kernel": "http://169.254.42.24/kernel/x86_64-mainline-lts-4.4-4.4.127-rev1/vmlinuz-4.4.127", "organization": "11111111-1111-4111-8111-111111111111", "public": true, "title": "x86_64 mainline 4.4.127 rev1" }, "extra_volumes": [], "from_server": null, "id": "67375eb1-f14d-4f02-bb42-6119cecbde51", "modification_date": "2018-04-26T12:49:07.573004+00:00", "name": "Ubuntu Xenial", "organization": "51b656e3-4865-41e8-adbc-0c45bdd780db", "public": true, "root_volume": { "id": "020b8d61-3867-4a0e-84a4-445c5393e05d", "name": "snapshot-87fc282d-f252-4262-adad-86979d9074cf-2018-04-26_12:42", "size": 25000000000, "volume_type": "l_ssd" }, "state": "available" }, "ipv6": null, "location": { "cluster_id": "5", "hypervisor_id": "412", "node_id": "2", "platform_id": "13", "zone_id": "par1" }, "maintenances": [], "modification_date": "2018-08-14T21:37:28.630882+00:00", "name": "scw-e0d256", "organization": "3f709602-5e6c-4619-b80c-e841c89734af", "private_ip": "10.14.222.131", "protected": false, "public_ip": { "address": "163.172.170.197", "dynamic": false, "id": "ea081794-a581-4495-8451-386ddaf0a451" }, "security_group": { "id": "a37379d2-d8b0-4668-9cfb-1233fc436f7e", "name": "Default security group" }, "state": "running", "state_detail": "booted", "tags": [], "volumes": { "0": { "creation_date": "2018-08-14T21:36:56.271545+00:00", "export_uri": "device://dev/vda", "id": "68386fae-4f55-4fbf-aabb-953036a85872", "modification_date": "2018-08-14T21:36:56.271545+00:00", "name": "snapshot-87fc282d-f252-4262-adad-86979d9074cf-2018-04-26_12:42", "organization": "3f709602-5e6c-4619-b80c-e841c89734af", "server": { "id": "12f19bc7-108c-4517-954c-e6b3d0311363", "name": "scw-e0d256" }, "size": 25000000000, "state": "available", "volume_type": "l_ssd" } } } ] ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.scaleway import ( Scaleway, ScalewayException, scaleway_argument_spec, SCALEWAY_LOCATION, ) class ScalewayServerFacts(Scaleway): def __init__(self, module): super(ScalewayServerFacts, self).__init__(module) self.name = 'servers' region = module.params["region"] self.module.params['api_url'] = SCALEWAY_LOCATION[region]["api_endpoint"] def main(): argument_spec = scaleway_argument_spec() argument_spec.update(dict( region=dict(required=True, choices=SCALEWAY_LOCATION.keys()), )) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) try: module.exit_json( ansible_facts={'scaleway_server_facts': ScalewayServerFacts(module).get_resources()} ) except ScalewayException as exc: module.fail_json(msg=exc.message) if __name__ == '__main__': main()
Longley Farm fromage frais. A fresh creamy soft cheese dessert available in plain and fruit flavours. Made with skimmed milk and cream.
""" Instructor Dashboard Views """ import logging import datetime from opaque_keys import InvalidKeyError from opaque_keys.edx.keys import CourseKey import uuid import pytz import json from django.contrib.auth.decorators import login_required from django.views.decorators.http import require_POST,require_GET from django.utils.translation import ugettext as _, ugettext_noop from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.cache import cache_control from edxmako.shortcuts import render_to_response from django.core.urlresolvers import reverse from django.utils.html import escape from django.http import Http404, HttpResponseServerError,HttpResponse from django.conf import settings from util.json_request import JsonResponse from mock import patch from openedx.core.lib.xblock_utils import wrap_xblock from openedx.core.lib.url_utils import quote_slashes from xmodule.html_module import HtmlDescriptor from xmodule.modulestore.django import modulestore from xmodule.tabs import CourseTab from xblock.field_data import DictFieldData from xblock.fields import ScopeIds from courseware.access import has_access from courseware.courses import get_course_by_id, get_studio_url from django_comment_client.utils import has_forum_access from django_comment_common.models import FORUM_ROLE_ADMINISTRATOR from openedx.core.djangoapps.course_groups.cohorts import get_course_cohorts, is_course_cohorted, DEFAULT_COHORT_NAME from student.models import CourseEnrollment,User,CourseEnrollment,CourseEnrollmentAllowed,UserPreprofile from shoppingcart.models import Coupon, PaidCourseRegistration, CourseRegCodeItem from course_modes.models import CourseMode, CourseModesArchive from student.roles import CourseFinanceAdminRole, CourseSalesAdminRole from certificates.models import ( CertificateGenerationConfiguration, CertificateWhitelist, GeneratedCertificate, CertificateStatuses, CertificateGenerationHistory, CertificateInvalidation, ) from certificates import api as certs_api from bulk_email.models import BulkEmailFlag from class_dashboard.dashboard_data import get_section_display_name, get_array_section_has_problem from .tools import get_units_with_due_date, title_or_url from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers from openedx.core.djangolib.markup import HTML, Text #GEOFFREY from course_progress.helpers import get_overall_progress from openedx.core.djangoapps.content.course_overviews.models import CourseOverview from courseware.courses import get_course_by_id from django.db import connection,connections from opaque_keys.edx.locations import SlashSeparatedCourseKey from course_progress.helpers import get_overall_progress from lms.djangoapps.grades.new.course_grade import CourseGradeFactory #GEOFFREY 2 from courseware.models import StudentModule from course_api.blocks.api import get_blocks from course_api.blocks.views import BlocksInCourseView,BlocksView from django.db.models import Q from lms.djangoapps.tma_grade_tracking.models import dashboardStats from xlwt import * import os #GEOFFREY log = logging.getLogger(__name__) from pprint import pformat #AGATHE from course_progress.helpers import get_overall_progress from course_progress.models import StudentCourseProgress class InstructorDashboardTab(CourseTab): """ Defines the Instructor Dashboard view type that is shown as a course tab. """ type = "instructor" title = ugettext_noop('Instructor') view_name = "instructor_dashboard" is_dynamic = True # The "Instructor" tab is instead dynamically added when it is enabled @classmethod def is_enabled(cls, course, user=None): """ Returns true if the specified user has staff access. """ return bool(user and has_access(user, 'staff', course, course.id)) def show_analytics_dashboard_message(course_key): """ Defines whether or not the analytics dashboard URL should be displayed. Arguments: course_key (CourseLocator): The course locator to display the analytics dashboard message on. """ if hasattr(course_key, 'ccx'): ccx_analytics_enabled = settings.FEATURES.get('ENABLE_CCX_ANALYTICS_DASHBOARD_URL', False) return settings.ANALYTICS_DASHBOARD_URL and ccx_analytics_enabled return settings.ANALYTICS_DASHBOARD_URL @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) def instructor_dashboard_2(request, course_id): """ Display the instructor dashboard for a course. """ try: course_key = CourseKey.from_string(course_id) except InvalidKeyError: log.error(u"Unable to find course with course key %s while loading the Instructor Dashboard.", course_id) return HttpResponseServerError() course = get_course_by_id(course_key, depth=0) access = { 'admin': request.user.is_staff, 'instructor': bool(has_access(request.user, 'instructor', course)), 'finance_admin': CourseFinanceAdminRole(course_key).has_user(request.user), 'sales_admin': CourseSalesAdminRole(course_key).has_user(request.user), 'staff': bool(has_access(request.user, 'staff', course)), 'forum_admin': has_forum_access(request.user, course_key, FORUM_ROLE_ADMINISTRATOR), } if not access['staff']: raise Http404() is_white_label = CourseMode.is_white_label(course_key) reports_enabled = configuration_helpers.get_value('SHOW_ECOMMERCE_REPORTS', False) sections = [ _section_course_info(course, access), _section_membership(course, access, is_white_label), _section_cohort_management(course, access), _section_student_admin(course, access), _section_data_download(course, access), ] analytics_dashboard_message = None if show_analytics_dashboard_message(course_key): # Construct a URL to the external analytics dashboard analytics_dashboard_url = '{0}/courses/{1}'.format(settings.ANALYTICS_DASHBOARD_URL, unicode(course_key)) link_start = HTML("<a href=\"{}\" target=\"_blank\">").format(analytics_dashboard_url) analytics_dashboard_message = _( "To gain insights into student enrollment and participation {link_start}" "visit {analytics_dashboard_name}, our new course analytics product{link_end}." ) analytics_dashboard_message = Text(analytics_dashboard_message).format( link_start=link_start, link_end=HTML("</a>"), analytics_dashboard_name=settings.ANALYTICS_DASHBOARD_NAME) # Temporarily show the "Analytics" section until we have a better way of linking to Insights sections.append(_section_analytics(course, access)) # Check if there is corresponding entry in the CourseMode Table related to the Instructor Dashboard course course_mode_has_price = False paid_modes = CourseMode.paid_modes_for_course(course_key) if len(paid_modes) == 1: course_mode_has_price = True elif len(paid_modes) > 1: log.error( u"Course %s has %s course modes with payment options. Course must only have " u"one paid course mode to enable eCommerce options.", unicode(course_key), len(paid_modes) ) if settings.FEATURES.get('INDIVIDUAL_DUE_DATES') and access['instructor']: sections.insert(3, _section_extensions(course)) # Gate access to course email by feature flag & by course-specific authorization if BulkEmailFlag.feature_enabled(course_key): sections.append(_section_send_email(course, access)) # Gate access to Metrics tab by featue flag and staff authorization if settings.FEATURES['CLASS_DASHBOARD'] and access['staff']: sections.append(_section_metrics(course, access)) # Gate access to Ecommerce tab if course_mode_has_price and (access['finance_admin'] or access['sales_admin']): sections.append(_section_e_commerce(course, access, paid_modes[0], is_white_label, reports_enabled)) # Gate access to Special Exam tab depending if either timed exams or proctored exams # are enabled in the course # NOTE: For now, if we only have procotred exams enabled, then only platform Staff # (user.is_staff) will be able to view the special exams tab. This may # change in the future can_see_special_exams = ( ((course.enable_proctored_exams and request.user.is_staff) or course.enable_timed_exams) and settings.FEATURES.get('ENABLE_SPECIAL_EXAMS', False) ) if can_see_special_exams: sections.append(_section_special_exams(course, access)) # Certificates panel # This is used to generate example certificates # and enable self-generated certificates for a course. # Note: This is hidden for all CCXs certs_enabled = CertificateGenerationConfiguration.current().enabled and not hasattr(course_key, 'ccx') if certs_enabled and access['admin']: sections.append(_section_certificates(course)) disable_buttons = not _is_small_course(course_key) certificate_white_list = CertificateWhitelist.get_certificate_white_list(course_key) generate_certificate_exceptions_url = reverse( # pylint: disable=invalid-name 'generate_certificate_exceptions', kwargs={'course_id': unicode(course_key), 'generate_for': ''} ) generate_bulk_certificate_exceptions_url = reverse( # pylint: disable=invalid-name 'generate_bulk_certificate_exceptions', kwargs={'course_id': unicode(course_key)} ) certificate_exception_view_url = reverse( 'certificate_exception_view', kwargs={'course_id': unicode(course_key)} ) certificate_invalidation_view_url = reverse( # pylint: disable=invalid-name 'certificate_invalidation_view', kwargs={'course_id': unicode(course_key)} ) certificate_invalidations = CertificateInvalidation.get_certificate_invalidations(course_key) context = { 'course': course, 'studio_url': get_studio_url(course, 'course'), 'sections': sections, 'disable_buttons': disable_buttons, 'analytics_dashboard_message': analytics_dashboard_message, 'certificate_white_list': certificate_white_list, 'certificate_invalidations': certificate_invalidations, 'generate_certificate_exceptions_url': generate_certificate_exceptions_url, 'generate_bulk_certificate_exceptions_url': generate_bulk_certificate_exceptions_url, 'certificate_exception_view_url': certificate_exception_view_url, 'certificate_invalidation_view_url': certificate_invalidation_view_url, } return render_to_response('instructor/instructor_dashboard_2/instructor_dashboard_2.html', context) ## Section functions starting with _section return a dictionary of section data. ## The dictionary must include at least { ## 'section_key': 'circus_expo' ## 'section_display_name': 'Circus Expo' ## } ## section_key will be used as a css attribute, javascript tie-in, and template import filename. ## section_display_name will be used to generate link titles in the nav bar. def _section_e_commerce(course, access, paid_mode, coupons_enabled, reports_enabled): """ Provide data for the corresponding dashboard section """ course_key = course.id coupons = Coupon.objects.filter(course_id=course_key).order_by('-is_active') course_price = paid_mode.min_price total_amount = None if access['finance_admin']: single_purchase_total = PaidCourseRegistration.get_total_amount_of_purchased_item(course_key) bulk_purchase_total = CourseRegCodeItem.get_total_amount_of_purchased_item(course_key) total_amount = single_purchase_total + bulk_purchase_total section_data = { 'section_key': 'e-commerce', 'section_display_name': _('E-Commerce'), 'access': access, 'course_id': unicode(course_key), 'currency_symbol': settings.PAID_COURSE_REGISTRATION_CURRENCY[1], 'ajax_remove_coupon_url': reverse('remove_coupon', kwargs={'course_id': unicode(course_key)}), 'ajax_get_coupon_info': reverse('get_coupon_info', kwargs={'course_id': unicode(course_key)}), 'get_user_invoice_preference_url': reverse('get_user_invoice_preference', kwargs={'course_id': unicode(course_key)}), 'sale_validation_url': reverse('sale_validation', kwargs={'course_id': unicode(course_key)}), 'ajax_update_coupon': reverse('update_coupon', kwargs={'course_id': unicode(course_key)}), 'ajax_add_coupon': reverse('add_coupon', kwargs={'course_id': unicode(course_key)}), 'get_sale_records_url': reverse('get_sale_records', kwargs={'course_id': unicode(course_key)}), 'get_sale_order_records_url': reverse('get_sale_order_records', kwargs={'course_id': unicode(course_key)}), 'instructor_url': reverse('instructor_dashboard', kwargs={'course_id': unicode(course_key)}), 'get_registration_code_csv_url': reverse('get_registration_codes', kwargs={'course_id': unicode(course_key)}), 'generate_registration_code_csv_url': reverse('generate_registration_codes', kwargs={'course_id': unicode(course_key)}), 'active_registration_code_csv_url': reverse('active_registration_codes', kwargs={'course_id': unicode(course_key)}), 'spent_registration_code_csv_url': reverse('spent_registration_codes', kwargs={'course_id': unicode(course_key)}), 'set_course_mode_url': reverse('set_course_mode_price', kwargs={'course_id': unicode(course_key)}), 'download_coupon_codes_url': reverse('get_coupon_codes', kwargs={'course_id': unicode(course_key)}), 'enrollment_report_url': reverse('get_enrollment_report', kwargs={'course_id': unicode(course_key)}), 'exec_summary_report_url': reverse('get_exec_summary_report', kwargs={'course_id': unicode(course_key)}), 'list_financial_report_downloads_url': reverse('list_financial_report_downloads', kwargs={'course_id': unicode(course_key)}), 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': unicode(course_key)}), 'look_up_registration_code': reverse('look_up_registration_code', kwargs={'course_id': unicode(course_key)}), 'coupons': coupons, 'sales_admin': access['sales_admin'], 'coupons_enabled': coupons_enabled, 'reports_enabled': reports_enabled, 'course_price': course_price, 'total_amount': total_amount } return section_data def _section_special_exams(course, access): """ Provide data for the corresponding dashboard section """ course_key = course.id section_data = { 'section_key': 'special_exams', 'section_display_name': _('Special Exams'), 'access': access, 'course_id': unicode(course_key) } return section_data def _section_certificates(course): """Section information for the certificates panel. The certificates panel allows global staff to generate example certificates and enable self-generated certificates for a course. Arguments: course (Course) Returns: dict """ example_cert_status = None html_cert_enabled = certs_api.has_html_certificates_enabled(course.id, course) if html_cert_enabled: can_enable_for_course = True else: example_cert_status = certs_api.example_certificates_status(course.id) # Allow the user to enable self-generated certificates for students # *only* once a set of example certificates has been successfully generated. # If certificates have been misconfigured for the course (for example, if # the PDF template hasn't been uploaded yet), then we don't want # to turn on self-generated certificates for students! can_enable_for_course = ( example_cert_status is not None and all( cert_status['status'] == 'success' for cert_status in example_cert_status ) ) instructor_generation_enabled = settings.FEATURES.get('CERTIFICATES_INSTRUCTOR_GENERATION', False) certificate_statuses_with_count = { certificate['status']: certificate['count'] for certificate in GeneratedCertificate.get_unique_statuses(course_key=course.id) } return { 'section_key': 'certificates', 'section_display_name': _('Certificates'), 'example_certificate_status': example_cert_status, 'can_enable_for_course': can_enable_for_course, 'enabled_for_course': certs_api.cert_generation_enabled(course.id), 'is_self_paced': course.self_paced, 'instructor_generation_enabled': instructor_generation_enabled, 'html_cert_enabled': html_cert_enabled, 'active_certificate': certs_api.get_active_web_certificate(course), 'certificate_statuses_with_count': certificate_statuses_with_count, 'status': CertificateStatuses, 'certificate_generation_history': CertificateGenerationHistory.objects.filter(course_id=course.id).order_by("-created"), 'urls': { 'generate_example_certificates': reverse( 'generate_example_certificates', kwargs={'course_id': course.id} ), 'enable_certificate_generation': reverse( 'enable_certificate_generation', kwargs={'course_id': course.id} ), 'start_certificate_generation': reverse( 'start_certificate_generation', kwargs={'course_id': course.id} ), 'start_certificate_regeneration': reverse( 'start_certificate_regeneration', kwargs={'course_id': course.id} ), 'list_instructor_tasks_url': reverse( 'list_instructor_tasks', kwargs={'course_id': course.id} ), } } @ensure_csrf_cookie @cache_control(no_cache=True, no_store=True, must_revalidate=True) @require_POST @login_required def set_course_mode_price(request, course_id): """ set the new course price and add new entry in the CourseModesArchive Table """ try: course_price = int(request.POST['course_price']) except ValueError: return JsonResponse( {'message': _("Please Enter the numeric value for the course price")}, status=400) # status code 400: Bad Request currency = request.POST['currency'] course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course_honor_mode = CourseMode.objects.filter(mode_slug='honor', course_id=course_key) if not course_honor_mode: return JsonResponse( {'message': _("CourseMode with the mode slug({mode_slug}) DoesNotExist").format(mode_slug='honor')}, status=400) # status code 400: Bad Request CourseModesArchive.objects.create( course_id=course_id, mode_slug='honor', mode_display_name='Honor Code Certificate', min_price=course_honor_mode[0].min_price, currency=course_honor_mode[0].currency, expiration_datetime=datetime.datetime.now(pytz.utc), expiration_date=datetime.date.today() ) course_honor_mode.update( min_price=course_price, currency=currency ) return JsonResponse({'message': _("CourseMode price updated successfully")}) def _section_course_info(course, access): """ Provide data for the corresponding dashboard section """ course_key = course.id section_data = { 'section_key': 'course_info', 'section_display_name': _('Course Info'), 'access': access, 'course_id': course_key, 'course_display_name': course.display_name, 'has_started': course.has_started(), 'has_ended': course.has_ended(), 'start_date': course.start, 'end_date': course.end, 'num_sections': len(course.children), 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': unicode(course_key)}), } if settings.FEATURES.get('DISPLAY_ANALYTICS_ENROLLMENTS'): section_data['enrollment_count'] = CourseEnrollment.objects.enrollment_counts(course_key) if show_analytics_dashboard_message(course_key): # dashboard_link is already made safe in _get_dashboard_link dashboard_link = _get_dashboard_link(course_key) # so we can use Text() here so it's not double-escaped and rendering HTML on the front-end message = Text(_("Enrollment data is now available in {dashboard_link}.")).format(dashboard_link=dashboard_link) section_data['enrollment_message'] = message if settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'): section_data['detailed_gitlogs_url'] = reverse('gitlogs_detail', kwargs={'course_id': unicode(course_key)}) try: sorted_cutoffs = sorted(course.grade_cutoffs.items(), key=lambda i: i[1], reverse=True) advance = lambda memo, (letter, score): "{}: {}, ".format(letter, score) + memo section_data['grade_cutoffs'] = reduce(advance, sorted_cutoffs, "")[:-2] except Exception: # pylint: disable=broad-except section_data['grade_cutoffs'] = "Not Available" try: section_data['course_errors'] = [(escape(a), '') for (a, _unused) in modulestore().get_course_errors(course.id)] except Exception: # pylint: disable=broad-except section_data['course_errors'] = [('Error fetching errors', '')] return section_data def _section_membership(course, access, is_white_label): """ Provide data for the corresponding dashboard section """ course_key = course.id ccx_enabled = settings.FEATURES.get('CUSTOM_COURSES_EDX', False) and course.enable_ccx section_data = { 'section_key': 'membership', 'section_display_name': _('Membership'), 'access': access, 'ccx_is_enabled': ccx_enabled, 'is_white_label': is_white_label, 'enroll_button_url': reverse('students_update_enrollment', kwargs={'course_id': unicode(course_key)}), 'unenroll_button_url': reverse('students_update_enrollment', kwargs={'course_id': unicode(course_key)}), 'upload_student_csv_button_url': reverse('register_and_enroll_students', kwargs={'course_id': unicode(course_key)}), 'modify_beta_testers_button_url': reverse('bulk_beta_modify_access', kwargs={'course_id': unicode(course_key)}), 'list_course_role_members_url': reverse('list_course_role_members', kwargs={'course_id': unicode(course_key)}), 'modify_access_url': reverse('modify_access', kwargs={'course_id': unicode(course_key)}), 'list_forum_members_url': reverse('list_forum_members', kwargs={'course_id': unicode(course_key)}), 'update_forum_role_membership_url': reverse('update_forum_role_membership', kwargs={'course_id': unicode(course_key)}), } return section_data def _section_cohort_management(course, access): """ Provide data for the corresponding cohort management section """ course_key = course.id ccx_enabled = hasattr(course_key, 'ccx') section_data = { 'section_key': 'cohort_management', 'section_display_name': _('Cohorts'), 'access': access, 'ccx_is_enabled': ccx_enabled, 'course_cohort_settings_url': reverse( 'course_cohort_settings', kwargs={'course_key_string': unicode(course_key)} ), 'cohorts_url': reverse('cohorts', kwargs={'course_key_string': unicode(course_key)}), 'upload_cohorts_csv_url': reverse('add_users_to_cohorts', kwargs={'course_id': unicode(course_key)}), 'discussion_topics_url': reverse('cohort_discussion_topics', kwargs={'course_key_string': unicode(course_key)}), 'verified_track_cohorting_url': reverse( 'verified_track_cohorting', kwargs={'course_key_string': unicode(course_key)} ), } return section_data def _is_small_course(course_key): """ Compares against MAX_ENROLLMENT_INSTR_BUTTONS to determine if course enrollment is considered small. """ is_small_course = False enrollment_count = CourseEnrollment.objects.num_enrolled_in(course_key) max_enrollment_for_buttons = settings.FEATURES.get("MAX_ENROLLMENT_INSTR_BUTTONS") if max_enrollment_for_buttons is not None: is_small_course = enrollment_count <= max_enrollment_for_buttons return is_small_course def _section_student_admin(course, access): """ Provide data for the corresponding dashboard section """ course_key = course.id is_small_course = _is_small_course(course_key) section_data = { 'section_key': 'student_admin', 'section_display_name': _('Student Admin'), 'access': access, 'is_small_course': is_small_course, 'get_student_progress_url_url': reverse('get_student_progress_url', kwargs={'course_id': unicode(course_key)}), 'enrollment_url': reverse('students_update_enrollment', kwargs={'course_id': unicode(course_key)}), 'reset_student_attempts_url': reverse('reset_student_attempts', kwargs={'course_id': unicode(course_key)}), 'reset_student_attempts_for_entrance_exam_url': reverse( 'reset_student_attempts_for_entrance_exam', kwargs={'course_id': unicode(course_key)}, ), 'rescore_problem_url': reverse('rescore_problem', kwargs={'course_id': unicode(course_key)}), 'rescore_entrance_exam_url': reverse('rescore_entrance_exam', kwargs={'course_id': unicode(course_key)}), 'student_can_skip_entrance_exam_url': reverse( 'mark_student_can_skip_entrance_exam', kwargs={'course_id': unicode(course_key)}, ), 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': unicode(course_key)}), 'list_entrace_exam_instructor_tasks_url': reverse('list_entrance_exam_instructor_tasks', kwargs={'course_id': unicode(course_key)}), 'spoc_gradebook_url': reverse('spoc_gradebook', kwargs={'course_id': unicode(course_key)}), } return section_data def _section_extensions(course): """ Provide data for the corresponding dashboard section """ section_data = { 'section_key': 'extensions', 'section_display_name': _('Extensions'), 'units_with_due_dates': [(title_or_url(unit), unicode(unit.location)) for unit in get_units_with_due_date(course)], 'change_due_date_url': reverse('change_due_date', kwargs={'course_id': unicode(course.id)}), 'reset_due_date_url': reverse('reset_due_date', kwargs={'course_id': unicode(course.id)}), 'show_unit_extensions_url': reverse('show_unit_extensions', kwargs={'course_id': unicode(course.id)}), 'show_student_extensions_url': reverse('show_student_extensions', kwargs={'course_id': unicode(course.id)}), } return section_data def _section_data_download(course, access): """ Provide data for the corresponding dashboard section """ course_key = course.id show_proctored_report_button = ( settings.FEATURES.get('ENABLE_SPECIAL_EXAMS', False) and course.enable_proctored_exams ) section_data = { 'section_key': 'data_download', 'section_display_name': _('Data Download'), 'access': access, 'show_generate_proctored_exam_report_button': show_proctored_report_button, 'get_problem_responses_url': reverse('get_problem_responses', kwargs={'course_id': unicode(course_key)}), 'get_grading_config_url': reverse('get_grading_config', kwargs={'course_id': unicode(course_key)}), 'get_students_features_url': reverse('get_students_features', kwargs={'course_id': unicode(course_key)}), 'get_issued_certificates_url': reverse( 'get_issued_certificates', kwargs={'course_id': unicode(course_key)} ), 'get_students_who_may_enroll_url': reverse( 'get_students_who_may_enroll', kwargs={'course_id': unicode(course_key)} ), 'get_anon_ids_url': reverse('get_anon_ids', kwargs={'course_id': unicode(course_key)}), 'list_proctored_results_url': reverse('get_proctored_exam_results', kwargs={'course_id': unicode(course_key)}), 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': unicode(course_key)}), 'list_report_downloads_url': reverse('list_report_downloads', kwargs={'course_id': unicode(course_key)}), 'calculate_grades_csv_url': reverse('calculate_grades_csv', kwargs={'course_id': unicode(course_key)}), 'problem_grade_report_url': reverse('problem_grade_report', kwargs={'course_id': unicode(course_key)}), 'course_has_survey': True if course.course_survey_name else False, 'course_survey_results_url': reverse('get_course_survey_results', kwargs={'course_id': unicode(course_key)}), 'export_ora2_data_url': reverse('export_ora2_data', kwargs={'course_id': unicode(course_key)}), } return section_data def null_applicable_aside_types(block): # pylint: disable=unused-argument """ get_aside method for monkey-patching into applicable_aside_types while rendering an HtmlDescriptor for email text editing. This returns an empty list. """ return [] def _section_send_email(course, access): """ Provide data for the corresponding bulk email section """ course_key = course.id # Monkey-patch applicable_aside_types to return no asides for the duration of this render with patch.object(course.runtime, 'applicable_aside_types', null_applicable_aside_types): # This HtmlDescriptor is only being used to generate a nice text editor. html_module = HtmlDescriptor( course.system, DictFieldData({'data': ''}), ScopeIds(None, None, None, course_key.make_usage_key('html', 'fake')) ) fragment = course.system.render(html_module, 'studio_view') fragment = wrap_xblock( 'LmsRuntime', html_module, 'studio_view', fragment, None, extra_data={"course-id": unicode(course_key)}, usage_id_serializer=lambda usage_id: quote_slashes(unicode(usage_id)), # Generate a new request_token here at random, because this module isn't connected to any other # xblock rendering. request_token=uuid.uuid1().get_hex() ) cohorts = [] if is_course_cohorted(course_key): cohorts = get_course_cohorts(course) email_editor = fragment.content section_data = { 'section_key': 'send_email', 'section_display_name': _('Email'), 'access': access, 'send_email': reverse('send_email', kwargs={'course_id': unicode(course_key)}), 'editor': email_editor, 'cohorts': cohorts, 'default_cohort_name': DEFAULT_COHORT_NAME, 'list_instructor_tasks_url': reverse( 'list_instructor_tasks', kwargs={'course_id': unicode(course_key)} ), 'email_background_tasks_url': reverse( 'list_background_email_tasks', kwargs={'course_id': unicode(course_key)} ), 'email_content_history_url': reverse( 'list_email_content', kwargs={'course_id': unicode(course_key)} ), } return section_data def _get_dashboard_link(course_key): """ Construct a URL to the external analytics dashboard """ analytics_dashboard_url = '{0}/courses/{1}'.format(settings.ANALYTICS_DASHBOARD_URL, unicode(course_key)) link = HTML(u"<a href=\"{0}\" target=\"_blank\">{1}</a>").format( analytics_dashboard_url, settings.ANALYTICS_DASHBOARD_NAME ) return link def _section_analytics(course, access): """ Provide data for the corresponding dashboard section """ section_data = { 'section_key': 'instructor_analytics', 'section_display_name': _('Analytics'), 'access': access, 'course_id': unicode(course.id), } return section_data def _section_metrics(course, access): """Provide data for the corresponding dashboard section """ course_key = course.id section_data = { 'section_key': 'metrics', 'section_display_name': _('Metrics'), 'access': access, 'course_id': unicode(course_key), 'sub_section_display_name': get_section_display_name(course_key), 'section_has_problem': get_array_section_has_problem(course_key), 'get_students_opened_subsection_url': reverse('get_students_opened_subsection'), 'get_students_problem_grades_url': reverse('get_students_problem_grades'), 'post_metrics_data_csv_url': reverse('post_metrics_data_csv'), } return section_data # GEOFFREY STAT DASHBOARD # GEOFFREY STAT DASHBOARD # GEOFFREY STAT DASHBOARD # GEOFFREY STAT DASHBOARD @login_required def stat_dashboard(request, course_id): #GET course_key course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course_key_modulestore = CourseKey.from_string(course_id) #course_module course_module = modulestore().get_course(course_key, depth=0) #course cutoff course_cutoff = course_module.grade_cutoffs['Pass'] #GET COURSE course = get_course_by_id(course_key) #overview overview = CourseOverview.get_from_id(course_key) #Get all course-enrollment row = User.objects.raw('SELECT a.id ,a.email FROM auth_user a,student_courseenrollment b WHERE a.id=b.user_id AND b.course_id=%s' ,[course_id]) invite = CourseEnrollmentAllowed.objects.all().filter(course_id=course_key) participant_list = [] all_user = 0 for _user in row: participant_list.append(_user.email) all_user = all_user + 1 for _u in invite: if not str(_u.email) in str(participant_list): all_user = all_user + 1 #number of user who started the course user_course_started = 0 #number of users who completed the entire quiz users_completed_quiz = 0 #count passed num_passed = 0 #add course average grade course_average_grade = 0 course_average_grade_global = 0 #number of user who finished the course user_finished = 0 # Users who completed the quiz entirely user_completed_quiz = 0 user_completed_quiz_list = [] #course_structure course_structure = get_course_structure(request,course_id) course_usage_key = modulestore().make_course_usage_key(course_key) blocks = get_blocks(request,course_usage_key,depth='all',requested_fields=['display_name','children']) # Users who completed the quiz (overall_progress equals 100.0 only if user completed the quiz) for user in row: overall_progress = get_overall_progress(user.id, course_key) if overall_progress == 100.0: users_completed_quiz = users_completed_quiz + 1 user_completed_quiz_list.append(user.username) # connect mongodb return values: mongo_persist = dashboardStats() collection = mongo_persist.connect() find_mongo_persist_course = mongo_persist.find_by_course_id(collection,course_id) for n in row: user_id = n.id users = User.objects.get(pk=user_id) try: users_info = find_mongo_persist_course['users_info'] for key, value in users_info.iteritems(): #log.info("user_info key:"+pformat(key)+" value"+pformat(value)) _passed = value['passed'] _percent = value['percent'] user_course_started = user_course_started + 1 # Average grade of all users who completed the quiz _username = value['username'] if _username in user_completed_quiz_list: course_average_grade_global = course_average_grade_global + (_percent * 100) # Average grade of users who passed the quiz if _passed: course_average_grade = course_average_grade + (_percent * 100) user_finished = user_finished + 1 if _percent >= course_cutoff: num_passed = num_passed + 1 except: pass #return context if user_finished != 0: final_course_average_grade = round((course_average_grade / user_finished),1) else : final_course_average_grade=0.0 if users_completed_quiz !=0: course_average_grade_global = round((course_average_grade_global / users_completed_quiz), 1) else : course_average_grade_global=0.0 #store problems components order problem_components=[] for chapter in course_structure: for section in chapter['children']: for vertical in section['children']: for component in vertical['children']: if 'problem' in str(component): problem_components.append(str(component)) context = { "course_id":course_id, "course":course, "row":row, 'course_module':course_module, "all_user":all_user, "num_passed":num_passed, "user_course_started":user_course_started, 'course_average_grade':final_course_average_grade, 'course_average_grade_global': course_average_grade_global, 'user_finished':user_finished, 'course_structure':course_structure, 'overview':overview, 'language_course':get_course_langue(course.language), 'problem_components':problem_components } return render_to_response('courseware/stat.html', context) @ensure_csrf_cookie @login_required def get_dashboard_username(request,course_id,email): course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) row = User.objects.raw('SELECT a.id,a.email,a.first_name,a.last_name FROM auth_user a,student_courseenrollment b WHERE a.id=b.user_id AND b.course_id=%s' ,[course_id]) emails = [] email = str(email).lower() for n in row: low = [ n.email.lower(), n.first_name.lower(), n.last_name.lower() ] if email in str(low).lower(): q = { "values" : [ n.email, n.first_name, n.last_name ], "id":n.email } emails.append(q) response = JsonResponse({ "usernames":emails, "email":email }) return response @ensure_csrf_cookie @login_required def stat_dashboard_username(request, course_id, email): try: # get users info users = User.objects.get(email=email) #user_email user_email = users.email lvl_1 = '' lvl_2 = '' lvl_3 = '' lvl_4 = '' try: preprofile = UserPreprofile.objects.filter(email=user_email).first() lvl_1 = preprofile.level_1 lvl_2 = preprofile.level_2 lvl_3 = preprofile.level_3 lvl_4 = preprofile.level_4 except: pass #ordered course course_grade = [] ordered_course_grade=[] quiz_order=get_quiz_structure(request, course_id) # get user id user_id= users.id # get course_key from url's param course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) # get course from course_key course = get_course_by_id(course_key) # get all courses block of the site course_block = StudentModule.objects.all().filter(student_id=user_id,course_id=course_key,max_grade__isnull=False) # var of grades / course_structure course_grade = [] # get course_users_info course_user_info = CourseGradeFactory().create(users, course) # user info responses user_info = [ {'Score':str(course_user_info.percent * 100)+'%'}, {'First_name':users.first_name}, {'Last_name':users.last_name}, {'Email':users.email}, {'Niveau_1':lvl_1}, {'Niveau_2':lvl_2}, {'Niveau_3':lvl_3}, {'Niveau_4':lvl_4} ] for n in course_block: q = {} usage_key = n.module_state_key block_view = BlocksView() block_name = get_blocks(request,usage_key,depth='all',requested_fields=['display_name']) root = block_name['root'] display_name = block_name['blocks'][root]['display_name'] q['earned'] = n.grade q['possible'] = n.max_grade q['display_name'] = display_name q['root'] = root course_grade.append(q) #Order blocks for id in quiz_order: for block in course_grade : if block['root']==str(id): ordered_course_grade.append(block) return JsonResponse({ "course_id":course_id, "email":email, "user_id":user_id, "course_grade": ordered_course_grade, "user_info": user_info, "quiz_order":quiz_order }) except: return JsonResponse({ "course_id":course_id, "username":username, "user_id": '', "course_grade": [], "user_info": '', }) @login_required def get_course_structure(request, course_id): course_key = CourseKey.from_string(course_id) course_usage_key = modulestore().make_course_usage_key(course_key) blocks = get_blocks(request,course_usage_key,depth='all',requested_fields=['display_name','children']) root = blocks['root'] blocks_overviews = [] try: children = blocks['blocks'][root]['children'] for z in children: q = {} child = blocks['blocks'][z] q['display_name'] = child['display_name'] q['id'] = child['id'] try: sub_section = child['children'] q['children'] = [] for s in sub_section: sub_ = blocks['blocks'][s] a = {} a['id'] = sub_['id'] a['display_name'] = sub_['display_name'] vertical = sub_['children'] try: a['children'] = [] for v in vertical: unit = blocks['blocks'][v] w = {} w['id'] = unit['id'] w['display_name'] = unit['display_name'] try: w['children'] = unit['children'] except: w['children'] = [] a['children'].append(w) except: a['children'] = [] q['children'].append(a) except: q['children'] = [] blocks_overviews.append(q) except: children = '' return blocks_overviews @ensure_csrf_cookie @login_required @require_POST def get_course_blocks_grade(request,course_id): data = json.loads(request.body) data_id = data.get('data_id') course_block = StudentModule.objects.raw("SELECT id,AVG(grade) AS moyenne,count(id) AS total,MAX(max_grade) AS max_grade,course_id,module_id FROM courseware_studentmodule WHERE course_id = %s AND max_grade IS NOT NULL AND grade <= max_grade GROUP BY module_id", [course_id]) course_grade = {} for n in course_block: usage_key = n.module_state_key block_view = BlocksView() try: block_name = get_blocks(request,usage_key,depth='all',requested_fields=['display_name']) root = block_name['root'] for z in data_id: if root in z.get('id'): if not root in course_grade: course_grade[root] = {} course_grade[root]['moyenne'] = n.moyenne course_grade[root]['total'] = n.total course_grade[root]['max_grade'] = n.max_grade course_grade[root]['course_id'] = str(n.course_id) course_grade[root]['module_id'] = str(n.module_state_key) course_grade[root]['display_name'] = block_name['blocks'][root]['display_name'] course_grade[root]['vertical_name'] = z.get('title') except: pass return JsonResponse({'course_grade':course_grade}) def get_result_page_info(request,course_id): response = JsonResponse({ "course_id":course_id }) return response @ensure_csrf_cookie @login_required @require_GET def get_course_users(request,course_id): #Get all course-enrollment """ UserPreprofile CourseEnrollment CourseEnrollmentAllowed """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) invite = CourseEnrollmentAllowed.objects.all().filter(course_id=course_key) enroll = CourseEnrollment.objects.all().filter(course_id=course_key) users = [] for _ui in invite: email = _ui.email if not str(email) in str(users): q = {} q['email'] = email q['statut'] = 'sent' q['Nom'] = '' q['Prenom'] = '' q['Niveau 1'] = '' q['Niveau 2'] = '' q['Niveau 3'] = '' q['Niveau 4'] = '' users.append(q) for _ue in enroll: try: email = User.objects.get(pk=_ue.user_id).email if not str(email) in str(users): q = {} q['email'] = email q['statut'] = 'accepted' q['Nom'] = '' q['Prenom'] = '' q['Niveau 1'] = '' q['Niveau 2'] = '' q['Niveau 3'] = '' q['Niveau 4'] = '' users.append(q) else: for user in users: if user['email'] == email: user['statut'] = 'accepted' except: pass for user in users: try: email = user['email'] profile = UserPreprofile.objects.filter(email=email).first() user['Nom'] = profile.last_name user['Prenom'] = profile.first_name user['Niveau 1'] = profile.level_1 user['Niveau 2'] = profile.level_2 user['Niveau 3'] = profile.level_3 user['Niveau 4'] = profile.level_4 except: pass filename = '{}_registered_users.xls'.format(course_id).replace('+','_') filepath = '/edx/var/edxapp/'+filename HEADERS = (u"Nom",u"Prenom",u"Adresse email",u"Niveau 1",u"Niveau 2",u"Niveau 3",u"Niveau 4",u"Statut") wb = Workbook(encoding='utf-8') sheet = wb.add_sheet('Users') for i, header in enumerate(HEADERS): sheet.write(0, i, header) j = 0 for i in range(len(users)): j=j+1 try: sheet.write(j, 0, users[i]['Nom']) except: sheet.write(j, 0, ' ') try: sheet.write(j, 1, users[i]['Prenom']) except: sheet.write(j, 1, ' ') try: sheet.write(j, 2, users[i]['email']) except: sheet.write(j, 2, ' ') try: sheet.write(j, 3, users[i]['Niveau 1']) except: sheet.write(j, 3, ' ') try: sheet.write(j, 4, users[i]['Niveau 2']) except: sheet.write(j, 4, ' ') try: sheet.write(j, 5, users[i]['Niveau 3']) except: sheet.write(j, 5, ' ') try: sheet.write(j, 6, users[i]['Niveau 4']) except: sheet.write(j, 6, ' ') try: sheet.write(j, 7, users[i]['statut']) except: sheet.write(j, 7, ' ') wb.save(filepath) context = { 'filename':filename, 'users':str(users) } return JsonResponse(context) def download_xls(request,filename): full_path = '/edx/var/edxapp/'+filename _file = open(full_path,'r') _content = _file.read() response = HttpResponse(_content, content_type="application/vnd.ms-excel") response['Content-Disposition'] = "attachment; filename="+filename os.remove(full_path) return response #generate current_course grade reports @ensure_csrf_cookie @login_required @require_GET def get_course_users_grades(request,course_id): # connect mongodb return values: mongo_persist = dashboardStats() collection = mongo_persist.connect() find_mongo_persist_course = mongo_persist.find_by_course_id(collection,course_id) # get users saved data users_info = find_mongo_persist_course.get('users_info') #get users id users_id = users_info.keys() q = { 'title': [ 'email','first name','last name' ], 'users': [] } k = 0 for _user_id in users_id: #try: current = users_info[_user_id] user = User.objects.get(pk=users_info[str(_user_id)]["user_id"]) percent = str(current["percent"] * 100)+'%' summary = current["summary"]["section_breakdown"] user_info = { 'email':user.email, 'first_name':user.first_name, 'last_name':user.last_name, 'percent': percent, 'grades':[] } for section in summary: if k == 0: if not section['label'] in q['title']: q['title'].append(section['label']) _section = { 'label':section['label'], 'percent':str(section['percent'] * 100)+'%' } user_info['grades'].append(_section) q['users'].append(user_info) k = k + 1 """ except: pass """ if not 'final grade' in q['title']: q['title'].append('final grade') filename = '{}_grades_reports.xls'.format(course_id).replace('+','_') filepath = '/edx/var/edxapp/'+filename HEADERS = q['title'] wb = Workbook(encoding='utf-8') sheet = wb.add_sheet('Grades') for i, header in enumerate(HEADERS): sheet.write(0, i, header) j = 0 for i in range(len(q['users'])): j=j+1 try: sheet.write(j, 0, q['users'][i]['email']) except: sheet.write(j, 0, ' ') try: sheet.write(j, 1, q['users'][i]['first_name']) except: sheet.write(j, 1, ' ') try: sheet.write(j, 2, q['users'][i]['last_name']) except: sheet.write(j, 2, ' ') d = 2 for grade in q['users'][i]['grades']: d = d + 1 try: sheet.write(j, d, grade['percent']) except: sheet.write(j, d, ' ') d = d + 1 sheet.write(j, d, q['users'][i]['percent']) wb.save(filepath) context = { 'filename':filename, 'course_id':course_id } return JsonResponse(context) def download_grades(request,filename): full_path = '/edx/var/edxapp/'+filename _file = open(full_path,'r') _content = _file.read() response = HttpResponse(_content, content_type="application/vnd.ms-excel") response['Content-Disposition'] = "attachment; filename="+filename os.remove(full_path) return response def get_list_lang(): language_options_tulp=settings.ALL_LANGUAGES language_options_dict={} for lang, label in language_options_tulp: language_options_dict[lang]=label return language_options_dict def get_course_langue(lang_code): language_options_dict=get_list_lang() course_language=language_options_dict[lang_code] return course_language def get_quiz_structure(request, course_id): course_key = CourseKey.from_string(course_id) course_usage_key = modulestore().make_course_usage_key(course_key) course_blocks = get_blocks(request,course_usage_key,depth='all',requested_fields=['display_name','children']) blocks_overviews = [] quiz_elements=[] blocks_list=[] for block in course_blocks['blocks'] : if course_blocks['blocks'][block].get('children') and "problem" in course_blocks['blocks'][block].get('children')[0]: blocks_list=course_blocks['blocks'][block]['children'] return blocks_list
Your trip to England’s capital won’t be perfect sans this list of visitor-flocked attractions. Tick these top eight views off your list for a personal glimpse and be one of the millions enjoying their London tour. A hub of historic man-made works since the prehistoric era, this famous landmark highlights the more than two million years of our antiquity. Enjoy the treasures and sights of the world’s cultures and evolution like the Egyptian mummies and Rosetta Stone. An estimate of six million people never misses to drop by this popular heritage on a yearly basis. Entrance is absolutely free for all ages, though special exhibitions require charges. A donation however, is highly recommended. If you want to discover works of acclaimed artists like Leonardo da Vinci and Rembrandt for free, this is the right place for you! The National Gallery is a haven to over 2,000 paintings from the past (as early as the Middle Ages) until the 20th century. These artwork collections are of public ownership, with audio guides translated in various languages. There are also fun family activities which you can join for free. Another free and fun way of exploring London is paying a visit to this renowned world gallery. The National History Museum houses hundreds of interactive displays in a state-of-the-art building. Among them are the popular dinosaurs, the mammals exhibit highlighting its blue whale model and the impressive Central Hall – the residence of the iconic Diplodocus skeleton. Its wide-range program of events and exhibitions provides avenues for interactive and topical discussions about nature and science. Please take note that special exhibitions may come with charges, so have extra cash as much as possible. A metropolitan arts center of the famous Thames, this is the best avenue for diverse kinds of cultural events, covering music, art, dance, performances and spoken language. Free events and activities are also offered here, with a wide range of food centers, shops and markets to ease your other needs. London serviced apartments also thrive nearby, giving additional perks for your comfortable stay and sight-seeing. Conveniently located on the Thames’ banks, Britain’s national museum will truly give you the art goose bumps you want for free! Modern and contemporary art across the globe are both showcased in its awe-inspiring Turbine Hall. Special exhibitions may also come with charges. Dubbed as the world’s tallest observation wheel at its 135-meter height, the London Eye is located in its heart, opposite the Big Ben and Houses of Parliament and spinning over Thames. This global icon of design and engineering is a modern epitome of London, offering a breath-taking 360-degree sight of the city. The famous 4D experience it gives makes it the top visitor landmark in the past ten years. Another dramatic gallery of art and design, this museum gives you a glimpse of unrivaled diversity and scope in terms of its displays. Admission is for free! Known as Europe’s most visited museum of science and technology, its building boasts of the more than 15,000 interactive displays that bring to life scientific principles as well as contemporary since debates. General entrance is for free! While other destinations are available, you might as well try this list first as most tourists choose these spots over others. Time and efforts will be definitely worth it!
# engine/base.py # Copyright (C) 2005-2015 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from __future__ import with_statement """Defines :class:`.Connection` and :class:`.Engine`. """ import sys from .. import exc, util, log, interfaces from ..sql import util as sql_util from .interfaces import Connectable, ExceptionContext from .util import _distill_params import contextlib class Connection(Connectable): """Provides high-level functionality for a wrapped DB-API connection. Provides execution support for string-based SQL statements as well as :class:`.ClauseElement`, :class:`.Compiled` and :class:`.DefaultGenerator` objects. Provides a :meth:`begin` method to return :class:`.Transaction` objects. The Connection object is **not** thread-safe. While a Connection can be shared among threads using properly synchronized access, it is still possible that the underlying DBAPI connection may not support shared access between threads. Check the DBAPI documentation for details. The Connection object represents a single dbapi connection checked out from the connection pool. In this state, the connection pool has no affect upon the connection, including its expiration or timeout state. For the connection pool to properly manage connections, connections should be returned to the connection pool (i.e. ``connection.close()``) whenever the connection is not in use. .. index:: single: thread safety; Connection """ def __init__(self, engine, connection=None, close_with_result=False, _branch_from=None, _execution_options=None, _dispatch=None, _has_events=None): """Construct a new Connection. The constructor here is not public and is only called only by an :class:`.Engine`. See :meth:`.Engine.connect` and :meth:`.Engine.contextual_connect` methods. """ self.engine = engine self.dialect = engine.dialect self.__branch_from = _branch_from self.__branch = _branch_from is not None if _branch_from: self.__connection = connection self._execution_options = _execution_options self._echo = _branch_from._echo self.should_close_with_result = False self.dispatch = _dispatch self._has_events = _branch_from._has_events else: self.__connection = connection \ if connection is not None else engine.raw_connection() self.__transaction = None self.__savepoint_seq = 0 self.should_close_with_result = close_with_result self.__invalid = False self.__can_reconnect = True self._echo = self.engine._should_log_info() if _has_events is None: # if _has_events is sent explicitly as False, # then don't join the dispatch of the engine; we don't # want to handle any of the engine's events in that case. self.dispatch = self.dispatch._join(engine.dispatch) self._has_events = _has_events or ( _has_events is None and engine._has_events) assert not _execution_options self._execution_options = engine._execution_options if self._has_events or self.engine._has_events: self.dispatch.engine_connect(self, self.__branch) def _branch(self): """Return a new Connection which references this Connection's engine and connection; but does not have close_with_result enabled, and also whose close() method does nothing. The Core uses this very sparingly, only in the case of custom SQL default functions that are to be INSERTed as the primary key of a row where we need to get the value back, so we have to invoke it distinctly - this is a very uncommon case. Userland code accesses _branch() when the connect() or contextual_connect() methods are called. The branched connection acts as much as possible like the parent, except that it stays connected when a close() event occurs. """ if self.__branch_from: return self.__branch_from._branch() else: return self.engine._connection_cls( self.engine, self.__connection, _branch_from=self, _execution_options=self._execution_options, _has_events=self._has_events, _dispatch=self.dispatch) @property def _root(self): """return the 'root' connection. Returns 'self' if this connection is not a branch, else returns the root connection from which we ultimately branched. """ if self.__branch_from: return self.__branch_from else: return self def _clone(self): """Create a shallow copy of this Connection. """ c = self.__class__.__new__(self.__class__) c.__dict__ = self.__dict__.copy() return c def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def execution_options(self, **opt): """ Set non-SQL options for the connection which take effect during execution. The method returns a copy of this :class:`.Connection` which references the same underlying DBAPI connection, but also defines the given execution options which will take effect for a call to :meth:`execute`. As the new :class:`.Connection` references the same underlying resource, it's usually a good idea to ensure that the copies will be discarded immediately, which is implicit if used as in:: result = connection.execution_options(stream_results=True).\\ execute(stmt) Note that any key/value can be passed to :meth:`.Connection.execution_options`, and it will be stored in the ``_execution_options`` dictionary of the :class:`.Connection`. It is suitable for usage by end-user schemes to communicate with event listeners, for example. The keywords that are currently recognized by SQLAlchemy itself include all those listed under :meth:`.Executable.execution_options`, as well as others that are specific to :class:`.Connection`. :param autocommit: Available on: Connection, statement. When True, a COMMIT will be invoked after execution when executed in 'autocommit' mode, i.e. when an explicit transaction is not begun on the connection. Note that DBAPI connections by default are always in a transaction - SQLAlchemy uses rules applied to different kinds of statements to determine if COMMIT will be invoked in order to provide its "autocommit" feature. Typically, all INSERT/UPDATE/DELETE statements as well as CREATE/DROP statements have autocommit behavior enabled; SELECT constructs do not. Use this option when invoking a SELECT or other specific SQL construct where COMMIT is desired (typically when calling stored procedures and such), and an explicit transaction is not in progress. :param compiled_cache: Available on: Connection. A dictionary where :class:`.Compiled` objects will be cached when the :class:`.Connection` compiles a clause expression into a :class:`.Compiled` object. It is the user's responsibility to manage the size of this dictionary, which will have keys corresponding to the dialect, clause element, the column names within the VALUES or SET clause of an INSERT or UPDATE, as well as the "batch" mode for an INSERT or UPDATE statement. The format of this dictionary is not guaranteed to stay the same in future releases. Note that the ORM makes use of its own "compiled" caches for some operations, including flush operations. The caching used by the ORM internally supersedes a cache dictionary specified here. :param isolation_level: Available on: :class:`.Connection`. Set the transaction isolation level for the lifespan of this :class:`.Connection` object (*not* the underyling DBAPI connection, for which the level is reset to its original setting upon termination of this :class:`.Connection` object). Valid values include those string values accepted by the :paramref:`.create_engine.isolation_level` parameter passed to :func:`.create_engine`. These levels are semi-database specific; see individual dialect documentation for valid levels. Note that this option necessarily affects the underlying DBAPI connection for the lifespan of the originating :class:`.Connection`, and is not per-execution. This setting is not removed until the underlying DBAPI connection is returned to the connection pool, i.e. the :meth:`.Connection.close` method is called. .. warning:: The ``isolation_level`` execution option should **not** be used when a transaction is already established, that is, the :meth:`.Connection.begin` method or similar has been called. A database cannot change the isolation level on a transaction in progress, and different DBAPIs and/or SQLAlchemy dialects may implicitly roll back or commit the transaction, or not affect the connection at all. .. versionchanged:: 0.9.9 A warning is emitted when the ``isolation_level`` execution option is used after a transaction has been started with :meth:`.Connection.begin` or similar. .. note:: The ``isolation_level`` execution option is implicitly reset if the :class:`.Connection` is invalidated, e.g. via the :meth:`.Connection.invalidate` method, or if a disconnection error occurs. The new connection produced after the invalidation will not have the isolation level re-applied to it automatically. .. seealso:: :paramref:`.create_engine.isolation_level` - set per :class:`.Engine` isolation level :meth:`.Connection.get_isolation_level` - view current level :ref:`SQLite Transaction Isolation <sqlite_isolation_level>` :ref:`Postgresql Transaction Isolation <postgresql_isolation_level>` :ref:`MySQL Transaction Isolation <mysql_isolation_level>` :ref:`session_transaction_isolation` - for the ORM :param no_parameters: When ``True``, if the final parameter list or dictionary is totally empty, will invoke the statement on the cursor as ``cursor.execute(statement)``, not passing the parameter collection at all. Some DBAPIs such as psycopg2 and mysql-python consider percent signs as significant only when parameters are present; this option allows code to generate SQL containing percent signs (and possibly other characters) that is neutral regarding whether it's executed by the DBAPI or piped into a script that's later invoked by command line tools. .. versionadded:: 0.7.6 :param stream_results: Available on: Connection, statement. Indicate to the dialect that results should be "streamed" and not pre-buffered, if possible. This is a limitation of many DBAPIs. The flag is currently understood only by the psycopg2 dialect. """ c = self._clone() c._execution_options = c._execution_options.union(opt) if self._has_events or self.engine._has_events: self.dispatch.set_connection_execution_options(c, opt) self.dialect.set_connection_execution_options(c, opt) return c @property def closed(self): """Return True if this connection is closed.""" return '_Connection__connection' not in self.__dict__ \ and not self.__can_reconnect @property def invalidated(self): """Return True if this connection was invalidated.""" return self._root.__invalid @property def connection(self): """The underlying DB-API connection managed by this Connection. .. seealso:: :ref:`dbapi_connections` """ try: return self.__connection except AttributeError: try: return self._revalidate_connection() except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) def get_isolation_level(self): """Return the current isolation level assigned to this :class:`.Connection`. This will typically be the default isolation level as determined by the dialect, unless if the :paramref:`.Connection.execution_options.isolation_level` feature has been used to alter the isolation level on a per-:class:`.Connection` basis. This attribute will typically perform a live SQL operation in order to procure the current isolation level, so the value returned is the actual level on the underlying DBAPI connection regardless of how this state was set. Compare to the :attr:`.Connection.default_isolation_level` accessor which returns the dialect-level setting without performing a SQL query. .. versionadded:: 0.9.9 .. seealso:: :attr:`.Connection.default_isolation_level` - view default level :paramref:`.create_engine.isolation_level` - set per :class:`.Engine` isolation level :paramref:`.Connection.execution_options.isolation_level` - set per :class:`.Connection` isolation level """ try: return self.dialect.get_isolation_level(self.connection) except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) @property def default_isolation_level(self): """The default isolation level assigned to this :class:`.Connection`. This is the isolation level setting that the :class:`.Connection` has when first procured via the :meth:`.Engine.connect` method. This level stays in place until the :paramref:`.Connection.execution_options.isolation_level` is used to change the setting on a per-:class:`.Connection` basis. Unlike :meth:`.Connection.get_isolation_level`, this attribute is set ahead of time from the first connection procured by the dialect, so SQL query is not invoked when this accessor is called. .. versionadded:: 0.9.9 .. seealso:: :meth:`.Connection.get_isolation_level` - view current level :paramref:`.create_engine.isolation_level` - set per :class:`.Engine` isolation level :paramref:`.Connection.execution_options.isolation_level` - set per :class:`.Connection` isolation level """ return self.dialect.default_isolation_level def _revalidate_connection(self): if self.__branch_from: return self.__branch_from._revalidate_connection() if self.__can_reconnect and self.__invalid: if self.__transaction is not None: raise exc.InvalidRequestError( "Can't reconnect until invalid " "transaction is rolled back") self.__connection = self.engine.raw_connection(_connection=self) self.__invalid = False return self.__connection raise exc.ResourceClosedError("This Connection is closed") @property def _connection_is_valid(self): # use getattr() for is_valid to support exceptions raised in # dialect initializer, where the connection is not wrapped in # _ConnectionFairy return getattr(self.__connection, 'is_valid', False) @property def _still_open_and_connection_is_valid(self): return \ not self.closed and \ not self.invalidated and \ getattr(self.__connection, 'is_valid', False) @property def info(self): """Info dictionary associated with the underlying DBAPI connection referred to by this :class:`.Connection`, allowing user-defined data to be associated with the connection. The data here will follow along with the DBAPI connection including after it is returned to the connection pool and used again in subsequent instances of :class:`.Connection`. """ return self.connection.info def connect(self): """Returns a branched version of this :class:`.Connection`. The :meth:`.Connection.close` method on the returned :class:`.Connection` can be called and this :class:`.Connection` will remain open. This method provides usage symmetry with :meth:`.Engine.connect`, including for usage with context managers. """ return self._branch() def contextual_connect(self, **kwargs): """Returns a branched version of this :class:`.Connection`. The :meth:`.Connection.close` method on the returned :class:`.Connection` can be called and this :class:`.Connection` will remain open. This method provides usage symmetry with :meth:`.Engine.contextual_connect`, including for usage with context managers. """ return self._branch() def invalidate(self, exception=None): """Invalidate the underlying DBAPI connection associated with this :class:`.Connection`. The underlying DBAPI connection is literally closed (if possible), and is discarded. Its source connection pool will typically lazily create a new connection to replace it. Upon the next use (where "use" typically means using the :meth:`.Connection.execute` method or similar), this :class:`.Connection` will attempt to procure a new DBAPI connection using the services of the :class:`.Pool` as a source of connectivty (e.g. a "reconnection"). If a transaction was in progress (e.g. the :meth:`.Connection.begin` method has been called) when :meth:`.Connection.invalidate` method is called, at the DBAPI level all state associated with this transaction is lost, as the DBAPI connection is closed. The :class:`.Connection` will not allow a reconnection to proceed until the :class:`.Transaction` object is ended, by calling the :meth:`.Transaction.rollback` method; until that point, any attempt at continuing to use the :class:`.Connection` will raise an :class:`~sqlalchemy.exc.InvalidRequestError`. This is to prevent applications from accidentally continuing an ongoing transactional operations despite the fact that the transaction has been lost due to an invalidation. The :meth:`.Connection.invalidate` method, just like auto-invalidation, will at the connection pool level invoke the :meth:`.PoolEvents.invalidate` event. .. seealso:: :ref:`pool_connection_invalidation` """ if self.invalidated: return if self.closed: raise exc.ResourceClosedError("This Connection is closed") if self._root._connection_is_valid: self._root.__connection.invalidate(exception) del self._root.__connection self._root.__invalid = True def detach(self): """Detach the underlying DB-API connection from its connection pool. E.g.:: with engine.connect() as conn: conn.detach() conn.execute("SET search_path TO schema1, schema2") # work with connection # connection is fully closed (since we used "with:", can # also call .close()) This :class:`.Connection` instance will remain usable. When closed (or exited from a context manager context as above), the DB-API connection will be literally closed and not returned to its originating pool. This method can be used to insulate the rest of an application from a modified state on a connection (such as a transaction isolation level or similar). """ self.__connection.detach() def begin(self): """Begin a transaction and return a transaction handle. The returned object is an instance of :class:`.Transaction`. This object represents the "scope" of the transaction, which completes when either the :meth:`.Transaction.rollback` or :meth:`.Transaction.commit` method is called. Nested calls to :meth:`.begin` on the same :class:`.Connection` will return new :class:`.Transaction` objects that represent an emulated transaction within the scope of the enclosing transaction, that is:: trans = conn.begin() # outermost transaction trans2 = conn.begin() # "nested" trans2.commit() # does nothing trans.commit() # actually commits Calls to :meth:`.Transaction.commit` only have an effect when invoked via the outermost :class:`.Transaction` object, though the :meth:`.Transaction.rollback` method of any of the :class:`.Transaction` objects will roll back the transaction. See also: :meth:`.Connection.begin_nested` - use a SAVEPOINT :meth:`.Connection.begin_twophase` - use a two phase /XID transaction :meth:`.Engine.begin` - context manager available from :class:`.Engine`. """ if self.__branch_from: return self.__branch_from.begin() if self.__transaction is None: self.__transaction = RootTransaction(self) return self.__transaction else: return Transaction(self, self.__transaction) def begin_nested(self): """Begin a nested transaction and return a transaction handle. The returned object is an instance of :class:`.NestedTransaction`. Nested transactions require SAVEPOINT support in the underlying database. Any transaction in the hierarchy may ``commit`` and ``rollback``, however the outermost transaction still controls the overall ``commit`` or ``rollback`` of the transaction of a whole. See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ if self.__branch_from: return self.__branch_from.begin_nested() if self.__transaction is None: self.__transaction = RootTransaction(self) else: self.__transaction = NestedTransaction(self, self.__transaction) return self.__transaction def begin_twophase(self, xid=None): """Begin a two-phase or XA transaction and return a transaction handle. The returned object is an instance of :class:`.TwoPhaseTransaction`, which in addition to the methods provided by :class:`.Transaction`, also provides a :meth:`~.TwoPhaseTransaction.prepare` method. :param xid: the two phase transaction id. If not supplied, a random id will be generated. See also :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`. """ if self.__branch_from: return self.__branch_from.begin_twophase(xid=xid) if self.__transaction is not None: raise exc.InvalidRequestError( "Cannot start a two phase transaction when a transaction " "is already in progress.") if xid is None: xid = self.engine.dialect.create_xid() self.__transaction = TwoPhaseTransaction(self, xid) return self.__transaction def recover_twophase(self): return self.engine.dialect.do_recover_twophase(self) def rollback_prepared(self, xid, recover=False): self.engine.dialect.do_rollback_twophase(self, xid, recover=recover) def commit_prepared(self, xid, recover=False): self.engine.dialect.do_commit_twophase(self, xid, recover=recover) def in_transaction(self): """Return True if a transaction is in progress.""" return self._root.__transaction is not None def _begin_impl(self, transaction): assert not self.__branch_from if self._echo: self.engine.logger.info("BEGIN (implicit)") if self._has_events or self.engine._has_events: self.dispatch.begin(self) try: self.engine.dialect.do_begin(self.connection) if self.connection._reset_agent is None: self.connection._reset_agent = transaction except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) def _rollback_impl(self): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.rollback(self) if self._still_open_and_connection_is_valid: if self._echo: self.engine.logger.info("ROLLBACK") try: self.engine.dialect.do_rollback(self.connection) except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) finally: if not self.__invalid and \ self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None self.__transaction = None else: self.__transaction = None def _commit_impl(self, autocommit=False): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.commit(self) if self._echo: self.engine.logger.info("COMMIT") try: self.engine.dialect.do_commit(self.connection) except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) finally: if not self.__invalid and \ self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None self.__transaction = None def _savepoint_impl(self, name=None): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.savepoint(self, name) if name is None: self.__savepoint_seq += 1 name = 'sa_savepoint_%s' % self.__savepoint_seq if self._still_open_and_connection_is_valid: self.engine.dialect.do_savepoint(self, name) return name def _rollback_to_savepoint_impl(self, name, context): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.rollback_savepoint(self, name, context) if self._still_open_and_connection_is_valid: self.engine.dialect.do_rollback_to_savepoint(self, name) self.__transaction = context def _release_savepoint_impl(self, name, context): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.release_savepoint(self, name, context) if self._still_open_and_connection_is_valid: self.engine.dialect.do_release_savepoint(self, name) self.__transaction = context def _begin_twophase_impl(self, transaction): assert not self.__branch_from if self._echo: self.engine.logger.info("BEGIN TWOPHASE (implicit)") if self._has_events or self.engine._has_events: self.dispatch.begin_twophase(self, transaction.xid) if self._still_open_and_connection_is_valid: self.engine.dialect.do_begin_twophase(self, transaction.xid) if self.connection._reset_agent is None: self.connection._reset_agent = transaction def _prepare_twophase_impl(self, xid): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.prepare_twophase(self, xid) if self._still_open_and_connection_is_valid: assert isinstance(self.__transaction, TwoPhaseTransaction) self.engine.dialect.do_prepare_twophase(self, xid) def _rollback_twophase_impl(self, xid, is_prepared): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.rollback_twophase(self, xid, is_prepared) if self._still_open_and_connection_is_valid: assert isinstance(self.__transaction, TwoPhaseTransaction) try: self.engine.dialect.do_rollback_twophase( self, xid, is_prepared) finally: if self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None self.__transaction = None else: self.__transaction = None def _commit_twophase_impl(self, xid, is_prepared): assert not self.__branch_from if self._has_events or self.engine._has_events: self.dispatch.commit_twophase(self, xid, is_prepared) if self._still_open_and_connection_is_valid: assert isinstance(self.__transaction, TwoPhaseTransaction) try: self.engine.dialect.do_commit_twophase(self, xid, is_prepared) finally: if self.connection._reset_agent is self.__transaction: self.connection._reset_agent = None self.__transaction = None else: self.__transaction = None def _autorollback(self): if not self._root.in_transaction(): self._root._rollback_impl() def close(self): """Close this :class:`.Connection`. This results in a release of the underlying database resources, that is, the DBAPI connection referenced internally. The DBAPI connection is typically restored back to the connection-holding :class:`.Pool` referenced by the :class:`.Engine` that produced this :class:`.Connection`. Any transactional state present on the DBAPI connection is also unconditionally released via the DBAPI connection's ``rollback()`` method, regardless of any :class:`.Transaction` object that may be outstanding with regards to this :class:`.Connection`. After :meth:`~.Connection.close` is called, the :class:`.Connection` is permanently in a closed state, and will allow no further operations. """ if self.__branch_from: try: del self.__connection except AttributeError: pass finally: self.__can_reconnect = False return try: conn = self.__connection except AttributeError: pass else: conn.close() if conn._reset_agent is self.__transaction: conn._reset_agent = None # the close() process can end up invalidating us, # as the pool will call our transaction as the "reset_agent" # for rollback(), which can then cause an invalidation if not self.__invalid: del self.__connection self.__can_reconnect = False self.__transaction = None def scalar(self, object, *multiparams, **params): """Executes and returns the first column of the first row. The underlying result/cursor is closed after execution. """ return self.execute(object, *multiparams, **params).scalar() def execute(self, object, *multiparams, **params): """Executes the a SQL statement construct and returns a :class:`.ResultProxy`. :param object: The statement to be executed. May be one of: * a plain string * any :class:`.ClauseElement` construct that is also a subclass of :class:`.Executable`, such as a :func:`~.expression.select` construct * a :class:`.FunctionElement`, such as that generated by :data:`.func`, will be automatically wrapped in a SELECT statement, which is then executed. * a :class:`.DDLElement` object * a :class:`.DefaultGenerator` object * a :class:`.Compiled` object :param \*multiparams/\**params: represent bound parameter values to be used in the execution. Typically, the format is either a collection of one or more dictionaries passed to \*multiparams:: conn.execute( table.insert(), {"id":1, "value":"v1"}, {"id":2, "value":"v2"} ) ...or individual key/values interpreted by \**params:: conn.execute( table.insert(), id=1, value="v1" ) In the case that a plain SQL string is passed, and the underlying DBAPI accepts positional bind parameters, a collection of tuples or individual values in \*multiparams may be passed:: conn.execute( "INSERT INTO table (id, value) VALUES (?, ?)", (1, "v1"), (2, "v2") ) conn.execute( "INSERT INTO table (id, value) VALUES (?, ?)", 1, "v1" ) Note above, the usage of a question mark "?" or other symbol is contingent upon the "paramstyle" accepted by the DBAPI in use, which may be any of "qmark", "named", "pyformat", "format", "numeric". See `pep-249 <http://www.python.org/dev/peps/pep-0249/>`_ for details on paramstyle. To execute a textual SQL statement which uses bound parameters in a DBAPI-agnostic way, use the :func:`~.expression.text` construct. """ if isinstance(object, util.string_types[0]): return self._execute_text(object, multiparams, params) try: meth = object._execute_on_connection except AttributeError: raise exc.InvalidRequestError( "Unexecutable object type: %s" % type(object)) else: return meth(self, multiparams, params) def _execute_function(self, func, multiparams, params): """Execute a sql.FunctionElement object.""" return self._execute_clauseelement(func.select(), multiparams, params) def _execute_default(self, default, multiparams, params): """Execute a schema.ColumnDefault object.""" if self._has_events or self.engine._has_events: for fn in self.dispatch.before_execute: default, multiparams, params = \ fn(self, default, multiparams, params) try: try: conn = self.__connection except AttributeError: conn = self._revalidate_connection() dialect = self.dialect ctx = dialect.execution_ctx_cls._init_default( dialect, self, conn) except Exception as e: self._handle_dbapi_exception(e, None, None, None, None) ret = ctx._exec_default(default, None) if self.should_close_with_result: self.close() if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, default, multiparams, params, ret) return ret def _execute_ddl(self, ddl, multiparams, params): """Execute a schema.DDL object.""" if self._has_events or self.engine._has_events: for fn in self.dispatch.before_execute: ddl, multiparams, params = \ fn(self, ddl, multiparams, params) dialect = self.dialect compiled = ddl.compile(dialect=dialect) ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_ddl, compiled, None, compiled ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, ddl, multiparams, params, ret) return ret def _execute_clauseelement(self, elem, multiparams, params): """Execute a sql.ClauseElement object.""" if self._has_events or self.engine._has_events: for fn in self.dispatch.before_execute: elem, multiparams, params = \ fn(self, elem, multiparams, params) distilled_params = _distill_params(multiparams, params) if distilled_params: # note this is usually dict but we support RowProxy # as well; but dict.keys() as an iterable is OK keys = distilled_params[0].keys() else: keys = [] dialect = self.dialect if 'compiled_cache' in self._execution_options: key = dialect, elem, tuple(sorted(keys)), len(distilled_params) > 1 compiled_sql = self._execution_options['compiled_cache'].get(key) if compiled_sql is None: compiled_sql = elem.compile( dialect=dialect, column_keys=keys, inline=len(distilled_params) > 1) self._execution_options['compiled_cache'][key] = compiled_sql else: compiled_sql = elem.compile( dialect=dialect, column_keys=keys, inline=len(distilled_params) > 1) ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_compiled, compiled_sql, distilled_params, compiled_sql, distilled_params ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, elem, multiparams, params, ret) return ret def _execute_compiled(self, compiled, multiparams, params): """Execute a sql.Compiled object.""" if self._has_events or self.engine._has_events: for fn in self.dispatch.before_execute: compiled, multiparams, params = \ fn(self, compiled, multiparams, params) dialect = self.dialect parameters = _distill_params(multiparams, params) ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_compiled, compiled, parameters, compiled, parameters ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, compiled, multiparams, params, ret) return ret def _execute_text(self, statement, multiparams, params): """Execute a string SQL statement.""" if self._has_events or self.engine._has_events: for fn in self.dispatch.before_execute: statement, multiparams, params = \ fn(self, statement, multiparams, params) dialect = self.dialect parameters = _distill_params(multiparams, params) ret = self._execute_context( dialect, dialect.execution_ctx_cls._init_statement, statement, parameters, statement, parameters ) if self._has_events or self.engine._has_events: self.dispatch.after_execute(self, statement, multiparams, params, ret) return ret def _execute_context(self, dialect, constructor, statement, parameters, *args): """Create an :class:`.ExecutionContext` and execute, returning a :class:`.ResultProxy`.""" try: try: conn = self.__connection except AttributeError: conn = self._revalidate_connection() context = constructor(dialect, self, conn, *args) except Exception as e: self._handle_dbapi_exception( e, util.text_type(statement), parameters, None, None) if context.compiled: context.pre_exec() cursor, statement, parameters = context.cursor, \ context.statement, \ context.parameters if not context.executemany: parameters = parameters[0] if self._has_events or self.engine._has_events: for fn in self.dispatch.before_cursor_execute: statement, parameters = \ fn(self, cursor, statement, parameters, context, context.executemany) if self._echo: self.engine.logger.info(statement) self.engine.logger.info( "%r", sql_util._repr_params(parameters, batches=10) ) evt_handled = False try: if context.executemany: if self.dialect._has_events: for fn in self.dialect.dispatch.do_executemany: if fn(cursor, statement, parameters, context): evt_handled = True break if not evt_handled: self.dialect.do_executemany( cursor, statement, parameters, context) elif not parameters and context.no_parameters: if self.dialect._has_events: for fn in self.dialect.dispatch.do_execute_no_params: if fn(cursor, statement, context): evt_handled = True break if not evt_handled: self.dialect.do_execute_no_params( cursor, statement, context) else: if self.dialect._has_events: for fn in self.dialect.dispatch.do_execute: if fn(cursor, statement, parameters, context): evt_handled = True break if not evt_handled: self.dialect.do_execute( cursor, statement, parameters, context) except Exception as e: self._handle_dbapi_exception( e, statement, parameters, cursor, context) if self._has_events or self.engine._has_events: self.dispatch.after_cursor_execute(self, cursor, statement, parameters, context, context.executemany) if context.compiled: context.post_exec() if context.is_crud: result = context._setup_crud_result_proxy() else: result = context.get_result_proxy() if result._metadata is None: result._soft_close(_autoclose_connection=False) if context.should_autocommit and self._root.__transaction is None: self._root._commit_impl(autocommit=True) if result._soft_closed and self.should_close_with_result: self.close() return result def _cursor_execute(self, cursor, statement, parameters, context=None): """Execute a statement + params on the given cursor. Adds appropriate logging and exception handling. This method is used by DefaultDialect for special-case executions, such as for sequences and column defaults. The path of statement execution in the majority of cases terminates at _execute_context(). """ if self._has_events or self.engine._has_events: for fn in self.dispatch.before_cursor_execute: statement, parameters = \ fn(self, cursor, statement, parameters, context, False) if self._echo: self.engine.logger.info(statement) self.engine.logger.info("%r", parameters) try: for fn in () if not self.dialect._has_events \ else self.dialect.dispatch.do_execute: if fn(cursor, statement, parameters, context): break else: self.dialect.do_execute( cursor, statement, parameters, context) except Exception as e: self._handle_dbapi_exception( e, statement, parameters, cursor, context) if self._has_events or self.engine._has_events: self.dispatch.after_cursor_execute(self, cursor, statement, parameters, context, False) def _safe_close_cursor(self, cursor): """Close the given cursor, catching exceptions and turning into log warnings. """ try: cursor.close() except Exception: # log the error through the connection pool's logger. self.engine.pool.logger.error( "Error closing cursor", exc_info=True) _reentrant_error = False _is_disconnect = False def _handle_dbapi_exception(self, e, statement, parameters, cursor, context): exc_info = sys.exc_info() if context and context.exception is None: context.exception = e if not self._is_disconnect: self._is_disconnect = \ isinstance(e, self.dialect.dbapi.Error) and \ not self.closed and \ self.dialect.is_disconnect( e, self.__connection if not self.invalidated else None, cursor) if context: context.is_disconnect = self._is_disconnect invalidate_pool_on_disconnect = True if self._reentrant_error: util.raise_from_cause( exc.DBAPIError.instance(statement, parameters, e, self.dialect.dbapi.Error, dialect=self.dialect), exc_info ) self._reentrant_error = True try: # non-DBAPI error - if we already got a context, # or there's no string statement, don't wrap it should_wrap = isinstance(e, self.dialect.dbapi.Error) or \ (statement is not None and context is None) if should_wrap: sqlalchemy_exception = exc.DBAPIError.instance( statement, parameters, e, self.dialect.dbapi.Error, connection_invalidated=self._is_disconnect, dialect=self.dialect) else: sqlalchemy_exception = None newraise = None if (self._has_events or self.engine._has_events) and \ not self._execution_options.get( 'skip_user_error_events', False): # legacy dbapi_error event if should_wrap and context: self.dispatch.dbapi_error(self, cursor, statement, parameters, context, e) # new handle_error event ctx = ExceptionContextImpl( e, sqlalchemy_exception, self.engine, self, cursor, statement, parameters, context, self._is_disconnect) for fn in self.dispatch.handle_error: try: # handler returns an exception; # call next handler in a chain per_fn = fn(ctx) if per_fn is not None: ctx.chained_exception = newraise = per_fn except Exception as _raised: # handler raises an exception - stop processing newraise = _raised break if sqlalchemy_exception and \ self._is_disconnect != ctx.is_disconnect: sqlalchemy_exception.connection_invalidated = \ self._is_disconnect = ctx.is_disconnect # set up potentially user-defined value for # invalidate pool. invalidate_pool_on_disconnect = \ ctx.invalidate_pool_on_disconnect if should_wrap and context: context.handle_dbapi_exception(e) if not self._is_disconnect: if cursor: self._safe_close_cursor(cursor) self._autorollback() if newraise: util.raise_from_cause(newraise, exc_info) elif should_wrap: util.raise_from_cause( sqlalchemy_exception, exc_info ) else: util.reraise(*exc_info) finally: del self._reentrant_error if self._is_disconnect: del self._is_disconnect if not self.invalidated: dbapi_conn_wrapper = self.__connection if invalidate_pool_on_disconnect: self.engine.pool._invalidate(dbapi_conn_wrapper, e) self.invalidate(e) if self.should_close_with_result: self.close() @classmethod def _handle_dbapi_exception_noconnection(cls, e, dialect, engine): exc_info = sys.exc_info() is_disconnect = dialect.is_disconnect(e, None, None) should_wrap = isinstance(e, dialect.dbapi.Error) if should_wrap: sqlalchemy_exception = exc.DBAPIError.instance( None, None, e, dialect.dbapi.Error, connection_invalidated=is_disconnect) else: sqlalchemy_exception = None newraise = None if engine._has_events: ctx = ExceptionContextImpl( e, sqlalchemy_exception, engine, None, None, None, None, None, is_disconnect) for fn in engine.dispatch.handle_error: try: # handler returns an exception; # call next handler in a chain per_fn = fn(ctx) if per_fn is not None: ctx.chained_exception = newraise = per_fn except Exception as _raised: # handler raises an exception - stop processing newraise = _raised break if sqlalchemy_exception and \ is_disconnect != ctx.is_disconnect: sqlalchemy_exception.connection_invalidated = \ is_disconnect = ctx.is_disconnect if newraise: util.raise_from_cause(newraise, exc_info) elif should_wrap: util.raise_from_cause( sqlalchemy_exception, exc_info ) else: util.reraise(*exc_info) def default_schema_name(self): return self.engine.dialect.get_default_schema_name(self) def transaction(self, callable_, *args, **kwargs): """Execute the given function within a transaction boundary. The function is passed this :class:`.Connection` as the first argument, followed by the given \*args and \**kwargs, e.g.:: def do_something(conn, x, y): conn.execute("some statement", {'x':x, 'y':y}) conn.transaction(do_something, 5, 10) The operations inside the function are all invoked within the context of a single :class:`.Transaction`. Upon success, the transaction is committed. If an exception is raised, the transaction is rolled back before propagating the exception. .. note:: The :meth:`.transaction` method is superseded by the usage of the Python ``with:`` statement, which can be used with :meth:`.Connection.begin`:: with conn.begin(): conn.execute("some statement", {'x':5, 'y':10}) As well as with :meth:`.Engine.begin`:: with engine.begin() as conn: conn.execute("some statement", {'x':5, 'y':10}) See also: :meth:`.Engine.begin` - engine-level transactional context :meth:`.Engine.transaction` - engine-level version of :meth:`.Connection.transaction` """ trans = self.begin() try: ret = self.run_callable(callable_, *args, **kwargs) trans.commit() return ret except: with util.safe_reraise(): trans.rollback() def run_callable(self, callable_, *args, **kwargs): """Given a callable object or function, execute it, passing a :class:`.Connection` as the first argument. The given \*args and \**kwargs are passed subsequent to the :class:`.Connection` argument. This function, along with :meth:`.Engine.run_callable`, allows a function to be run with a :class:`.Connection` or :class:`.Engine` object without the need to know which one is being dealt with. """ return callable_(self, *args, **kwargs) def _run_visitor(self, visitorcallable, element, **kwargs): visitorcallable(self.dialect, self, **kwargs).traverse_single(element) class ExceptionContextImpl(ExceptionContext): """Implement the :class:`.ExceptionContext` interface.""" def __init__(self, exception, sqlalchemy_exception, engine, connection, cursor, statement, parameters, context, is_disconnect): self.engine = engine self.connection = connection self.sqlalchemy_exception = sqlalchemy_exception self.original_exception = exception self.execution_context = context self.statement = statement self.parameters = parameters self.is_disconnect = is_disconnect class Transaction(object): """Represent a database transaction in progress. The :class:`.Transaction` object is procured by calling the :meth:`~.Connection.begin` method of :class:`.Connection`:: from sqlalchemy import create_engine engine = create_engine("postgresql://scott:tiger@localhost/test") connection = engine.connect() trans = connection.begin() connection.execute("insert into x (a, b) values (1, 2)") trans.commit() The object provides :meth:`.rollback` and :meth:`.commit` methods in order to control transaction boundaries. It also implements a context manager interface so that the Python ``with`` statement can be used with the :meth:`.Connection.begin` method:: with connection.begin(): connection.execute("insert into x (a, b) values (1, 2)") The Transaction object is **not** threadsafe. See also: :meth:`.Connection.begin`, :meth:`.Connection.begin_twophase`, :meth:`.Connection.begin_nested`. .. index:: single: thread safety; Transaction """ def __init__(self, connection, parent): self.connection = connection self._actual_parent = parent self.is_active = True @property def _parent(self): return self._actual_parent or self def close(self): """Close this :class:`.Transaction`. If this transaction is the base transaction in a begin/commit nesting, the transaction will rollback(). Otherwise, the method returns. This is used to cancel a Transaction without affecting the scope of an enclosing transaction. """ if not self._parent.is_active: return if self._parent is self: self.rollback() def rollback(self): """Roll back this :class:`.Transaction`. """ if not self._parent.is_active: return self._do_rollback() self.is_active = False def _do_rollback(self): self._parent.rollback() def commit(self): """Commit this :class:`.Transaction`.""" if not self._parent.is_active: raise exc.InvalidRequestError("This transaction is inactive") self._do_commit() self.is_active = False def _do_commit(self): pass def __enter__(self): return self def __exit__(self, type, value, traceback): if type is None and self.is_active: try: self.commit() except: with util.safe_reraise(): self.rollback() else: self.rollback() class RootTransaction(Transaction): def __init__(self, connection): super(RootTransaction, self).__init__(connection, None) self.connection._begin_impl(self) def _do_rollback(self): if self.is_active: self.connection._rollback_impl() def _do_commit(self): if self.is_active: self.connection._commit_impl() class NestedTransaction(Transaction): """Represent a 'nested', or SAVEPOINT transaction. A new :class:`.NestedTransaction` object may be procured using the :meth:`.Connection.begin_nested` method. The interface is the same as that of :class:`.Transaction`. """ def __init__(self, connection, parent): super(NestedTransaction, self).__init__(connection, parent) self._savepoint = self.connection._savepoint_impl() def _do_rollback(self): if self.is_active: self.connection._rollback_to_savepoint_impl( self._savepoint, self._parent) def _do_commit(self): if self.is_active: self.connection._release_savepoint_impl( self._savepoint, self._parent) class TwoPhaseTransaction(Transaction): """Represent a two-phase transaction. A new :class:`.TwoPhaseTransaction` object may be procured using the :meth:`.Connection.begin_twophase` method. The interface is the same as that of :class:`.Transaction` with the addition of the :meth:`prepare` method. """ def __init__(self, connection, xid): super(TwoPhaseTransaction, self).__init__(connection, None) self._is_prepared = False self.xid = xid self.connection._begin_twophase_impl(self) def prepare(self): """Prepare this :class:`.TwoPhaseTransaction`. After a PREPARE, the transaction can be committed. """ if not self._parent.is_active: raise exc.InvalidRequestError("This transaction is inactive") self.connection._prepare_twophase_impl(self.xid) self._is_prepared = True def _do_rollback(self): self.connection._rollback_twophase_impl(self.xid, self._is_prepared) def _do_commit(self): self.connection._commit_twophase_impl(self.xid, self._is_prepared) class Engine(Connectable, log.Identified): """ Connects a :class:`~sqlalchemy.pool.Pool` and :class:`~sqlalchemy.engine.interfaces.Dialect` together to provide a source of database connectivity and behavior. An :class:`.Engine` object is instantiated publicly using the :func:`~sqlalchemy.create_engine` function. See also: :doc:`/core/engines` :ref:`connections_toplevel` """ _execution_options = util.immutabledict() _has_events = False _connection_cls = Connection def __init__(self, pool, dialect, url, logging_name=None, echo=None, proxy=None, execution_options=None ): self.pool = pool self.url = url self.dialect = dialect self.pool._dialect = dialect if logging_name: self.logging_name = logging_name self.echo = echo self.engine = self log.instance_logger(self, echoflag=echo) if proxy: interfaces.ConnectionProxy._adapt_listener(self, proxy) if execution_options: self.update_execution_options(**execution_options) def update_execution_options(self, **opt): """Update the default execution_options dictionary of this :class:`.Engine`. The given keys/values in \**opt are added to the default execution options that will be used for all connections. The initial contents of this dictionary can be sent via the ``execution_options`` parameter to :func:`.create_engine`. .. seealso:: :meth:`.Connection.execution_options` :meth:`.Engine.execution_options` """ self._execution_options = \ self._execution_options.union(opt) self.dispatch.set_engine_execution_options(self, opt) self.dialect.set_engine_execution_options(self, opt) def execution_options(self, **opt): """Return a new :class:`.Engine` that will provide :class:`.Connection` objects with the given execution options. The returned :class:`.Engine` remains related to the original :class:`.Engine` in that it shares the same connection pool and other state: * The :class:`.Pool` used by the new :class:`.Engine` is the same instance. The :meth:`.Engine.dispose` method will replace the connection pool instance for the parent engine as well as this one. * Event listeners are "cascaded" - meaning, the new :class:`.Engine` inherits the events of the parent, and new events can be associated with the new :class:`.Engine` individually. * The logging configuration and logging_name is copied from the parent :class:`.Engine`. The intent of the :meth:`.Engine.execution_options` method is to implement "sharding" schemes where multiple :class:`.Engine` objects refer to the same connection pool, but are differentiated by options that would be consumed by a custom event:: primary_engine = create_engine("mysql://") shard1 = primary_engine.execution_options(shard_id="shard1") shard2 = primary_engine.execution_options(shard_id="shard2") Above, the ``shard1`` engine serves as a factory for :class:`.Connection` objects that will contain the execution option ``shard_id=shard1``, and ``shard2`` will produce :class:`.Connection` objects that contain the execution option ``shard_id=shard2``. An event handler can consume the above execution option to perform a schema switch or other operation, given a connection. Below we emit a MySQL ``use`` statement to switch databases, at the same time keeping track of which database we've established using the :attr:`.Connection.info` dictionary, which gives us a persistent storage space that follows the DBAPI connection:: from sqlalchemy import event from sqlalchemy.engine import Engine shards = {"default": "base", shard_1: "db1", "shard_2": "db2"} @event.listens_for(Engine, "before_cursor_execute") def _switch_shard(conn, cursor, stmt, params, context, executemany): shard_id = conn._execution_options.get('shard_id', "default") current_shard = conn.info.get("current_shard", None) if current_shard != shard_id: cursor.execute("use %s" % shards[shard_id]) conn.info["current_shard"] = shard_id .. versionadded:: 0.8 .. seealso:: :meth:`.Connection.execution_options` - update execution options on a :class:`.Connection` object. :meth:`.Engine.update_execution_options` - update the execution options for a given :class:`.Engine` in place. """ return OptionEngine(self, opt) @property def name(self): """String name of the :class:`~sqlalchemy.engine.interfaces.Dialect` in use by this :class:`Engine`.""" return self.dialect.name @property def driver(self): """Driver name of the :class:`~sqlalchemy.engine.interfaces.Dialect` in use by this :class:`Engine`.""" return self.dialect.driver echo = log.echo_property() def __repr__(self): return 'Engine(%r)' % self.url def dispose(self): """Dispose of the connection pool used by this :class:`.Engine`. This has the effect of fully closing all **currently checked in** database connections. Connections that are still checked out will **not** be closed, however they will no longer be associated with this :class:`.Engine`, so when they are closed individually, eventually the :class:`.Pool` which they are associated with will be garbage collected and they will be closed out fully, if not already closed on checkin. A new connection pool is created immediately after the old one has been disposed. This new pool, like all SQLAlchemy connection pools, does not make any actual connections to the database until one is first requested, so as long as the :class:`.Engine` isn't used again, no new connections will be made. .. seealso:: :ref:`engine_disposal` """ self.pool.dispose() self.pool = self.pool.recreate() self.dispatch.engine_disposed(self) def _execute_default(self, default): with self.contextual_connect() as conn: return conn._execute_default(default, (), {}) @contextlib.contextmanager def _optional_conn_ctx_manager(self, connection=None): if connection is None: with self.contextual_connect() as conn: yield conn else: yield connection def _run_visitor(self, visitorcallable, element, connection=None, **kwargs): with self._optional_conn_ctx_manager(connection) as conn: conn._run_visitor(visitorcallable, element, **kwargs) class _trans_ctx(object): def __init__(self, conn, transaction, close_with_result): self.conn = conn self.transaction = transaction self.close_with_result = close_with_result def __enter__(self): return self.conn def __exit__(self, type, value, traceback): if type is not None: self.transaction.rollback() else: self.transaction.commit() if not self.close_with_result: self.conn.close() def begin(self, close_with_result=False): """Return a context manager delivering a :class:`.Connection` with a :class:`.Transaction` established. E.g.:: with engine.begin() as conn: conn.execute("insert into table (x, y, z) values (1, 2, 3)") conn.execute("my_special_procedure(5)") Upon successful operation, the :class:`.Transaction` is committed. If an error is raised, the :class:`.Transaction` is rolled back. The ``close_with_result`` flag is normally ``False``, and indicates that the :class:`.Connection` will be closed when the operation is complete. When set to ``True``, it indicates the :class:`.Connection` is in "single use" mode, where the :class:`.ResultProxy` returned by the first call to :meth:`.Connection.execute` will close the :class:`.Connection` when that :class:`.ResultProxy` has exhausted all result rows. .. versionadded:: 0.7.6 See also: :meth:`.Engine.connect` - procure a :class:`.Connection` from an :class:`.Engine`. :meth:`.Connection.begin` - start a :class:`.Transaction` for a particular :class:`.Connection`. """ conn = self.contextual_connect(close_with_result=close_with_result) try: trans = conn.begin() except: with util.safe_reraise(): conn.close() return Engine._trans_ctx(conn, trans, close_with_result) def transaction(self, callable_, *args, **kwargs): """Execute the given function within a transaction boundary. The function is passed a :class:`.Connection` newly procured from :meth:`.Engine.contextual_connect` as the first argument, followed by the given \*args and \**kwargs. e.g.:: def do_something(conn, x, y): conn.execute("some statement", {'x':x, 'y':y}) engine.transaction(do_something, 5, 10) The operations inside the function are all invoked within the context of a single :class:`.Transaction`. Upon success, the transaction is committed. If an exception is raised, the transaction is rolled back before propagating the exception. .. note:: The :meth:`.transaction` method is superseded by the usage of the Python ``with:`` statement, which can be used with :meth:`.Engine.begin`:: with engine.begin() as conn: conn.execute("some statement", {'x':5, 'y':10}) See also: :meth:`.Engine.begin` - engine-level transactional context :meth:`.Connection.transaction` - connection-level version of :meth:`.Engine.transaction` """ with self.contextual_connect() as conn: return conn.transaction(callable_, *args, **kwargs) def run_callable(self, callable_, *args, **kwargs): """Given a callable object or function, execute it, passing a :class:`.Connection` as the first argument. The given \*args and \**kwargs are passed subsequent to the :class:`.Connection` argument. This function, along with :meth:`.Connection.run_callable`, allows a function to be run with a :class:`.Connection` or :class:`.Engine` object without the need to know which one is being dealt with. """ with self.contextual_connect() as conn: return conn.run_callable(callable_, *args, **kwargs) def execute(self, statement, *multiparams, **params): """Executes the given construct and returns a :class:`.ResultProxy`. The arguments are the same as those used by :meth:`.Connection.execute`. Here, a :class:`.Connection` is acquired using the :meth:`~.Engine.contextual_connect` method, and the statement executed with that connection. The returned :class:`.ResultProxy` is flagged such that when the :class:`.ResultProxy` is exhausted and its underlying cursor is closed, the :class:`.Connection` created here will also be closed, which allows its associated DBAPI connection resource to be returned to the connection pool. """ connection = self.contextual_connect(close_with_result=True) return connection.execute(statement, *multiparams, **params) def scalar(self, statement, *multiparams, **params): return self.execute(statement, *multiparams, **params).scalar() def _execute_clauseelement(self, elem, multiparams=None, params=None): connection = self.contextual_connect(close_with_result=True) return connection._execute_clauseelement(elem, multiparams, params) def _execute_compiled(self, compiled, multiparams, params): connection = self.contextual_connect(close_with_result=True) return connection._execute_compiled(compiled, multiparams, params) def connect(self, **kwargs): """Return a new :class:`.Connection` object. The :class:`.Connection` object is a facade that uses a DBAPI connection internally in order to communicate with the database. This connection is procured from the connection-holding :class:`.Pool` referenced by this :class:`.Engine`. When the :meth:`~.Connection.close` method of the :class:`.Connection` object is called, the underlying DBAPI connection is then returned to the connection pool, where it may be used again in a subsequent call to :meth:`~.Engine.connect`. """ return self._connection_cls(self, **kwargs) def contextual_connect(self, close_with_result=False, **kwargs): """Return a :class:`.Connection` object which may be part of some ongoing context. By default, this method does the same thing as :meth:`.Engine.connect`. Subclasses of :class:`.Engine` may override this method to provide contextual behavior. :param close_with_result: When True, the first :class:`.ResultProxy` created by the :class:`.Connection` will call the :meth:`.Connection.close` method of that connection as soon as any pending result rows are exhausted. This is used to supply the "connectionless execution" behavior provided by the :meth:`.Engine.execute` method. """ return self._connection_cls( self, self._wrap_pool_connect(self.pool.connect, None), close_with_result=close_with_result, **kwargs) def table_names(self, schema=None, connection=None): """Return a list of all table names available in the database. :param schema: Optional, retrieve names from a non-default schema. :param connection: Optional, use a specified connection. Default is the ``contextual_connect`` for this ``Engine``. """ with self._optional_conn_ctx_manager(connection) as conn: if not schema: schema = self.dialect.default_schema_name return self.dialect.get_table_names(conn, schema) def has_table(self, table_name, schema=None): """Return True if the given backend has a table of the given name. .. seealso:: :ref:`metadata_reflection_inspector` - detailed schema inspection using the :class:`.Inspector` interface. :class:`.quoted_name` - used to pass quoting information along with a schema identifier. """ return self.run_callable(self.dialect.has_table, table_name, schema) def _wrap_pool_connect(self, fn, connection): dialect = self.dialect try: return fn() except dialect.dbapi.Error as e: if connection is None: Connection._handle_dbapi_exception_noconnection( e, dialect, self) else: util.reraise(*sys.exc_info()) def raw_connection(self, _connection=None): """Return a "raw" DBAPI connection from the connection pool. The returned object is a proxied version of the DBAPI connection object used by the underlying driver in use. The object will have all the same behavior as the real DBAPI connection, except that its ``close()`` method will result in the connection being returned to the pool, rather than being closed for real. This method provides direct DBAPI connection access for special situations when the API provided by :class:`.Connection` is not needed. When a :class:`.Connection` object is already present, the DBAPI connection is available using the :attr:`.Connection.connection` accessor. .. seealso:: :ref:`dbapi_connections` """ return self._wrap_pool_connect( self.pool.unique_connection, _connection) class OptionEngine(Engine): def __init__(self, proxied, execution_options): self._proxied = proxied self.url = proxied.url self.dialect = proxied.dialect self.logging_name = proxied.logging_name self.echo = proxied.echo log.instance_logger(self, echoflag=self.echo) self.dispatch = self.dispatch._join(proxied.dispatch) self._execution_options = proxied._execution_options self.update_execution_options(**execution_options) def _get_pool(self): return self._proxied.pool def _set_pool(self, pool): self._proxied.pool = pool pool = property(_get_pool, _set_pool) def _get_has_events(self): return self._proxied._has_events or \ self.__dict__.get('_has_events', False) def _set_has_events(self, value): self.__dict__['_has_events'] = value _has_events = property(_get_has_events, _set_has_events)
When it comes to music, sounds as a general sort of array, you're either trying to preserve and present the sound with fidelity or you're attempting to change the sound in one or many ways and create something that's hopefully unique as much as it's familiar enough to resonate with the listener. Most folk musicians are in the former camp, keeping gentle company with acoustic music without amplification or amplified in such a way that it retains as much of its natural quality as possible. I'm down with that. When it boils right down to it, nothing beats the sound of an instrument au naturel. But face it - sometimes, it's helpful to plug in so that you may be heard. Proper amplification takes whatever it is that you are doing and presents it to the listener as you intended it to be heard. Anything less and you might be settling, but there are many fine reasons for that too. Take, for example, Audio 2000's AWP6040. I got this from Mike Clemmer at Wood 'N' Strings after he told me about how he uses it when teaching workshops. It's a little 25 watt battery-powered wireless P.A. system that comes with both a lavalier and a lapel mic plus inputs for guitar cable both 1/4" and 3.5" - aux MP3 player jack - you can get one of the mics on you, clip the transmitter to your belt, plug your instrument directly into the box or splitter into the pack, set the tone, individual channel volumes, add a little slapback to the voice and, if you're strolling - you've got 100 feet to wander while you play and sing or speak. I use this when I teach workshops at festivals and also in the past for ceremonies, house concerts and, tonight, a birthday party. It's truly surprising just how well this unit punches the sound out there. Better still: I don't know what Mike's selling these for now, but last summer, it was a steal at under $100. Right? This is the *gear* post, so it's going to get tweaky in here real quick. It'll all end with the story of why I'm expecting a delivery in about twelve hours. Here now, the beginning of the past four month's deeper trip to get into the sound. If you haven't seen them already (and you can always access these kinds of posts by clicking on the "gear" label in the label cloud on the right) the Tube Pac probably started this whole move to upgrade, so this has been over the course of a year then. Recording mostly with dynamic microphones (Shure SM-57, SM-58Beta) left me wanting to capture more depth and richness in the studio, which led to this single-channel tube pre-amplification and compression unit. Right away, the difference in sound quality was amazing. Then came the Digitech Vocalist 4, which is really quite funny to sneak into tunes and watch as people try to figure out where your other singers are. Finally had to break down and get another audio interface. With one fried channel, the PreSonus Firebox was only half useful, not to mention glitchy. After a horrible side-trip into the world of M-Audio, I settled (a-ha, that word again) for a PreSonus Firestudio Mobile because they didn't have the Saffire Pro 40 at the Guitar Center near the house. Let's not chalk this up to impatience - album deadlines dictated that a glitching interface needed replaced, like, right then. And so you do. There's still the occasional glitch - but nothing that harms recording quality. Just quality of non-frustration levels while waiting for Apple's Audio/MIDI program to connect the dots. No big. Play a round of Words With Friends. Right? By the way, that P.A. pictured at the top of this post? That was mine - Fender Passport PD150. Nice unit - 4 channels (well, 3 channels and a stereo auxiliary), great for vocals and light duty work, but don't try and put anything that resembles bass through it. Nuh-uh. And unfortunately, that's exactly where I was attempting to go through all of this tone building and developing performance additions. As much as there is a time to strum the mountain dulcimer unaffected and free of cables, there is also much rejoicing to be had when you can really throw it through a heap of processors and turn it into the paintbrushes of your musical dreams.